Refactoring repository metadata session handling

This commit is contained in:
Martin Stockhammer 2019-08-06 22:06:57 +02:00
parent eca0097d85
commit 21d5cf53fe
43 changed files with 1971 additions and 1745 deletions

View File

@ -23,10 +23,7 @@ import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.*;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.repository.ContentNotFoundException;
import org.apache.archiva.repository.ManagedRepositoryContent;
@ -203,7 +200,7 @@ public abstract class AbstractRepositoryPurge
{
try
{
metaResolved.put( metaBaseId, metadataRepository.getArtifacts( , repository.getId( ),
metaResolved.put( metaBaseId, metadataRepository.getArtifacts(repositorySession, repository.getId( ),
reference.getGroupId( ), reference.getArtifactId( ), baseVersion ) );
}
catch ( MetadataResolutionException e )
@ -314,18 +311,22 @@ public abstract class AbstractRepositoryPurge
}
projectLevelMetadata.add( info.projectVersionLevel( ) );
}
metadataRepository.save( );
try {
repositorySession.save( );
} catch (MetadataSessionException e) {
log.error("Could not save sesion {}", e.getMessage());
}
Collection<ArtifactMetadata> artifacts = null;
// Get remaining artifacts and remove project if empty
for ( ArtifactInfo info : projectLevelMetadata )
{
try
{
artifacts = metadataRepository.getArtifacts( , repository.getId( ), info.getNamespace( ),
artifacts = metadataRepository.getArtifacts(repositorySession , repository.getId( ), info.getNamespace( ),
info.getName( ), info.getProjectVersion( ) );
if ( artifacts.size( ) == 0 )
{
metadataRepository.removeProjectVersion( , repository.getId( ),
metadataRepository.removeProjectVersion(repositorySession , repository.getId( ),
info.getNamespace( ), info.getName( ), info.getProjectVersion( ) );
log.debug( "Removed project version from MetadataRepository {}", info );
}
@ -335,7 +336,12 @@ public abstract class AbstractRepositoryPurge
log.error( "Could not remove project version from MetadataRepository {}: {}", info, e.getMessage( ), e );
}
}
metadataRepository.save( );
try {
repositorySession.save( );
} catch (MetadataSessionException e) {
log.error("Could not save sesion {}", e.getMessage());
}
}
@ -360,14 +366,18 @@ public abstract class AbstractRepositoryPurge
version = artifactInfo.getProjectVersion( );
MavenArtifactFacet mavenArtifactFacetToCompare = new MavenArtifactFacet( );
mavenArtifactFacetToCompare.setClassifier( artifactInfo.getClassifier( ) );
metadataRepository.removeArtifact( , repository.getId( ), groupId,
metadataRepository.removeArtifact(repositorySession , repository.getId( ), groupId,
artifactId, version, mavenArtifactFacetToCompare );
metadataRepository.save( );
try {
repositorySession.save( );
} catch (MetadataSessionException e) {
log.error("Could not save session {}", e.getMessage());
}
}
}
else
{
metadataRepository.removeArtifact( , artifactMetadata, artifactInfo.getProjectVersion( ) );
metadataRepository.removeArtifact(repositorySession , artifactMetadata, artifactInfo.getProjectVersion( ) );
}
}

View File

@ -158,7 +158,7 @@ public class CleanupReleasedSnapshotsRepositoryPurge
artifactRef.getArtifactId( ), artifactRef.getVersion( ),
artifactFile.getFileName().toString() );
}
metadataRepository.removeProjectVersion( , repository.getId( ),
metadataRepository.removeProjectVersion( repositorySession, repository.getId( ),
artifactRef.getGroupId( ), artifactRef.getArtifactId( ), artifactRef.getVersion( ) );
needsMetadataUpdate = true;

View File

@ -127,7 +127,7 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts(repositorySession , TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
@ -137,8 +137,8 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
// Verify the metadataRepository invocations
// complete snapshot version removal for released
verify(metadataRepository, times(1)).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.3") );
verify(metadataRepository, times(1)).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.3") );
// check if the snapshot was removed
assertDeleted( projectRoot + "/2.3-SNAPSHOT" );
@ -246,11 +246,11 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts(repositorySession , TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
List<ArtifactMetadata> ml2 = getArtifactMetadataFromDir(RELEASES_TEST_REPO_ID , projectName, repo.getParent(), releaseDir );
when(metadataRepository.getArtifacts( , RELEASES_TEST_REPO_ID,
when(metadataRepository.getArtifacts(repositorySession , RELEASES_TEST_REPO_ID,
projectNs, projectName, releaseVersion )).thenReturn(ml2);
@ -260,8 +260,8 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
// Verify the metadataRepository invocations
// Complete version removal for cleanup
verify(metadataRepository, times(1)).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( , eq(RELEASES_TEST_REPO_ID), eq(projectNs), eq(projectName), eq(releaseVersion) );
verify(metadataRepository, times(1)).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(RELEASES_TEST_REPO_ID), eq(projectNs), eq(projectName), eq(releaseVersion) );
// check if the snapshot was removed
@ -317,13 +317,13 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts(repositorySession , TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
List<ArtifactMetadata> m2 = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir2 );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts(repositorySession , TEST_REPO_ID,
projectNs, projectName, "2.0.3-SNAPSHOT" )).thenReturn(ml);
List<ArtifactMetadata> m3 = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir3 );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts(repositorySession , TEST_REPO_ID,
projectNs, projectName, "2.0.4-SNAPSHOT" )).thenReturn(ml);
@ -333,11 +333,11 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
// Verify the metadataRepository invocations
// No removal
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.0.3-SNAPSHOT") );
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.0.4-SNAPSHOT") );
verify(metadataRepository, never()).removeArtifact( , any(ArtifactMetadata.class), any(String.class) );
verify(metadataRepository, never()).removeArtifact( , any(String.class), any(String.class), any(String.class), any(String.class), any( MetadataFacet.class) );
verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.0.3-SNAPSHOT") );
verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.0.4-SNAPSHOT") );
verify(metadataRepository, never()).removeArtifact(repositorySession , any(ArtifactMetadata.class), any(String.class) );
verify(metadataRepository, never()).removeArtifact(repositorySession , any(String.class), any(String.class), any(String.class), any(String.class), any( MetadataFacet.class) );

View File

@ -112,7 +112,7 @@ public class DaysOldRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
@ -120,9 +120,9 @@ public class DaysOldRepositoryPurgeTest
listenerControl.verify();
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(2)).removeArtifact( , metadataArg.capture(), eq(projectVersion) );
verify(metadataRepository, times(2)).removeArtifact(repositorySession , metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
@ -197,7 +197,7 @@ public class DaysOldRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts(repositorySession , TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
@ -206,9 +206,9 @@ public class DaysOldRepositoryPurgeTest
listenerControl.verify();
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( , metadataArg.capture(), eq(projectVersion) );
verify(metadataRepository, times(deletedVersions.size())).removeArtifact(repositorySession , metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
@ -296,7 +296,7 @@ public class DaysOldRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts(repositorySession , TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
@ -305,9 +305,9 @@ public class DaysOldRepositoryPurgeTest
listenerControl.verify();
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( , metadataArg.capture(), eq(projectVersion) );
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));

View File

@ -188,7 +188,7 @@ public class RepositoryPurgeConsumerTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir( TEST_REPO_ID, projectName, repo, vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
Set<String> deletedVersions = new HashSet<>();
deletedVersions.add("1.0RC1-20070504.153317-1");
@ -197,9 +197,9 @@ public class RepositoryPurgeConsumerTest
repoPurgeConsumer.processFile( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(2)).removeArtifact( , metadataArg.capture(), eq(projectVersion) );
verify(metadataRepository, times(2)).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
@ -291,7 +291,7 @@ public class RepositoryPurgeConsumerTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir( TEST_REPO_ID, projectName, repo, vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
Set<String> deletedVersions = new HashSet<>();
deletedVersions.add("2.2-SNAPSHOT");
@ -300,9 +300,9 @@ public class RepositoryPurgeConsumerTest
repoPurgeConsumer.processFile( PATH_TO_BY_DAYS_OLD_ARTIFACT );
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(2)).removeArtifact( , metadataArg.capture(), eq(projectVersion) );
verify(metadataRepository, times(2)).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
@ -373,16 +373,16 @@ public class RepositoryPurgeConsumerTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir( TEST_REPO_ID, projectName, repo, vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
repoPurgeConsumer.processFile(
CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, never()).removeArtifact( , any(), any() );
verify(metadataRepository, never()).removeArtifact( , any(), any(), any(), any(), any(MetadataFacet.class) );
verify(metadataRepository, never()).removeArtifact( repositorySession, any(), any() );
verify(metadataRepository, never()).removeArtifact( repositorySession, any(), any(), any(), any(), any(MetadataFacet.class) );
// check if the snapshot wasn't removed
@ -436,15 +436,15 @@ public class RepositoryPurgeConsumerTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
repoPurgeConsumer.processFile(
CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
verify(metadataRepository, times(1)).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, times(1)).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, never()).removeArtifact( , any(), any() );
verify(metadataRepository, never()).removeArtifact( repositorySession, any(), any() );
// check if the snapshot was removed
assertDeleted( projectRoot + "/2.3-SNAPSHOT" );

View File

@ -115,7 +115,7 @@ public class RetentionCountRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
@ -124,9 +124,9 @@ public class RetentionCountRepositoryPurgeTest
listenerControl.verify();
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( , metadataArg.capture(), eq(projectVersion) );
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
@ -202,7 +202,7 @@ public class RetentionCountRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
@ -210,9 +210,9 @@ public class RetentionCountRepositoryPurgeTest
listenerControl.verify();
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( , metadataArg.capture(), eq(projectVersion) );
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
@ -290,7 +290,7 @@ public class RetentionCountRepositoryPurgeTest
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID , projectName, repo.getParent(), vDir );
when(metadataRepository.getArtifacts( , TEST_REPO_ID,
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
@ -298,9 +298,9 @@ public class RetentionCountRepositoryPurgeTest
listenerControl.verify();
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion( , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( , metadataArg.capture(), eq(projectVersion) );
verify(metadataRepository, times(deletedVersions.size())).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));

View File

@ -29,10 +29,7 @@ import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.*;
import org.apache.archiva.metadata.repository.storage.ReadMetadataRequest;
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
@ -193,14 +190,14 @@ public class ArchivaMetadataCreationConsumer
// read the metadata and update it if it is newer or doesn't exist
artifact.setWhenGathered( whenGathered );
metadataRepository.updateArtifact( , repoId, project.getNamespace(), project.getId(),
metadataRepository.updateArtifact(repositorySession , repoId, project.getNamespace(), project.getId(),
projectVersion, artifact );
if ( createVersionMetadata )
{
metadataRepository.updateProjectVersion( , repoId, project.getNamespace(),
metadataRepository.updateProjectVersion(repositorySession , repoId, project.getNamespace(),
project.getId(), versionMetadata );
}
metadataRepository.updateProject( , repoId, project );
metadataRepository.updateProject(repositorySession , repoId, project );
repositorySession.save();
}
catch ( MetadataRepositoryException e )
@ -208,27 +205,28 @@ public class ArchivaMetadataCreationConsumer
log.warn(
"Error occurred persisting metadata for artifact:{} (repository:{}); message: {}" ,
path, repoId, e.getMessage(), e );
repositorySession.revert();
try {
repositorySession.revert();
} catch (MetadataSessionException ex) {
log.error("Reverting failed {}", ex.getMessage());
}
}
catch ( RepositoryStorageRuntimeException e )
{
log.warn(
"Error occurred persisting metadata for artifact:{} (repository:{}); message: {}",
path, repoId, e.getMessage(), e );
repositorySession.revert();
}
finally
try {
repositorySession.revert();
} catch (MetadataSessionException ex) {
log.error("Reverting failed {}", ex.getMessage());
}
} catch (MetadataSessionException e) {
throw new ConsumerException(e.getMessage(), e);
} finally
{
repositorySession.close();
}
catch ( org.apache.archiva.metadata.repository.MetadataSessionException e )
{
e.printStackTrace( );
}
catch ( org.apache.archiva.metadata.repository.MetadataSessionException e )
{
e.printStackTrace( );
}
}
@Override

View File

@ -32,10 +32,7 @@ import org.apache.archiva.indexer.ArchivaIndexManager;
import org.apache.archiva.indexer.IndexManagerFactory;
import org.apache.archiva.indexer.IndexUpdateFailedException;
import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.*;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatisticsManager;
import org.apache.archiva.redback.components.cache.Cache;
import org.apache.archiva.redback.components.registry.RegistryException;
@ -352,7 +349,7 @@ public class DefaultManagedRepositoryAdmin
try
{
MetadataRepository metadataRepository = repositorySession.getRepository();
metadataRepository.removeRepository( , repository.getId() );
metadataRepository.removeRepository(repositorySession , repository.getId() );
//invalidate cache
namespacesCache.remove( repository.getId() );
log.debug( "call repositoryStatisticsManager.deleteStatistics" );
@ -363,15 +360,12 @@ public class DefaultManagedRepositoryAdmin
{
//throw new RepositoryAdminException( e.getMessage(), e );
log.warn( "skip error during removing repository from MetadataRepository:{}", e.getMessage(), e );
}
finally
} catch (MetadataSessionException e) {
log.warn( "skip error during removing repository from MetadataRepository:{}", e.getMessage(), e );
} finally
{
repositorySession.close();
}
catch ( org.apache.archiva.metadata.repository.MetadataSessionException e )
{
e.printStackTrace( );
}
}
if ( deleteContent )
@ -526,7 +520,7 @@ public class DefaultManagedRepositoryAdmin
}
}
catch ( MetadataRepositoryException e )
catch (MetadataRepositoryException | MetadataSessionException e )
{
throw new RepositoryAdminException( e.getMessage(), e );
}
@ -534,10 +528,6 @@ public class DefaultManagedRepositoryAdmin
{
repositorySession.close();
}
catch ( org.apache.archiva.metadata.repository.MetadataSessionException e )
{
e.printStackTrace( );
}
if ( updateIndexContext )
{

View File

@ -176,14 +176,14 @@ public class ArchivaRepositoryScanningTaskExecutor
{
throw new TaskExecutionException( "Unable to store updated statistics: " + e.getMessage(), e );
}
finally
{
repositorySession.close();
}
catch ( org.apache.archiva.metadata.repository.MetadataSessionException e )
{
e.printStackTrace( );
}
finally
{
repositorySession.close();
}
// log.info( "Scanning for removed repository content" );

View File

@ -70,6 +70,11 @@
<artifactId>repository-statistics</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>metadata-statistics-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva.redback</groupId>
<artifactId>redback-authorization-api</artifactId>

View File

@ -317,18 +317,15 @@ public class DefaultBrowseService
}
return versionMetadata;
}
finally
} catch (MetadataRepositoryException e) {
throw new ArchivaRestServiceException(e.getMessage(), e);
} finally
{
if ( repositorySession != null )
{
repositorySession.close();
}
}
catch ( MetadataRepositoryException e )
{
e.printStackTrace( );
}
}
@ -479,7 +476,7 @@ public class DefaultBrowseService
}
return sharedModel;
}
catch ( MetadataResolutionException e )
catch (MetadataResolutionException | MetadataRepositoryException e )
{
throw new ArchivaRestServiceException( e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
@ -491,10 +488,6 @@ public class DefaultBrowseService
repositorySession.close();
}
}
catch ( MetadataRepositoryException e )
{
e.printStackTrace( );
}
}
@Override
@ -662,11 +655,11 @@ public class DefaultBrowseService
{
MetadataRepository metadataRepository = repositorySession.getRepository();
metadataRepository.updateProjectVersion( , repositoryId, groupId, artifactId, projectVersionMetadata );
metadataRepository.updateProjectVersion(repositorySession , repositoryId, groupId, artifactId, projectVersionMetadata );
repositorySession.save();
}
catch ( MetadataRepositoryException e )
catch (MetadataRepositoryException | MetadataSessionException e )
{
log.error( e.getMessage(), e );
throw new ArchivaRestServiceException( e.getMessage(),
@ -676,10 +669,6 @@ public class DefaultBrowseService
{
repositorySession.close();
}
catch ( MetadataSessionException e )
{
e.printStackTrace( );
}
return Boolean.TRUE;
}
@ -723,11 +712,11 @@ public class DefaultBrowseService
{
MetadataRepository metadataRepository = repositorySession.getRepository();
metadataRepository.updateProjectVersion( , repositoryId, groupId, artifactId, projectVersionMetadata );
metadataRepository.updateProjectVersion(repositorySession , repositoryId, groupId, artifactId, projectVersionMetadata );
repositorySession.save();
}
catch ( MetadataRepositoryException e )
catch (MetadataRepositoryException | MetadataSessionException e )
{
log.error( e.getMessage(), e );
throw new ArchivaRestServiceException( e.getMessage(),
@ -737,10 +726,6 @@ public class DefaultBrowseService
{
repositorySession.close();
}
catch ( MetadataSessionException e )
{
e.printStackTrace( );
}
return Boolean.TRUE;
}
@ -1006,7 +991,7 @@ public class DefaultBrowseService
}
try
{
List<ArtifactMetadata> artifactMetadatas = repositorySession.getRepository().getArtifacts( , repositoryId );
List<ArtifactMetadata> artifactMetadatas = repositorySession.getRepository().getArtifacts(repositorySession , repositoryId );
return buildArtifacts( artifactMetadatas, repositoryId );
}
catch ( MetadataRepositoryException e )
@ -1034,7 +1019,7 @@ public class DefaultBrowseService
}
try
{
List<ArtifactMetadata> artifactMetadatas = repositorySession.getRepository().getArtifactsByProjectVersionMetadata( , key, value, repositoryId );
List<ArtifactMetadata> artifactMetadatas = repositorySession.getRepository().getArtifactsByProjectVersionMetadata(repositorySession , key, value, repositoryId );
return buildArtifacts( artifactMetadatas, repositoryId );
}
catch ( MetadataRepositoryException e )
@ -1062,7 +1047,7 @@ public class DefaultBrowseService
}
try
{
List<ArtifactMetadata> artifactMetadatas = repositorySession.getRepository().getArtifactsByMetadata( , key, value, repositoryId );
List<ArtifactMetadata> artifactMetadatas = repositorySession.getRepository().getArtifactsByMetadata(repositorySession , key, value, repositoryId );
return buildArtifacts( artifactMetadatas, repositoryId );
}
catch ( MetadataRepositoryException e )
@ -1090,7 +1075,7 @@ public class DefaultBrowseService
}
try
{
List<ArtifactMetadata> artifactMetadatas = repositorySession.getRepository().getArtifactsByProperty( , key, value, repositoryId );
List<ArtifactMetadata> artifactMetadatas = repositorySession.getRepository().getArtifactsByProperty(repositorySession , key, value, repositoryId );
return buildArtifacts( artifactMetadatas, repositoryId );
}
catch ( MetadataRepositoryException e )
@ -1137,7 +1122,7 @@ public class DefaultBrowseService
try
{
List<ArtifactMetadata> artifactMetadatas =
repositorySession.getRepository().searchArtifacts( , repositoryId, text, exact == null ? false : exact );
repositorySession.getRepository().searchArtifacts(repositorySession , repositoryId, text, exact == null ? false : exact );
return buildArtifacts( artifactMetadatas, repositoryId );
}
catch ( MetadataRepositoryException e )
@ -1166,7 +1151,7 @@ public class DefaultBrowseService
try
{
List<ArtifactMetadata> artifactMetadatas =
repositorySession.getRepository().searchArtifacts( , repositoryId, key, text, exact == null ? false : exact );
repositorySession.getRepository().searchArtifacts(repositorySession , repositoryId, key, text, exact == null ? false : exact );
return buildArtifacts( artifactMetadatas, repositoryId );
}
catch ( MetadataRepositoryException e )

View File

@ -128,7 +128,7 @@ public class DefaultMergeRepositoriesService
{
org.apache.archiva.repository.ManagedRepository managedRepo = repositoryRegistry.getManagedRepository(targetRepositoryId);
MetadataRepository metadataRepository = repositorySession.getRepository();
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( , sourceRepositoryId );
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts(repositorySession , sourceRepositoryId );
if ( managedRepo.getActiveReleaseSchemes().contains(ReleaseScheme.RELEASE) && !managedRepo.getActiveReleaseSchemes().contains(ReleaseScheme.SNAPSHOT) )
{
@ -174,7 +174,7 @@ public class DefaultMergeRepositoriesService
repositoryMerger.getConflictingArtifacts( repositorySession.getRepository(), sourceRepositoryId,
targetRepositoryId );
MetadataRepository metadataRepository = repositorySession.getRepository();
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( , sourceRepositoryId );
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts(repositorySession , sourceRepositoryId );
sourceArtifacts.removeAll( conflictSourceArtifacts );
org.apache.archiva.repository.ManagedRepository managedRepo = repositoryRegistry.getManagedRepository(targetRepositoryId);

View File

@ -173,10 +173,10 @@ public class DefaultReportRepositoriesService
MetadataRepository metadataRepository = repositorySession.getRepository();
for ( String repoId : observableRepositories )
{
for ( String name : metadataRepository.getMetadataFacets( , repoId, RepositoryProblemFacet.FACET_ID ) )
for ( String name : metadataRepository.getMetadataFacets(repositorySession , repoId, RepositoryProblemFacet.FACET_ID ) )
{
RepositoryProblemFacet metadataFacet =
(RepositoryProblemFacet) metadataRepository.getMetadataFacet( ,
(RepositoryProblemFacet) metadataRepository.getMetadataFacet(repositorySession ,
repoId,
RepositoryProblemFacet.FACET_ID, name );
if ( StringUtils.isEmpty( groupId ) || groupId.equals( metadataFacet.getNamespace() ) )

View File

@ -30,11 +30,7 @@ import org.apache.archiva.maven2.model.Artifact;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.*;
import org.apache.archiva.model.ArchivaRepositoryMetadata;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.VersionedReference;
@ -703,14 +699,14 @@ public class DefaultRepositoriesService
}
Collection<ArtifactMetadata> artifacts =
metadataRepository.getArtifacts( , repositoryId, namespace, projectId, version );
metadataRepository.getArtifacts(repositorySession , repositoryId, namespace, projectId, version );
for ( ArtifactMetadata artifactMetadata : artifacts )
{
metadataRepository.removeArtifact( , artifactMetadata, version );
metadataRepository.removeArtifact(repositorySession , artifactMetadata, version );
}
metadataRepository.removeProjectVersion( , repositoryId, namespace, projectId, version );
metadataRepository.removeProjectVersion(repositorySession , repositoryId, namespace, projectId, version );
}
catch ( MetadataRepositoryException e )
{
@ -727,14 +723,14 @@ public class DefaultRepositoriesService
finally
{
repositorySession.save();
try {
repositorySession.save();
} catch (MetadataSessionException e) {
log.error("Session save failed {}", e.getMessage());
}
repositorySession.close();
}
catch ( org.apache.archiva.metadata.repository.MetadataSessionException e )
{
e.printStackTrace( );
}
return Boolean.TRUE;
}
@ -869,13 +865,13 @@ public class DefaultRepositoriesService
{
String baseVersion = VersionUtil.getBaseVersion( artifact.getVersion() );
artifacts =
metadataRepository.getArtifacts( , repositoryId, artifact.getGroupId(),
metadataRepository.getArtifacts(repositorySession , repositoryId, artifact.getGroupId(),
artifact.getArtifactId(), baseVersion );
}
else
{
artifacts =
metadataRepository.getArtifacts( , repositoryId, artifact.getGroupId(),
metadataRepository.getArtifacts(repositorySession , repositoryId, artifact.getGroupId(),
artifact.getArtifactId(), artifact.getVersion() );
}
@ -887,13 +883,13 @@ public class DefaultRepositoriesService
{
// verify metata repository doesn't contains anymore the version
Collection<String> projectVersions =
metadataRepository.getProjectVersions( , repositoryId,
metadataRepository.getProjectVersions(repositorySession , repositoryId,
artifact.getGroupId(), artifact.getArtifactId() );
if ( projectVersions.contains( artifact.getVersion() ) )
{
log.warn( "artifact not found when deleted but version still here ! so force cleanup" );
metadataRepository.removeProjectVersion( , repositoryId,
metadataRepository.removeProjectVersion(repositorySession , repositoryId,
artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() );
}
@ -924,9 +920,9 @@ public class DefaultRepositoriesService
artifact.getVersion();
MavenArtifactFacet mavenArtifactFacetToCompare = new MavenArtifactFacet();
mavenArtifactFacetToCompare.setClassifier( artifact.getClassifier() );
metadataRepository.removeArtifact( , repositoryId, groupId, artifactId,
metadataRepository.removeArtifact(repositorySession , repositoryId, groupId, artifactId,
version, mavenArtifactFacetToCompare );
metadataRepository.save();
repositorySession.save();
}
}
@ -934,12 +930,12 @@ public class DefaultRepositoriesService
{
if ( snapshotVersion )
{
metadataRepository.removeArtifact( ,
metadataRepository.removeArtifact(repositorySession ,
artifactMetadata, VersionUtil.getBaseVersion( artifact.getVersion() ) );
}
else
{
metadataRepository.removeArtifact( ,
metadataRepository.removeArtifact(repositorySession ,
artifactMetadata.getRepositoryId(),
artifactMetadata.getNamespace(), artifactMetadata.getProject(),
artifact.getVersion(), artifactMetadata.getId() );
@ -970,7 +966,7 @@ public class DefaultRepositoriesService
{
throw new ArchivaRestServiceException( "Repository exception: " + e.getMessage(), 500, e );
}
catch ( MetadataResolutionException e )
catch (MetadataResolutionException | MetadataSessionException e )
{
throw new ArchivaRestServiceException( "Repository exception: " + e.getMessage(), 500, e );
}
@ -981,14 +977,14 @@ public class DefaultRepositoriesService
finally
{
repositorySession.save();
try {
repositorySession.save();
} catch (MetadataSessionException e) {
log.error("Could not save sesion {}", e.getMessage());
}
repositorySession.close();
}
catch ( org.apache.archiva.metadata.repository.MetadataSessionException e )
{
e.printStackTrace( );
}
return Boolean.TRUE;
}
@ -1029,16 +1025,16 @@ public class DefaultRepositoriesService
MetadataRepository metadataRepository = repositorySession.getRepository();
metadataRepository.removeNamespace( , repositoryId, groupId );
metadataRepository.removeNamespace(repositorySession , repositoryId, groupId );
// just invalidate cache entry
String cacheKey = repositoryId + "-" + groupId;
namespacesCache.remove( cacheKey );
namespacesCache.remove( repositoryId );
metadataRepository.save();
repositorySession.save();
}
catch ( MetadataRepositoryException e )
catch (MetadataRepositoryException | MetadataSessionException e )
{
log.error( e.getMessage(), e );
throw new ArchivaRestServiceException( "Repository exception: " + e.getMessage(), 500, e );
@ -1111,11 +1107,11 @@ public class DefaultRepositoriesService
MetadataRepository metadataRepository = repositorySession.getRepository();
metadataRepository.removeProject( , repositoryId, groupId, projectId );
metadataRepository.removeProject(repositorySession , repositoryId, groupId, projectId );
metadataRepository.save();
repositorySession.save();
}
catch ( MetadataRepositoryException e )
catch (MetadataRepositoryException | MetadataSessionException e )
{
log.error( e.getMessage(), e );
throw new ArchivaRestServiceException( "Repository exception: " + e.getMessage(), 500, e );

View File

@ -234,7 +234,7 @@ public class DefaultSearchService
for ( String repoId : checksumSearch.getRepositories() )
{
Collection<ArtifactMetadata> artifactMetadatas =
metadataRepository.getArtifactsByChecksum( , repoId, checksumSearch.getChecksum() );
metadataRepository.getArtifactsByChecksum( repositorySession, repoId, checksumSearch.getChecksum() );
artifactSet.addAll( buildArtifacts( artifactMetadatas, repoId ) );
}

View File

@ -24,6 +24,8 @@ import com.sun.syndication.io.FeedException;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.rss.RssFeedEntry;
import org.apache.archiva.rss.RssFeedGenerator;
import org.slf4j.Logger;
@ -60,6 +62,10 @@ public class NewArtifactsRssFeedProcessor
@Inject
private RssFeedGenerator generator;
@Inject
private RepositorySessionFactory repositorySessionFactory;
private Logger log = LoggerFactory.getLogger( NewArtifactsRssFeedProcessor.class );
private static final TimeZone GMT_TIME_ZONE = TimeZone.getTimeZone( "GMT" );
@ -91,9 +97,9 @@ public class NewArtifactsRssFeedProcessor
greaterThanThisDate.clear( Calendar.MILLISECOND );
List<ArtifactMetadata> artifacts;
try
try(RepositorySession session = repositorySessionFactory.createSession())
{
artifacts = metadataRepository.getArtifactsByDateRange( , repoId, greaterThanThisDate.getTime(), null );
artifacts = metadataRepository.getArtifactsByDateRange(session , repoId, greaterThanThisDate.getTime(), null );
}
catch ( MetadataRepositoryException e )
{

View File

@ -22,9 +22,9 @@ package org.apache.archiva.rss.processor;
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.FeedException;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.*;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.rss.RssFeedEntry;
import org.apache.archiva.rss.RssFeedGenerator;
import org.slf4j.Logger;
@ -57,6 +57,12 @@ public class NewVersionsOfArtifactRssFeedProcessor
@Inject
private RssFeedGenerator generator;
@Inject
private RepositoryRegistry repositoryRegistry;
@Inject
private RepositorySessionFactory repositorySessionFactory;
/**
* Process all versions of the artifact which had a rss feed request.
*/
@ -80,14 +86,15 @@ public class NewVersionsOfArtifactRssFeedProcessor
throws FeedException
{
List<ArtifactMetadata> artifacts = new ArrayList<>();
try
try(RepositorySession session = repositorySessionFactory.createSession())
{
for ( String repoId : metadataRepository.getRepositories() )
for ( Repository repo : repositoryRegistry.getRepositories() )
{
Collection<String> versions = metadataRepository.getProjectVersions( , repoId, groupId, artifactId );
final String repoId = repo.getId();
Collection<String> versions = metadataRepository.getProjectVersions( session, repoId, groupId, artifactId );
for ( String version : versions )
{
artifacts.addAll( metadataRepository.getArtifacts( , repoId, groupId, artifactId, version ) );
artifacts.addAll( metadataRepository.getArtifacts(session , repoId, groupId, artifactId, version ) );
}
}
}

View File

@ -24,6 +24,8 @@ import com.sun.syndication.feed.synd.SyndFeed;
import junit.framework.TestCase;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.rss.RssFeedGenerator;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
import org.easymock.IMocksControl;
@ -57,6 +59,10 @@ public class NewVersionsOfArtifactRssFeedProcessorTest
private MetadataRepository metadataRepository;
private IMocksControl factoryControl;
private RepositorySessionFactory repositorySessionFactory;
@Before
@Override
public void setUp()
@ -69,6 +75,9 @@ public class NewVersionsOfArtifactRssFeedProcessorTest
metadataRepositoryControl = createControl();
metadataRepository = metadataRepositoryControl.createMock( MetadataRepository.class );
factoryControl = createControl();
repositorySessionFactory = factoryControl.createMock(RepositorySessionFactory.class);
}
@SuppressWarnings("unchecked")
@ -89,15 +98,16 @@ public class NewVersionsOfArtifactRssFeedProcessorTest
reqParams.put( RssFeedProcessor.KEY_GROUP_ID, GROUP_ID );
reqParams.put( RssFeedProcessor.KEY_ARTIFACT_ID, ARTIFACT_ID );
expect( metadataRepository.getRepositories() ).andReturn( Collections.singletonList( TEST_REPO ) );
expect( metadataRepository.getProjectVersions( , TEST_REPO, GROUP_ID, ARTIFACT_ID ) ).andReturn(
Arrays.asList( "1.0.1", "1.0.2", "1.0.3-SNAPSHOT" ) );
expect( metadataRepository.getArtifacts( , TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.1" ) ).andReturn(
Collections.singletonList( artifact1 ) );
expect( metadataRepository.getArtifacts( , TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.2" ) ).andReturn(
Collections.singletonList( artifact2 ) );
expect( metadataRepository.getArtifacts( , TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.3-SNAPSHOT" ) ).andReturn(
Collections.singletonList( artifact3 ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
expect(metadataRepository.getProjectVersions(session, TEST_REPO, GROUP_ID, ARTIFACT_ID)).andReturn(
Arrays.asList("1.0.1", "1.0.2", "1.0.3-SNAPSHOT"));
expect(metadataRepository.getArtifacts(session, TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.1")).andReturn(
Collections.singletonList(artifact1));
expect(metadataRepository.getArtifacts(session, TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.2")).andReturn(
Collections.singletonList(artifact2));
expect(metadataRepository.getArtifacts(session, TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.3-SNAPSHOT")).andReturn(
Collections.singletonList(artifact3));
}
metadataRepositoryControl.replay();
SyndFeed feed = newVersionsProcessor.process( reqParams, metadataRepository );

View File

@ -94,11 +94,6 @@ public class TestMetadataRepository
return artifacts;
}
@Override
public Collection<String> getRepositories()
{
return Collections.singletonList( TEST_REPO );
}
@Override
public Collection<ArtifactMetadata> getArtifacts( RepositorySession session, String repoId, String namespace, String projectId,

View File

@ -69,18 +69,14 @@ public class RepositorySession
return dirty;
}
protected void setDirty(boolean value) {
this.dirty = value;
}
public void save() throws MetadataSessionException
{
setDirty( false );
this.dirty = false;
}
public void revert() throws MetadataSessionException
{
setDirty( false );
this.dirty = false;
}
/**
@ -127,4 +123,12 @@ public class RepositorySession
{
this.dirty = true;
}
public void refresh() throws MetadataSessionException {
}
public void refreshAndDiscard() throws MetadataSessionException {
}
}

View File

@ -28,4 +28,5 @@ public interface RepositorySessionFactory
RepositorySession createSession() throws MetadataRepositoryException;
void close();
}

View File

@ -58,9 +58,9 @@ public class RepositoryWalkingStatisticsProvider implements RepositoryStatistics
{
try
{
for ( String ns : metadataRepository.getRootNamespaces( , repositoryId ) )
for ( String ns : metadataRepository.getRootNamespaces( repositorySession, repositoryId ) )
{
walkRepository( metadataRepository, repositoryStatistics, repositoryId, ns );
walkRepository( repositorySession, metadataRepository, repositoryStatistics, repositoryId, ns );
}
}
catch ( MetadataResolutionException e )
@ -69,16 +69,16 @@ public class RepositoryWalkingStatisticsProvider implements RepositoryStatistics
}
}
private void walkRepository( MetadataRepository metadataRepository, RepositoryStatistics stats, String repositoryId,
private void walkRepository( RepositorySession repositorySession, MetadataRepository metadataRepository, RepositoryStatistics stats, String repositoryId,
String ns )
throws MetadataResolutionException
{
for ( String namespace : metadataRepository.getNamespaces( , repositoryId, ns ) )
for ( String namespace : metadataRepository.getNamespaces( repositorySession , repositoryId, ns ) )
{
walkRepository( metadataRepository, stats, repositoryId, ns + "." + namespace );
walkRepository( repositorySession, metadataRepository, stats, repositoryId, ns + "." + namespace );
}
Collection<String> projects = metadataRepository.getProjects( , repositoryId, ns );
Collection<String> projects = metadataRepository.getProjects( repositorySession , repositoryId, ns );
if ( !projects.isEmpty() )
{
stats.setTotalGroupCount( stats.getTotalGroupCount() + 1 );
@ -86,9 +86,9 @@ public class RepositoryWalkingStatisticsProvider implements RepositoryStatistics
for ( String project : projects )
{
for ( String version : metadataRepository.getProjectVersions( , repositoryId, ns, project ) )
for ( String version : metadataRepository.getProjectVersions( repositorySession , repositoryId, ns, project ) )
{
for ( ArtifactMetadata artifact : metadataRepository.getArtifacts( , repositoryId, ns,
for ( ArtifactMetadata artifact : metadataRepository.getArtifacts( repositorySession , repositoryId, ns,
project, version ) )
{
stats.setTotalArtifactCount( stats.getTotalArtifactCount() + 1 );

View File

@ -22,10 +22,13 @@ package org.apache.archiva.audit;
import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@ -49,43 +52,46 @@ public class DefaultAuditManager
private static final TimeZone UTC_TIME_ZONE = TimeZone.getTimeZone( "UTC" );
@Inject
RepositorySessionFactory repositorySessionFactory;
@Override
public List<AuditEvent> getMostRecentAuditEvents( MetadataRepository metadataRepository,
List<String> repositoryIds )
throws MetadataRepositoryException
{
// TODO: consider a more efficient implementation that directly gets the last ten from the content repository
List<AuditRecord> records = new ArrayList<>();
for ( String repositoryId : repositoryIds )
{
List<String> names = metadataRepository.getMetadataFacets( , repositoryId, AuditEvent.FACET_ID );
for ( String name : names )
{
records.add( new AuditRecord( repositoryId, name ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
// TODO: consider a more efficient implementation that directly gets the last ten from the content repository
List<AuditRecord> records = new ArrayList<>();
for (String repositoryId : repositoryIds) {
List<String> names = metadataRepository.getMetadataFacets(session, repositoryId, AuditEvent.FACET_ID);
for (String name : names) {
records.add(new AuditRecord(repositoryId, name));
}
}
}
Collections.sort( records );
records = records.subList( 0, records.size() < NUM_RECENT_EVENTS ? records.size() : NUM_RECENT_EVENTS );
Collections.sort(records);
records = records.subList(0, records.size() < NUM_RECENT_EVENTS ? records.size() : NUM_RECENT_EVENTS);
List<AuditEvent> events = new ArrayList<>( records.size() );
for ( AuditRecord record : records )
{
AuditEvent auditEvent = (AuditEvent) metadataRepository.getMetadataFacet( ,
record.repositoryId,
AuditEvent.FACET_ID, record.name );
events.add( auditEvent );
List<AuditEvent> events = new ArrayList<>(records.size());
for (AuditRecord record : records) {
AuditEvent auditEvent = (AuditEvent) metadataRepository.getMetadataFacet(session,
record.repositoryId,
AuditEvent.FACET_ID, record.name);
events.add(auditEvent);
}
return events;
}
return events;
}
@Override
public void addAuditEvent( MetadataRepository repository, AuditEvent event )
throws MetadataRepositoryException
{
// ignore those with no repository - they will still be logged to the textual audit log
if ( event.getRepositoryId() != null )
{
repository.addMetadataFacet( , event.getRepositoryId(), event );
try(RepositorySession session = repositorySessionFactory.createSession()) {
// ignore those with no repository - they will still be logged to the textual audit log
if (event.getRepositoryId() != null) {
repository.addMetadataFacet(session, event.getRepositoryId(), event);
}
}
}
@ -93,7 +99,9 @@ public class DefaultAuditManager
public void deleteAuditEvents( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException
{
metadataRepository.removeMetadataFacets( , repositoryId, AuditEvent.FACET_ID );
try(RepositorySession session = repositorySessionFactory.createSession()) {
metadataRepository.removeMetadataFacets(session, repositoryId, AuditEvent.FACET_ID);
}
}
@Override
@ -110,44 +118,37 @@ public class DefaultAuditManager
Date endTime )
throws MetadataRepositoryException
{
List<AuditEvent> results = new ArrayList<>();
for ( String repositoryId : repositoryIds )
{
List<String> list = metadataRepository.getMetadataFacets( , repositoryId, AuditEvent.FACET_ID );
for ( String name : list )
{
try
{
Date date = createNameFormat().parse( name );
if ( ( startTime == null || !date.before( startTime ) ) && ( endTime == null || !date.after(
endTime ) ) )
{
AuditEvent event = (AuditEvent) metadataRepository.getMetadataFacet( ,
repositoryId,
AuditEvent.FACET_ID, name );
try(RepositorySession session = repositorySessionFactory.createSession()) {
List<AuditEvent> results = new ArrayList<>();
for (String repositoryId : repositoryIds) {
List<String> list = metadataRepository.getMetadataFacets(session, repositoryId, AuditEvent.FACET_ID);
for (String name : list) {
try {
Date date = createNameFormat().parse(name);
if ((startTime == null || !date.before(startTime)) && (endTime == null || !date.after(
endTime))) {
AuditEvent event = (AuditEvent) metadataRepository.getMetadataFacet(session,
repositoryId,
AuditEvent.FACET_ID, name);
if ( resource == null || event.getResource().startsWith( resource ) )
{
results.add( event );
if (resource == null || event.getResource().startsWith(resource)) {
results.add(event);
}
}
} catch (ParseException e) {
log.error("Invalid audit event found in the metadata repository: {}", e.getMessage());
// continue and ignore this one
}
}
catch ( ParseException e )
{
log.error( "Invalid audit event found in the metadata repository: {}", e.getMessage() );
// continue and ignore this one
}
Collections.sort(results, new Comparator<AuditEvent>() {
@Override
public int compare(AuditEvent o1, AuditEvent o2) {
return o2.getTimestamp().compareTo(o1.getTimestamp());
}
}
});
return results;
}
Collections.sort( results, new Comparator<AuditEvent>()
{
@Override
public int compare( AuditEvent o1, AuditEvent o2 )
{
return o2.getTimestamp().compareTo( o1.getTimestamp() );
}
} );
return results;
}
private static SimpleDateFormat createNameFormat()

View File

@ -77,14 +77,14 @@ public class MetadataAuditListener
{
log.warn( "Unable to write audit event to repository: {}", e.getMessage(), e );
}
finally
{
repositorySession.close();
}
catch ( org.apache.archiva.metadata.repository.MetadataSessionException e )
{
e.printStackTrace( );
}
finally
{
repositorySession.close();
}
}
}
}

View File

@ -23,6 +23,9 @@ import junit.framework.TestCase;
import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
import org.easymock.EasyMock;
import org.easymock.IMocksControl;
@ -52,6 +55,8 @@ public class AuditManagerTest
private MetadataRepository metadataRepository;
private RepositorySessionFactory repositorySessionFactory;
private static final String AUDIT_EVENT_BASE = "2010/01/18/123456.";
private static final String TEST_REPO_ID = "test-repo";
@ -67,6 +72,7 @@ public class AuditManagerTest
private static final SimpleDateFormat TIMESTAMP_FORMAT = createTimestampFormat();
private static final DecimalFormat MILLIS_FORMAT = new DecimalFormat( "000" );
private IMocksControl factoryControl;
private static SimpleDateFormat createTimestampFormat()
{
@ -87,6 +93,9 @@ public class AuditManagerTest
metadataRepositoryControl = EasyMock.createControl();
metadataRepository = metadataRepositoryControl.createMock( MetadataRepository.class );
factoryControl = EasyMock.createControl();
repositorySessionFactory = factoryControl.createMock(RepositorySessionFactory.class);
ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
repository.setId( TEST_REPO_ID );
repository.setLocation( "" );
@ -104,14 +113,15 @@ public class AuditManagerTest
expectedEvents.add( event );
}
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
getEventNames( expectedEvents ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
getEventNames(expectedEvents));
for ( AuditEvent event : expectedEvents.subList( 1, expectedEvents.size() ) )
{
EasyMock.expect(
metadataRepository.getMetadataFacet( , TEST_REPO_ID, AuditEvent.FACET_ID, event.getName() ) ).andReturn(
event );
for (AuditEvent event : expectedEvents.subList(1, expectedEvents.size())) {
EasyMock.expect(
metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, event.getName())).andReturn(
event);
}
}
metadataRepositoryControl.replay();
@ -141,13 +151,14 @@ public class AuditManagerTest
expectedEvents.add( createEvent( AUDIT_EVENT_BASE + MILLIS_FORMAT.format( i ) ) );
}
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
getEventNames( expectedEvents ) );
for ( AuditEvent event : expectedEvents )
{
EasyMock.expect(
metadataRepository.getMetadataFacet( , TEST_REPO_ID, AuditEvent.FACET_ID, event.getName() ) ).andReturn(
event );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
getEventNames(expectedEvents));
for (AuditEvent event : expectedEvents) {
EasyMock.expect(
metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, event.getName())).andReturn(
event);
}
}
metadataRepositoryControl.replay();
@ -184,15 +195,16 @@ public class AuditManagerTest
eventNames.get( repositoryId ).add( event.getName() );
}
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
eventNames.get( TEST_REPO_ID ) );
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID_2, AuditEvent.FACET_ID ) ).andReturn(
eventNames.get( TEST_REPO_ID_2 ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
eventNames.get(TEST_REPO_ID));
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID_2, AuditEvent.FACET_ID)).andReturn(
eventNames.get(TEST_REPO_ID_2));
for ( AuditEvent event : events.subList( 1, events.size() ) )
{
EasyMock.expect( metadataRepository.getMetadataFacet( , event.getRepositoryId(),
AuditEvent.FACET_ID, event.getName() ) ).andReturn( event );
for (AuditEvent event : events.subList(1, events.size())) {
EasyMock.expect(metadataRepository.getMetadataFacet(session, event.getRepositoryId(),
AuditEvent.FACET_ID, event.getName())).andReturn(event);
}
}
metadataRepositoryControl.replay();
@ -218,8 +230,10 @@ public class AuditManagerTest
{
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
Collections.<String>emptyList() );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Collections.<String>emptyList());
}
metadataRepositoryControl.replay();
assertTrue( auditManager.getMostRecentAuditEvents( metadataRepository,
@ -235,7 +249,9 @@ public class AuditManagerTest
{
AuditEvent event = createEvent( new Date() );
metadataRepository.addMetadataFacet( , TEST_REPO_ID, event );
try(RepositorySession session = repositorySessionFactory.createSession()) {
metadataRepository.addMetadataFacet(session, TEST_REPO_ID, event);
}
metadataRepositoryControl.replay();
@ -265,7 +281,9 @@ public class AuditManagerTest
throws Exception
{
metadataRepository.removeMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID );
try(RepositorySession session = repositorySessionFactory.createSession()) {
metadataRepository.removeMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID);
}
metadataRepositoryControl.replay();
@ -286,12 +304,14 @@ public class AuditManagerTest
AuditEvent expectedEvent = createEvent( expectedTimestamp );
AuditEvent event3 = createEvent( new Date( current.getTime() - 1000 ) );
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
Arrays.asList( event1.getName(), expectedEvent.getName(), event3.getName() ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(event1.getName(), expectedEvent.getName(), event3.getName()));
// only match the middle one
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent.getName() ) ).andReturn( expectedEvent );
// only match the middle one
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent.getName())).andReturn(expectedEvent);
}
metadataRepositoryControl.replay();
@ -318,13 +338,15 @@ public class AuditManagerTest
Date ts3 = new Date( current.getTime() - 1000 );
AuditEvent expectedEvent3 = createEvent( ts3 );
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
Arrays.asList( event1.getName(), expectedEvent2.getName(), expectedEvent3.getName() ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(event1.getName(), expectedEvent2.getName(), expectedEvent3.getName()));
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName() ) ).andReturn( expectedEvent2 );
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName() ) ).andReturn( expectedEvent3 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName())).andReturn(expectedEvent2);
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName())).andReturn(expectedEvent3);
}
metadataRepositoryControl.replay();
List<AuditEvent> events =
@ -350,15 +372,16 @@ public class AuditManagerTest
AuditEvent expectedEvent2 = createEvent( expectedTimestamp );
AuditEvent event3 = createEvent( new Date( current.getTime() - 1000 ) );
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
Arrays.asList( expectedEvent1.getName(), expectedEvent2.getName(), event3.getName() ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(expectedEvent1.getName(), expectedEvent2.getName(), event3.getName()));
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent1.getName() ) ).andReturn( expectedEvent1 );
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName() ) ).andReturn( expectedEvent2 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent1.getName())).andReturn(expectedEvent1);
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName())).andReturn(expectedEvent2);
}
metadataRepositoryControl.replay();
List<AuditEvent> events =
@ -386,17 +409,19 @@ public class AuditManagerTest
Date ts3 = new Date( current.getTime() - 1000 );
AuditEvent expectedEvent3 = createEvent( ts3 );
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
Arrays.asList( expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName() ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName()));
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent1.getName() ) ).andReturn( expectedEvent1 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent1.getName())).andReturn(expectedEvent1);
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName() ) ).andReturn( expectedEvent2 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName())).andReturn(expectedEvent2);
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName() ) ).andReturn( expectedEvent3 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName())).andReturn(expectedEvent3);
}
metadataRepositoryControl.replay();
List<AuditEvent> events =
@ -426,18 +451,20 @@ public class AuditManagerTest
AuditEvent expectedEvent3 = createEvent( ts3 );
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
Arrays.asList( expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName() ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName()));
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent1.getName() ) ).andReturn( expectedEvent1 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent1.getName())).andReturn(expectedEvent1);
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName() ) ).andReturn( expectedEvent2 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName())).andReturn(expectedEvent2);
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName() ) ).andReturn( expectedEvent3 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName())).andReturn(expectedEvent3);
}
metadataRepositoryControl.replay();
List<AuditEvent> events =
@ -464,16 +491,18 @@ public class AuditManagerTest
AuditEvent expectedEvent3 = createEvent( new Date( current.getTime() - 1000 ) );
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) ).andReturn(
Arrays.asList( expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName() ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName()));
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent1.getName() ) ).andReturn( expectedEvent1 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent1.getName())).andReturn(expectedEvent1);
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent2.getName() ) ).andReturn( expectedEvent2 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent2.getName())).andReturn(expectedEvent2);
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent3.getName() ) ).andReturn( expectedEvent3 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent3.getName())).andReturn(expectedEvent3);
}
metadataRepositoryControl.replay();
List<AuditEvent> events =
@ -499,23 +528,23 @@ public class AuditManagerTest
Date ts3 = new Date( current.getTime() - 1000 );
AuditEvent expectedEvent3 = createEvent( ts3 );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID))
.andReturn(Arrays.asList(expectedEvent1.getName(), expectedEvent3.getName()));
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) )
.andReturn( Arrays.asList( expectedEvent1.getName(), expectedEvent3.getName() ) );
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID_2, AuditEvent.FACET_ID ) )
.andReturn( Arrays.asList( expectedEvent2.getName() ) );
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID_2, AuditEvent.FACET_ID))
.andReturn(Arrays.asList(expectedEvent2.getName()));
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent1.getName() ) )
.andReturn( expectedEvent1 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent1.getName()))
.andReturn(expectedEvent1);
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID_2, AuditEvent.FACET_ID, expectedEvent2.getName() ) )
.andReturn( expectedEvent2 );
EasyMock.expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent3.getName() ) )
.andReturn( expectedEvent3 );
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID_2, AuditEvent.FACET_ID, expectedEvent2.getName()))
.andReturn(expectedEvent2);
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent3.getName()))
.andReturn(expectedEvent3);
}
metadataRepositoryControl.replay();
List<AuditEvent> events =
@ -542,9 +571,10 @@ public class AuditManagerTest
String name2 = createEvent( expectedTimestamp ).getName();
String name3 = createEvent( new Date( current.getTime() - 1000 ) ).getName();
EasyMock.expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, AuditEvent.FACET_ID ) )
.andReturn( Arrays.asList( name1, name2, name3 ) );
try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID))
.andReturn(Arrays.asList(name1, name2, name3));
}
metadataRepositoryControl.replay();
List<AuditEvent> events =

View File

@ -87,10 +87,10 @@ public class CassandraMetadataRepositoryTest
.withId( TEST_PROJECT_VERSION ) //
.build();
this.cmr.removeProjectVersion( , TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
this.cmr.removeProjectVersion( null, TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
assertThat(
repository.getProjectVersion( , TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION ) ).isNull();
repository.getProjectVersion( null , TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION ) ).isNull();
assertThat( cmr.getMailingLists( key ) ).isNotNull().isEmpty();

View File

@ -85,13 +85,12 @@ public class RepositoriesNamespaceTest
try
{
cmr.updateNamespace( , "release", "org" );
cmr.updateNamespace( null , "release", "org" );
r = cmr.getRepository( "release" );
assertThat( r ).isNotNull();
assertThat( cmr.getRepositories() ).isNotEmpty().hasSize( 1 );
assertThat( cmr.getNamespaces( "release" ) ).isNotEmpty().hasSize( 1 );
n = cmr.getNamespace( "release", "org" );
@ -99,14 +98,14 @@ public class RepositoriesNamespaceTest
assertThat( n ).isNotNull();
assertThat( n.getRepository() ).isNotNull();
cmr.updateNamespace( , "release", "org.apache" );
cmr.updateNamespace( null, "release", "org.apache" );
r = cmr.getRepository( "release" );
assertThat( r ).isNotNull();
assertThat( cmr.getNamespaces( "release" ) ).isNotEmpty().hasSize( 2 );
cmr.removeNamespace( , "release", "org.apache" );
cmr.removeNamespace(null , "release", "org.apache" );
assertThat( cmr.getNamespaces( "release" ) ).isNotEmpty().hasSize( 1 );
assertThat( cmr.getNamespaces( "release" ) ).containsExactly( "org" );
@ -114,16 +113,16 @@ public class RepositoriesNamespaceTest
projectMetadata.setId( "theproject" );
projectMetadata.setNamespace( "org" );
cmr.updateProject( , "release", projectMetadata );
cmr.updateProject(null , "release", projectMetadata );
assertThat( cmr.getProjects( , "release", "org" ) ).isNotEmpty().hasSize( 1 ).containsExactly(
assertThat( cmr.getProjects(null , "release", "org" ) ).isNotEmpty().hasSize( 1 ).containsExactly(
"theproject" );
cmr.removeProject( , "release", "org", "theproject" );
cmr.removeProject(null , "release", "org", "theproject" );
assertThat( cmr.getProjects( , "release", "org" ) ).isEmpty();
assertThat( cmr.getProjects(null , "release", "org" ) ).isEmpty();
cmr.removeRepository( , "release" );
cmr.removeRepository(null , "release" );
r = cmr.getRepository( "release" );

View File

@ -1458,7 +1458,7 @@ public class JcrMetadataRepository
public List<ArtifactMetadata> searchArtifacts( RepositorySession session, String repositoryId, String text, boolean exact )
throws MetadataRepositoryException
{
return searchArtifacts( null, repositoryId, text, exact );
return searchArtifacts( session, repositoryId, null, text, exact );
}
@Override

View File

@ -20,12 +20,7 @@ package org.apache.archiva.metadata.repository.jcr;
*/
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractRepositorySessionFactory;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolver;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.RepositorySessionFactoryBean;
import org.apache.archiva.metadata.repository.*;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.StopWatch;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
@ -95,7 +90,7 @@ public class JcrRepositorySessionFactory extends AbstractRepositorySessionFactor
return this.metadataResolver;
}
public void initialize()
protected void initialize()
{
// skip initialisation if not jcr
@ -107,7 +102,9 @@ public class JcrRepositorySessionFactory extends AbstractRepositorySessionFactor
StopWatch stopWatch = new StopWatch();
stopWatch.start();
metadataFacetFactories = applicationContext.getBeansOfType( MetadataFacetFactory.class );
if (applicationContext!=null) {
metadataFacetFactories = applicationContext.getBeansOfType(MetadataFacetFactory.class);
}
// olamy with spring the "id" is now "metadataFacetFactory#hint"
// whereas was only hint with plexus so let remove metadataFacetFactory#
Map<String, MetadataFacetFactory> cleanedMetadataFacetFactories =
@ -115,8 +112,13 @@ public class JcrRepositorySessionFactory extends AbstractRepositorySessionFactor
for ( Map.Entry<String, MetadataFacetFactory> entry : metadataFacetFactories.entrySet() )
{
cleanedMetadataFacetFactories.put( StringUtils.substringAfterLast( entry.getKey(), "#" ),
entry.getValue() );
if (entry.getKey().contains("#")) {
cleanedMetadataFacetFactories.put( StringUtils.substringAfterLast( entry.getKey(), "#" ),
entry.getValue() );
} else {
cleanedMetadataFacetFactories.put(entry.getKey(), entry.getValue());
}
}
metadataFacetFactories = cleanedMetadataFacetFactories;
@ -159,4 +161,17 @@ public class JcrRepositorySessionFactory extends AbstractRepositorySessionFactor
{
super.close();
}
public void setMetadataResolver(MetadataResolver metadataResolver) {
this.metadataResolver = metadataResolver;
}
public JcrMetadataRepository getMetadataRepository() {
return jcrMetadataRepository;
}
public void setMetadataFacetFactories(Map<String, MetadataFacetFactory> metadataFacetFactories) {
this.metadataFacetFactories = metadataFacetFactories;
}
}

View File

@ -67,6 +67,9 @@ public class JcrSession extends RepositorySession implements AutoCloseable
@Override
protected boolean isDirty( )
{
if (super.isDirty()) {
return true;
}
try
{
return jcrSession.hasPendingChanges( );
@ -105,4 +108,22 @@ public class JcrSession extends RepositorySession implements AutoCloseable
throw new MetadataSessionException( e.getMessage( ), e );
}
}
@Override
public void refresh() throws MetadataSessionException {
try {
jcrSession.refresh(true);
} catch (RepositoryException e) {
throw new MetadataSessionException(e.getMessage(), e);
}
}
@Override
public void refreshAndDiscard() throws MetadataSessionException {
try {
jcrSession.refresh(false);
} catch (RepositoryException e) {
throw new MetadataSessionException(e.getMessage(), e);
}
}
}

View File

@ -21,12 +21,10 @@ package org.apache.archiva.metadata.repository.jcr;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.lang.time.StopWatch;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.Oak;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.jcr.Jcr;
import org.apache.jackrabbit.oak.plugins.index.IndexUtils;
import org.apache.jackrabbit.oak.plugins.index.lucene.ExtractedTextCache;
import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
import org.apache.jackrabbit.oak.plugins.index.lucene.IndexTracker;
import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider;
@ -35,6 +33,8 @@ import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.DocumentQueue;
import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.LocalIndexObserver;
import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory;
import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
import org.apache.jackrabbit.oak.plugins.index.search.ExtractedTextCache;
import org.apache.jackrabbit.oak.plugins.name.Namespaces;
import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder;
@ -43,6 +43,7 @@ import org.apache.jackrabbit.oak.spi.commit.Observer;
import org.apache.jackrabbit.oak.spi.lifecycle.RepositoryInitializer;
import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
import org.apache.jackrabbit.oak.spi.mount.Mounts;
import org.apache.jackrabbit.oak.spi.namespace.NamespaceConstants;
import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
@ -61,6 +62,9 @@ import java.util.concurrent.Executors;
import static org.apache.archiva.metadata.repository.jcr.RepositoryFactory.StoreType.IN_MEMORY_TYPE;
import static org.apache.archiva.metadata.repository.jcr.RepositoryFactory.StoreType.SEGMENT_FILE_TYPE;
import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
import static org.apache.jackrabbit.JcrConstants.NT_UNSTRUCTURED;
import static org.apache.jackrabbit.oak.api.Type.NAME;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INCLUDE_PROPERTY_TYPES;
/**
@ -117,10 +121,18 @@ public class RepositoryFactory
@Override
public void initialize( @Nonnull NodeBuilder root )
{
log.info( "Creating index " );
NodeBuilder namespaces;
if (!root.hasChildNode(NamespaceConstants.REP_NAMESPACES)) {
namespaces = Namespaces.createStandardMappings(root);
Namespaces.buildIndexNode(namespaces); // index node for faster lookup
} else {
namespaces = root.getChildNode(NamespaceConstants.REP_NAMESPACES);
}
Namespaces.addCustomMapping(namespaces, "http://archiva.apache.org/jcr/", "archiva");
log.info( "Creating index " );
NodeBuilder lucene = IndexUtils.getOrCreateOakIndex( root ).child( "lucene" );
lucene.setProperty( JcrConstants.JCR_PRIMARYTYPE, "oak:QueryIndexDefinition", Type.NAME );
lucene.setProperty( JCR_PRIMARYTYPE, "oak:QueryIndexDefinition", NAME );
lucene.setProperty( "compatVersion", 2 );
lucene.setProperty( "type", "lucene" );
@ -129,68 +141,74 @@ public class RepositoryFactory
// lucene.setProperty("refresh",true);
lucene.setProperty( "async", ImmutableSet.of( "async", "sync" ), Type.STRINGS );
NodeBuilder rules = lucene.child( "indexRules" ).
setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME );
rules.setProperty( ":childOrder", ImmutableSet.of( "archiva:projectVersion", //
"archiva:artifact", //
"archiva:facet", //
"archiva:namespace", //
"archiva:project" ), //
Type.STRINGS );
NodeBuilder allProps = rules.child( "archiva:projectVersion" ) //
NodeBuilder allProps = rules.child( "archiva:projectVersion" )
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME)//
.child( "properties" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
.setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME ) //
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
.setProperty( "indexNodeName", true ) //
.child( "allProps" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
.setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME );
allProps.setProperty( "name", ".*" );
allProps.setProperty( "isRegexp", true );
allProps.setProperty( "nodeScopeIndex", true );
allProps.setProperty( "index", true );
allProps.setProperty( "analyzed", true );
// allProps.setProperty("propertyIndex",true);
allProps = rules.child( "archiva:artifact" ) //
.child( "properties" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
.setProperty( "indexNodeName", true ).child( "allProps" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
allProps = rules.child("archiva:artifact") //
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME)
.child("properties") //
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME) //
.setProperty(":childOrder", ImmutableSet.of("allProps"), Type.STRINGS) //
.setProperty("indexNodeName", true)
.child("allProps")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME);
allProps.setProperty( "name", ".*" );
allProps.setProperty( "isRegexp", true );
allProps.setProperty( "nodeScopeIndex", true );
allProps.setProperty( "index", true );
allProps.setProperty( "analyzed", true );
allProps = rules.child( "archiva:facet" ) //
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME)
.child( "properties" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
.setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME ) //
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
.setProperty( "indexNodeName", true ) //
.child( "allProps" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
.setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME );
allProps.setProperty( "name", ".*" );
allProps.setProperty( "isRegexp", true );
allProps.setProperty( "nodeScopeIndex", true );
allProps.setProperty( "index", true );
allProps.setProperty( "analyzed", true );
allProps = rules.child( "archiva:namespace" ) //
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME)
.child( "properties" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
.setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME ) //
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
.setProperty( "indexNodeName", true ) //
.child( "allProps" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
.setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME );
allProps.setProperty( "name", ".*" );
allProps.setProperty( "isRegexp", true );
allProps.setProperty( "nodeScopeIndex", true );
allProps.setProperty( "index", true );
allProps.setProperty( "analyzed", true );
allProps = rules.child( "archiva:project" ) //
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME)
.child( "properties" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
.setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME ) //
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
.setProperty( "indexNodeName", true ) //
.child( "allProps" ) //
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
.setProperty( JCR_PRIMARYTYPE, NT_UNSTRUCTURED, NAME );
allProps.setProperty( "name", ".*" );
allProps.setProperty( "isRegexp", true );
allProps.setProperty( "nodeScopeIndex", true );

View File

@ -21,6 +21,8 @@ package org.apache.archiva.metadata.repository.jcr;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.DefaultMetadataResolver;
import org.apache.archiva.metadata.repository.MetadataResolver;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.junit.After;
import org.junit.Before;
@ -40,7 +42,6 @@ import java.util.Map;
public class JcrMetadataRepositoryTest
extends AbstractMetadataRepositoryTest
{
private JcrMetadataRepository jcrMetadataRepository;
@Inject
private ApplicationContext applicationContext;
@ -70,26 +71,34 @@ public class JcrMetadataRepositoryTest
Map<String, MetadataFacetFactory> factories = createTestMetadataFacetFactories();
// TODO: probably don't need to use Spring for this
jcrMetadataRepository = new JcrMetadataRepository( factories, jcrRepository );
// // TODO: probably don't need to use Spring for this
// jcrMetadataRepository = new JcrMetadataRepository( factories, jcrRepository );
//
// try
// {
// Session session = jcrMetadataRepository.login();
//
// // set up namespaces, etc.
// JcrMetadataRepository.initializeNodeTypes( session );
//
// // removing content is faster than deleting and re-copying the files from target/jcr
// session.getRootNode().getNode( "repositories" ).remove();
// session.save();
// }
// catch ( RepositoryException e )
// {
// // ignore
// }
try
{
Session session = jcrMetadataRepository.getJcrSession();
// this.repository = jcrMetadataRepository;
JcrRepositorySessionFactory jcrSessionFactory = new JcrRepositorySessionFactory();
jcrSessionFactory.setMetadataResolver(new DefaultMetadataResolver());
jcrSessionFactory.setMetadataFacetFactories(factories);
// set up namespaces, etc.
JcrMetadataRepository.initializeNodeTypes( session );
jcrSessionFactory.open();
this.sessionFactory = jcrSessionFactory;
this.repository = jcrSessionFactory.getMetadataRepository();
// removing content is faster than deleting and re-copying the files from target/jcr
session.getRootNode().getNode( "repositories" ).remove();
session.save();
}
catch ( RepositoryException e )
{
// ignore
}
this.repository = jcrMetadataRepository;
}
@ -98,7 +107,8 @@ public class JcrMetadataRepositoryTest
public void tearDown()
throws Exception
{
jcrMetadataRepository.close();
repository.close();
sessionFactory.close();
super.tearDown();
}

View File

@ -22,8 +22,10 @@ package org.apache.archiva.metadata.repository.stats;
import junit.framework.TestCase;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.jcr.JcrMetadataRepository;
import org.apache.archiva.metadata.repository.jcr.JcrRepositorySessionFactory;
import org.apache.archiva.metadata.repository.jcr.RepositoryFactory;
import org.apache.archiva.metadata.repository.stats.model.DefaultRepositoryStatistics;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
@ -68,6 +70,8 @@ public class JcrRepositoryStatisticsGatheringTest
private static final String TEST_REPO = "test-repo";
JcrMetadataRepository repository;
JcrRepositorySessionFactory sessionFactory;
@Inject
private RepositorySessionFactory repositorySessionFactory;
@ -75,7 +79,7 @@ public class JcrRepositoryStatisticsGatheringTest
@Inject
private ApplicationContext applicationContext;
Session session;
Session jcrSession;
private static Repository jcrRepository;
@ -108,17 +112,17 @@ public class JcrRepositoryStatisticsGatheringTest
// TODO: probably don't need to use Spring for this
JcrMetadataRepository jcrMetadataRepository = new JcrMetadataRepository( factories, jcrRepository );
session = jcrMetadataRepository.getJcrSession();
jcrSession = jcrMetadataRepository.login();
try
{
session = jcrMetadataRepository.getJcrSession();
jcrSession = jcrMetadataRepository.login();
// set up namespaces, etc.
JcrMetadataRepository.initializeNodeTypes( session );
JcrMetadataRepository.initializeNodeTypes(jcrSession);
// removing content is faster than deleting and re-copying the files from target/jcr
session.getRootNode().getNode( "repositories" ).remove();
jcrSession.getRootNode().getNode( "repositories" ).remove();
}
catch ( RepositoryException e )
{
@ -126,6 +130,7 @@ public class JcrRepositoryStatisticsGatheringTest
}
this.repository = jcrMetadataRepository;
this.sessionFactory = new JcrRepositorySessionFactory();
}
private static void registerMixinNodeType( NodeTypeManager nodeTypeManager, String type )
@ -152,73 +157,74 @@ public class JcrRepositoryStatisticsGatheringTest
public void testJcrStatisticsQuery()
throws Exception
{
Calendar cal = Calendar.getInstance();
Date endTime = cal.getTime();
cal.add( Calendar.HOUR, -1 );
Date startTime = cal.getTime();
try(RepositorySession repSession = sessionFactory.createSession()) {
Calendar cal = Calendar.getInstance();
Date endTime = cal.getTime();
cal.add(Calendar.HOUR, -1);
Date startTime = cal.getTime();
loadContentIntoRepo( TEST_REPO );
loadContentIntoRepo( "another-repo" );
loadContentIntoRepo(TEST_REPO);
loadContentIntoRepo("another-repo");
DefaultRepositoryStatistics testedStatistics = new DefaultRepositoryStatistics();
testedStatistics.setNewFileCount( NEW_FILE_COUNT );
testedStatistics.setTotalFileCount( TOTAL_FILE_COUNT );
testedStatistics.setScanStartTime( startTime );
testedStatistics.setScanEndTime( endTime );
DefaultRepositoryStatistics testedStatistics = new DefaultRepositoryStatistics();
testedStatistics.setNewFileCount(NEW_FILE_COUNT);
testedStatistics.setTotalFileCount(TOTAL_FILE_COUNT);
testedStatistics.setScanStartTime(startTime);
testedStatistics.setScanEndTime(endTime);
repository.populateStatistics( , repository, TEST_REPO, testedStatistics );
repository.populateStatistics(repSession, repository, TEST_REPO, testedStatistics);
DefaultRepositoryStatistics expectedStatistics = new DefaultRepositoryStatistics();
expectedStatistics.setNewFileCount( NEW_FILE_COUNT );
expectedStatistics.setTotalFileCount( TOTAL_FILE_COUNT );
expectedStatistics.setScanEndTime( endTime );
expectedStatistics.setScanStartTime( startTime );
expectedStatistics.setTotalArtifactFileSize( 95954585 );
expectedStatistics.setTotalArtifactCount( 269 );
expectedStatistics.setTotalGroupCount( 1 );
expectedStatistics.setTotalProjectCount( 43 );
expectedStatistics.setTotalCountForType( "zip", 1 );
expectedStatistics.setTotalCountForType( "gz", 1 ); // FIXME: should be tar.gz
expectedStatistics.setTotalCountForType( "java-source", 10 );
expectedStatistics.setTotalCountForType( "jar", 108 );
expectedStatistics.setTotalCountForType( "xml", 3 );
expectedStatistics.setTotalCountForType( "war", 2 );
expectedStatistics.setTotalCountForType( "pom", 144 );
expectedStatistics.setRepositoryId( TEST_REPO );
DefaultRepositoryStatistics expectedStatistics = new DefaultRepositoryStatistics();
expectedStatistics.setNewFileCount(NEW_FILE_COUNT);
expectedStatistics.setTotalFileCount(TOTAL_FILE_COUNT);
expectedStatistics.setScanEndTime(endTime);
expectedStatistics.setScanStartTime(startTime);
expectedStatistics.setTotalArtifactFileSize(95954585);
expectedStatistics.setTotalArtifactCount(269);
expectedStatistics.setTotalGroupCount(1);
expectedStatistics.setTotalProjectCount(43);
expectedStatistics.setTotalCountForType("zip", 1);
expectedStatistics.setTotalCountForType("gz", 1); // FIXME: should be tar.gz
expectedStatistics.setTotalCountForType("java-source", 10);
expectedStatistics.setTotalCountForType("jar", 108);
expectedStatistics.setTotalCountForType("xml", 3);
expectedStatistics.setTotalCountForType("war", 2);
expectedStatistics.setTotalCountForType("pom", 144);
expectedStatistics.setRepositoryId(TEST_REPO);
logger.info("getTotalCountForType: {}", testedStatistics.getTotalCountForType() );
assertEquals( NEW_FILE_COUNT, testedStatistics.getNewFileCount() );
assertEquals( TOTAL_FILE_COUNT, testedStatistics.getTotalFileCount() );
assertEquals( endTime, testedStatistics.getScanEndTime() );
assertEquals( startTime, testedStatistics.getScanStartTime() );
assertEquals( 95954585, testedStatistics.getTotalArtifactFileSize() );
assertEquals( 269, testedStatistics.getTotalArtifactCount() );
assertEquals( 1, testedStatistics.getTotalGroupCount() );
assertEquals( 43, testedStatistics.getTotalProjectCount() );
assertEquals( 1, testedStatistics.getTotalCountForType( "zip" ) );
assertEquals( 1, testedStatistics.getTotalCountForType( "gz" ) );
assertEquals( 10, testedStatistics.getTotalCountForType( "java-source" ) );
assertEquals( 108, testedStatistics.getTotalCountForType( "jar" ) );
assertEquals( 3, testedStatistics.getTotalCountForType( "xml" ) );
assertEquals( 2, testedStatistics.getTotalCountForType( "war" ) );
assertEquals( 144, testedStatistics.getTotalCountForType( "pom" ) );
assertEquals( 10, testedStatistics.getTotalCountForType( "java-source" ) );
logger.info("getTotalCountForType: {}", testedStatistics.getTotalCountForType());
assertEquals(NEW_FILE_COUNT, testedStatistics.getNewFileCount());
assertEquals(TOTAL_FILE_COUNT, testedStatistics.getTotalFileCount());
assertEquals(endTime, testedStatistics.getScanEndTime());
assertEquals(startTime, testedStatistics.getScanStartTime());
assertEquals(95954585, testedStatistics.getTotalArtifactFileSize());
assertEquals(269, testedStatistics.getTotalArtifactCount());
assertEquals(1, testedStatistics.getTotalGroupCount());
assertEquals(43, testedStatistics.getTotalProjectCount());
assertEquals(1, testedStatistics.getTotalCountForType("zip"));
assertEquals(1, testedStatistics.getTotalCountForType("gz"));
assertEquals(10, testedStatistics.getTotalCountForType("java-source"));
assertEquals(108, testedStatistics.getTotalCountForType("jar"));
assertEquals(3, testedStatistics.getTotalCountForType("xml"));
assertEquals(2, testedStatistics.getTotalCountForType("war"));
assertEquals(144, testedStatistics.getTotalCountForType("pom"));
assertEquals(10, testedStatistics.getTotalCountForType("java-source"));
}
}
private void loadContentIntoRepo( String repoId )
throws RepositoryException, IOException
{
Node n = JcrUtils.getOrAddNode( session.getRootNode(), "repositories" );
Node n = JcrUtils.getOrAddNode( jcrSession.getRootNode(), "repositories" );
n = JcrUtils.getOrAddNode( n, repoId );
n = JcrUtils.getOrAddNode( n, "content" );
n = JcrUtils.getOrAddNode( n, "org" );
n = JcrUtils.getOrAddNode( n, "apache" );
GZIPInputStream inputStream = new GZIPInputStream( getClass().getResourceAsStream( "/artifacts.xml.gz" ) );
session.importXML( n.getPath(), inputStream, ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW );
session.save();
jcrSession.importXML( n.getPath(), inputStream, ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW );
jcrSession.save();
}
}

View File

@ -30,6 +30,7 @@
</appenders>
<loggers>
<logger name="org.apache.jackrabbit" level="info"/>
<logger name="org.apache.jackrabbit.oak" level="debug"/>
<root level="info" includeLocation="true">
<appender-ref ref="console"/>
</root>

View File

@ -24,12 +24,15 @@ import org.apache.archiva.metadata.model.facets.RepositoryProblemFacet;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataException;
import org.apache.archiva.repository.events.RepositoryListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
/**
* Process repository management events and respond appropriately.
*
@ -40,6 +43,9 @@ public class RepositoryProblemEventListener
{
private Logger log = LoggerFactory.getLogger( RepositoryProblemEventListener.class );
@Inject
private RepositorySessionFactory repositorySessionFactory;
// FIXME: move to session
@Override
public void deleteArtifact( MetadataRepository metadataRepository, String repositoryId, String namespace,
@ -47,9 +53,9 @@ public class RepositoryProblemEventListener
{
String name = RepositoryProblemFacet.createName( namespace, project, version, id );
try
try(RepositorySession session = repositorySessionFactory.createSession())
{
metadataRepository.removeMetadataFacet( , repositoryId, RepositoryProblemFacet.FACET_ID, name );
metadataRepository.removeMetadataFacet(session , repositoryId, RepositoryProblemFacet.FACET_ID, name );
}
catch ( MetadataRepositoryException e )
{
@ -67,7 +73,7 @@ public class RepositoryProblemEventListener
try
{
MetadataRepository metadataRepository = session.getRepository();
metadataRepository.removeMetadataFacet( , repoId, RepositoryProblemFacet.FACET_ID, name );
metadataRepository.removeMetadataFacet(session , repoId, RepositoryProblemFacet.FACET_ID, name );
session.markDirty();
}
catch ( MetadataRepositoryException e )
@ -91,7 +97,7 @@ public class RepositoryProblemEventListener
try
{
session.getRepository().addMetadataFacet( , repoId, problem );
session.getRepository().addMetadataFacet(session , repoId, problem );
session.markDirty();
}
catch ( MetadataRepositoryException e )

View File

@ -97,6 +97,7 @@ public class DuplicateArtifactsConsumer
@Named ( value = "repositoryPathTranslator#maven2" )
private RepositoryPathTranslator pathTranslator;
private RepositorySession repositorySession;
@Override
@ -172,7 +173,7 @@ public class DuplicateArtifactsConsumer
Collection<ArtifactMetadata> results;
try
{
results = metadataRepository.getArtifactsByChecksum( , repoId, checksumSha1 );
results = metadataRepository.getArtifactsByChecksum(repositorySession , repoId, checksumSha1 );
}
catch ( MetadataRepositoryException e )
{
@ -224,7 +225,7 @@ public class DuplicateArtifactsConsumer
try
{
metadataRepository.addMetadataFacet( , repoId, problem );
metadataRepository.addMetadataFacet(repositorySession , repoId, problem );
}
catch ( MetadataRepositoryException e )
{

View File

@ -85,6 +85,7 @@ public class DuplicateArtifactsConsumerTest
@Inject
ApplicationContext applicationContext;
RepositorySessionFactory repositorySessionFactory;
@Before
@Override
@ -99,6 +100,7 @@ public class DuplicateArtifactsConsumerTest
config.setLocation( Paths.get( "target/test-repository" ).toAbsolutePath().toUri() );
metadataRepository = mock( MetadataRepository.class );
repositorySessionFactory = mock(RepositorySessionFactory.class);
RepositorySession session = mock( RepositorySession.class );
when( session.getRepository() ).thenReturn( metadataRepository );
@ -114,14 +116,15 @@ public class DuplicateArtifactsConsumerTest
public void testConsumerArtifactNotDuplicated()
throws Exception
{
when( metadataRepository.getArtifactsByChecksum( , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
RepositorySession session = repositorySessionFactory.createSession();
when( metadataRepository.getArtifactsByChecksum(session , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
Arrays.asList( TEST_METADATA ) );
consumer.beginScan( config, new Date() );
consumer.processFile( TEST_FILE );
consumer.completeScan();
verify( metadataRepository, never() ).addMetadataFacet( , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
verify( metadataRepository, never() ).addMetadataFacet(session , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
}
// TODO: Doesn't currently work
@ -142,7 +145,8 @@ public class DuplicateArtifactsConsumerTest
public void testConsumerArtifactDuplicated()
throws Exception
{
when( metadataRepository.getArtifactsByChecksum( , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
RepositorySession session = repositorySessionFactory.createSession();
when( metadataRepository.getArtifactsByChecksum(session , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
Arrays.asList( TEST_METADATA, createMetadata( "1.0" ) ) );
consumer.beginScan( config, new Date() );
@ -150,7 +154,7 @@ public class DuplicateArtifactsConsumerTest
consumer.completeScan();
ArgumentCaptor<RepositoryProblemFacet> argument = ArgumentCaptor.forClass( RepositoryProblemFacet.class );
verify( metadataRepository ).addMetadataFacet( , eq( TEST_REPO ), argument.capture() );
verify( metadataRepository ).addMetadataFacet(session , eq( TEST_REPO ), argument.capture() );
RepositoryProblemFacet problem = argument.getValue();
assertProblem( problem );
}
@ -159,7 +163,8 @@ public class DuplicateArtifactsConsumerTest
public void testConsumerArtifactDuplicatedButSelfNotInMetadataRepository()
throws Exception
{
when( metadataRepository.getArtifactsByChecksum( , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
RepositorySession session = repositorySessionFactory.createSession();
when( metadataRepository.getArtifactsByChecksum(session , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
Arrays.asList( createMetadata( "1.0" ) ) );
consumer.beginScan( config, new Date() );
@ -167,7 +172,7 @@ public class DuplicateArtifactsConsumerTest
consumer.completeScan();
ArgumentCaptor<RepositoryProblemFacet> argument = ArgumentCaptor.forClass( RepositoryProblemFacet.class );
verify( metadataRepository ).addMetadataFacet( , eq( TEST_REPO ), argument.capture() );
verify( metadataRepository ).addMetadataFacet(session , eq( TEST_REPO ), argument.capture() );
RepositoryProblemFacet problem = argument.getValue();
assertProblem( problem );
}
@ -190,8 +195,8 @@ public class DuplicateArtifactsConsumerTest
{
consumer.completeScan();
}
verify( metadataRepository, never() ).addMetadataFacet( , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
RepositorySession session = repositorySessionFactory.createSession();
verify( metadataRepository, never() ).addMetadataFacet(session , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
}
@Test
@ -202,15 +207,16 @@ public class DuplicateArtifactsConsumerTest
// No exception unnecessarily for something we can't report on
consumer.processFile( "com/example/invalid-artifact.txt" );
consumer.completeScan();
verify( metadataRepository, never() ).addMetadataFacet( , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
RepositorySession session = repositorySessionFactory.createSession();
verify( metadataRepository, never() ).addMetadataFacet(session , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
}
@Test
public void testConsumerArtifactNotAnArtifactPathResults()
throws Exception
{
when( metadataRepository.getArtifactsByChecksum( , eq( TEST_REPO ), anyString() ) ).thenReturn(
RepositorySession session = repositorySessionFactory.createSession();
when( metadataRepository.getArtifactsByChecksum(session , eq( TEST_REPO ), anyString() ) ).thenReturn(
Arrays.asList( TEST_METADATA, createMetadata( "1.0" ) ) );
// override, this feels a little overspecified though
@ -222,7 +228,7 @@ public class DuplicateArtifactsConsumerTest
consumer.processFile( "com/example/invalid-artifact.txt" );
consumer.completeScan();
verify( metadataRepository, never() ).addMetadataFacet( , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
verify( metadataRepository, never() ).addMetadataFacet(session , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
}
private static void assertProblem( RepositoryProblemFacet problem )

View File

@ -21,6 +21,8 @@ package org.apache.archiva.metadata.repository.stats;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.model.DefaultRepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatisticsManager;
@ -31,6 +33,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@ -52,11 +55,16 @@ public class DefaultRepositoryStatisticsManager
private RepositoryWalkingStatisticsProvider walkingProvider = new RepositoryWalkingStatisticsProvider();
@Inject
RepositorySessionFactory repositorySessionFactory;
@Override
public boolean hasStatistics( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException
{
return metadataRepository.hasMetadataFacet( , repositoryId, DefaultRepositoryStatistics.FACET_ID );
try(RepositorySession session = repositorySessionFactory.createSession()) {
return metadataRepository.hasMetadataFacet(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
}
}
@Override
@ -65,26 +73,24 @@ public class DefaultRepositoryStatisticsManager
{
StopWatch stopWatch = new StopWatch();
stopWatch.start();
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
List<String> scans = metadataRepository.getMetadataFacets( , repositoryId, DefaultRepositoryStatistics.FACET_ID );
if ( scans == null )
{
return null;
}
Collections.sort( scans );
if ( !scans.isEmpty() )
{
String name = scans.get( scans.size() - 1 );
RepositoryStatistics repositoryStatistics =
RepositoryStatistics.class.cast( metadataRepository.getMetadataFacet( , repositoryId,
RepositoryStatistics.FACET_ID, name ));
stopWatch.stop();
log.debug( "time to find last RepositoryStatistics: {} ms", stopWatch.getTime() );
return repositoryStatistics;
}
else
{
return null;
try(RepositorySession session = repositorySessionFactory.createSession()) {
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
List<String> scans = metadataRepository.getMetadataFacets(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
if (scans == null) {
return null;
}
Collections.sort(scans);
if (!scans.isEmpty()) {
String name = scans.get(scans.size() - 1);
RepositoryStatistics repositoryStatistics =
RepositoryStatistics.class.cast(metadataRepository.getMetadataFacet(session, repositoryId,
RepositoryStatistics.FACET_ID, name));
stopWatch.stop();
log.debug("time to find last RepositoryStatistics: {} ms", stopWatch.getTime());
return repositoryStatistics;
} else {
return null;
}
}
}
@ -93,42 +99,43 @@ public class DefaultRepositoryStatisticsManager
Date endTime, long totalFiles, long newFiles )
throws MetadataRepositoryException
{
DefaultRepositoryStatistics repositoryStatistics = new DefaultRepositoryStatistics();
repositoryStatistics.setRepositoryId( repositoryId );
repositoryStatistics.setScanStartTime( startTime );
repositoryStatistics.setScanEndTime( endTime );
repositoryStatistics.setTotalFileCount( totalFiles );
repositoryStatistics.setNewFileCount( newFiles );
try(RepositorySession session = repositorySessionFactory.createSession()) {
DefaultRepositoryStatistics repositoryStatistics = new DefaultRepositoryStatistics();
repositoryStatistics.setRepositoryId(repositoryId);
repositoryStatistics.setScanStartTime(startTime);
repositoryStatistics.setScanEndTime(endTime);
repositoryStatistics.setTotalFileCount(totalFiles);
repositoryStatistics.setNewFileCount(newFiles);
// TODO
// In the future, instead of being tied to a scan we might want to record information in the fly based on
// events that are occurring. Even without these totals we could query much of the information on demand based
// on information from the metadata content repository. In the mean time, we lock information in at scan time.
// Note that if new types are later discoverable due to a code change or new plugin, historical stats will not
// be updated and the repository will need to be rescanned.
// TODO
// In the future, instead of being tied to a scan we might want to record information in the fly based on
// events that are occurring. Even without these totals we could query much of the information on demand based
// on information from the metadata content repository. In the mean time, we lock information in at scan time.
// Note that if new types are later discoverable due to a code change or new plugin, historical stats will not
// be updated and the repository will need to be rescanned.
long startGather = System.currentTimeMillis();
long startGather = System.currentTimeMillis();
if ( metadataRepository instanceof RepositoryStatisticsProvider)
{
((RepositoryStatisticsProvider)metadataRepository).populateStatistics( ,
metadataRepository, repositoryId, repositoryStatistics );
if (metadataRepository instanceof RepositoryStatisticsProvider) {
((RepositoryStatisticsProvider) metadataRepository).populateStatistics(session,
metadataRepository, repositoryId, repositoryStatistics);
} else {
walkingProvider.populateStatistics(session, metadataRepository, repositoryId, repositoryStatistics);
}
log.info("Gathering statistics executed in {} ms", (System.currentTimeMillis() - startGather));
metadataRepository.addMetadataFacet(session, repositoryId, repositoryStatistics);
}
else
{
walkingProvider.populateStatistics( , metadataRepository, repositoryId, repositoryStatistics );
}
log.info( "Gathering statistics executed in {} ms", ( System.currentTimeMillis() - startGather ) );
metadataRepository.addMetadataFacet( , repositoryId, repositoryStatistics );
}
@Override
public void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException
{
metadataRepository.removeMetadataFacets( , repositoryId, DefaultRepositoryStatistics.FACET_ID );
try(RepositorySession session = repositorySessionFactory.createSession()) {
metadataRepository.removeMetadataFacets(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
}
}
@Override
@ -136,31 +143,28 @@ public class DefaultRepositoryStatisticsManager
Date startTime, Date endTime )
throws MetadataRepositoryException
{
List<RepositoryStatistics> results = new ArrayList<>();
List<String> list = metadataRepository.getMetadataFacets( , repositoryId, DefaultRepositoryStatistics.FACET_ID );
Collections.sort( list, Collections.reverseOrder() );
for ( String name : list )
{
try
{
Date date = createNameFormat().parse( name );
if ( ( startTime == null || !date.before( startTime ) ) && ( endTime == null || !date.after(
endTime ) ) )
{
RepositoryStatistics stats =
(RepositoryStatistics) metadataRepository.getMetadataFacet( ,
repositoryId,
DefaultRepositoryStatistics.FACET_ID, name );
results.add( stats );
try(RepositorySession session = repositorySessionFactory.createSession()) {
List<RepositoryStatistics> results = new ArrayList<>();
List<String> list = metadataRepository.getMetadataFacets(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
Collections.sort(list, Collections.reverseOrder());
for (String name : list) {
try {
Date date = createNameFormat().parse(name);
if ((startTime == null || !date.before(startTime)) && (endTime == null || !date.after(
endTime))) {
RepositoryStatistics stats =
(RepositoryStatistics) metadataRepository.getMetadataFacet(session,
repositoryId,
DefaultRepositoryStatistics.FACET_ID, name);
results.add(stats);
}
} catch (ParseException e) {
log.error("Invalid scan result found in the metadata repository: {}", e.getMessage());
// continue and ignore this one
}
}
catch ( ParseException e )
{
log.error( "Invalid scan result found in the metadata repository: {}", e.getMessage() );
// continue and ignore this one
}
return results;
}
return results;
}
private static SimpleDateFormat createNameFormat()

View File

@ -23,6 +23,8 @@ import junit.framework.TestCase;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.model.DefaultRepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
@ -64,6 +66,9 @@ public class RepositoryStatisticsManagerTest
private static final SimpleDateFormat TIMESTAMP_FORMAT = createTimestampFormat();
private RepositorySessionFactory repositorySessionFactory;
private IMocksControl factoryControl;
private static SimpleDateFormat createTimestampFormat()
{
SimpleDateFormat fmt = new SimpleDateFormat( DefaultRepositoryStatistics.SCAN_TIMESTAMP_FORMAT );
@ -82,6 +87,9 @@ public class RepositoryStatisticsManagerTest
metadataRepositoryControl = createControl();
metadataRepository = metadataRepositoryControl.createMock( MetadataRepository.class );
factoryControl = createControl();
repositorySessionFactory = factoryControl.createMock(RepositorySessionFactory.class);
}
@Test
@ -103,12 +111,13 @@ public class RepositoryStatisticsManagerTest
stats.setTotalFileCount( 56229 );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
Arrays.asList( FIRST_TEST_SCAN, SECOND_TEST_SCAN ) );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
DefaultRepositoryStatistics.FACET_ID, SECOND_TEST_SCAN ) ).andReturn( stats );
try(RepositorySession session = repositorySessionFactory.createSession()) {
expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID)).andReturn(
Arrays.asList(FIRST_TEST_SCAN, SECOND_TEST_SCAN));
expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
DefaultRepositoryStatistics.FACET_ID, SECOND_TEST_SCAN)).andReturn(stats);
}
metadataRepositoryControl.replay();
stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
@ -131,7 +140,8 @@ public class RepositoryStatisticsManagerTest
throws Exception
{
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
RepositorySession session = repositorySessionFactory.createSession();
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
Collections.<String>emptyList() );
metadataRepositoryControl.replay();
@ -151,13 +161,15 @@ public class RepositoryStatisticsManagerTest
RepositoryStatistics stats = createTestStats( startTime, current );
walkRepository( 1 );
RepositorySession session = repositorySessionFactory.createSession();
metadataRepository.addMetadataFacet( , TEST_REPO_ID, stats );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
metadataRepository.addMetadataFacet(session , TEST_REPO_ID, stats );
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
Arrays.asList( stats.getName() ) );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID,
DefaultRepositoryStatistics.FACET_ID, stats.getName() ) ).andReturn( stats );
metadataRepositoryControl.replay();
@ -189,21 +201,23 @@ public class RepositoryStatisticsManagerTest
Date startTime1 = new Date( current.getTime() - 12345 );
DefaultRepositoryStatistics stats1 = createTestStats( startTime1, new Date( current.getTime() - 6000 ) );
metadataRepository.addMetadataFacet( , TEST_REPO_ID, stats1 );
RepositorySession session = repositorySessionFactory.createSession();
metadataRepository.addMetadataFacet(session , TEST_REPO_ID, stats1 );
Date startTime2 = new Date( current.getTime() - 3000 );
DefaultRepositoryStatistics stats2 = createTestStats( startTime2, current );
metadataRepository.addMetadataFacet( , TEST_REPO_ID, stats2 );
metadataRepository.addMetadataFacet(session , TEST_REPO_ID, stats2 );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
Arrays.asList( stats1.getName(), stats2.getName() ) );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID,
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID,
DefaultRepositoryStatistics.FACET_ID, stats2.getName() ) ).andReturn( stats2 );
metadataRepository.removeMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID );
metadataRepository.removeMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
Collections.<String>emptyList() );
metadataRepositoryControl.replay();
@ -226,10 +240,11 @@ public class RepositoryStatisticsManagerTest
public void testDeleteStatsWhenEmpty()
throws Exception
{
RepositorySession session = repositorySessionFactory.createSession();
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
Collections.<String>emptyList() ).times( 2 );
metadataRepository.removeMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID );
metadataRepository.removeMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID );
metadataRepositoryControl.replay();
@ -256,12 +271,14 @@ public class RepositoryStatisticsManagerTest
ArrayList<String> keys = new ArrayList<>( statsCreated.keySet() );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
RepositorySession session = repositorySessionFactory.createSession();
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
// only match the middle one
String key = keys.get( 1 );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
statsCreated.get( key ) );
@ -299,16 +316,19 @@ public class RepositoryStatisticsManagerTest
List<String> keys = new ArrayList<>( statsCreated.keySet() );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
RepositorySession session = repositorySessionFactory.createSession();
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
String key = keys.get( 1 );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
expect( metadataRepository.getMetadataFacet(session, TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
statsCreated.get( key ) );
key = keys.get( 2 );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
statsCreated.get( key ) );
@ -346,15 +366,17 @@ public class RepositoryStatisticsManagerTest
List<String> keys = new ArrayList<>( statsCreated.keySet() );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
RepositorySession session = repositorySessionFactory.createSession();
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
String key = keys.get( 0 );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
statsCreated.get( key ) );
key = keys.get( 1 );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
statsCreated.get( key ) );
metadataRepositoryControl.replay();
@ -392,19 +414,21 @@ public class RepositoryStatisticsManagerTest
ArrayList<String> keys = new ArrayList<>( statsCreated.keySet() );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
RepositorySession session = repositorySessionFactory.createSession();
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
String key = keys.get( 0 );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
statsCreated.get( key ) );
key = keys.get( 1 );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
statsCreated.get( key ) );
key = keys.get( 2 );
expect( metadataRepository.getMetadataFacet( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
expect( metadataRepository.getMetadataFacet(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID, key ) ).andReturn(
statsCreated.get( key ) );
metadataRepositoryControl.replay();
@ -442,7 +466,9 @@ public class RepositoryStatisticsManagerTest
ArrayList<String> keys = new ArrayList<>( statsCreated.keySet() );
expect( metadataRepository.getMetadataFacets( , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
RepositorySession session = repositorySessionFactory.createSession();
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
metadataRepositoryControl.replay();
@ -466,8 +492,10 @@ public class RepositoryStatisticsManagerTest
private void addStats( Date startTime, Date endTime )
throws Exception
{
RepositorySession session = repositorySessionFactory.createSession();
DefaultRepositoryStatistics stats = createTestStats( startTime, endTime );
metadataRepository.addMetadataFacet( , TEST_REPO_ID, stats );
metadataRepository.addMetadataFacet(session , TEST_REPO_ID, stats );
statsCreated.put( stats.getName(), stats );
}
@ -509,107 +537,111 @@ public class RepositoryStatisticsManagerTest
private void walkRepository( int count )
throws Exception
{
RepositorySession session = repositorySessionFactory.createSession();
for ( int i = 0; i < count; i++ )
{
expect( metadataRepository.getRootNamespaces( , TEST_REPO_ID ) ).andReturn( Arrays.asList( "com", "org" ) );
expect( metadataRepository.getProjects( , TEST_REPO_ID, "com" ) ).andReturn( Arrays.<String>asList() );
expect( metadataRepository.getRootNamespaces(session , TEST_REPO_ID ) ).andReturn( Arrays.asList( "com", "org" ) );
expect( metadataRepository.getNamespaces( , TEST_REPO_ID, "com" ) ).andReturn( Arrays.asList( "example" ) );
expect( metadataRepository.getProjects(session , TEST_REPO_ID, "com" ) ).andReturn( Arrays.<String>asList() );
expect( metadataRepository.getNamespaces( , TEST_REPO_ID, "com.example" ) ).andReturn(
expect( metadataRepository.getNamespaces(session , TEST_REPO_ID, "com" ) ).andReturn( Arrays.asList( "example" ) );
expect( metadataRepository.getNamespaces(session , TEST_REPO_ID, "com.example" ) ).andReturn(
Arrays.<String>asList() );
expect( metadataRepository.getProjects( , TEST_REPO_ID, "com.example" ) ).andReturn(
expect( metadataRepository.getProjects(session , TEST_REPO_ID, "com.example" ) ).andReturn(
Arrays.asList( "example-project" ) );
expect( metadataRepository.getProjectVersions( , TEST_REPO_ID, "com.example", "example-project" ) ).andReturn(
expect( metadataRepository.getProjectVersions(session , TEST_REPO_ID, "com.example", "example-project" ) ).andReturn(
Arrays.asList( "1.0", "1.1" ) );
expect(
metadataRepository.getArtifacts( , TEST_REPO_ID, "com.example", "example-project", "1.0" ) ).andReturn(
metadataRepository.getArtifacts(session , TEST_REPO_ID, "com.example", "example-project", "1.0" ) ).andReturn(
Arrays.asList( createArtifact( "com.example", "example-project", "1.0", "jar" ),
createArtifact( "com.example", "example-project", "1.0", "pom" ) ) );
expect(
metadataRepository.getArtifacts( , TEST_REPO_ID, "com.example", "example-project", "1.1" ) ).andReturn(
metadataRepository.getArtifacts(session , TEST_REPO_ID, "com.example", "example-project", "1.1" ) ).andReturn(
Arrays.asList( createArtifact( "com.example", "example-project", "1.1", "jar" ),
createArtifact( "com.example", "example-project", "1.1", "pom" ) ) );
expect( metadataRepository.getNamespaces( , TEST_REPO_ID, "org" ) ).andReturn( Arrays.asList( "apache", "codehaus" ) );
expect( metadataRepository.getNamespaces(session , TEST_REPO_ID, "org" ) ).andReturn( Arrays.asList( "apache", "codehaus" ) );
expect( metadataRepository.getNamespaces( , TEST_REPO_ID, "org.apache" ) ).andReturn( Arrays.asList( "archiva", "maven" ) );
expect( metadataRepository.getNamespaces(session , TEST_REPO_ID, "org.apache" ) ).andReturn( Arrays.asList( "archiva", "maven" ) );
expect( metadataRepository.getProjects( , TEST_REPO_ID, "org.apache" ) ).andReturn( Arrays.<String>asList() );
expect( metadataRepository.getProjects(session , TEST_REPO_ID, "org.apache" ) ).andReturn( Arrays.<String>asList() );
expect( metadataRepository.getNamespaces( , TEST_REPO_ID, "org.apache.archiva" ) ).andReturn( Arrays.<String>asList() );
expect( metadataRepository.getNamespaces(session , TEST_REPO_ID, "org.apache.archiva" ) ).andReturn( Arrays.<String>asList() );
expect( metadataRepository.getProjects( , TEST_REPO_ID, "org.apache.archiva" ) ).andReturn( Arrays.asList( "metadata-repository-api", "metadata-model" ) );
expect( metadataRepository.getProjects(session , TEST_REPO_ID, "org.apache.archiva" ) ).andReturn( Arrays.asList( "metadata-repository-api", "metadata-model" ) );
expect( metadataRepository.getProjectVersions( , TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api" ) )
expect( metadataRepository.getProjectVersions(session , TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api" ) )
.andReturn( Arrays.asList( "1.3-SNAPSHOT", "1.3" ) );
expect( metadataRepository.getArtifacts( , TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT" ) )
expect( metadataRepository.getArtifacts(session , TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT" ) )
.andReturn( Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT", "jar" ),
createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT",
"pom" ) ) );
expect( metadataRepository.getArtifacts( , TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api", "1.3" ) )
expect( metadataRepository.getArtifacts(session , TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api", "1.3" ) )
.andReturn( Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3", "jar" ),
createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3", "pom" ) ) );
expect( metadataRepository.getProjectVersions( , TEST_REPO_ID, "org.apache.archiva", "metadata-model" ) )
expect( metadataRepository.getProjectVersions(session , TEST_REPO_ID, "org.apache.archiva", "metadata-model" ) )
.andReturn( Arrays.asList( "1.3-SNAPSHOT", "1.3" ) );
expect( metadataRepository.getArtifacts( , TEST_REPO_ID, "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT" ) )
expect( metadataRepository.getArtifacts(session , TEST_REPO_ID, "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT" ) )
.andReturn( Arrays.asList( createArtifact( "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT", "jar" ),
createArtifact( "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT", "pom" ) ) );
expect( metadataRepository.getArtifacts( , TEST_REPO_ID, "org.apache.archiva", "metadata-model", "1.3" ) )
expect( metadataRepository.getArtifacts(session , TEST_REPO_ID, "org.apache.archiva", "metadata-model", "1.3" ) )
.andReturn( Arrays.asList( createArtifact( "org.apache.archiva", "metadata-model", "1.3", "jar" ),
createArtifact( "org.apache.archiva", "metadata-model", "1.3", "pom" ) ) );
expect( metadataRepository.getNamespaces( , TEST_REPO_ID, "org.apache.maven" ) ).andReturn( Arrays.<String>asList() );
expect( metadataRepository.getNamespaces(session , TEST_REPO_ID, "org.apache.maven" ) ).andReturn( Arrays.<String>asList() );
expect( metadataRepository.getProjects( , TEST_REPO_ID, "org.apache.maven" ) )
expect( metadataRepository.getProjects(session , TEST_REPO_ID, "org.apache.maven" ) )
.andReturn( Arrays.asList( "maven-model" ) );
expect( metadataRepository.getProjectVersions( , TEST_REPO_ID, "org.apache.maven", "maven-model" ) )
expect( metadataRepository.getProjectVersions(session , TEST_REPO_ID, "org.apache.maven", "maven-model" ) )
.andReturn( Arrays.asList( "2.2.1" ) );
expect( metadataRepository.getArtifacts( , TEST_REPO_ID, "org.apache.maven", "maven-model", "2.2.1" ) )
expect( metadataRepository.getArtifacts(session , TEST_REPO_ID, "org.apache.maven", "maven-model", "2.2.1" ) )
.andReturn( Arrays.asList( createArtifact( "org.apache.archiva", "maven-model", "2.2.1", "jar" ),
createArtifact( "org.apache.archiva", "maven-model", "2.2.1", "pom" ) ) );
expect( metadataRepository.getNamespaces( , TEST_REPO_ID, "org.codehaus" ) ).andReturn( Arrays.asList( "plexus" ) );
expect( metadataRepository.getNamespaces(session , TEST_REPO_ID, "org.codehaus" ) ).andReturn( Arrays.asList( "plexus" ) );
expect( metadataRepository.getProjects( , TEST_REPO_ID, "org" ) ).andReturn( Arrays.<String>asList( ) );
expect( metadataRepository.getProjects(session , TEST_REPO_ID, "org" ) ).andReturn( Arrays.<String>asList( ) );
expect( metadataRepository.getProjects( , TEST_REPO_ID, "org.codehaus" ) )
expect( metadataRepository.getProjects(session , TEST_REPO_ID, "org.codehaus" ) )
.andReturn( Arrays.<String>asList( ) );
expect( metadataRepository.getNamespaces( , TEST_REPO_ID, "org.codehaus.plexus" ) )
expect( metadataRepository.getNamespaces(session , TEST_REPO_ID, "org.codehaus.plexus" ) )
.andReturn( Arrays.<String>asList( ) );
expect( metadataRepository.getProjects( , TEST_REPO_ID, "org.codehaus.plexus" ) )
expect( metadataRepository.getProjects(session , TEST_REPO_ID, "org.codehaus.plexus" ) )
.andReturn( Arrays.asList( "plexus-spring" ) );
expect( metadataRepository.getProjectVersions( , TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring" ) )
expect( metadataRepository.getProjectVersions(session, TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring" ) )
.andReturn( Arrays.asList( "1.0", "1.1", "1.2" ) );
expect( metadataRepository.getArtifacts( , TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.0" ) )
expect( metadataRepository.getArtifacts(session , TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.0" ) )
.andReturn( Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.0", "jar" ),
createArtifact( "org.codehaus.plexus", "plexus-spring", "1.0", "pom" ) ) );
expect( metadataRepository.getArtifacts( , TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.1" ) )
expect( metadataRepository.getArtifacts(session, TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.1" ) )
.andReturn( Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.1", "jar" ),
createArtifact( "org.codehaus.plexus", "plexus-spring", "1.1", "pom" ) ) );
expect( metadataRepository.getArtifacts( , TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.2" ) )
expect( metadataRepository.getArtifacts(session , TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.2" ) )
.andReturn( Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.2", "jar" ),
createArtifact( "org.codehaus.plexus", "plexus-spring", "1.2", "pom" ) ) );
}

View File

@ -30,6 +30,8 @@ import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.filter.Filter;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
import org.apache.archiva.model.ArchivaRepositoryMetadata;
import org.apache.archiva.repository.RepositoryException;
@ -82,6 +84,9 @@ public class Maven2RepositoryMerger
private static final String METADATA_FILENAME = "maven-metadata.xml";
@Inject
private RepositorySessionFactory repositorySessionFactory;
@Inject
public Maven2RepositoryMerger(
@Named (value = "archivaConfiguration#default") ArchivaConfiguration archivaConfiguration,
@ -101,9 +106,9 @@ public class Maven2RepositoryMerger
throws RepositoryMergerException
{
try
try(RepositorySession session = repositorySessionFactory.createSession())
{
List<ArtifactMetadata> artifactsInSourceRepo = metadataRepository.getArtifacts( , sourceRepoId );
List<ArtifactMetadata> artifactsInSourceRepo = metadataRepository.getArtifacts(session , sourceRepoId );
for ( ArtifactMetadata artifactMetadata : artifactsInSourceRepo )
{
artifactMetadata.setRepositoryId( targetRepoId );
@ -130,9 +135,9 @@ public class Maven2RepositoryMerger
Filter<ArtifactMetadata> filter )
throws RepositoryMergerException
{
try
try(RepositorySession session = repositorySessionFactory.createSession())
{
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( , sourceRepoId );
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts(session , sourceRepoId );
for ( ArtifactMetadata metadata : sourceArtifacts )
{
if ( filter.accept( metadata ) )
@ -396,12 +401,12 @@ public class Maven2RepositoryMerger
String targetRepo )
throws RepositoryMergerException
{
try
try(RepositorySession session = repositorySessionFactory.createSession())
{
TreeSet<ArtifactMetadata> targetArtifacts = new TreeSet<>(META_COMPARATOR);
targetArtifacts.addAll(metadataRepository.getArtifacts( , targetRepo ));
targetArtifacts.addAll(metadataRepository.getArtifacts(session , targetRepo ));
TreeSet<ArtifactMetadata> sourceArtifacts = new TreeSet<>(META_COMPARATOR);
sourceArtifacts.addAll(metadataRepository.getArtifacts( , sourceRepo ));
sourceArtifacts.addAll(metadataRepository.getArtifacts(session , sourceRepo ));
sourceArtifacts.retainAll(targetArtifacts);
return new ArrayList<>(sourceArtifacts);

View File

@ -26,6 +26,8 @@ import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.archiva.configuration.RepositoryScanningConfiguration;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.Before;
import org.junit.Test;
@ -58,6 +60,8 @@ public class Maven2RepositoryMergerTest
private MetadataRepository metadataRepository;
private RepositorySessionFactory repositorySessionFactory;
@Before
@Override
public void setUp()
@ -66,6 +70,7 @@ public class Maven2RepositoryMergerTest
super.setUp();
MockitoAnnotations.initMocks( this );
metadataRepository = mock( MetadataRepository.class );
repositorySessionFactory = mock(RepositorySessionFactory.class);
}
private List<ArtifactMetadata> getArtifacts()
@ -120,9 +125,11 @@ public class Maven2RepositoryMergerTest
c.addManagedRepository( targetRepo );
configuration.save( c );
when( metadataRepository.getArtifacts( , TEST_REPO_ID ) ).thenReturn( getArtifacts() );
repositoryMerger.merge( metadataRepository, TEST_REPO_ID, "target-rep" );
verify( metadataRepository ).getArtifacts( , TEST_REPO_ID );
try(RepositorySession session = repositorySessionFactory.createSession()) {
when(metadataRepository.getArtifacts(session, TEST_REPO_ID)).thenReturn(getArtifacts());
repositoryMerger.merge(metadataRepository, TEST_REPO_ID, "target-rep");
verify(metadataRepository).getArtifacts(session, TEST_REPO_ID);
}
assertTrue( Files.exists(mergedArtifact) );
assertTrue( Files.exists(mavenMetadata) );
assertTrue( Files.exists(pom) );
@ -169,12 +176,14 @@ public class Maven2RepositoryMergerTest
"/target/test-repository/com/example/test/test-artifact/1.0-SNAPSHOT/test-artifact-1.0-20100308.230825-1.jar" );
targetRepoFile.toFile().setReadOnly();
when( metadataRepository.getArtifacts( , sourceRepoId ) ).thenReturn( sourceRepoArtifactsList );
when( metadataRepository.getArtifacts( , TEST_REPO_ID ) ).thenReturn( targetRepoArtifactsList );
try(RepositorySession session = repositorySessionFactory.createSession()) {
when(metadataRepository.getArtifacts(session, sourceRepoId)).thenReturn(sourceRepoArtifactsList);
when(metadataRepository.getArtifacts(session, TEST_REPO_ID)).thenReturn(targetRepoArtifactsList);
assertEquals( 1, repositoryMerger.getConflictingArtifacts( metadataRepository, sourceRepoId,
TEST_REPO_ID ).size() );
verify( metadataRepository ).getArtifacts( , TEST_REPO_ID );
assertEquals(1, repositoryMerger.getConflictingArtifacts(metadataRepository, sourceRepoId,
TEST_REPO_ID).size());
verify(metadataRepository).getArtifacts(session, TEST_REPO_ID);
}
}
}

View File

@ -81,7 +81,7 @@
<surefire.redirectTestOutputToFile>true</surefire.redirectTestOutputToFile>
<lucene.version>4.10.4</lucene.version>
<jcr-oak.version>1.7.11</jcr-oak.version>
<jcr-oak.version>1.14.0</jcr-oak.version>
<jackrabbit.version>2.15.4</jackrabbit.version>
<felix.scr.version>1.12.0</felix.scr.version>
<metrics-core.version>3.1.0</metrics-core.version>
@ -357,6 +357,7 @@
<artifactId>jackrabbit-webdav</artifactId>
<version>${jackrabbit.version}</version>
</dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
<artifactId>jackrabbit-jcr-commons</artifactId>