[MRM-1293] introduce repository statistics as a new plugin that operates over the metadata repository. Currently a work in progress - repository size statistics are not stored, and only the scanner and scheduler have been hooked in (meaning reports will be out of date)

git-svn-id: https://svn.apache.org/repos/asf/archiva/branches/MRM-1025@888164 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Brett Porter 2009-12-07 22:32:10 +00:00
parent 8afc66c9d2
commit 7c7fffc71c
47 changed files with 1430 additions and 642 deletions

View File

@ -30,7 +30,7 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import org.apache.archiva.scheduler.repository.RepositoryArchivaTaskScheduler;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.RepositoryTask;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
@ -134,7 +134,7 @@ public class DefaultRepositoryProxyConnectors
/**
* @plexus.requirement role="org.apache.archiva.scheduler.ArchivaTaskScheduler" role-hint="repository"
*/
private RepositoryArchivaTaskScheduler scheduler;
private ArchivaTaskScheduler scheduler;
public File fetchFromProxies( ManagedRepositoryContent repository, ArtifactReference artifact )
throws ProxyDownloadException

View File

@ -0,0 +1,121 @@
package org.apache.archiva.metadata.repository;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
public class TestMetadataRepository
implements MetadataRepository
{
public ProjectMetadata getProject( String repoId, String namespace, String projectId )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
String projectVersion )
throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getArtifactVersions( String repoId, String namespace, String projectId,
String projectVersion )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<ProjectVersionReference> getProjectReferences( String repoId, String namespace, String projectId,
String projectVersion )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getRootNamespaces( String repoId )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getNamespaces( String repoId, String namespace )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getProjects( String repoId, String namespace )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public void updateProject( String repoId, ProjectMetadata project )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateArtifact( String repoId, String namespace, String projectId, String projectVersion,
ArtifactMetadata artifactMeta )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateProjectVersion( String repoId, String namespace, String projectId, ProjectVersionMetadata versionMetadata )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateProjectReference( String repoId, String namespace, String projectId, String projectVersion,
ProjectVersionReference reference )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateNamespace( String repoId, String namespace )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public List<String> getMetadataFacets( String repodId, String facetId )
{
return Collections.emptyList();
}
public MetadataFacet getMetadataFacet( String repositoryId, String facetId, String name )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet )
{
//To change body of implemented methods use File | Settings | File Templates.
}
}

View File

@ -1,68 +0,0 @@
package org.apache.maven.archiva.proxy.stubs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.Serializable;
import java.util.List;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.RepositoryContentStatisticsDAO;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.database.SimpleConstraint;
/**
* Using a stub for faster tests! Not really used for the unit tests, just for dependency injection.
*/
public class ArchivaDAOStub
implements ArchivaDAO
{
public ArtifactDAO getArtifactDAO()
{
// TODO Auto-generated method stub
return null;
}
public RepositoryContentStatisticsDAO getRepositoryContentStatisticsDAO()
{
// TODO Auto-generated method stub
return null;
}
public RepositoryProblemDAO getRepositoryProblemDAO()
{
// TODO Auto-generated method stub
return null;
}
public List<?> query( SimpleConstraint constraint )
{
// TODO Auto-generated method stub
return null;
}
public Object save( Serializable obj )
{
// TODO Auto-generated method stub
return null;
}
}

View File

@ -107,9 +107,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -105,9 +105,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -110,9 +110,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -101,9 +101,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -105,9 +105,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -105,9 +105,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -124,9 +124,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -105,9 +105,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -105,9 +105,8 @@
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -65,21 +65,6 @@ public class RepositoryScanStatistics
*/
private long newFileCount = 0;
/**
* Field totalProjectCount
*/
private long totalProjectCount = 0;
/**
* Field totalGroupCount
*/
private long totalGroupCount = 0;
/**
* Field totalArtifactCount
*/
private long totalArtifactCount = 0;
/**
* Field totalSize
*/
@ -216,21 +201,6 @@ public class RepositoryScanStatistics
return newFileCount;
}
public long getTotalProjectCount()
{
return totalProjectCount;
}
public long getTotalGroupCount()
{
return totalGroupCount;
}
public long getTotalArtifactCount()
{
return totalArtifactCount;
}
public long getTotalSize()
{
return totalSize;

View File

@ -14,17 +14,22 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-scheduler-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>repository-statistics</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-configuration</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-database</artifactId>
<artifactId>archiva-repository-scanner</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-scanner</artifactId>
<artifactId>archiva-repository-layer</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
@ -34,11 +39,6 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-database-consumers</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-spring</artifactId>
@ -49,11 +49,6 @@
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
@ -63,12 +58,13 @@
<executions>
<execution>
<id>merge</id>
<phase>process-resources</phase>
<goals>
<goal>merge-metadata</goal>
</goals>
<configuration>
<descriptors>
<descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
<descriptor>${basedir}/src/main/components-fragment.xml</descriptor>
<descriptor>${project.build.outputDirectory}/META-INF/plexus/components.xml</descriptor>
</descriptors>
</configuration>

View File

@ -19,29 +19,17 @@ package org.apache.archiva.scheduler.repository;
* under the License.
*/
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Date;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.archiva.repository.scanner.RepositoryContentConsumers;
import org.apache.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.archiva.repository.scanner.RepositoryScanner;
import org.apache.archiva.repository.scanner.RepositoryScannerException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.ArtifactsByRepositoryConstraint;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
import org.apache.maven.archiva.database.constraints.UniqueArtifactIdConstraint;
import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.taskqueue.Task;
@ -54,9 +42,7 @@ import org.slf4j.LoggerFactory;
* ArchivaRepositoryScanningTaskExecutor
*
* @version $Id$
*
* @plexus.component
* role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
* @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
* role-hint="repository-scanning"
*/
public class ArchivaRepositoryScanningTaskExecutor
@ -64,13 +50,6 @@ public class ArchivaRepositoryScanningTaskExecutor
{
private Logger log = LoggerFactory.getLogger( ArchivaRepositoryScanningTaskExecutor.class );
/**
* TODO: just for stats, remove this and use the main stats module
*
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* @plexus.requirement
*/
@ -83,11 +62,6 @@ public class ArchivaRepositoryScanningTaskExecutor
*/
private RepositoryScanner repoScanner;
/**
* @plexus.requirement role="org.apache.maven.archiva.repository.events.RepositoryListener"
*/
private List<RepositoryListener> repositoryListeners;
/**
* @plexus.requirement
*/
@ -95,6 +69,11 @@ public class ArchivaRepositoryScanningTaskExecutor
private Task task;
/**
* @plexus.requirement
*/
private RepositoryStatisticsManager repositoryStatisticsManager;
public void initialize()
throws InitializationException
{
@ -118,12 +97,14 @@ public class ArchivaRepositoryScanningTaskExecutor
RepositoryTask repoTask = (RepositoryTask) task;
if ( StringUtils.isBlank( repoTask.getRepositoryId() ) )
String repoId = repoTask.getRepositoryId();
if ( StringUtils.isBlank( repoId ) )
{
throw new TaskExecutionException( "Unable to execute RepositoryTask with blank repository Id." );
}
ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
ManagedRepositoryConfiguration arepo =
archivaConfiguration.getConfiguration().findManagedRepositoryById( repoId );
// execute consumers on resource file if set
if ( repoTask.getResourceFile() != null )
@ -140,19 +121,20 @@ public class ArchivaRepositoryScanningTaskExecutor
{
if ( arepo == null )
{
throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
throw new TaskExecutionException(
"Unable to execute RepositoryTask with invalid repository id: " + repoId );
}
long sinceWhen = RepositoryScanner.FRESH_SCAN;
long previousFileCount = 0;
List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( arepo.getId() ) );
if ( CollectionUtils.isNotEmpty( results ) )
{
RepositoryContentStatistics lastStats = results.get( 0 );
if ( !repoTask.isScanAll() )
{
sinceWhen = lastStats.getWhenGathered().getTime() - lastStats.getDuration();
RepositoryStatistics previousStats = repositoryStatisticsManager.getLastStatistics( repoId );
if ( previousStats != null )
{
sinceWhen = previousStats.getScanStartTime().getTime();
previousFileCount = previousStats.getTotalFileCount();
}
}
@ -160,9 +142,14 @@ public class ArchivaRepositoryScanningTaskExecutor
log.info( "Finished first scan: " + stats.toDump( arepo ) );
RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats );
dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );
RepositoryStatistics repositoryStatistics = new RepositoryStatistics();
repositoryStatistics.setScanStartTime( stats.getWhenGathered() );
repositoryStatistics.setScanEndTime(
new Date( stats.getWhenGathered().getTime() + stats.getDuration() ) );
repositoryStatistics.setTotalFileCount( stats.getTotalFileCount() );
repositoryStatistics.setNewFileCount( stats.getTotalFileCount() - previousFileCount );
// further statistics will be populated by the following method
repositoryStatisticsManager.addStatisticsAfterScan( repoId, repositoryStatistics );
// log.info( "Scanning for removed repository content" );
@ -180,53 +167,6 @@ public class ArchivaRepositoryScanningTaskExecutor
}
}
@SuppressWarnings("unchecked")
private RepositoryContentStatistics constructRepositoryStatistics( ManagedRepositoryConfiguration arepo,
long sinceWhen,
List<RepositoryContentStatistics> results,
RepositoryScanStatistics stats )
{
// I hate jpox and modello <-- and so do I
RepositoryContentStatistics dbstats = new RepositoryContentStatistics();
dbstats.setDuration( stats.getDuration() );
dbstats.setNewFileCount( stats.getNewFileCount() );
dbstats.setRepositoryId( stats.getRepositoryId() );
dbstats.setTotalFileCount( stats.getTotalFileCount() );
dbstats.setWhenGathered( stats.getWhenGathered() );
// total artifact count
try
{
List<ArchivaArtifact> artifacts = dao.getArtifactDAO().queryArtifacts(
new ArtifactsByRepositoryConstraint( arepo.getId(), stats.getWhenGathered(), "groupId", true ) );
dbstats.setTotalArtifactCount( artifacts.size() );
}
catch ( ObjectNotFoundException oe )
{
log.error( "Object not found in the database : " + oe.getMessage() );
}
catch ( ArchivaDatabaseException ae )
{
log.error( "Error occurred while querying artifacts for artifact count : " + ae.getMessage() );
}
// total repo size -- TODO: needs to exclude ignored files (eg .svn)
long size = FileUtils.sizeOfDirectory( new File( arepo.getLocation() ) );
dbstats.setTotalSize( size );
// total unique groups
List<String> repos = new ArrayList<String>();
repos.add( arepo.getId() );
List<String> groupIds = (List<String>) dao.query( new UniqueGroupIdConstraint( repos ) );
dbstats.setTotalGroupCount( groupIds.size() );
List<Object[]> artifactIds = (List<Object[]>) dao.query( new UniqueArtifactIdConstraint( arepo.getId(), true ) );
dbstats.setTotalProjectCount( artifactIds.size() );
return dbstats;
}
public Task getCurrentTaskInExecution()
{
return task;

View File

@ -25,15 +25,13 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationEvent;
import org.apache.maven.archiva.configuration.ConfigurationListener;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
@ -74,9 +72,9 @@ public class RepositoryArchivaTaskScheduler
private ArchivaConfiguration archivaConfiguration;
/**
* @plexus.requirement role-hint="jdo"
* @plexus.requirement
*/
private ArchivaDAO dao;
private RepositoryStatisticsManager repositoryStatisticsManager;
private static final String REPOSITORY_SCAN_GROUP = "rg";
@ -264,15 +262,7 @@ public class RepositoryArchivaTaskScheduler
@SuppressWarnings("unchecked")
private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig )
{
List<RepositoryScanStatistics> results =
(List<RepositoryScanStatistics>) dao.query( new MostRecentRepositoryScanStatistics( repoConfig.getId() ) );
if ( results != null && !results.isEmpty() )
{
return true;
}
return false;
return repositoryStatisticsManager.getLastStatistics( repoConfig.getId() ) != null;
}
// MRM-848: Pre-configured repository initially appear to be empty

View File

@ -20,27 +20,20 @@ package org.apache.archiva.scheduler.repository;
*/
import java.io.File;
import java.net.URL;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.model.ArtifactReference;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.codehaus.plexus.util.FileUtils;
import org.jpox.SchemaTool;
/**
* ArchivaRepositoryScanningTaskExecutorTest
@ -52,88 +45,19 @@ public class ArchivaRepositoryScanningTaskExecutorTest
{
private TaskExecutor taskExecutor;
protected ArchivaDAO dao;
private File repoDir;
private static final String TEST_REPO_ID = "testRepo";
private RepositoryStatisticsManager repositoryStatisticsManager;
private TestConsumer testConsumer;
protected void setUp()
throws Exception
{
super.setUp();
DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
/* derby version
File derbyDbDir = new File( "target/plexus-home/testdb" );
if ( derbyDbDir.exists() )
{
FileUtils.deleteDirectory( derbyDbDir );
}
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
*/
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
// jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
jdoFactory.setProperty( "org.jpox.validateTables", "true" );
jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
Properties properties = jdoFactory.getProperties();
for ( Map.Entry<Object, Object> entry : properties.entrySet() )
{
System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
}
URL jdoFileUrls[] = new URL[]{getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" )};
if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
{
fail( "Unable to process test " + getName() + " - missing package.jdo." );
}
File propsFile = null; // intentional
boolean verbose = true;
SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[]{}, propsFile, verbose );
SchemaTool.createSchemaTables( jdoFileUrls, new URL[]{}, propsFile, verbose, null );
PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
assertNotNull( pmf );
PersistenceManager pm = pmf.getPersistenceManager();
pm.close();
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
File sourceRepoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
@ -152,6 +76,7 @@ public class ArchivaRepositoryScanningTaskExecutorTest
{
f.setLastModified( cal.getTimeInMillis() );
}
// TODO: test they are excluded instead
for ( String dir : (List<String>) FileUtils.getDirectoryNames( repoDir, "**/.svn", null, false ) )
{
FileUtils.deleteDirectory( new File( repoDir, dir ) );
@ -169,6 +94,9 @@ public class ArchivaRepositoryScanningTaskExecutorTest
repositoryConfiguration.setLocation( repoDir.getAbsolutePath() );
archivaConfig.getConfiguration().getManagedRepositories().clear();
archivaConfig.getConfiguration().addManagedRepository( repositoryConfiguration );
repositoryStatisticsManager = (RepositoryStatisticsManager) lookup( RepositoryStatisticsManager.class );
testConsumer = (TestConsumer) lookup( KnownRepositoryContentConsumer.class, "test-consumer" );
}
protected void tearDown()
@ -190,8 +118,7 @@ public class ArchivaRepositoryScanningTaskExecutorTest
taskExecutor.executeTask( repoTask );
ArtifactDAO adao = dao.getArtifactDAO();
List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
assertNotNull( unprocessedResultList );
assertEquals( "Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
@ -210,25 +137,21 @@ public class ArchivaRepositoryScanningTaskExecutorTest
taskExecutor.executeTask( repoTask );
// check no artifacts processed
ArtifactDAO adao = dao.getArtifactDAO();
List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
assertNotNull( unprocessedResultList );
assertEquals( "Incorrect number of unprocessed artifacts detected. No new artifacts should have been found.", 0,
unprocessedResultList.size() );
// check correctness of new stats
List<RepositoryContentStatistics> results =
(List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
RepositoryContentStatistics newStats = results.get( 0 );
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertEquals( 0, newStats.getNewFileCount() );
assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
assertEquals( 31, newStats.getTotalFileCount() );
// TODO: can't test these as they weren't stored in the database
// TODO: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
// assertEquals( 8, newStats.getTotalArtifactCount() );
// assertEquals( 3, newStats.getTotalGroupCount() );
// assertEquals( 5, newStats.getTotalProjectCount() );
assertEquals( 14159, newStats.getTotalSize() );
// assertEquals( 14159, newStats.getTotalArtifactFileSize() );
File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
@ -246,23 +169,20 @@ public class ArchivaRepositoryScanningTaskExecutorTest
taskExecutor.executeTask( repoTask );
unprocessedResultList = adao.queryArtifacts( null );
unprocessedResultList = testConsumer.getConsumed();
assertNotNull( unprocessedResultList );
assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
unprocessedResultList.size() );
// check correctness of new stats
results =
(List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
RepositoryContentStatistics updatedStats = results.get( 0 );
RepositoryStatistics updatedStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertEquals( 2, updatedStats.getNewFileCount() );
assertEquals( TEST_REPO_ID, updatedStats.getRepositoryId() );
assertEquals( 33, updatedStats.getTotalFileCount() );
// TODO: can't test these as they weren't stored in the database
// TODO: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
// assertEquals( 8, newStats.getTotalArtifactCount() );
// assertEquals( 3, newStats.getTotalGroupCount() );
// assertEquals( 5, newStats.getTotalProjectCount() );
assertEquals( 19301, updatedStats.getTotalSize() );
// assertEquals( 19301, updatedStats.getTotalArtifactFileSize() );
}
public void testExecutorScanOnlyNewArtifactsChangeTimes()
@ -293,24 +213,20 @@ public class ArchivaRepositoryScanningTaskExecutorTest
taskExecutor.executeTask( repoTask );
// check no artifacts processed
ArtifactDAO adao = dao.getArtifactDAO();
List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
assertNotNull( unprocessedResultList );
assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
unprocessedResultList.size() );
// check correctness of new stats
List<RepositoryContentStatistics> results =
(List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
RepositoryContentStatistics newStats = results.get( 0 );
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertEquals( 2, newStats.getNewFileCount() );
assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
assertEquals( 33, newStats.getTotalFileCount() );
// TODO: can't test these as they weren't stored in the database
// TODO: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
// assertEquals( 8, newStats.getTotalArtifactCount() );
// assertEquals( 3, newStats.getTotalGroupCount() );
// assertEquals( 5, newStats.getTotalProjectCount() );
assertEquals( 19301, newStats.getTotalSize() );
// assertEquals( 19301, newStats.getTotalArtifactFileSize() );
}
public void testExecutorScanOnlyNewArtifactsMidScan()
@ -341,40 +257,20 @@ public class ArchivaRepositoryScanningTaskExecutorTest
taskExecutor.executeTask( repoTask );
// check no artifacts processed
ArtifactDAO adao = dao.getArtifactDAO();
List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
assertNotNull( unprocessedResultList );
assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
unprocessedResultList.size() );
// check correctness of new stats
List<RepositoryContentStatistics> results =
(List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
RepositoryContentStatistics newStats = results.get( 0 );
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertEquals( 2, newStats.getNewFileCount() );
assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
assertEquals( 33, newStats.getTotalFileCount() );
// TODO: can't test these as they weren't stored in the database
// TODO: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
// assertEquals( 8, newStats.getTotalArtifactCount() );
// assertEquals( 3, newStats.getTotalGroupCount() );
// assertEquals( 5, newStats.getTotalProjectCount() );
assertEquals( 19301, newStats.getTotalSize() );
}
private void createAndSaveTestStats()
{
RepositoryContentStatistics stats = new RepositoryContentStatistics();
stats.setDuration( 1234567 );
stats.setNewFileCount( 31 );
stats.setRepositoryId( TEST_REPO_ID );
stats.setTotalArtifactCount( 8 );
stats.setTotalFileCount( 31 );
stats.setTotalGroupCount( 3 );
stats.setTotalProjectCount( 5 );
stats.setTotalSize( 38545 );
stats.setWhenGathered( Calendar.getInstance().getTime() );
dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
// assertEquals( 19301, newStats.getTotalArtifactFileSize() );
}
public void testExecutorForceScanAll()
@ -385,25 +281,40 @@ public class ArchivaRepositoryScanningTaskExecutorTest
repoTask.setRepositoryId( TEST_REPO_ID );
repoTask.setScanAll( true );
RepositoryContentStatistics stats = new RepositoryContentStatistics();
stats.setDuration( 1234567 );
Date date = Calendar.getInstance().getTime();
RepositoryStatistics stats = new RepositoryStatistics();
stats.setScanStartTime( new Date( date.getTime() - 1234567 ) );
stats.setScanEndTime( date );
stats.setNewFileCount( 8 );
stats.setRepositoryId( TEST_REPO_ID );
stats.setTotalArtifactCount( 8 );
stats.setTotalFileCount( 8 );
stats.setTotalGroupCount( 3 );
stats.setTotalProjectCount( 5 );
stats.setTotalSize( 999999 );
stats.setWhenGathered( Calendar.getInstance().getTime() );
stats.setTotalArtifactFileSize( 999999 );
dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
taskExecutor.executeTask( repoTask );
ArtifactDAO adao = dao.getArtifactDAO();
List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
assertNotNull( unprocessedResultList );
assertEquals( "Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
}
private void createAndSaveTestStats()
{
Date date = Calendar.getInstance().getTime();
RepositoryStatistics stats = new RepositoryStatistics();
stats.setScanStartTime( new Date( date.getTime() - 1234567 ) );
stats.setScanEndTime( date );
stats.setNewFileCount( 31 );
stats.setTotalArtifactCount( 8 );
stats.setTotalFileCount( 31 );
stats.setTotalGroupCount( 3 );
stats.setTotalProjectCount( 5 );
stats.setTotalArtifactFileSize( 38545 );
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
}
}

View File

@ -0,0 +1,114 @@
package org.apache.archiva.scheduler.repository;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.layout.LayoutException;
public class TestConsumer
extends AbstractMonitoredConsumer
implements KnownRepositoryContentConsumer
{
private Set<ArtifactReference> consumed = new HashSet<ArtifactReference>();
// injected
private RepositoryContentFactory factory;
private ManagedRepositoryContent repository;
public String getId()
{
return "test-consumer";
}
public String getDescription()
{
return null;
}
public boolean isPermanent()
{
return false;
}
public List<String> getIncludes()
{
return Collections.singletonList( "**/**" );
}
public List<String> getExcludes()
{
return null;
}
public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered )
throws ConsumerException
{
consumed.clear();
try
{
this.repository = factory.getManagedRepositoryContent( repository.getId() );
}
catch ( RepositoryException e )
{
throw new ConsumerException( e.getMessage(), e );
}
}
public void processFile( String path )
throws ConsumerException
{
if ( !path.endsWith( ".sha1" ) && !path.endsWith( ".md5" ) )
{
try
{
consumed.add( repository.toArtifactReference( path ) );
}
catch ( LayoutException e )
{
throw new ConsumerException( e.getMessage(), e );
}
}
}
public void completeScan()
{
}
public Collection<ArtifactReference> getConsumed()
{
return consumed;
}
}

View File

@ -0,0 +1,59 @@
package org.apache.archiva.scheduler.repository;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
public class TestRepositoryStatisticsManager
implements RepositoryStatisticsManager
{
private Map<String, List<RepositoryStatistics>> repoStats = new HashMap<String, List<RepositoryStatistics>>();
public RepositoryStatistics getLastStatistics( String repositoryId )
{
List<RepositoryStatistics> repositoryStatisticsList = getStatsList( repositoryId );
return !repositoryStatisticsList.isEmpty()
? repositoryStatisticsList.get( repositoryStatisticsList.size() - 1 )
: null;
}
public void addStatisticsAfterScan( String repositoryId, RepositoryStatistics repositoryStatistics )
{
List<RepositoryStatistics> stats = getStatsList( repositoryId );
stats.add( repositoryStatistics );
}
private List<RepositoryStatistics> getStatsList( String repositoryId )
{
List<RepositoryStatistics> stats = repoStats.get( repositoryId );
if ( stats == null )
{
stats = new ArrayList<RepositoryStatistics>();
repoStats.put( repositoryId, stats );
}
return stats;
}
}

View File

@ -91,29 +91,7 @@
</fileType>
</fileTypes>
<knownContentConsumers>
<knownContentConsumer>update-db-artifact</knownContentConsumer>
<knownContentConsumer>create-missing-checksums</knownContentConsumer>
<knownContentConsumer>update-db-repository-metadata</knownContentConsumer>
<knownContentConsumer>validate-checksum</knownContentConsumer>
<knownContentConsumer>validate-signature</knownContentConsumer>
<knownContentConsumer>index-content</knownContentConsumer>
<knownContentConsumer>auto-remove</knownContentConsumer>
<knownContentConsumer>auto-rename</knownContentConsumer>
<knownContentConsumer>test-consumer</knownContentConsumer>
</knownContentConsumers>
<invalidContentConsumers>
<invalidContentConsumer>update-db-bad-content</invalidContentConsumer>
</invalidContentConsumers>
</repositoryScanning>
<databaseScanning>
<cronExpression>0 0 * * * ?</cronExpression>
<unprocessedConsumers>
<unprocessedConsumer>test-db-unprocessed</unprocessedConsumer>
<unprocessedConsumer>update-db-artifact</unprocessedConsumer>
</unprocessedConsumers>
<cleanupConsumers>
<cleanupConsumer>test-db-cleanup</cleanupConsumer>
</cleanupConsumers>
</databaseScanning>
</configuration>

View File

@ -25,13 +25,7 @@
<role-hint>test-repository-scanning</role-hint>
<implementation>org.apache.archiva.scheduler.repository.ArchivaRepositoryScanningTaskExecutor
</implementation>
<description></description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<field-name>dao</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<field-name>archivaConfiguration</field-name>
@ -40,6 +34,9 @@
<role>org.apache.archiva.repository.scanner.RepositoryScanner</role>
<field-name>repoScanner</field-name>
</requirement>
<requirement>
<role>org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager</role>
</requirement>
</requirements>
</component>
@ -76,19 +73,19 @@
</component>
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
<implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
<configuration>
<persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
<otherProperties>
<property>
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
</otherProperties>
</configuration>
<role>org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager</role>
<implementation>org.apache.archiva.scheduler.repository.TestRepositoryStatisticsManager</implementation>
</component>
<component>
<role>org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer</role>
<role-hint>test-consumer</role-hint>
<implementation>org.apache.archiva.scheduler.repository.TestConsumer</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
</requirement>
</requirements>
</component>
</components>
</component-set>

View File

@ -33,6 +33,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-report-manager</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>repository-statistics</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-scheduler-repository</artifactId>

View File

@ -19,30 +19,29 @@ package org.apache.maven.archiva.web.action.admin.repositories;
* under the License.
*/
import org.apache.struts2.interceptor.ServletRequestAware;
import com.opensymphony.xwork2.Preparable;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.configuration.RemoteRepositoryConfiguration;
import org.apache.maven.archiva.configuration.functors.RepositoryConfigurationComparator;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.apache.maven.archiva.web.util.ContextUtils;
import org.apache.maven.archiva.web.action.PlexusActionSupport;
import org.codehaus.plexus.redback.rbac.Resource;
import org.codehaus.redback.integration.interceptor.SecureAction;
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
import org.codehaus.redback.integration.interceptor.SecureActionException;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import com.opensymphony.xwork2.Preparable;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.configuration.RemoteRepositoryConfiguration;
import org.apache.maven.archiva.configuration.functors.RepositoryConfigurationComparator;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.apache.maven.archiva.web.action.PlexusActionSupport;
import org.apache.maven.archiva.web.util.ContextUtils;
import org.apache.struts2.interceptor.ServletRequestAware;
import org.codehaus.plexus.redback.rbac.Resource;
import org.codehaus.redback.integration.interceptor.SecureAction;
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
import org.codehaus.redback.integration.interceptor.SecureActionException;
/**
* Shows the Repositories Tab for the administrator.
@ -63,20 +62,20 @@ public class RepositoriesAction
private List<RemoteRepositoryConfiguration> remoteRepositories;
private Map<String, RepositoryContentStatistics> repositoryStatistics;
private Map<String, RepositoryStatistics> repositoryStatistics;
private Map<String, List<String>> repositoryToGroupMap;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* Used to construct the repository WebDAV URL in the repository action.
*/
private String baseUrl;
/**
* @plexus.requirement
*/
private RepositoryStatisticsManager repositoryStatisticsManager;
public void setServletRequest( HttpServletRequest request )
{
// TODO: is there a better way to do this?
@ -106,14 +105,13 @@ public class RepositoriesAction
Collections.sort( managedRepositories, new RepositoryConfigurationComparator() );
Collections.sort( remoteRepositories, new RepositoryConfigurationComparator() );
repositoryStatistics = new HashMap<String, RepositoryContentStatistics>();
repositoryStatistics = new HashMap<String, RepositoryStatistics>();
for ( ManagedRepositoryConfiguration repo : managedRepositories )
{
List<RepositoryContentStatistics> results =
(List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( repo.getId() ) );
if ( !results.isEmpty() )
RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( repo.getId() );
if ( stats != null )
{
repositoryStatistics.put( repo.getId(), results.get( 0 ) );
repositoryStatistics.put( repo.getId(), stats );
}
}
}
@ -128,7 +126,7 @@ public class RepositoriesAction
return remoteRepositories;
}
public Map<String, RepositoryContentStatistics> getRepositoryStatistics()
public Map<String, RepositoryStatistics> getRepositoryStatistics()
{
return repositoryStatistics;
}

View File

@ -225,7 +225,7 @@
<table>
<tr>
<th>Last Scanned</th>
<td>${stats.whenGathered}</td>
<td>${stats.scanStartTime}</td>
</tr>
<tr>
<th>Duration</th>

View File

@ -1,10 +1,8 @@
package org.apache.maven.archiva.web.action.admin.repositories;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import junit.framework.Assert;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
@ -14,7 +12,6 @@ import org.apache.maven.archiva.database.SimpleConstraint;
import org.apache.maven.archiva.database.constraints.UniqueArtifactIdConstraint;
import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint;
import org.apache.maven.archiva.database.constraints.UniqueVersionConstraint;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -67,19 +64,7 @@ public class ArchivaDAOStub
{
return artifacts;
}
else
{
Assert.assertEquals( RepositoryContentStatistics.class, constraint.getResultClass() );
List<RepositoryContentStatistics> stats = new ArrayList<RepositoryContentStatistics>();
for ( String repo : configuration.getConfiguration().getManagedRepositoriesAsMap().keySet() )
{
RepositoryContentStatistics statistics = new RepositoryContentStatistics();
statistics.setRepositoryId( repo );
stats.add( statistics );
}
return stats;
}
throw new UnsupportedOperationException();
}
public Object save( Serializable obj )

View File

@ -19,12 +19,19 @@ package org.apache.maven.archiva.web.action.admin.repositories;
* under the License.
*/
import java.util.Arrays;
import com.meterware.servletunit.ServletRunner;
import com.meterware.servletunit.ServletUnitClient;
import com.opensymphony.xwork2.Action;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.stats.DefaultRepositoryStatisticsManager;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
import org.codehaus.redback.integration.interceptor.SecureActionException;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.easymock.MockControl;
/**
* Test the repositories action returns the correct data.
@ -48,6 +55,24 @@ public class RepositoriesActionTest
public void testGetRepositories()
throws Exception
{
MockControl control = MockControl.createControl( MetadataRepository.class );
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
control.expectAndReturn( metadataRepository.getMetadataFacets( "internal", RepositoryStatistics.FACET_ID ),
Arrays.asList( "20091125.123456.678" ) );
control.expectAndReturn(
metadataRepository.getMetadataFacet( "internal", RepositoryStatistics.FACET_ID, "20091125.123456.678" ),
new RepositoryStatistics() );
control.expectAndReturn( metadataRepository.getMetadataFacets( "snapshots", RepositoryStatistics.FACET_ID ),
Arrays.asList( "20091112.012345.012" ) );
control.expectAndReturn(
metadataRepository.getMetadataFacet( "snapshots", RepositoryStatistics.FACET_ID, "20091112.012345.012" ),
new RepositoryStatistics() );
control.replay();
DefaultRepositoryStatisticsManager statsManager =
(DefaultRepositoryStatisticsManager) lookup( RepositoryStatisticsManager.class );
statsManager.setMetadataRepository( metadataRepository );
ServletRunner sr = new ServletRunner();
ServletUnitClient sc = sr.newClient();
@ -66,6 +91,8 @@ public class RepositoriesActionTest
assertEquals( 2, action.getManagedRepositories().size() );
assertEquals( 2, action.getRemoteRepositories().size() );
assertEquals( 2, action.getRepositoryStatistics().size() );
control.verify();
}
public void testSecureActionBundle()

View File

@ -1,52 +0,0 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<plexus>
<components>
<component>
<role>com.opensymphony.xwork2.Action</role>
<role-hint>repositoriesAction</role-hint>
<implementation>org.apache.maven.archiva.web.action.admin.repositories.RepositoriesAction</implementation>
<instantiation-strategy>per-lookup</instantiation-strategy>
<description>Shows the Repositories Tab for the administrator.</description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<field-name>archivaConfiguration</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>stub</role-hint>
<field-name>dao</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>stub</role-hint>
<implementation>org.apache.maven.archiva.web.action.admin.repositories.ArchivaDAOStub</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<field-name>configuration</field-name>
</requirement>
</requirements>
</component>
</components>
</plexus>

View File

@ -0,0 +1,121 @@
package org.apache.archiva.metadata.repository;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
public class TestMetadataRepository
implements MetadataRepository
{
public ProjectMetadata getProject( String repoId, String namespace, String projectId )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
String projectVersion )
throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getArtifactVersions( String repoId, String namespace, String projectId,
String projectVersion )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<ProjectVersionReference> getProjectReferences( String repoId, String namespace, String projectId,
String projectVersion )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getRootNamespaces( String repoId )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getNamespaces( String repoId, String namespace )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getProjects( String repoId, String namespace )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public void updateProject( String repoId, ProjectMetadata project )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateArtifact( String repoId, String namespace, String projectId, String projectVersion,
ArtifactMetadata artifactMeta )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateProjectVersion( String repoId, String namespace, String projectId, ProjectVersionMetadata versionMetadata )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateProjectReference( String repoId, String namespace, String projectId, String projectVersion,
ProjectVersionReference reference )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateNamespace( String repoId, String namespace )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public List<String> getMetadataFacets( String repodId, String facetId )
{
return Collections.emptyList();
}
public MetadataFacet getMetadataFacet( String repositoryId, String facetId, String name )
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet )
{
//To change body of implemented methods use File | Settings | File Templates.
}
}

View File

@ -29,5 +29,10 @@
<resource>archiva-mime-types.txt</resource>
</configuration>
</component>
<component>
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
</components>
</component-set>

View File

@ -94,35 +94,60 @@ The following is the intended content model for the metadata content repository:
| |-- maven:groupId=
| |-- maven:plugins.compiler.artifactId=
| `-- maven:plugins.compiler.name=
`-- references/
`-- org/
`-- apache/
`-- archiva/
|-- parent/
| `-- 1/
|-- references/
| `-- org/
| `-- apache/
| `-- archiva/
| |-- parent/
| | `-- 1/
| | `-- references/
| | `-- org/
| | `-- apache/
| | `-- archiva/
| | |-- platform/
| | | `-- scanner/
| | | `-- 1.0-SNAPSHOT/
| | | `-- referenceType=parent
| | `-- web/
| | `-- webapp/
| | `-- 1.0-SNAPSHOT/
| | `-- referenceType=parent
| `-- platform/
| `-- scanner/
| `-- 1.0-SNAPSHOT/
| `-- references/
| `-- org/
| `-- apache/
| `-- archiva/
| |-- platform/
| | `-- scanner/
| | `-- 1.0-SNAPSHOT/
| | `-- referenceType=parent
| `-- web/
| `-- webapp/
| `-- 1.0-SNAPSHOT/
| `-- referenceType=parent
`-- platform/
`-- scanner/
`-- 1.0-SNAPSHOT/
`-- references/
`-- org/
`-- apache/
`-- archiva/
`-- web/
`-- webapp/
`-- 1.0-SNAPSHOT/
`-- referenceType=dependency
| `-- referenceType=dependency
`-- stats/
`-- 2009/
`-- 12/
|-- 02/
| `-- 23/
| `-- 47/
| `-- 00/
| |-- scanEndTime=
| |-- scanStartTime=
| |-- totalArtifactCount=
| |-- totalArtifactFileSize=
| |-- totalFileCount=
| |-- totalGroupCount=
| `-- totalProjectCount=
`-- 03/
`-- 09/
`-- 00/
`-- 00/
|-- scanEndTime=
|-- scanStartTime=
|-- totalArtifactCount=
|-- totalArtifactFileSize=
|-- totalFileCount=
|-- totalGroupCount=
`-- totalProjectCount=
(To update - run "tree --dirstfirst -F" on the unpacked content-model.zip from the sandbox)
@ -179,6 +204,9 @@ Notes:
*) some of the above needs to be reviewed before going into production. For example:
- the maven specific aspects of dependencies should become a faceted part of the content
- more of the metadata might be faceted in general, keeping the content model basic by default
- determine if any of the stats can be derived by functions of the content repository rather than storing and trying
to keep them up to date. Historical data might be retained by versioning and taking a snapshot at a given point in
time. The current approach of tying them to the scanning process is not optimal
- the storing of metadata as 0-indexed lists would be better in as child nodes. This might require additional levels
in the current repository (.../scanner/versions/1.0-SNAPSHOT/artifacts/scanner-1.0-20091120.012345-1.pom), or
for listed information to be in a separate tree

View File

@ -21,7 +21,7 @@ package org.apache.archiva.metadata.model;
import java.util.Map;
public interface ProjectVersionFacet
public interface MetadataFacet
{
String getFacetId();

View File

@ -21,5 +21,5 @@ package org.apache.archiva.metadata.model;
public interface MetadataFacetFactory
{
ProjectVersionFacet createProjectVersionFacet();
MetadataFacet createMetadataFacet();
}

View File

@ -45,7 +45,7 @@ public class ProjectVersionMetadata
private List<License> licenses = new ArrayList<License>();
private Map<String, ProjectVersionFacet> facets = new HashMap<String, ProjectVersionFacet>();
private Map<String, MetadataFacet> facets = new HashMap<String, MetadataFacet>();
private List<MailingList> mailingLists = new ArrayList<MailingList>();
@ -151,22 +151,22 @@ public class ProjectVersionMetadata
this.licenses.add( license );
}
public void addFacet( ProjectVersionFacet projectVersionFacet )
public void addFacet( MetadataFacet metadataFacet )
{
this.facets.put( projectVersionFacet.getFacetId(), projectVersionFacet );
this.facets.put( metadataFacet.getFacetId(), metadataFacet );
}
public ProjectVersionFacet getFacet( String facetId )
public MetadataFacet getFacet( String facetId )
{
return this.facets.get( facetId );
}
public Map<String, ProjectVersionFacet> getFacets()
public Map<String, MetadataFacet> getFacets()
{
return facets;
}
public Collection<ProjectVersionFacet> getFacetList()
public Collection<MetadataFacet> getFacetList()
{
return this.facets.values();
}

View File

@ -19,7 +19,10 @@ package org.apache.archiva.metadata.repository;
* under the License.
*/
import java.util.List;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
@ -45,4 +48,10 @@ public interface MetadataRepository
ProjectVersionReference reference );
void updateNamespace( String repoId, String namespace );
List<String> getMetadataFacets( String repodId, String facetId );
MetadataFacet getMetadataFacet( String repositoryId, String facetId, String name );
void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet );
}

View File

@ -22,10 +22,10 @@ package org.apache.archiva.metadata.repository.storage.maven2;
import java.util.HashMap;
import java.util.Map;
import org.apache.archiva.metadata.model.ProjectVersionFacet;
import org.apache.archiva.metadata.model.MetadataFacet;
public class MavenProjectFacet
implements ProjectVersionFacet
implements MetadataFacet
{
private String groupId;

View File

@ -19,8 +19,8 @@ package org.apache.archiva.metadata.repository.storage.maven2;
* under the License.
*/
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.model.ProjectVersionFacet;
/**
* @plexus.component role="org.apache.archiva.metadata.model.MetadataFacetFactory" role-hint="org.apache.archiva.metadata.repository.storage.maven2"
@ -28,7 +28,7 @@ import org.apache.archiva.metadata.model.ProjectVersionFacet;
public class MavenProjectFacetFactory
implements MetadataFacetFactory
{
public ProjectVersionFacet createProjectVersionFacet()
public MetadataFacet createMetadataFacet()
{
return new MavenProjectFacet();
}

View File

@ -27,6 +27,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
@ -40,10 +41,10 @@ import org.apache.archiva.metadata.model.Dependency;
import org.apache.archiva.metadata.model.IssueManagement;
import org.apache.archiva.metadata.model.License;
import org.apache.archiva.metadata.model.MailingList;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.model.Organization;
import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionFacet;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.model.Scm;
@ -78,6 +79,8 @@ public class FileMetadataRepository
private static final String NAMESPACE_METADATA_KEY = "namespace-metadata";
private static final String METADATA_KEY = "metadata";
public void updateProject( String repoId, ProjectMetadata project )
{
updateProject( repoId, project.getNamespace(), project.getId() );
@ -95,7 +98,6 @@ public class FileMetadataRepository
properties.setProperty( "namespace", namespace );
properties.setProperty( "id", id );
writeProperties( properties, new File( namespaceDirectory, id ), PROJECT_METADATA_KEY );
}
catch ( IOException e )
{
@ -112,7 +114,7 @@ public class FileMetadataRepository
File directory =
new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + versionMetadata.getId() );
Properties properties = readProperties( directory, PROJECT_VERSION_METADATA_KEY );
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
// remove properties that are not references or artifacts
for ( String name : properties.stringPropertyNames() )
{
@ -180,7 +182,7 @@ public class FileMetadataRepository
facetIds.addAll( Arrays.asList( properties.getProperty( "facetIds", "" ).split( "," ) ) );
properties.setProperty( "facetIds", join( facetIds ) );
for ( ProjectVersionFacet facet : versionMetadata.getFacetList() )
for ( MetadataFacet facet : versionMetadata.getFacetList() )
{
properties.putAll( facet.toProperties() );
}
@ -201,7 +203,7 @@ public class FileMetadataRepository
{
File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
Properties properties = readProperties( directory, PROJECT_VERSION_METADATA_KEY );
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
int i = Integer.valueOf( properties.getProperty( "ref:lastReferenceNum", "-1" ) ) + 1;
setProperty( properties, "ref:lastReferenceNum", Integer.toString( i ) );
setProperty( properties, "ref:reference." + i + ".namespace", reference.getNamespace() );
@ -237,6 +239,68 @@ public class FileMetadataRepository
}
}
public List<String> getMetadataFacets( String repoId, String facetId )
{
File directory = getMetadataDirectory( repoId, facetId );
String[] list = directory.list();
return list != null ? Arrays.asList( list ) : Collections.<String>emptyList();
}
public MetadataFacet getMetadataFacet( String repositoryId, String facetId, String name )
{
Properties properties;
try
{
properties =
readProperties( new File( getMetadataDirectory( repositoryId, facetId ), name ), METADATA_KEY );
}
catch ( FileNotFoundException e )
{
return null;
}
catch ( IOException e )
{
// TODO
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
return null;
}
MetadataFacet metadataFacet = null;
MetadataFacetFactory metadataFacetFactory = metadataFacetFactories.get( facetId );
if ( metadataFacetFactory != null )
{
metadataFacet = metadataFacetFactory.createMetadataFacet();
Map<String, String> map = new HashMap<String, String>();
for ( String key : properties.stringPropertyNames() )
{
map.put( key, properties.getProperty( key ) );
}
metadataFacet.fromProperties( map );
}
return metadataFacet;
}
public void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet )
{
Properties properties = new Properties();
properties.putAll( metadataFacet.toProperties() );
try
{
writeProperties( properties, new File( getMetadataDirectory( repositoryId, facetId ), name ),
METADATA_KEY );
}
catch ( IOException e )
{
// TODO!
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
private File getMetadataDirectory( String repositoryId, String facetId )
{
return new File( this.directory, repositoryId + "/.meta/" + facetId );
}
private String join( Collection<String> ids )
{
if ( !ids.isEmpty() )
@ -265,7 +329,7 @@ public class FileMetadataRepository
{
File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
Properties properties = readProperties( directory, PROJECT_VERSION_METADATA_KEY );
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
properties.setProperty( "artifact:updated:" + artifact.getId(),
Long.toString( artifact.getFileLastModified().getTime() ) );
@ -287,7 +351,26 @@ public class FileMetadataRepository
}
}
private Properties readOrCreateProperties( File directory, String propertiesKey )
{
try
{
return readProperties( directory, propertiesKey );
}
catch ( FileNotFoundException e )
{
// ignore and return new properties
}
catch ( IOException e )
{
// TODO
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
return new Properties();
}
private Properties readProperties( File directory, String propertiesKey )
throws IOException
{
Properties properties = new Properties();
FileInputStream in = null;
@ -296,15 +379,6 @@ public class FileMetadataRepository
in = new FileInputStream( new File( directory, propertiesKey + ".properties" ) );
properties.load( in );
}
catch ( FileNotFoundException e )
{
// skip - use blank properties
}
catch ( IOException e )
{
// TODO
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
finally
{
IOUtils.closeQuietly( in );
@ -316,7 +390,7 @@ public class FileMetadataRepository
{
File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId );
Properties properties = readProperties( directory, PROJECT_VERSION_METADATA_KEY );
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
ProjectMetadata project = new ProjectMetadata();
project.setNamespace( properties.getProperty( "namespace" ) );
@ -329,7 +403,7 @@ public class FileMetadataRepository
{
File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
Properties properties = readProperties( directory, PROJECT_VERSION_METADATA_KEY );
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
String id = properties.getProperty( "id" );
ProjectVersionMetadata versionMetadata = null;
if ( id != null )
@ -461,7 +535,7 @@ public class FileMetadataRepository
}
else
{
ProjectVersionFacet facet = factory.createProjectVersionFacet();
MetadataFacet facet = factory.createMetadataFacet();
Map<String, String> map = new HashMap<String, String>();
for ( String key : properties.stringPropertyNames() )
{
@ -475,7 +549,7 @@ public class FileMetadataRepository
}
}
for ( ProjectVersionFacet facet : versionMetadata.getFacetList() )
for ( MetadataFacet facet : versionMetadata.getFacetList() )
{
properties.putAll( facet.toProperties() );
}
@ -488,7 +562,7 @@ public class FileMetadataRepository
{
File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
Properties properties = readProperties( directory, PROJECT_VERSION_METADATA_KEY );
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
List<String> versions = new ArrayList<String>();
for ( Map.Entry entry : properties.entrySet() )
@ -507,7 +581,7 @@ public class FileMetadataRepository
{
File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
Properties properties = readProperties( directory, PROJECT_VERSION_METADATA_KEY );
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
int numberOfRefs = Integer.valueOf( properties.getProperty( "ref:lastReferenceNum", "-1" ) ) + 1;
List<ProjectVersionReference> references = new ArrayList<ProjectVersionReference>();

View File

@ -1,18 +1,5 @@
package org.apache.archiva.metadata.repository.file;
import java.io.File;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import org.apache.archiva.metadata.model.MailingList;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.model.ProjectVersionFacet;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.commons.io.FileUtils;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -32,6 +19,19 @@ import org.codehaus.plexus.spring.PlexusInSpringTestCase;
* under the License.
*/
import java.io.File;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.archiva.metadata.model.MailingList;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.commons.io.FileUtils;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
public class FileMetadataRepositoryTest
extends PlexusInSpringTestCase
{
@ -45,15 +45,32 @@ public class FileMetadataRepositoryTest
private static final String TEST_PROJECT_VERSION = "1.0";
private static final String TEST_FACET_ID = "test-facet-id";
private static final String TEST_NAME = "test-name";
private static final String TEST_VALUE = "test-value";
private static final String UNKNOWN = "unknown";
public void setUp()
throws Exception
{
super.setUp();
repository = (FileMetadataRepository) lookup( MetadataRepository.class );
repository = new FileMetadataRepository();
File directory = getTestFile( "target/test-repository" );
FileUtils.deleteDirectory( directory );
repository.setDirectory( directory );
repository.setMetadataFacetFactories(
Collections.<String, MetadataFacetFactory>singletonMap( TEST_FACET_ID, new MetadataFacetFactory()
{
public MetadataFacet createMetadataFacet()
{
return new TestMetadataFacet( "test-metadata" );
}
} ) );
}
public void testRootNamespaceWithNoMetadataRepository()
@ -75,38 +92,78 @@ public class FileMetadataRepositoryTest
public void testUpdateProjectVersionMetadataWithExistingFacets()
{
repository.setMetadataFacetFactories(
Collections.<String, MetadataFacetFactory>singletonMap( "test", new MetadataFacetFactory()
{
public ProjectVersionFacet createProjectVersionFacet()
{
return new TestProjectVersionFacet( "bar" );
}
} ) );
ProjectVersionMetadata metadata = new ProjectVersionMetadata();
metadata.setId( TEST_PROJECT_VERSION );
ProjectVersionFacet facet = new TestProjectVersionFacet( "baz" );
MetadataFacet facet = new TestMetadataFacet( "baz" );
metadata.addFacet( facet );
repository.updateProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
metadata = repository.getProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
assertEquals( Collections.singleton( "test" ), metadata.getFacetIds() );
assertEquals( Collections.singleton( TEST_FACET_ID ), metadata.getFacetIds() );
metadata = new ProjectVersionMetadata();
metadata.setId( TEST_PROJECT_VERSION );
repository.updateProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
metadata = repository.getProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
assertEquals( Collections.singleton( "test" ), metadata.getFacetIds() );
TestProjectVersionFacet testFacet = (TestProjectVersionFacet) metadata.getFacet( "test" );
assertEquals( Collections.singleton( TEST_FACET_ID ), metadata.getFacetIds() );
TestMetadataFacet testFacet = (TestMetadataFacet) metadata.getFacet( TEST_FACET_ID );
assertEquals( "baz", testFacet.getValue() );
}
private static class TestProjectVersionFacet
implements ProjectVersionFacet
public void testGetMetadataFacet()
{
private TestProjectVersionFacet( String value )
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
assertEquals( new TestMetadataFacet( TEST_VALUE ),
repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
}
public void testGetMetadataFacetWhenEmpty()
{
assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
}
public void testGetMetadataFacetWhenUnknownName()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, UNKNOWN ) );
}
public void testGetMetadataFacetWhenDefaultValue()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( null ) );
assertEquals( new TestMetadataFacet( "test-metadata" ),
repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
}
public void testGetMetadataFacetWhenUnknownFacetId()
{
repository.addMetadataFacet( TEST_REPO_ID, UNKNOWN, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
assertNull( repository.getMetadataFacet( TEST_REPO_ID, UNKNOWN, TEST_NAME ) );
}
public void testGetMetadataFacets()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
assertEquals( Collections.singletonList( TEST_NAME ),
repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID ) );
}
public void testGetMetadataFacetsWhenEmpty()
{
List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertTrue( facets.isEmpty() );
}
private static class TestMetadataFacet
implements MetadataFacet
{
private TestMetadataFacet( String value )
{
this.value = value;
}
@ -115,22 +172,67 @@ public class FileMetadataRepositoryTest
public String getFacetId()
{
return "test";
return TEST_FACET_ID;
}
public Map<String, String> toProperties()
{
return Collections.singletonMap( "test:foo", value );
if ( value != null )
{
return Collections.singletonMap( TEST_FACET_ID + ":foo", value );
}
else
{
return Collections.emptyMap();
}
}
public void fromProperties( Map<String, String> properties )
{
value = properties.get( "test:foo" );
String value = properties.get( TEST_FACET_ID + ":foo" );
if ( value != null )
{
this.value = value;
}
}
public String getValue()
{
return value;
}
@Override
public String toString()
{
return "TestMetadataFacet{" + "value='" + value + '\'' + '}';
}
@Override
public boolean equals( Object o )
{
if ( this == o )
{
return true;
}
if ( o == null || getClass() != o.getClass() )
{
return false;
}
TestMetadataFacet that = (TestMetadataFacet) o;
if ( value != null ? !value.equals( that.value ) : that.value != null )
{
return false;
}
return true;
}
@Override
public int hashCode()
{
return value != null ? value.hashCode() : 0;
}
}
}

View File

@ -29,5 +29,6 @@
<modules>
<module>metadata-repository-file</module>
<module>maven2-repository</module>
<module>repository-statistics</module>
</modules>
</project>

View File

@ -0,0 +1,51 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>plugins</artifactId>
<groupId>org.apache.archiva</groupId>
<version>1.3-SNAPSHOT</version>
</parent>
<artifactId>repository-statistics</artifactId>
<name>Repository Statistics</name>
<dependencies>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>metadata-repository-api</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-spring</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,80 @@
package org.apache.archiva.metadata.repository.stats;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.List;
import org.apache.archiva.metadata.repository.MetadataRepository;
/**
* @plexus.component role="org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager" role-hint="default"
*/
public class DefaultRepositoryStatisticsManager
implements RepositoryStatisticsManager
{
/**
* @plexus.requirement
*/
private MetadataRepository metadataRepository;
static final DateFormat SCAN_TIMESTAMP = new SimpleDateFormat( "yyyyMMdd.HHmmss.SSS" );
public RepositoryStatistics getLastStatistics( String repositoryId )
{
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
List<String> scans = metadataRepository.getMetadataFacets( repositoryId, RepositoryStatistics.FACET_ID );
Collections.sort( scans );
if ( !scans.isEmpty() )
{
String name = scans.get( scans.size() - 1 );
return (RepositoryStatistics) metadataRepository.getMetadataFacet( repositoryId,
RepositoryStatistics.FACET_ID, name );
}
else
{
return null;
}
}
public void addStatisticsAfterScan( String repositoryId, RepositoryStatistics repositoryStatistics )
{
// TODO
// populate total artifact count from content repository
// repositoryStatistics.setTotalArtifactCount( );
// populate total size from content repository
// repositoryStatistics.setTotalArtifactFileSize( );
// populate total group count from content repository
// repositoryStatistics.setTotalGroupCount( );
// populate total project count from content repository
// repositoryStatistics.setTotalProjectCount( );
metadataRepository.addMetadataFacet( repositoryId, RepositoryStatistics.FACET_ID,
SCAN_TIMESTAMP.format( repositoryStatistics.getScanStartTime() ),
repositoryStatistics );
}
public void setMetadataRepository( MetadataRepository metadataRepository )
{
this.metadataRepository = metadataRepository;
}
}

View File

@ -0,0 +1,164 @@
package org.apache.archiva.metadata.repository.stats;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.apache.archiva.metadata.model.MetadataFacet;
public class RepositoryStatistics
implements MetadataFacet
{
private Date scanEndTime;
private Date scanStartTime;
private long totalArtifactCount;
private long totalArtifactFileSize;
private long totalFileCount;
private long totalGroupCount;
private long totalProjectCount;
private long newFileCount;
public static String FACET_ID = "org.apache.archiva.metadata.repository.stats";
public Date getScanEndTime()
{
return scanEndTime;
}
public void setScanEndTime( Date scanEndTime )
{
this.scanEndTime = scanEndTime;
}
public Date getScanStartTime()
{
return scanStartTime;
}
public void setScanStartTime( Date scanStartTime )
{
this.scanStartTime = scanStartTime;
}
public long getTotalArtifactCount()
{
return totalArtifactCount;
}
public void setTotalArtifactCount( long totalArtifactCount )
{
this.totalArtifactCount = totalArtifactCount;
}
public long getTotalArtifactFileSize()
{
return totalArtifactFileSize;
}
public void setTotalArtifactFileSize( long totalArtifactFileSize )
{
this.totalArtifactFileSize = totalArtifactFileSize;
}
public long getTotalFileCount()
{
return totalFileCount;
}
public void setTotalFileCount( long totalFileCount )
{
this.totalFileCount = totalFileCount;
}
public long getTotalGroupCount()
{
return totalGroupCount;
}
public void setTotalGroupCount( long totalGroupCount )
{
this.totalGroupCount = totalGroupCount;
}
public long getTotalProjectCount()
{
return totalProjectCount;
}
public void setTotalProjectCount( long totalProjectCount )
{
this.totalProjectCount = totalProjectCount;
}
public void setNewFileCount( long newFileCount )
{
this.newFileCount = newFileCount;
}
public long getNewFileCount()
{
return newFileCount;
}
public long getDuration()
{
return scanEndTime.getTime() - scanStartTime.getTime();
}
public String getFacetId()
{
return FACET_ID;
}
public Map<String, String> toProperties()
{
Map<String, String> properties = new HashMap<String, String>();
properties.put( "scanEndTime", String.valueOf( scanEndTime.getTime() ) );
properties.put( "scanStartTime", String.valueOf( scanStartTime.getTime() ) );
properties.put( "totalArtifactCount", String.valueOf( totalArtifactCount ) );
properties.put( "totalArtifactFileSize", String.valueOf( totalArtifactFileSize ) );
properties.put( "totalFileCount", String.valueOf( totalFileCount ) );
properties.put( "totalGroupCount", String.valueOf( totalGroupCount ) );
properties.put( "totalProjectCount", String.valueOf( totalProjectCount ) );
properties.put( "newFileCount", String.valueOf( newFileCount ) );
return properties;
}
public void fromProperties( Map<String, String> properties )
{
scanEndTime = new Date( Long.valueOf( properties.get( "scanEndTime" ) ) );
scanStartTime = new Date( Long.valueOf( properties.get( "scanStartTime" ) ) );
totalArtifactCount = Long.valueOf( properties.get( "totalArtifactCount" ) );
totalArtifactFileSize = Long.valueOf( properties.get( "totalArtifactFileSize" ) );
totalFileCount = Long.valueOf( properties.get( "totalFileCount" ) );
totalGroupCount = Long.valueOf( properties.get( "totalGroupCount" ) );
totalProjectCount = Long.valueOf( properties.get( "totalProjectCount" ) );
newFileCount = Long.valueOf( properties.get( "newFileCount" ) );
}
}

View File

@ -0,0 +1,35 @@
package org.apache.archiva.metadata.repository.stats;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
/**
* @plexus.component role="org.apache.archiva.metadata.model.MetadataFacetFactory" role-hint="org.apache.archiva.metadata.repository.stats"
*/
public class RepositoryStatisticsFactory
implements MetadataFacetFactory
{
public MetadataFacet createMetadataFacet()
{
return new RepositoryStatistics();
}
}

View File

@ -1,4 +1,4 @@
package org.apache.archiva.metadata.model;
package org.apache.archiva.metadata.repository.stats;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -19,10 +19,9 @@ package org.apache.archiva.metadata.model;
* under the License.
*/
/**
* Information about the repository as a whole.
*/
public class RepositoryMetadata
public interface RepositoryStatisticsManager
{
// TODO
RepositoryStatistics getLastStatistics( String repositoryId );
void addStatisticsAfterScan( String repositoryId, RepositoryStatistics repositoryStatistics );
}

View File

@ -0,0 +1,152 @@
package org.apache.archiva.metadata.repository.stats;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.text.ParseException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import junit.framework.TestCase;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.easymock.MockControl;
public class RepositoryStatisticsManagerTest
extends TestCase
{
private DefaultRepositoryStatisticsManager repositoryStatisticsManager;
private static final String TEST_REPO_ID = "test-repo";
private MockControl metadataRepositoryControl;
private MetadataRepository metadataRepository;
private static final String FIRST_TEST_SCAN = "20091201.123456.789";
private static final String SECOND_TEST_SCAN = "20091202.012345.678";
@Override
protected void setUp()
throws Exception
{
super.setUp();
repositoryStatisticsManager = new DefaultRepositoryStatisticsManager();
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
repositoryStatisticsManager.setMetadataRepository( metadataRepository );
}
public void testGetLatestStats()
throws ParseException
{
Date endTime =
new Date( DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.parse( SECOND_TEST_SCAN ).getTime() + 60000 );
RepositoryStatistics stats = new RepositoryStatistics();
stats.setScanStartTime( DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.parse( SECOND_TEST_SCAN ) );
stats.setScanEndTime( endTime );
stats.setTotalArtifactFileSize( 1314527915L );
stats.setNewFileCount( 123 );
stats.setTotalArtifactCount( 10386 );
stats.setTotalProjectCount( 2031 );
stats.setTotalGroupCount( 529 );
stats.setTotalFileCount( 56229 );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
Arrays.asList( FIRST_TEST_SCAN, SECOND_TEST_SCAN ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, SECOND_TEST_SCAN ),
stats );
metadataRepositoryControl.replay();
stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertNotNull( stats );
assertEquals( 1314527915L, stats.getTotalArtifactFileSize() );
assertEquals( 123, stats.getNewFileCount() );
assertEquals( 10386, stats.getTotalArtifactCount() );
assertEquals( 2031, stats.getTotalProjectCount() );
assertEquals( 529, stats.getTotalGroupCount() );
assertEquals( 56229, stats.getTotalFileCount() );
assertEquals( SECOND_TEST_SCAN,
DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.format( stats.getScanStartTime() ) );
assertEquals( endTime, stats.getScanEndTime() );
metadataRepositoryControl.verify();
}
public void testGetLatestStatsWhenEmpty()
{
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
Collections.emptyList() );
metadataRepositoryControl.replay();
RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertNull( stats );
metadataRepositoryControl.verify();
}
public void testAddNewStats()
{
Date current = new Date();
Date startTime = new Date( current.getTime() - 12345 );
RepositoryStatistics stats = new RepositoryStatistics();
stats.setScanStartTime( startTime );
stats.setScanEndTime( current );
stats.setTotalArtifactFileSize( 1400032000L );
stats.setNewFileCount( 45 );
stats.setTotalArtifactCount( 10412 );
stats.setTotalProjectCount( 2036 );
stats.setTotalGroupCount( 531 );
stats.setTotalFileCount( 56345 );
String startTimeAsString = DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.format( startTime );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, startTimeAsString, stats );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
Arrays.asList( startTimeAsString ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, startTimeAsString ),
stats );
metadataRepositoryControl.replay();
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertNotNull( stats );
assertEquals( 1400032000L, stats.getTotalArtifactFileSize() );
assertEquals( 45, stats.getNewFileCount() );
assertEquals( 10412, stats.getTotalArtifactCount() );
assertEquals( 2036, stats.getTotalProjectCount() );
assertEquals( 531, stats.getTotalGroupCount() );
assertEquals( 56345, stats.getTotalFileCount() );
assertEquals( current.getTime() - 12345, stats.getScanStartTime().getTime() );
assertEquals( current, stats.getScanEndTime() );
metadataRepositoryControl.verify();
}
}

View File

@ -100,6 +100,7 @@
<executions>
<execution>
<id>generate</id>
<phase>generate-resources</phase>
<goals>
<goal>generate-metadata</goal>
</goals>
@ -303,6 +304,11 @@
<artifactId>metadata-repository-file</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>repository-statistics</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>maven2-repository</artifactId>