mirror of https://github.com/apache/archiva.git
[MRM-1294] avoid starting the search period at twice the duration since the start of the previous scan
git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@887027 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
58b9f885e0
commit
30f8605a1d
|
@ -19,6 +19,10 @@ package org.apache.maven.archiva.scheduled.executors;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
|
@ -46,18 +50,12 @@ import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* ArchivaRepositoryScanningTaskExecutor
|
||||
*
|
||||
* @version $Id$
|
||||
*
|
||||
* @plexus.component
|
||||
* role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
|
||||
* role-hint="repository-scanning"
|
||||
* @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
|
||||
* role-hint="repository-scanning"
|
||||
*/
|
||||
public class ArchivaRepositoryScanningTaskExecutor
|
||||
implements TaskExecutor, Initializable
|
||||
|
@ -104,13 +102,14 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
|
||||
if ( StringUtils.isBlank( repoTask.getRepositoryId() ) )
|
||||
{
|
||||
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
|
||||
throw new TaskExecutionException( "Unable to execute RepositoryTask with blank repository Id." );
|
||||
}
|
||||
|
||||
ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
|
||||
ManagedRepositoryConfiguration arepo =
|
||||
archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
|
||||
|
||||
// execute consumers on resource file if set
|
||||
if( repoTask.getResourceFile() != null )
|
||||
if ( repoTask.getResourceFile() != null )
|
||||
{
|
||||
log.debug( "Executing task from queue with job name: " + repoTask );
|
||||
consumers.executeConsumers( arepo, repoTask.getResourceFile(), repoTask.isUpdateRelatedArtifacts() );
|
||||
|
@ -124,19 +123,21 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
{
|
||||
if ( arepo == null )
|
||||
{
|
||||
throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
|
||||
throw new TaskExecutionException(
|
||||
"Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
|
||||
}
|
||||
|
||||
long sinceWhen = RepositoryScanner.FRESH_SCAN;
|
||||
|
||||
List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( arepo.getId() ) );
|
||||
List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query(
|
||||
new MostRecentRepositoryScanStatistics( arepo.getId() ) );
|
||||
|
||||
if ( CollectionUtils.isNotEmpty( results ) )
|
||||
{
|
||||
RepositoryContentStatistics lastStats = results.get( 0 );
|
||||
if( !repoTask.isScanAll() )
|
||||
if ( !repoTask.isScanAll() )
|
||||
{
|
||||
sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration();
|
||||
sinceWhen = lastStats.getWhenGathered().getTime();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -144,7 +145,7 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
|
||||
log.info( "Finished repository task: " + stats.toDump( arepo ) );
|
||||
|
||||
RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats );
|
||||
RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, stats );
|
||||
|
||||
dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );
|
||||
|
||||
|
@ -159,8 +160,6 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
private RepositoryContentStatistics constructRepositoryStatistics( ManagedRepositoryConfiguration arepo,
|
||||
long sinceWhen,
|
||||
List<RepositoryContentStatistics> results,
|
||||
RepositoryScanStatistics stats )
|
||||
{
|
||||
// I hate jpox and modello <-- and so do I
|
||||
|
@ -175,7 +174,7 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
try
|
||||
{
|
||||
List<ArchivaArtifact> artifacts = dao.getArtifactDAO().queryArtifacts(
|
||||
new ArtifactsByRepositoryConstraint( arepo.getId(), stats.getWhenGathered(), "groupId", true ) );
|
||||
new ArtifactsByRepositoryConstraint( arepo.getId(), stats.getWhenGathered(), "groupId", true ) );
|
||||
dbstats.setTotalArtifactCount( artifacts.size() );
|
||||
}
|
||||
catch ( ObjectNotFoundException oe )
|
||||
|
@ -191,14 +190,15 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
long size = FileUtils.sizeOfDirectory( new File( arepo.getLocation() ) );
|
||||
dbstats.setTotalSize( size );
|
||||
|
||||
// total unique groups
|
||||
// total unique groups
|
||||
List<String> repos = new ArrayList<String>();
|
||||
repos.add( arepo.getId() );
|
||||
|
||||
List<String> groupIds = (List<String>) dao.query( new UniqueGroupIdConstraint( repos ) );
|
||||
dbstats.setTotalGroupCount( groupIds.size() );
|
||||
|
||||
List<Object[]> artifactIds = (List<Object[]>) dao.query( new UniqueArtifactIdConstraint( arepo.getId(), true ) );
|
||||
List<Object[]> artifactIds =
|
||||
(List<Object[]>) dao.query( new UniqueArtifactIdConstraint( arepo.getId(), true ) );
|
||||
dbstats.setTotalProjectCount( artifactIds.size() );
|
||||
|
||||
return dbstats;
|
||||
|
|
|
@ -264,6 +264,65 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
assertEquals( 43687, updatedStats.getTotalSize() );
|
||||
}
|
||||
|
||||
public void testExecutorScanOnlyNewArtifactsChangeTimes()
|
||||
throws Exception
|
||||
{
|
||||
RepositoryTask repoTask = new RepositoryTask();
|
||||
|
||||
repoTask.setRepositoryId( TEST_REPO_ID );
|
||||
repoTask.setScanAll( false );
|
||||
|
||||
RepositoryContentStatistics stats = new RepositoryContentStatistics();
|
||||
stats.setDuration( 1234567 );
|
||||
stats.setNewFileCount( 31 );
|
||||
stats.setRepositoryId( TEST_REPO_ID );
|
||||
stats.setTotalArtifactCount( 8 );
|
||||
stats.setTotalFileCount( 31 );
|
||||
stats.setTotalGroupCount( 3 );
|
||||
stats.setTotalProjectCount( 5 );
|
||||
stats.setTotalSize( 38545 );
|
||||
stats.setWhenGathered( Calendar.getInstance().getTime() );
|
||||
|
||||
dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
|
||||
|
||||
File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
|
||||
|
||||
FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
|
||||
newArtifactGroup );
|
||||
|
||||
// update last modified date
|
||||
new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
|
||||
Calendar.getInstance().getTimeInMillis() + 1000 );
|
||||
new File( newArtifactGroup,
|
||||
"archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
|
||||
Calendar.getInstance().getTimeInMillis() + 1000 );
|
||||
|
||||
assertTrue( newArtifactGroup.exists() );
|
||||
|
||||
// scan using the really long previous duration
|
||||
taskExecutor.executeTask( repoTask );
|
||||
|
||||
// check no artifacts processed
|
||||
ArtifactDAO adao = dao.getArtifactDAO();
|
||||
List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
|
||||
assertNotNull( unprocessedResultList );
|
||||
assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
|
||||
unprocessedResultList.size() );
|
||||
|
||||
// check correctness of new stats
|
||||
List<RepositoryContentStatistics> results =
|
||||
(List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
|
||||
RepositoryContentStatistics newStats = results.get( 0 );
|
||||
assertEquals( 2, newStats.getNewFileCount() );
|
||||
assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
|
||||
assertEquals( 33, newStats.getTotalFileCount() );
|
||||
// TODO: can't test these as they weren't stored in the database
|
||||
// assertEquals( 8, newStats.getTotalArtifactCount() );
|
||||
// assertEquals( 3, newStats.getTotalGroupCount() );
|
||||
// assertEquals( 5, newStats.getTotalProjectCount() );
|
||||
assertEquals( 43687, newStats.getTotalSize() );
|
||||
}
|
||||
|
||||
public void testExecutorForceScanAll()
|
||||
throws Exception
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue