mirror of https://github.com/apache/archiva.git
[MRM-1056] Option to force scanning of an artifact/repository regardless of file dates
o add checkbox for processing all artifacts beside Scan Repository Now button in repositories page o scan all artifacts if flag is set o added unit tests git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@816084 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8604c2f389
commit
40e247c7e4
|
@ -585,7 +585,7 @@ public class DefaultRepositoryProxyConnectors
|
|||
|
||||
private void queueRepositoryTask( String repositoryId, File localFile )
|
||||
{
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repositoryId, localFile.getName(), localFile, true );
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repositoryId, localFile.getName(), localFile, true, true );
|
||||
|
||||
try
|
||||
{
|
||||
|
|
|
@ -428,7 +428,7 @@ public class DefaultArchivaTaskScheduler
|
|||
private synchronized void queueInitialRepoScan( ManagedRepositoryConfiguration repoConfig )
|
||||
{
|
||||
String repoId = repoConfig.getId();
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "initial-scan" );
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "initial-scan", true );
|
||||
|
||||
if ( queuedRepos.contains( repoId ) )
|
||||
{
|
||||
|
|
|
@ -59,7 +59,7 @@ public class RepositoryTaskJob
|
|||
TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
|
||||
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
|
||||
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( (String) dataMap.get( TASK_REPOSITORY ), "" );
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( (String) dataMap.get( TASK_REPOSITORY ), "", true );
|
||||
task.setName( context.getJobDetail().getName() );
|
||||
|
||||
try
|
||||
|
|
|
@ -134,8 +134,11 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
if ( CollectionUtils.isNotEmpty( results ) )
|
||||
{
|
||||
RepositoryContentStatistics lastStats = results.get( 0 );
|
||||
if( !repoTask.isScanAll() )
|
||||
{
|
||||
sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration();
|
||||
}
|
||||
}
|
||||
|
||||
RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen );
|
||||
|
||||
|
|
|
@ -42,6 +42,18 @@ public class RepositoryTask
|
|||
|
||||
boolean updateRelatedArtifacts;
|
||||
|
||||
boolean scanAll;
|
||||
|
||||
public boolean isScanAll()
|
||||
{
|
||||
return scanAll;
|
||||
}
|
||||
|
||||
public void setScanAll( boolean scanAll )
|
||||
{
|
||||
this.scanAll = scanAll;
|
||||
}
|
||||
|
||||
public String getRepositoryId()
|
||||
{
|
||||
return repositoryId;
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
|
|||
*/
|
||||
public class TaskCreator
|
||||
{
|
||||
public static RepositoryTask createRepositoryTask( String repositoryId, String taskNameSuffix )
|
||||
public static RepositoryTask createRepositoryTask( String repositoryId, String taskNameSuffix, boolean scanAll )
|
||||
{
|
||||
String suffix = "";
|
||||
if( !StringUtils.isEmpty( taskNameSuffix ) )
|
||||
|
@ -43,14 +43,15 @@ public class TaskCreator
|
|||
task.setRepositoryId( repositoryId );
|
||||
task.setName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repositoryId + suffix );
|
||||
task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
|
||||
task.setScanAll( scanAll );
|
||||
|
||||
return task;
|
||||
}
|
||||
|
||||
public static RepositoryTask createRepositoryTask( String repositoryId, String taskNameSuffix, File resourceFile,
|
||||
boolean updateRelatedArtifacts )
|
||||
boolean updateRelatedArtifacts, boolean scanAll )
|
||||
{
|
||||
RepositoryTask task = createRepositoryTask( repositoryId, taskNameSuffix );
|
||||
RepositoryTask task = createRepositoryTask( repositoryId, taskNameSuffix, scanAll );
|
||||
task.setResourceFile( resourceFile );
|
||||
task.setUpdateRelatedArtifacts( updateRelatedArtifacts );
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.maven.archiva.scheduled.executors;
|
|||
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
@ -34,11 +35,13 @@ import org.apache.maven.archiva.database.ArchivaDAO;
|
|||
import org.apache.maven.archiva.database.ArtifactDAO;
|
||||
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
|
||||
import org.apache.maven.archiva.model.ArchivaArtifact;
|
||||
import org.apache.maven.archiva.model.RepositoryContentStatistics;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
|
||||
import org.codehaus.plexus.jdo.JdoFactory;
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
|
||||
import org.codehaus.plexus.util.FileUtils;
|
||||
import org.jpox.SchemaTool;
|
||||
|
||||
/**
|
||||
|
@ -53,6 +56,8 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
|
||||
protected ArchivaDAO dao;
|
||||
|
||||
private File repoDir;
|
||||
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
|
@ -131,12 +136,28 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
|
||||
|
||||
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
|
||||
|
||||
File sourceRepoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
|
||||
repoDir = new File( getBasedir(), "target/default-repository" );
|
||||
|
||||
repoDir.mkdir();
|
||||
|
||||
FileUtils.copyDirectoryStructure( sourceRepoDir, repoDir );
|
||||
|
||||
assertTrue( repoDir.exists() );
|
||||
}
|
||||
|
||||
protected void tearDown() throws Exception
|
||||
{
|
||||
FileUtils.deleteDirectory( repoDir );
|
||||
|
||||
assertFalse( repoDir.exists() );
|
||||
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
public void testExecutor() throws Exception
|
||||
{
|
||||
File repoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
|
||||
|
||||
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
|
||||
|
||||
ArchivaConfiguration archivaConfig = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
|
||||
|
@ -162,6 +183,106 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
assertEquals("Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
|
||||
}
|
||||
|
||||
public void testExecutorScanOnlyNewArtifacts()
|
||||
throws Exception
|
||||
{
|
||||
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
|
||||
|
||||
ArchivaConfiguration archivaConfig = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
|
||||
assertNotNull( archivaConfig );
|
||||
|
||||
// Create it
|
||||
ManagedRepositoryConfiguration repo = createRepository( "testRepo", "Test Repository", repoDir );
|
||||
assertNotNull( repo );
|
||||
archivaConfig.getConfiguration().getManagedRepositories().clear();
|
||||
archivaConfig.getConfiguration().addManagedRepository( repo );
|
||||
|
||||
RepositoryTask repoTask = new RepositoryTask();
|
||||
|
||||
repoTask.setName( "testRepoTask" );
|
||||
repoTask.setRepositoryId( "testRepo" );
|
||||
repoTask.setScanAll( false );
|
||||
|
||||
RepositoryContentStatistics stats = new RepositoryContentStatistics();
|
||||
stats.setDuration( 1234567 );
|
||||
stats.setNewFileCount( 8 );
|
||||
stats.setRepositoryId( "testRepo" );
|
||||
stats.setTotalArtifactCount( 8 );
|
||||
stats.setTotalFileCount( 8 );
|
||||
stats.setTotalGroupCount( 3 );
|
||||
stats.setTotalProjectCount( 5 );
|
||||
stats.setTotalSize( 999999 );
|
||||
stats.setWhenGathered( Calendar.getInstance().getTime() );
|
||||
|
||||
dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
|
||||
|
||||
taskExecutor.executeTask( repoTask );
|
||||
|
||||
ArtifactDAO adao = dao.getArtifactDAO();
|
||||
List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
|
||||
|
||||
assertNotNull( unprocessedResultList );
|
||||
assertEquals("Incorrect number of unprocessed artifacts detected. No new artifacts should have been found.", 0, unprocessedResultList.size() );
|
||||
|
||||
File newArtifactGroup = new File( repoDir, "org/apache/archiva");
|
||||
|
||||
FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva"), newArtifactGroup );
|
||||
|
||||
// update last modified date
|
||||
new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified( Calendar.getInstance().getTimeInMillis() + 1000 );
|
||||
new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified( Calendar.getInstance().getTimeInMillis() + 1000 );
|
||||
|
||||
assertTrue( newArtifactGroup.exists() );
|
||||
|
||||
taskExecutor.executeTask( repoTask );
|
||||
|
||||
unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
|
||||
assertNotNull( unprocessedResultList );
|
||||
assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1, unprocessedResultList.size() );
|
||||
}
|
||||
|
||||
public void testExecutorForceScanAll()
|
||||
throws Exception
|
||||
{
|
||||
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
|
||||
|
||||
ArchivaConfiguration archivaConfig = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
|
||||
assertNotNull( archivaConfig );
|
||||
|
||||
// Create it
|
||||
ManagedRepositoryConfiguration repo = createRepository( "testRepo", "Test Repository", repoDir );
|
||||
assertNotNull( repo );
|
||||
archivaConfig.getConfiguration().getManagedRepositories().clear();
|
||||
archivaConfig.getConfiguration().addManagedRepository( repo );
|
||||
|
||||
RepositoryTask repoTask = new RepositoryTask();
|
||||
|
||||
repoTask.setName( "testRepoTask" );
|
||||
repoTask.setRepositoryId( "testRepo" );
|
||||
repoTask.setScanAll( true );
|
||||
|
||||
RepositoryContentStatistics stats = new RepositoryContentStatistics();
|
||||
stats.setDuration( 1234567 );
|
||||
stats.setNewFileCount( 8 );
|
||||
stats.setRepositoryId( "testRepo" );
|
||||
stats.setTotalArtifactCount( 8 );
|
||||
stats.setTotalFileCount( 8 );
|
||||
stats.setTotalGroupCount( 3 );
|
||||
stats.setTotalProjectCount( 5 );
|
||||
stats.setTotalSize( 999999 );
|
||||
stats.setWhenGathered( Calendar.getInstance().getTime() );
|
||||
|
||||
dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
|
||||
|
||||
taskExecutor.executeTask( repoTask );
|
||||
|
||||
ArtifactDAO adao = dao.getArtifactDAO();
|
||||
List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
|
||||
|
||||
assertNotNull( unprocessedResultList );
|
||||
assertEquals("Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
|
||||
}
|
||||
|
||||
protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
|
||||
{
|
||||
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
|
||||
|
|
|
@ -617,7 +617,7 @@ public class UploadAction
|
|||
|
||||
private void queueRepositoryTask( String repositoryId, File localFile )
|
||||
{
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repositoryId, localFile.getName(), localFile, true );
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repositoryId, localFile.getName(), localFile, true, true );
|
||||
|
||||
try
|
||||
{
|
||||
|
|
|
@ -51,6 +51,8 @@ public class SchedulerAction
|
|||
|
||||
private String repoid;
|
||||
|
||||
private boolean scanAll;
|
||||
|
||||
public String scanRepository()
|
||||
{
|
||||
if ( StringUtils.isBlank( repoid ) )
|
||||
|
@ -59,7 +61,7 @@ public class SchedulerAction
|
|||
return SUCCESS;
|
||||
}
|
||||
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoid, "" );
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoid, "", scanAll );
|
||||
|
||||
boolean scheduleTask = false;
|
||||
|
||||
|
@ -181,4 +183,14 @@ public class SchedulerAction
|
|||
{
|
||||
this.repoid = repoid;
|
||||
}
|
||||
|
||||
public boolean getScanAll()
|
||||
{
|
||||
return scanAll;
|
||||
}
|
||||
|
||||
public void setScanAll( boolean scanAll )
|
||||
{
|
||||
this.scanAll = scanAll;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -197,19 +197,20 @@
|
|||
<th>
|
||||
Actions
|
||||
</th>
|
||||
<td>
|
||||
<table>
|
||||
<tr>
|
||||
<td>
|
||||
<redback:ifAuthorized permission="archiva-run-indexer">
|
||||
<s:form action="indexRepository" theme="simple">
|
||||
<s:hidden name="repoid" value="%{#attr.repository.id}"/>
|
||||
<s:submit value="Scan Repository Now"/>
|
||||
</s:form>
|
||||
</redback:ifAuthorized>
|
||||
</td>
|
||||
<table>
|
||||
<tr>
|
||||
<td><s:checkbox name="scanAll" value="scanAll"/>Process All Artifacts</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><s:submit value="Scan Repository Now"/></td>
|
||||
</tr>
|
||||
</table>
|
||||
</s:form>
|
||||
</redback:ifAuthorized>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
|
|
@ -645,7 +645,7 @@ public class ArchivaDavResource
|
|||
|
||||
private void queueRepositoryTask( File localFile )
|
||||
{
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repository.getId(), localFile.getName(), localFile, false );
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repository.getId(), localFile.getName(), localFile, false, true );
|
||||
|
||||
try
|
||||
{
|
||||
|
|
|
@ -313,7 +313,7 @@ public class AdministrationServiceImpl
|
|||
}
|
||||
}
|
||||
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "" );
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "", false );
|
||||
|
||||
taskScheduler.queueRepositoryTask( task );
|
||||
|
||||
|
|
Loading…
Reference in New Issue