Migrating scheduler to java.nio

This commit is contained in:
Martin Stockhammer 2017-09-10 17:47:17 +02:00
parent 7503cb83e3
commit f886688c25
15 changed files with 173 additions and 142 deletions

View File

@ -148,7 +148,7 @@ public class NexusIndexerConsumer
Path artifactFile = managedRepository.resolve(path);
ArtifactIndexingTask task =
new ArtifactIndexingTask( repository, artifactFile.toFile(), ArtifactIndexingTask.Action.ADD, getIndexingContext() );
new ArtifactIndexingTask( repository, artifactFile, ArtifactIndexingTask.Action.ADD, getIndexingContext() );
try
{
log.debug( "Queueing indexing task '{}' to add or update the artifact in the index.", task );
@ -174,7 +174,7 @@ public class NexusIndexerConsumer
// specify in indexing task that this is not a repo scan request!
ArtifactIndexingTask task =
new ArtifactIndexingTask( repository, artifactFile.toFile(), ArtifactIndexingTask.Action.ADD,
new ArtifactIndexingTask( repository, artifactFile, ArtifactIndexingTask.Action.ADD,
getIndexingContext(), false );
// only update index we don't need to scan the full repo here
task.setOnlyUpdate( true );

View File

@ -68,7 +68,7 @@ public class NexusIndexerConsumerTest
switch ( task.getAction() )
{
case ADD:
indexed.add( task.getResourceFile().toPath() );
indexed.add( task.getResourceFile() );
break;
case DELETE:
indexed.remove( task.getResourceFile() );

View File

@ -807,7 +807,7 @@ public class DefaultRepositoryProxyConnectors
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
task.setResourceFile( localFile.toFile() );
task.setResourceFile( localFile );
task.setUpdateRelatedArtifacts( true );
task.setScanAll( true );

View File

@ -43,8 +43,8 @@ import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
/**
* ArchivaIndexingTaskExecutor Executes all indexing tasks. Adding, updating and removing artifacts from the index are
@ -115,7 +115,7 @@ public class ArchivaIndexingTaskExecutor
log.debug( "Creating indexing context on resource: {}", //
( indexingTask.getResourceFile() == null
? "none"
: indexingTask.getResourceFile().getPath() ) );
: indexingTask.getResourceFile() ) );
context = managedRepositoryAdmin.createIndexContext( repository );
}
catch ( RepositoryAdminException e )
@ -132,20 +132,20 @@ public class ArchivaIndexingTaskExecutor
try
{
File artifactFile = indexingTask.getResourceFile();
Path artifactFile = indexingTask.getResourceFile();
if ( artifactFile == null )
{
log.debug( "no artifact pass in indexing task so skip it" );
}
else
{
ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile );
ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile() );
if ( ac != null )
{
// MRM-1779 pom must be indexed too
// TODO make that configurable?
if ( artifactFile.getPath().endsWith( ".pom" ) )
if ( artifactFile.getFileName().toString().endsWith( ".pom" ) )
{
ac.getArtifactInfo().setFileExtension( "pom" );
ac.getArtifactInfo().setPackaging( "pom" );
@ -205,7 +205,7 @@ public class ArchivaIndexingTaskExecutor
if ( !indexingTask.isExecuteOnEntireRepo() )
{
log.debug( "Finishing indexing task on resource file : {}", indexingTask.getResourceFile() != null
? indexingTask.getResourceFile().getPath()
? indexingTask.getResourceFile()
: " none " );
finishIndexingTask( indexingTask, repository, context );
}

View File

@ -23,7 +23,8 @@ import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.redback.components.taskqueue.Task;
import org.apache.maven.index.context.IndexingContext;
import java.io.File;
import java.nio.file.Path;
public class ArtifactIndexingTask
implements Task
@ -37,7 +38,7 @@ public class ArtifactIndexingTask
private final ManagedRepository repository;
private final File resourceFile;
private final Path resourceFile;
private final Action action;
@ -50,7 +51,7 @@ public class ArtifactIndexingTask
*/
private boolean onlyUpdate = false;
public ArtifactIndexingTask( ManagedRepository repository, File resourceFile, Action action,
public ArtifactIndexingTask( ManagedRepository repository, Path resourceFile, Action action,
IndexingContext context )
{
this.repository = repository;
@ -59,14 +60,14 @@ public class ArtifactIndexingTask
this.context = context;
}
public ArtifactIndexingTask( ManagedRepository repository, File resourceFile, Action action,
public ArtifactIndexingTask( ManagedRepository repository, Path resourceFile, Action action,
IndexingContext context, boolean executeOnEntireRepo )
{
this( repository, resourceFile, action, context );
this.executeOnEntireRepo = executeOnEntireRepo;
}
public ArtifactIndexingTask( ManagedRepository repository, File resourceFile, Action action,
public ArtifactIndexingTask( ManagedRepository repository, Path resourceFile, Action action,
IndexingContext context, boolean executeOnEntireRepo, boolean onlyUpdate )
{
this( repository, resourceFile, action, context, executeOnEntireRepo );
@ -95,7 +96,7 @@ public class ArtifactIndexingTask
return 0;
}
public File getResourceFile()
public Path getResourceFile()
{
return resourceFile;
}

View File

@ -23,7 +23,6 @@ import org.apache.archiva.admin.model.beans.RemoteRepository;
import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
import org.apache.archiva.proxy.common.WagonFactory;
import org.apache.archiva.proxy.common.WagonFactoryRequest;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.time.StopWatch;
import org.apache.maven.index.context.IndexingContext;
import org.apache.maven.index.updater.IndexUpdateRequest;
@ -45,12 +44,13 @@ import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
@ -106,7 +106,7 @@ public class DownloadRemoteIndexTask
}
this.runningRemoteDownloadIds.add( this.remoteRepository.getId() );
}
File tempIndexDirectory = null;
Path tempIndexDirectory = null;
StopWatch stopWatch = new StopWatch();
stopWatch.start();
try
@ -115,15 +115,15 @@ public class DownloadRemoteIndexTask
IndexingContext indexingContext = remoteRepositoryAdmin.createIndexContext( this.remoteRepository );
// create a temp directory to download files
tempIndexDirectory = new File( indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex" );
File indexCacheDirectory = new File( indexingContext.getIndexDirectoryFile().getParent(), ".indexCache" );
indexCacheDirectory.mkdirs();
if ( tempIndexDirectory.exists() )
tempIndexDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex" );
Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile().getParent(), ".indexCache" );
Files.createDirectories( indexCacheDirectory );
if ( Files.exists(tempIndexDirectory) )
{
FileUtils.deleteDirectory( tempIndexDirectory );
org.apache.archiva.common.utils.FileUtils.deleteDirectory( tempIndexDirectory );
}
tempIndexDirectory.mkdirs();
tempIndexDirectory.deleteOnExit();
Files.createDirectories( tempIndexDirectory );
tempIndexDirectory.toFile().deleteOnExit();
String baseIndexUrl = indexingContext.getIndexUpdateUrl();
String wagonProtocol = new URL( this.remoteRepository.getUrl() ).getProtocol();
@ -167,17 +167,17 @@ public class DownloadRemoteIndexTask
wagon.connect( new Repository( this.remoteRepository.getId(), baseIndexUrl ), authenticationInfo,
proxyInfo );
File indexDirectory = indexingContext.getIndexDirectoryFile();
if ( !indexDirectory.exists() )
Path indexDirectory = indexingContext.getIndexDirectoryFile().toPath();
if ( !Files.exists(indexDirectory) )
{
indexDirectory.mkdirs();
Files.createDirectories( indexDirectory );
}
ResourceFetcher resourceFetcher =
new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
request.setForceFullUpdate( this.fullDownload );
request.setLocalIndexCacheDir( indexCacheDirectory );
request.setLocalIndexCacheDir( indexCacheDirectory.toFile() );
this.indexUpdater.fetchAndUpdateIndex( request );
stopWatch.stop();
@ -204,11 +204,11 @@ public class DownloadRemoteIndexTask
log.info( "end download remote index for remote repository {}", this.remoteRepository.getId() );
}
private void deleteDirectoryQuiet( File f )
private void deleteDirectoryQuiet( Path f )
{
try
{
FileUtils.deleteDirectory( f );
org.apache.archiva.common.utils.FileUtils.deleteDirectory( f );
}
catch ( IOException e )
{
@ -280,13 +280,13 @@ public class DownloadRemoteIndexTask
Logger log;
File tempIndexDirectory;
Path tempIndexDirectory;
Wagon wagon;
RemoteRepository remoteRepository;
private WagonResourceFetcher( Logger log, File tempIndexDirectory, Wagon wagon,
private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
RemoteRepository remoteRepository )
{
this.log = log;
@ -316,11 +316,11 @@ public class DownloadRemoteIndexTask
try
{
log.info( "index update retrieve file, name:{}", name );
File file = new File( tempIndexDirectory, name );
Files.deleteIfExists( file.toPath() );
file.deleteOnExit();
wagon.get( addParameters( name, this.remoteRepository ), file );
return Files.newInputStream( file.toPath() );
Path file = tempIndexDirectory.resolve( name );
Files.deleteIfExists( file );
file.toFile().deleteOnExit();
wagon.get( addParameters( name, this.remoteRepository ), file.toFile() );
return Files.newInputStream( file );
}
catch ( AuthorizationException | TransferFailedException e )
{

View File

@ -32,13 +32,13 @@ import org.apache.maven.index.context.IndexCreator;
import org.apache.maven.index.context.IndexingContext;
import org.apache.maven.index.expr.SourcedSearchExpression;
import org.apache.maven.index.expr.StringSearchExpression;
import org.apache.maven.index.updater.DefaultIndexUpdater;
import org.apache.maven.index.updater.IndexUpdateRequest;
import org.apache.maven.index.updater.IndexUpdater;
import org.apache.maven.index_shaded.lucene.search.BooleanClause;
import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
import org.apache.maven.index_shaded.lucene.search.IndexSearcher;
import org.apache.maven.index_shaded.lucene.search.TopDocs;
import org.apache.maven.index.updater.DefaultIndexUpdater;
import org.apache.maven.index.updater.IndexUpdateRequest;
import org.apache.maven.index.updater.IndexUpdater;
import org.assertj.core.api.Assertions;
import org.junit.After;
import org.junit.Before;
@ -47,8 +47,10 @@ import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Set;
@ -87,7 +89,7 @@ public class ArchivaIndexingTaskExecutorTest
repositoryConfig = new ManagedRepository();
repositoryConfig.setId( "test-repo" );
repositoryConfig.setLocation(
new File( System.getProperty( "basedir" ), "target/test-classes/test-repo" ).getAbsolutePath() );
Paths.get( System.getProperty( "basedir" ), "target/test-classes/test-repo" ).toAbsolutePath().toString() );
repositoryConfig.setLayout( "default" );
repositoryConfig.setName( "Test Repository" );
repositoryConfig.setScanned( true );
@ -130,7 +132,7 @@ public class ArchivaIndexingTaskExecutorTest
public void testAddArtifactToIndex()
throws Exception
{
File artifactFile = new File( repositoryConfig.getLocation(),
Path artifactFile = Paths.get( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
ArtifactIndexingTask task =
@ -150,8 +152,8 @@ public class ArchivaIndexingTaskExecutorTest
{
IndexingContext context = indexer.addIndexingContext( repositoryConfig.getId(), //
repositoryConfig.getId(), //
new File( repositoryConfig.getLocation() ), //
new File( repositoryConfig.getLocation(), ".indexer" )
Paths.get( repositoryConfig.getLocation() ).toFile(), //
Paths.get( repositoryConfig.getLocation(), ".indexer" ).toFile()
//
, null, null, indexCreators );
context.setSearchable( true );
@ -160,8 +162,8 @@ public class ArchivaIndexingTaskExecutorTest
FlatSearchRequest request = new FlatSearchRequest( q );
FlatSearchResponse response = indexer.searchFlat( request );
assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
assertEquals( 1, response.getTotalHits() );
Set<ArtifactInfo> results = response.getResults();
@ -177,7 +179,7 @@ public class ArchivaIndexingTaskExecutorTest
public void testUpdateArtifactInIndex()
throws Exception
{
File artifactFile = new File( repositoryConfig.getLocation(),
Path artifactFile = Paths.get( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
ArtifactIndexingTask task =
@ -202,8 +204,8 @@ public class ArchivaIndexingTaskExecutorTest
//searcher.close();
ctx.releaseIndexSearcher( searcher );
assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
// should only return 1 hit!
assertEquals( 1, topDocs.totalHits );
@ -213,7 +215,7 @@ public class ArchivaIndexingTaskExecutorTest
public void testRemoveArtifactFromIndex()
throws Exception
{
File artifactFile = new File( repositoryConfig.getLocation(),
Path artifactFile = Paths.get( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
ArtifactIndexingTask task =
@ -235,8 +237,8 @@ public class ArchivaIndexingTaskExecutorTest
FlatSearchResponse response = indexer.searchFlat( flatSearchRequest );
assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
// should return 1 hit
assertEquals( 1, response.getTotalHitsCount() );
@ -257,8 +259,8 @@ public class ArchivaIndexingTaskExecutorTest
new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ),
BooleanClause.Occur.SHOULD );
assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
flatSearchRequest = new FlatSearchRequest( q, getIndexingContext() );
@ -274,22 +276,23 @@ public class ArchivaIndexingTaskExecutorTest
throws Exception
{
File indexerDirectory = new File( repositoryConfig.getLocation(), ".indexer" );
Path indexerDirectory =Paths.get( repositoryConfig.getLocation(), ".indexer" );
indexerDirectory.listFiles( new FilenameFilter()
{
@Override
public boolean accept( File file, String s )
Files.list(indexerDirectory).filter( path -> path.getFileName().toString().startsWith("nexus-maven-repository-index") )
.forEach( path ->
{
if ( s.startsWith( "nexus-maven-repository-index" ) )
try
{
new File( file, s ).delete();
Files.delete( path );
}
return false;
}
} );
catch ( IOException e )
{
e.printStackTrace( );
}
} );
File artifactFile = new File( repositoryConfig.getLocation(),
Path artifactFile = Paths.get( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
ArtifactIndexingTask task =
@ -306,19 +309,19 @@ public class ArchivaIndexingTaskExecutorTest
indexingExecutor.executeTask( task );
assertTrue( indexerDirectory.exists() );
assertTrue( Files.exists(indexerDirectory) );
// test packed index file creation
//no more zip
//Assertions.assertThat(new File( indexerDirectory, "nexus-maven-repository-index.zip" )).exists();
Assertions.assertThat( new File( indexerDirectory, "nexus-maven-repository-index.properties" ) ).exists();
Assertions.assertThat( new File( indexerDirectory, "nexus-maven-repository-index.gz" ) ).exists();
Assertions.assertThat( Files.exists(indexerDirectory.resolve("nexus-maven-repository-index.properties" ) ));
Assertions.assertThat( Files.exists(indexerDirectory.resolve("nexus-maven-repository-index.gz" ) ));
// unpack .zip index
File destDir = new File( repositoryConfig.getLocation(), ".indexer/tmp" );
Path destDir = Paths.get( repositoryConfig.getLocation(), ".indexer/tmp" );
//unzipIndex( indexerDirectory.getPath(), destDir.getPath() );
DefaultIndexUpdater.FileFetcher fetcher = new DefaultIndexUpdater.FileFetcher( indexerDirectory );
DefaultIndexUpdater.FileFetcher fetcher = new DefaultIndexUpdater.FileFetcher( indexerDirectory.toFile() );
IndexUpdateRequest updateRequest = new IndexUpdateRequest( getIndexingContext(), fetcher );
//updateRequest.setLocalIndexCacheDir( indexerDirectory );
indexUpdater.fetchAndUpdateIndex( updateRequest );

View File

@ -45,8 +45,10 @@ import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
@ -83,17 +85,17 @@ public class DownloadRemoteIndexTaskTest
server = new Server( );
serverConnector = new ServerConnector( server, new HttpConnectionFactory());
server.addConnector( serverConnector );
createContext( server, new File( "src/test/" ) );
createContext( server, Paths.get( "src/test/" ) );
this.server.start();
this.port = serverConnector.getLocalPort();
log.info( "start server on port {}", this.port );
}
protected void createContext( Server server, File repositoryDirectory )
protected void createContext( Server server, Path repositoryDirectory )
throws IOException
{
ServletContextHandler context = new ServletContextHandler();
context.setResourceBase( repositoryDirectory.getAbsolutePath() );
context.setResourceBase( repositoryDirectory.toAbsolutePath().toString() );
context.setContextPath( "/" );
ServletHolder sh = new ServletHolder( DefaultServlet.class );
context.addServlet( sh, "/" );
@ -141,16 +143,16 @@ public class DownloadRemoteIndexTaskTest
}
protected RemoteRepository getRemoteRepository()
protected RemoteRepository getRemoteRepository() throws IOException
{
RemoteRepository remoteRepository = new RemoteRepository();
File indexDirectory =
new File( FileUtils.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
indexDirectory.mkdirs();
indexDirectory.deleteOnExit();
Path indexDirectory =
Paths.get( FileUtils.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
Files.createDirectories( indexDirectory );
indexDirectory.toFile().deleteOnExit();
remoteRepository.setName( "foo" );
remoteRepository.setIndexDirectory( indexDirectory.getAbsolutePath() );
remoteRepository.setIndexDirectory( indexDirectory.toAbsolutePath().toString() );
remoteRepository.setDownloadRemoteIndex( true );
remoteRepository.setId( "test-repo" );
remoteRepository.setUrl( "http://localhost:" + port );

View File

@ -2,7 +2,8 @@ package org.apache.archiva.scheduler.repository.model;
import org.apache.archiva.redback.components.taskqueue.Task;
import java.io.File;
import java.nio.file.Path;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -32,7 +33,7 @@ public class RepositoryTask
{
private String repositoryId;
private File resourceFile;
private Path resourceFile;
private boolean updateRelatedArtifacts;
@ -80,12 +81,12 @@ public class RepositoryTask
return 0;
}
public File getResourceFile()
public Path getResourceFile()
{
return resourceFile;
}
public void setResourceFile( File resourceFile )
public void setResourceFile( Path resourceFile )
{
this.resourceFile = resourceFile;
}

View File

@ -113,7 +113,7 @@ public class ArchivaRepositoryScanningTaskExecutor
if ( task.getResourceFile() != null )
{
log.debug( "Executing task from queue with job name: {}", task );
consumers.executeConsumers( arepo, task.getResourceFile().toPath(), task.isUpdateRelatedArtifacts() );
consumers.executeConsumers( arepo, task.getResourceFile(), task.isUpdateRelatedArtifacts() );
}
else
{

View File

@ -36,8 +36,17 @@ import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitOption;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
import java.util.Calendar;
import java.util.Comparator;
import java.util.stream.Stream;
import static org.mockito.Mockito.mock;
@ -70,7 +79,7 @@ public abstract class AbstractArchivaRepositoryScanningTaskExecutorTest
@Named( value = "repositorySessionFactory#mock" )
private MockRepositorySessionFactory factory;
protected File repoDir;
protected Path repoDir;
protected static final String TEST_REPO_ID = "testRepo";
@ -83,28 +92,40 @@ public abstract class AbstractArchivaRepositoryScanningTaskExecutorTest
{
super.setUp();
File sourceRepoDir = new File( "./src/test/repositories/default-repository" );
repoDir = new File( "./target/default-repository" );
Path sourceRepoDir = Paths.get( "src/test/repositories/default-repository" );
repoDir = Paths.get( "target/default-repository" );
FileUtils.deleteDirectory( repoDir );
assertFalse( "Default Test Repository should not exist.", repoDir.exists() );
org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoDir );
assertFalse( "Default Test Repository should not exist.", Files.exists(repoDir) );
repoDir.mkdir();
Files.createDirectories(repoDir);
FileUtils.copyDirectoryStructure( sourceRepoDir, repoDir );
FileUtils.copyDirectoryStructure( sourceRepoDir.toFile(), repoDir.toFile() );
// set the timestamps to a time well in the past
Calendar cal = Calendar.getInstance();
cal.add( Calendar.YEAR, -1 );
FileUtils.getFiles( repoDir, "**", null ) //
.stream().forEach( file -> file.setLastModified( cal.getTimeInMillis() ) );
// TODO: test they are excluded instead
for ( String dir : FileUtils.getDirectoryNames( repoDir, "**/.svn", null, false ) )
{
FileUtils.deleteDirectory( new File( repoDir, dir ) );
try(Stream<Path> stream = Files.walk( repoDir,FileVisitOption.FOLLOW_LINKS)) {
stream.forEach( path ->
{
try
{
Files.setLastModifiedTime( path, FileTime.fromMillis( cal.getTimeInMillis( ) ) );
}
catch ( IOException e )
{
e.printStackTrace( );
}
} );
}
PathMatcher m = FileSystems.getDefault().getPathMatcher( "glob:**/.svn" );
Files.walk(repoDir, FileVisitOption.FOLLOW_LINKS).filter(Files::isDirectory)
.sorted( Comparator.reverseOrder( ))
.filter( path -> m.matches( path ) )
.forEach( path ->
org.apache.archiva.common.utils.FileUtils.deleteQuietly( path )
);
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
assertTrue( "Default Test Repository should exist.", Files.exists(repoDir) && Files.isDirectory( repoDir) );
assertNotNull( archivaConfig );
@ -112,7 +133,7 @@ public abstract class AbstractArchivaRepositoryScanningTaskExecutorTest
ManagedRepositoryConfiguration repositoryConfiguration = new ManagedRepositoryConfiguration();
repositoryConfiguration.setId( TEST_REPO_ID );
repositoryConfiguration.setName( "Test Repository" );
repositoryConfiguration.setLocation( repoDir.getAbsolutePath() );
repositoryConfiguration.setLocation( repoDir.toAbsolutePath().toString() );
archivaConfig.getConfiguration().getManagedRepositories().clear();
archivaConfig.getConfiguration().addManagedRepository( repositoryConfiguration );
@ -126,9 +147,9 @@ public abstract class AbstractArchivaRepositoryScanningTaskExecutorTest
public void tearDown()
throws Exception
{
FileUtils.deleteDirectory( repoDir );
org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoDir );
assertFalse( repoDir.exists() );
assertFalse( Files.exists(repoDir) );
super.tearDown();
}

View File

@ -28,7 +28,10 @@ import org.codehaus.plexus.util.FileUtils;
import org.junit.Test;
import org.springframework.test.context.ContextConfiguration;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
@ -72,20 +75,20 @@ public class ArchivaRepositoryScanningTaskExecutorPhase2Test
// assertEquals( 5, newStats.getTotalProjectCount() );
// assertEquals( 14159, newStats.getTotalArtifactFileSize() );
File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
assertFalse( "newArtifactGroup should not exist.", newArtifactGroup.exists() );
Path newArtifactGroup = repoDir.resolve( "org/apache/archiva" );
assertFalse( "newArtifactGroup should not exist.", Files.exists(newArtifactGroup) );
FileUtils.copyDirectoryStructure( new File( "target/test-classes/test-repo/org/apache/archiva" ),
newArtifactGroup );
FileUtils.copyDirectoryStructure( Paths.get( "target/test-classes/test-repo/org/apache/archiva" ).toFile(),
newArtifactGroup.toFile() );
// update last modified date
new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
Calendar.getInstance().getTimeInMillis() + 1000 );
new File( newArtifactGroup,
"archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
Calendar.getInstance().getTimeInMillis() + 1000 );
Files.setLastModifiedTime(newArtifactGroup.resolve( "archiva-index-methods-jar-test/1.0/pom.xml" ), FileTime.fromMillis(
Calendar.getInstance().getTimeInMillis() + 1000 ));
Files.setLastModifiedTime( newArtifactGroup.resolve(
"archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ), FileTime.fromMillis(
Calendar.getInstance().getTimeInMillis() + 1000 ));
assertTrue( newArtifactGroup.exists() );
assertTrue( Files.exists(newArtifactGroup) );
taskExecutor.executeTask( repoTask );
@ -117,20 +120,20 @@ public class ArchivaRepositoryScanningTaskExecutorPhase2Test
createAndSaveTestStats();
File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
assertFalse( "newArtifactGroup should not exist.", newArtifactGroup.exists() );
Path newArtifactGroup = repoDir.resolve( "org/apache/archiva" );
assertFalse( "newArtifactGroup should not exist.", Files.exists(newArtifactGroup) );
FileUtils.copyDirectoryStructure( new File( "target/test-classes/test-repo/org/apache/archiva" ),
newArtifactGroup );
FileUtils.copyDirectoryStructure( Paths.get( "target/test-classes/test-repo/org/apache/archiva" ).toFile(),
newArtifactGroup.toFile() );
// update last modified date, placing shortly after last scan
new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
Calendar.getInstance().getTimeInMillis() + 1000 );
new File( newArtifactGroup,
"archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
Calendar.getInstance().getTimeInMillis() + 1000 );
Files.setLastModifiedTime(newArtifactGroup.resolve("archiva-index-methods-jar-test/1.0/pom.xml" ), FileTime.fromMillis(
Calendar.getInstance().getTimeInMillis() + 1000 ));
Files.setLastModifiedTime( newArtifactGroup.resolve(
"archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ), FileTime.fromMillis(
Calendar.getInstance().getTimeInMillis() + 1000 ));
assertTrue( newArtifactGroup.exists() );
assertTrue( Files.exists(newArtifactGroup) );
// scan using the really long previous duration
taskExecutor.executeTask( repoTask );
@ -164,20 +167,20 @@ public class ArchivaRepositoryScanningTaskExecutorPhase2Test
createAndSaveTestStats();
File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
assertFalse( "newArtifactGroup should not exist.", newArtifactGroup.exists() );
Path newArtifactGroup = repoDir.resolve( "org/apache/archiva" );
assertFalse( "newArtifactGroup should not exist.", Files.exists(newArtifactGroup) );
FileUtils.copyDirectoryStructure( new File( "target/test-classes/test-repo/org/apache/archiva" ),
newArtifactGroup );
FileUtils.copyDirectoryStructure( Paths.get( "target/test-classes/test-repo/org/apache/archiva" ).toFile(),
newArtifactGroup.toFile() );
// update last modified date, placing in middle of last scan
new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
Calendar.getInstance().getTimeInMillis() - 50000 );
new File( newArtifactGroup,
"archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
Calendar.getInstance().getTimeInMillis() - 50000 );
Files.setLastModifiedTime( newArtifactGroup.resolve("archiva-index-methods-jar-test/1.0/pom.xml" ), FileTime.fromMillis(
Calendar.getInstance().getTimeInMillis() - 50000 ));
Files.setLastModifiedTime( newArtifactGroup.resolve(
"archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ), FileTime.fromMillis(
Calendar.getInstance().getTimeInMillis() - 50000 ));
assertTrue( newArtifactGroup.exists() );
assertTrue( Files.exists(newArtifactGroup) );
// scan using the really long previous duration
taskExecutor.executeTask( repoTask );

View File

@ -503,7 +503,7 @@ public class DefaultRepositoriesService
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
task.setResourceFile( localFile );
task.setResourceFile( localFile.toPath() );
task.setUpdateRelatedArtifacts( true );
//task.setScanAll( true );

View File

@ -559,7 +559,7 @@ public class DefaultFileUploadService
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
task.setResourceFile( localFile );
task.setResourceFile( localFile.toPath() );
task.setUpdateRelatedArtifacts( true );
task.setScanAll( false );

View File

@ -713,7 +713,7 @@ public class ArchivaDavResource
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repository.getId() );
task.setResourceFile( localFile );
task.setResourceFile( localFile.toPath() );
task.setUpdateRelatedArtifacts( false );
task.setScanAll( false );