mirror of
https://github.com/apache/archiva.git
synced 2025-02-21 17:35:19 +00:00
Migrating web modules to java.nio
This commit is contained in:
parent
b12b8a3ec9
commit
abba44b0a8
@ -24,12 +24,12 @@
|
||||
import org.apache.archiva.admin.model.admin.ArchivaAdministration;
|
||||
import org.apache.archiva.admin.model.beans.ManagedRepository;
|
||||
import org.apache.archiva.admin.model.managed.ManagedRepositoryAdmin;
|
||||
import org.apache.archiva.metadata.model.facets.AuditEvent;
|
||||
import org.apache.archiva.checksum.ChecksumAlgorithm;
|
||||
import org.apache.archiva.checksum.ChecksummedFile;
|
||||
import org.apache.archiva.common.utils.VersionComparator;
|
||||
import org.apache.archiva.common.utils.VersionUtil;
|
||||
import org.apache.archiva.maven2.metadata.MavenMetadataReader;
|
||||
import org.apache.archiva.metadata.model.facets.AuditEvent;
|
||||
import org.apache.archiva.model.ArchivaRepositoryMetadata;
|
||||
import org.apache.archiva.model.ArtifactReference;
|
||||
import org.apache.archiva.model.SnapshotVersion;
|
||||
@ -65,11 +65,12 @@
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.Response;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
@ -136,13 +137,13 @@ public FileMetadata post( MultipartBody multipartBody )
|
||||
//Content-Disposition: form-data; name="files[]"; filename="org.apache.karaf.features.command-2.2.2.jar"
|
||||
String fileName = file.getContentDisposition().getParameter( "filename" );
|
||||
|
||||
File tmpFile = File.createTempFile( "upload-artifact", ".tmp" );
|
||||
tmpFile.deleteOnExit();
|
||||
IOUtils.copy( file.getDataHandler().getInputStream(), new FileOutputStream( tmpFile ) );
|
||||
FileMetadata fileMetadata = new FileMetadata( fileName, tmpFile.length(), "theurl" );
|
||||
fileMetadata.setServerFileName( tmpFile.getPath() );
|
||||
Path tmpFile = Files.createTempFile( "upload-artifact", ".tmp" );
|
||||
tmpFile.toFile().deleteOnExit();
|
||||
IOUtils.copy( file.getDataHandler().getInputStream(), new FileOutputStream( tmpFile.toFile() ) );
|
||||
FileMetadata fileMetadata = new FileMetadata( fileName, Files.size(tmpFile), "theurl" );
|
||||
fileMetadata.setServerFileName( tmpFile.toString() );
|
||||
fileMetadata.setClassifier( classifier );
|
||||
fileMetadata.setDeleteUrl( tmpFile.getName() );
|
||||
fileMetadata.setDeleteUrl( tmpFile.getFileName().toString() );
|
||||
fileMetadata.setPomFile( pomFile );
|
||||
fileMetadata.setPackaging( packaging );
|
||||
|
||||
@ -183,17 +184,21 @@ protected synchronized List<FileMetadata> getSessionFilesList()
|
||||
public Boolean deleteFile( String fileName )
|
||||
throws ArchivaRestServiceException
|
||||
{
|
||||
File file = new File( SystemUtils.getJavaIoTmpDir(), fileName );
|
||||
log.debug( "delete file:{},exists:{}", file.getPath(), file.exists() );
|
||||
Path file = SystemUtils.getJavaIoTmpDir().toPath().resolve( fileName );
|
||||
log.debug( "delete file:{},exists:{}", file, Files.exists(file) );
|
||||
boolean removed = getSessionFileMetadatas().remove( new FileMetadata( fileName ) );
|
||||
// try with full name as ui only know the file name
|
||||
if ( !removed )
|
||||
{
|
||||
/* unused */ getSessionFileMetadatas().remove( new FileMetadata( file.getPath() ) );
|
||||
/* unused */ getSessionFileMetadatas().remove( new FileMetadata( file.toString() ) );
|
||||
}
|
||||
if ( file.exists() )
|
||||
try
|
||||
{
|
||||
return file.delete();
|
||||
Files.deleteIfExists( file );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error("Could not delete file {}: {}", file, e.getMessage(), e);
|
||||
}
|
||||
return Boolean.FALSE;
|
||||
}
|
||||
@ -205,7 +210,7 @@ public Boolean clearUploadedFiles()
|
||||
List<FileMetadata> fileMetadatas = new ArrayList( getSessionFileMetadatas() );
|
||||
for ( FileMetadata fileMetadata : fileMetadatas )
|
||||
{
|
||||
deleteFile( new File( fileMetadata.getServerFileName() ).getPath() );
|
||||
deleteFile( Paths.get( fileMetadata.getServerFileName() ).toString() );
|
||||
}
|
||||
getSessionFileMetadatas().clear();
|
||||
return Boolean.TRUE;
|
||||
@ -331,7 +336,7 @@ protected void savePomFile( String repositoryId, FileMetadata fileMetadata, Stri
|
||||
int lastIndex = artifactPath.lastIndexOf( '/' );
|
||||
|
||||
String path = artifactPath.substring( 0, lastIndex );
|
||||
File targetPath = new File( repoConfig.getLocation(), path );
|
||||
Path targetPath = Paths.get( repoConfig.getLocation(), path );
|
||||
|
||||
String pomFilename = artifactPath.substring( lastIndex + 1 );
|
||||
if ( StringUtils.isNotEmpty( fileMetadata.getClassifier() ) )
|
||||
@ -340,9 +345,9 @@ protected void savePomFile( String repositoryId, FileMetadata fileMetadata, Stri
|
||||
}
|
||||
pomFilename = FilenameUtils.removeExtension( pomFilename ) + ".pom";
|
||||
|
||||
copyFile( new File( fileMetadata.getServerFileName() ), targetPath, pomFilename, fixChecksums );
|
||||
copyFile( Paths.get( fileMetadata.getServerFileName() ), targetPath, pomFilename, fixChecksums );
|
||||
triggerAuditEvent( repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE );
|
||||
queueRepositoryTask( repoConfig.getId(), new File( targetPath, pomFilename ) );
|
||||
queueRepositoryTask( repoConfig.getId(), targetPath.resolve(pomFilename ) );
|
||||
}
|
||||
catch ( IOException ie )
|
||||
{
|
||||
@ -385,7 +390,7 @@ protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean
|
||||
int lastIndex = artifactPath.lastIndexOf( '/' );
|
||||
|
||||
String path = artifactPath.substring( 0, lastIndex );
|
||||
File targetPath = new File( repoConfig.getLocation(), path );
|
||||
Path targetPath = Paths.get( repoConfig.getLocation(), path );
|
||||
|
||||
log.debug( "artifactPath: {} found targetPath: {}", artifactPath, targetPath );
|
||||
|
||||
@ -393,7 +398,7 @@ protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean
|
||||
int newBuildNumber = -1;
|
||||
String timestamp = null;
|
||||
|
||||
File versionMetadataFile = new File( targetPath, MetadataTools.MAVEN_METADATA );
|
||||
Path versionMetadataFile = targetPath.resolve( MetadataTools.MAVEN_METADATA );
|
||||
ArchivaRepositoryMetadata versionMetadata = getMetadata( versionMetadataFile );
|
||||
|
||||
if ( VersionUtil.isSnapshot( version ) )
|
||||
@ -412,9 +417,9 @@ protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean
|
||||
}
|
||||
}
|
||||
|
||||
if ( !targetPath.exists() )
|
||||
if ( !Files.exists(targetPath) )
|
||||
{
|
||||
targetPath.mkdirs();
|
||||
Files.createDirectories( targetPath );
|
||||
}
|
||||
|
||||
String filename = artifactPath.substring( lastIndex + 1 );
|
||||
@ -428,8 +433,8 @@ protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean
|
||||
|
||||
try
|
||||
{
|
||||
File targetFile = new File( targetPath, filename );
|
||||
if ( targetFile.exists() && !VersionUtil.isSnapshot( version ) && repoConfig.isBlockRedeployments() )
|
||||
Path targetFile = targetPath.resolve( filename );
|
||||
if ( Files.exists(targetFile) && !VersionUtil.isSnapshot( version ) && repoConfig.isBlockRedeployments() )
|
||||
{
|
||||
throw new ArchivaRestServiceException(
|
||||
"Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.",
|
||||
@ -437,7 +442,7 @@ protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean
|
||||
}
|
||||
else
|
||||
{
|
||||
copyFile( new File( fileMetadata.getServerFileName() ), targetPath, filename, fixChecksums );
|
||||
copyFile( Paths.get( fileMetadata.getServerFileName() ), targetPath, filename, fixChecksums );
|
||||
triggerAuditEvent( repository.getId(), path + "/" + filename, AuditEvent.UPLOAD_FILE );
|
||||
queueRepositoryTask( repository.getId(), targetFile );
|
||||
}
|
||||
@ -461,7 +466,7 @@ protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean
|
||||
|
||||
try
|
||||
{
|
||||
File generatedPomFile =
|
||||
Path generatedPomFile =
|
||||
createPom( targetPath, pomFilename, fileMetadata, groupId, artifactId, version, packaging );
|
||||
triggerAuditEvent( repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE );
|
||||
if ( fixChecksums )
|
||||
@ -481,7 +486,7 @@ protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean
|
||||
// explicitly update only if metadata-updater consumer is not enabled!
|
||||
if ( !archivaAdministration.getKnownContentConsumers().contains( "metadata-updater" ) )
|
||||
{
|
||||
updateProjectMetadata( targetPath.getAbsolutePath(), lastUpdatedTimestamp, timestamp, newBuildNumber,
|
||||
updateProjectMetadata( targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber,
|
||||
fixChecksums, fileMetadata, groupId, artifactId, version, packaging );
|
||||
|
||||
if ( VersionUtil.isSnapshot( version ) )
|
||||
@ -507,17 +512,22 @@ protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean
|
||||
throw new ArchivaRestServiceException( "RepositoryAdmin exception: " + e.getMessage(),
|
||||
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
throw new ArchivaRestServiceException("Repository exception "+ e.getMessage(),
|
||||
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private ArchivaRepositoryMetadata getMetadata( File metadataFile )
|
||||
private ArchivaRepositoryMetadata getMetadata( Path metadataFile )
|
||||
throws RepositoryMetadataException
|
||||
{
|
||||
ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
|
||||
if ( metadataFile.exists() )
|
||||
if ( Files.exists(metadataFile) )
|
||||
{
|
||||
try
|
||||
{
|
||||
metadata = MavenMetadataReader.read( metadataFile.toPath() );
|
||||
metadata = MavenMetadataReader.read( metadataFile );
|
||||
}
|
||||
catch ( XMLException e )
|
||||
{
|
||||
@ -527,7 +537,7 @@ private ArchivaRepositoryMetadata getMetadata( File metadataFile )
|
||||
return metadata;
|
||||
}
|
||||
|
||||
private File createPom( File targetPath, String filename, FileMetadata fileMetadata, String groupId,
|
||||
private Path createPom( Path targetPath, String filename, FileMetadata fileMetadata, String groupId,
|
||||
String artifactId, String version, String packaging )
|
||||
throws IOException
|
||||
{
|
||||
@ -538,10 +548,10 @@ private File createPom( File targetPath, String filename, FileMetadata fileMetad
|
||||
projectModel.setVersion( version );
|
||||
projectModel.setPackaging( packaging );
|
||||
|
||||
File pomFile = new File( targetPath, filename );
|
||||
Path pomFile = targetPath.resolve( filename );
|
||||
MavenXpp3Writer writer = new MavenXpp3Writer();
|
||||
|
||||
try (FileWriter w = new FileWriter( pomFile ))
|
||||
try (FileWriter w = new FileWriter( pomFile.toFile() ))
|
||||
{
|
||||
writer.write( w, projectModel );
|
||||
}
|
||||
@ -549,17 +559,17 @@ private File createPom( File targetPath, String filename, FileMetadata fileMetad
|
||||
return pomFile;
|
||||
}
|
||||
|
||||
private void fixChecksums( File file )
|
||||
private void fixChecksums( Path file )
|
||||
{
|
||||
ChecksummedFile checksum = new ChecksummedFile( file.toPath() );
|
||||
ChecksummedFile checksum = new ChecksummedFile( file );
|
||||
checksum.fixChecksums( algorithms );
|
||||
}
|
||||
|
||||
private void queueRepositoryTask( String repositoryId, File localFile )
|
||||
private void queueRepositoryTask( String repositoryId, Path localFile )
|
||||
{
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repositoryId );
|
||||
task.setResourceFile( localFile.toPath() );
|
||||
task.setResourceFile( localFile );
|
||||
task.setUpdateRelatedArtifacts( true );
|
||||
task.setScanAll( false );
|
||||
|
||||
@ -570,20 +580,20 @@ private void queueRepositoryTask( String repositoryId, File localFile )
|
||||
catch ( TaskQueueException e )
|
||||
{
|
||||
log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
|
||||
+ "'].", localFile.getName() );
|
||||
+ "'].", localFile.getFileName() );
|
||||
}
|
||||
}
|
||||
|
||||
private void copyFile( File sourceFile, File targetPath, String targetFilename, boolean fixChecksums )
|
||||
private void copyFile( Path sourceFile, Path targetPath, String targetFilename, boolean fixChecksums )
|
||||
throws IOException
|
||||
{
|
||||
|
||||
Files.copy( sourceFile.toPath(), new File( targetPath, targetFilename ).toPath(), StandardCopyOption.REPLACE_EXISTING,
|
||||
Files.copy( sourceFile, targetPath.resolve( targetFilename ), StandardCopyOption.REPLACE_EXISTING,
|
||||
StandardCopyOption.COPY_ATTRIBUTES );
|
||||
|
||||
if ( fixChecksums )
|
||||
{
|
||||
fixChecksums( new File( targetPath, targetFilename ) );
|
||||
fixChecksums( targetPath.resolve( targetFilename ) );
|
||||
}
|
||||
}
|
||||
|
||||
@ -598,12 +608,12 @@ private void updateProjectMetadata( String targetPath, Date lastUpdatedTimestamp
|
||||
List<String> availableVersions = new ArrayList<>();
|
||||
String latestVersion = version;
|
||||
|
||||
File projectDir = new File( targetPath ).getParentFile();
|
||||
File projectMetadataFile = new File( projectDir, MetadataTools.MAVEN_METADATA );
|
||||
Path projectDir = Paths.get(targetPath).getParent();
|
||||
Path projectMetadataFile = projectDir.resolve( MetadataTools.MAVEN_METADATA );
|
||||
|
||||
ArchivaRepositoryMetadata projectMetadata = getMetadata( projectMetadataFile );
|
||||
|
||||
if ( projectMetadataFile.exists() )
|
||||
if ( Files.exists(projectMetadataFile) )
|
||||
{
|
||||
availableVersions = projectMetadata.getAvailableVersions();
|
||||
|
||||
@ -643,7 +653,7 @@ private void updateProjectMetadata( String targetPath, Date lastUpdatedTimestamp
|
||||
projectMetadata.setReleasedVersion( latestVersion );
|
||||
}
|
||||
|
||||
RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile.toPath() );
|
||||
RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile );
|
||||
|
||||
if ( fixChecksums )
|
||||
{
|
||||
@ -655,13 +665,13 @@ private void updateProjectMetadata( String targetPath, Date lastUpdatedTimestamp
|
||||
* Update version level metadata for snapshot artifacts. If it does not exist, create the metadata and fix checksums
|
||||
* if necessary.
|
||||
*/
|
||||
private void updateVersionMetadata( ArchivaRepositoryMetadata metadata, File metadataFile,
|
||||
private void updateVersionMetadata( ArchivaRepositoryMetadata metadata, Path metadataFile,
|
||||
Date lastUpdatedTimestamp, String timestamp, int buildNumber,
|
||||
boolean fixChecksums, FileMetadata fileMetadata, String groupId,
|
||||
String artifactId, String version, String packaging )
|
||||
throws RepositoryMetadataException
|
||||
{
|
||||
if ( !metadataFile.exists() )
|
||||
if ( !Files.exists(metadataFile) )
|
||||
{
|
||||
metadata.setGroupId( groupId );
|
||||
metadata.setArtifactId( artifactId );
|
||||
@ -677,7 +687,7 @@ private void updateVersionMetadata( ArchivaRepositoryMetadata metadata, File met
|
||||
metadata.getSnapshotVersion().setTimestamp( timestamp );
|
||||
metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp );
|
||||
|
||||
RepositoryMetadataWriter.write( metadata, metadataFile.toPath() );
|
||||
RepositoryMetadataWriter.write( metadata, metadataFile );
|
||||
|
||||
if ( fixChecksums )
|
||||
{
|
||||
|
@ -29,7 +29,9 @@
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Collections;
|
||||
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
|
||||
import org.junit.runner.RunWith;
|
||||
@ -46,13 +48,13 @@ public class RuntimeInfoServiceTest
|
||||
public void startServer()
|
||||
throws Exception
|
||||
{
|
||||
File appServerBase = new File( System.getProperty( "appserver.base" ) );
|
||||
Path appServerBase = Paths.get( System.getProperty( "appserver.base" ) );
|
||||
|
||||
File jcrDirectory = new File( appServerBase, "jcr" );
|
||||
Path jcrDirectory = appServerBase.resolve( "jcr" );
|
||||
|
||||
if ( jcrDirectory.exists() )
|
||||
if ( Files.exists(jcrDirectory) )
|
||||
{
|
||||
FileUtils.deleteDirectory( jcrDirectory );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( jcrDirectory );
|
||||
}
|
||||
|
||||
super.startServer();
|
||||
|
@ -33,7 +33,6 @@
|
||||
|
||||
import javax.ws.rs.RedirectionException;
|
||||
import javax.ws.rs.core.Response;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.nio.file.Files;
|
||||
@ -54,8 +53,8 @@ public static void setAppServerBase()
|
||||
{
|
||||
previousAppServerBase = System.getProperty( "appserver.base" );
|
||||
System.setProperty( "appserver.base",
|
||||
new File( System.getProperty( "java.io.tmpdir" ) ).getCanonicalPath() + "/target/"
|
||||
+ DownloadArtifactFromQueryTest.class.getName() );
|
||||
Paths.get( System.getProperty( "java.io.tmpdir" ) ).toAbsolutePath().resolve("target")
|
||||
.resolve(DownloadArtifactFromQueryTest.class.getName() ).toString());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
@ -43,9 +43,10 @@
|
||||
import javax.servlet.http.HttpServlet;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
@ -169,24 +170,21 @@ public void downloadWithRemoteRedirect()
|
||||
|
||||
getUserService( authorizationHeader ).removeFromCache( "guest" );
|
||||
|
||||
File file = new File( "target/junit-4.9.jar" );
|
||||
if ( file.exists() )
|
||||
{
|
||||
file.delete();
|
||||
}
|
||||
Path file = Paths.get( "target/junit-4.9.jar" );
|
||||
Files.deleteIfExists( file );
|
||||
|
||||
HttpWagon httpWagon = new HttpWagon();
|
||||
httpWagon.connect( new Repository( "foo", "http://localhost:" + port ) );
|
||||
|
||||
httpWagon.get( "repository/internal/junit/junit/4.9/junit-4.9.jar", file );
|
||||
httpWagon.get( "repository/internal/junit/junit/4.9/junit-4.9.jar", file.toFile() );
|
||||
|
||||
ZipFile zipFile = new ZipFile( file );
|
||||
ZipFile zipFile = new ZipFile( file.toFile() );
|
||||
List<String> entries = getZipEntriesNames( zipFile );
|
||||
ZipEntry zipEntry = zipFile.getEntry( "org/junit/runners/JUnit4.class" );
|
||||
assertNotNull( "cannot find zipEntry org/junit/runners/JUnit4.class, entries: " + entries + ", content is: "
|
||||
+ FileUtils.readFileToString( file ), zipEntry );
|
||||
+ FileUtils.readFileToString( file.toFile() ), zipEntry );
|
||||
zipFile.close();
|
||||
file.deleteOnExit();
|
||||
file.toFile().deleteOnExit();
|
||||
}
|
||||
|
||||
|
||||
@ -217,8 +215,8 @@ public static class RepoServlet
|
||||
protected void doGet( HttpServletRequest req, HttpServletResponse resp )
|
||||
throws ServletException, IOException
|
||||
{
|
||||
File jar = new File( System.getProperty( "basedir" ), "src/test/junit-4.9.jar" );
|
||||
Files.copy( jar.toPath(), resp.getOutputStream() );
|
||||
Path jar = Paths.get( System.getProperty( "basedir" ), "src/test/junit-4.9.jar" );
|
||||
Files.copy( jar, resp.getOutputStream() );
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -32,18 +32,20 @@
|
||||
import org.apache.archiva.rest.api.services.RepositoryGroupService;
|
||||
import org.apache.archiva.rest.api.services.SearchService;
|
||||
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* @author Olivier Lamy
|
||||
*/
|
||||
@ -76,10 +78,10 @@ public void cleanup()
|
||||
throws Exception
|
||||
{
|
||||
super.tearDown();
|
||||
File tmpIndexDir = new File( System.getProperty( "java.io.tmpdir" ) + "/tmpIndex" );
|
||||
if ( tmpIndexDir.exists() )
|
||||
Path tmpIndexDir = Paths.get( System.getProperty( "java.io.tmpdir" ), "tmpIndex" );
|
||||
if ( Files.exists(tmpIndexDir) )
|
||||
{
|
||||
FileUtils.deleteDirectory( tmpIndexDir );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( tmpIndexDir );
|
||||
}
|
||||
}
|
||||
|
||||
@ -88,10 +90,10 @@ public void cleanup()
|
||||
public void downloadMergedIndexWithNonDefaultPath()
|
||||
throws Exception
|
||||
{
|
||||
File tmpIndexDir = new File( System.getProperty( "java.io.tmpdir" ) + "/tmpIndex" );
|
||||
if ( tmpIndexDir.exists() )
|
||||
Path tmpIndexDir = Paths.get( System.getProperty( "java.io.tmpdir" ), "tmpIndex" );
|
||||
if ( Files.exists(tmpIndexDir) )
|
||||
{
|
||||
FileUtils.deleteDirectory( tmpIndexDir );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( tmpIndexDir );
|
||||
}
|
||||
String id = Long.toString( System.currentTimeMillis() );
|
||||
ManagedRepository managedRepository = new ManagedRepository();
|
||||
|
@ -39,7 +39,6 @@
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
@ -63,8 +62,8 @@ public static void setAppServerBase()
|
||||
{
|
||||
previousAppServerBase = System.getProperty( "appserver.base" );
|
||||
System.setProperty( "appserver.base",
|
||||
new File( System.getProperty( "java.io.tmpdir" ) ).getCanonicalPath() + "/target/"
|
||||
+ DownloadMergedIndexTest.class.getName()
|
||||
Paths.get(System.getProperty( "java.io.tmpdir" ) ).toAbsolutePath().resolve( "target").resolve(
|
||||
DownloadMergedIndexTest.class.getName()).toString()
|
||||
);
|
||||
}
|
||||
|
||||
@ -85,10 +84,10 @@ public void cleanup()
|
||||
throws Exception
|
||||
{
|
||||
super.tearDown();
|
||||
File tmpIndexDir = new File( System.getProperty( "java.io.tmpdir" ) + "/tmpIndex" );
|
||||
if ( tmpIndexDir.exists() )
|
||||
Path tmpIndexDir = Paths.get( System.getProperty( "java.io.tmpdir" ) + "/tmpIndex" );
|
||||
if ( Files.exists(tmpIndexDir) )
|
||||
{
|
||||
FileUtils.deleteDirectory( tmpIndexDir );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( tmpIndexDir );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -34,7 +34,9 @@
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
@ -74,10 +76,10 @@ public void downloadSNAPSHOT()
|
||||
throws Exception
|
||||
{
|
||||
|
||||
File tmpIndexDir = new File( System.getProperty( "java.io.tmpdir" ) + "/tmpIndex" );
|
||||
if ( tmpIndexDir.exists() )
|
||||
Path tmpIndexDir = Paths.get( System.getProperty( "java.io.tmpdir" ) + "/tmpIndex" );
|
||||
if ( Files.exists(tmpIndexDir) )
|
||||
{
|
||||
FileUtils.deleteDirectory( tmpIndexDir );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( tmpIndexDir );
|
||||
}
|
||||
String id = Long.toString( System.currentTimeMillis() );
|
||||
ManagedRepository managedRepository = new ManagedRepository();
|
||||
@ -111,24 +113,21 @@ public void downloadSNAPSHOT()
|
||||
|
||||
getUserService( authorizationHeader ).removeFromCache( "guest" );
|
||||
|
||||
File file = new File( "target/archiva-model-1.4-M4-SNAPSHOT.jar" );
|
||||
if ( file.exists() )
|
||||
{
|
||||
file.delete();
|
||||
}
|
||||
Path file = Paths.get( "target/archiva-model-1.4-M4-SNAPSHOT.jar" );
|
||||
Files.deleteIfExists(file);
|
||||
|
||||
HttpWagon httpWagon = new HttpWagon();
|
||||
httpWagon.connect( new Repository( "foo", "http://localhost:" + port ) );
|
||||
|
||||
httpWagon.get( "/repository/"+ id +"/org/apache/archiva/archiva-model/1.4-M4-SNAPSHOT/archiva-model-1.4-M4-SNAPSHOT.jar", file );
|
||||
httpWagon.get( "/repository/"+ id +"/org/apache/archiva/archiva-model/1.4-M4-SNAPSHOT/archiva-model-1.4-M4-SNAPSHOT.jar", file.toFile() );
|
||||
|
||||
ZipFile zipFile = new ZipFile( file );
|
||||
ZipFile zipFile = new ZipFile( file.toFile() );
|
||||
List<String> entries = getZipEntriesNames( zipFile );
|
||||
ZipEntry zipEntry = zipFile.getEntry( "org/apache/archiva/model/ArchivaArtifact.class" );
|
||||
assertNotNull( "cannot find zipEntry org/apache/archiva/model/ArchivaArtifact.class, entries: " + entries + ", content is: "
|
||||
+ FileUtils.readFileToString( file ), zipEntry );
|
||||
+ FileUtils.readFileToString( file.toFile() ), zipEntry );
|
||||
zipFile.close();
|
||||
file.deleteOnExit();
|
||||
file.toFile().deleteOnExit();
|
||||
|
||||
|
||||
|
||||
|
@ -34,8 +34,9 @@
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@ -72,7 +73,7 @@ public void checkRemoteConnectivity()
|
||||
{
|
||||
|
||||
Server repoServer =
|
||||
buildStaticServer( new File( System.getProperty( "basedir" ) + "/src/test/repositories/test-repo" ) );
|
||||
buildStaticServer( Paths.get( System.getProperty( "basedir" ), "src/test/repositories/test-repo" ) );
|
||||
|
||||
ServerConnector serverConnector = new ServerConnector( repoServer, new HttpConnectionFactory());
|
||||
repoServer.addConnector( serverConnector );
|
||||
@ -107,7 +108,7 @@ public void checkRemoteConnectivityEmptyRemote()
|
||||
throws Exception
|
||||
{
|
||||
|
||||
File tmpDir = Files.createTempDirectory( "test" ).toFile();
|
||||
Path tmpDir = Files.createTempDirectory( "test" );
|
||||
Server repoServer = buildStaticServer( tmpDir );
|
||||
ServerConnector serverConnector = new ServerConnector( repoServer, new HttpConnectionFactory());
|
||||
repoServer.addConnector( serverConnector );
|
||||
@ -133,7 +134,7 @@ public void checkRemoteConnectivityEmptyRemote()
|
||||
finally
|
||||
{
|
||||
service.deleteRemoteRepository( "id-new" );
|
||||
FileUtils.deleteQuietly( tmpDir );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteQuietly( tmpDir );
|
||||
repoServer.stop();
|
||||
}
|
||||
}
|
||||
@ -165,14 +166,14 @@ public void checkRemoteConnectivityFail()
|
||||
}
|
||||
}
|
||||
|
||||
protected Server buildStaticServer( File path )
|
||||
protected Server buildStaticServer( Path path )
|
||||
{
|
||||
Server repoServer = new Server( );
|
||||
|
||||
ResourceHandler resourceHandler = new ResourceHandler();
|
||||
resourceHandler.setDirectoriesListed( true );
|
||||
resourceHandler.setWelcomeFiles( new String[]{ "index.html" } );
|
||||
resourceHandler.setResourceBase( path.getAbsolutePath() );
|
||||
resourceHandler.setResourceBase( path.toAbsolutePath().toString() );
|
||||
|
||||
HandlerList handlers = new HandlerList();
|
||||
handlers.setHandlers( new Handler[]{ resourceHandler, new DefaultHandler() } );
|
||||
|
@ -33,7 +33,6 @@
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
@ -210,9 +209,8 @@ public static Path takeScreenShot( String fileName, WebDriver driver) {
|
||||
FileUtils.writeStringToFile( htmlFile.toFile(), pageSource, encoding);
|
||||
try
|
||||
{
|
||||
File scrs = ((TakesScreenshot)driver).getScreenshotAs( OutputType.FILE );
|
||||
result = scrs.toPath();
|
||||
Files.copy(result, screenShotFile);
|
||||
Path scrs = ((TakesScreenshot)driver).getScreenshotAs( OutputType.FILE ).toPath();
|
||||
Files.copy(scrs, screenShotFile);
|
||||
}
|
||||
catch ( Exception e )
|
||||
{
|
||||
|
@ -20,13 +20,13 @@
|
||||
*/
|
||||
|
||||
import org.apache.archiva.admin.model.beans.ManagedRepository;
|
||||
import org.apache.archiva.metadata.model.facets.AuditEvent;
|
||||
import org.apache.archiva.repository.events.AuditListener;
|
||||
import org.apache.archiva.common.filelock.FileLockException;
|
||||
import org.apache.archiva.common.filelock.FileLockManager;
|
||||
import org.apache.archiva.common.filelock.FileLockTimeoutException;
|
||||
import org.apache.archiva.common.filelock.Lock;
|
||||
import org.apache.archiva.metadata.model.facets.AuditEvent;
|
||||
import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
|
||||
import org.apache.archiva.repository.events.AuditListener;
|
||||
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
|
||||
import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler;
|
||||
import org.apache.archiva.scheduler.repository.model.RepositoryTask;
|
||||
@ -64,13 +64,15 @@
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ -83,7 +85,7 @@ public class ArchivaDavResource
|
||||
|
||||
private final DavResourceFactory factory;
|
||||
|
||||
private final File localResource;
|
||||
private final Path localResource;
|
||||
|
||||
private final String logicalResource;
|
||||
|
||||
@ -116,7 +118,7 @@ public ArchivaDavResource( String localResource, String logicalResource, Managed
|
||||
MimeTypes mimeTypes, List<AuditListener> auditListeners,
|
||||
RepositoryArchivaTaskScheduler scheduler, FileLockManager fileLockManager )
|
||||
{
|
||||
this.localResource = new File( localResource );
|
||||
this.localResource = Paths.get( localResource );
|
||||
this.logicalResource = logicalResource;
|
||||
this.locator = locator;
|
||||
this.factory = factory;
|
||||
@ -160,13 +162,13 @@ public String getSupportedMethods()
|
||||
@Override
|
||||
public boolean exists()
|
||||
{
|
||||
return localResource.exists();
|
||||
return Files.exists(localResource);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCollection()
|
||||
{
|
||||
return localResource.isDirectory();
|
||||
return Files.isDirectory(localResource);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -182,7 +184,7 @@ public DavResourceLocator getLocator()
|
||||
return locator;
|
||||
}
|
||||
|
||||
public File getLocalResource()
|
||||
public Path getLocalResource()
|
||||
{
|
||||
return localResource;
|
||||
}
|
||||
@ -202,7 +204,15 @@ public String getHref()
|
||||
@Override
|
||||
public long getModificationTime()
|
||||
{
|
||||
return localResource.lastModified();
|
||||
try
|
||||
{
|
||||
return Files.getLastModifiedTime(localResource).toMillis();
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error("Could not get modification time of {}: {}", localResource, e.getMessage(), e);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -211,15 +221,15 @@ public void spool( OutputContext outputContext )
|
||||
{
|
||||
if ( !isCollection() )
|
||||
{
|
||||
outputContext.setContentLength( localResource.length() );
|
||||
outputContext.setContentType( mimeTypes.getMimeType( localResource.getName() ) );
|
||||
outputContext.setContentLength( Files.size( localResource ) );
|
||||
outputContext.setContentType( mimeTypes.getMimeType( localResource.getFileName().toString() ) );
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
if ( !isCollection() && outputContext.hasStream() )
|
||||
{
|
||||
Lock lock = fileLockManager.readFileLock( localResource );
|
||||
Lock lock = fileLockManager.readFileLock( localResource.toFile() );
|
||||
try (InputStream is = Files.newInputStream( lock.getFile().toPath() ))
|
||||
{
|
||||
IOUtils.copy( is, outputContext.getOutputStream() );
|
||||
@ -314,12 +324,12 @@ public DavResource getCollection()
|
||||
public void addMember( DavResource resource, InputContext inputContext )
|
||||
throws DavException
|
||||
{
|
||||
File localFile = new File( localResource, resource.getDisplayName() );
|
||||
boolean exists = localFile.exists();
|
||||
Path localFile = localResource.resolve( resource.getDisplayName() );
|
||||
boolean exists = Files.exists(localFile);
|
||||
|
||||
if ( isCollection() && inputContext.hasStream() ) // New File
|
||||
{
|
||||
try (OutputStream stream = Files.newOutputStream( localFile.toPath() ))
|
||||
try (OutputStream stream = Files.newOutputStream( localFile ))
|
||||
{
|
||||
IOUtils.copy( inputContext.getInputStream(), stream );
|
||||
}
|
||||
@ -330,14 +340,22 @@ public void addMember( DavResource resource, InputContext inputContext )
|
||||
|
||||
// TODO: a bad deployment shouldn't delete an existing file - do we need to write to a temporary location first?
|
||||
long expectedContentLength = inputContext.getContentLength();
|
||||
long actualContentLength = localFile.length();
|
||||
long actualContentLength = 0;
|
||||
try
|
||||
{
|
||||
actualContentLength = Files.size(localFile);
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error( "Could not get length of file {}: {}", localFile, e.getMessage(), e );
|
||||
}
|
||||
// length of -1 is given for a chunked request or unknown length, in which case we accept what was uploaded
|
||||
if ( expectedContentLength >= 0 && expectedContentLength != actualContentLength )
|
||||
{
|
||||
String msg = "Content Header length was " + expectedContentLength + " but was " + actualContentLength;
|
||||
log.debug( "Upload failed: {}", msg );
|
||||
|
||||
FileUtils.deleteQuietly( localFile );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteQuietly( localFile );
|
||||
throw new DavException( HttpServletResponse.SC_BAD_REQUEST, msg );
|
||||
}
|
||||
|
||||
@ -350,7 +368,14 @@ public void addMember( DavResource resource, InputContext inputContext )
|
||||
}
|
||||
else if ( !inputContext.hasStream() && isCollection() ) // New directory
|
||||
{
|
||||
localFile.mkdir();
|
||||
try
|
||||
{
|
||||
Files.createDirectories( localFile );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error("Could not create directory {}: {}", localFile, e.getMessage(), e);
|
||||
}
|
||||
|
||||
log.debug( "Directory '{}' (current user '{}')", resource.getDisplayName(), this.principal );
|
||||
|
||||
@ -371,28 +396,34 @@ public DavResourceIterator getMembers()
|
||||
List<DavResource> list = new ArrayList<>();
|
||||
if ( exists() && isCollection() )
|
||||
{
|
||||
for ( String item : localResource.list() )
|
||||
try ( Stream<Path> stream = Files.list(localResource))
|
||||
{
|
||||
try
|
||||
stream.forEach ( p ->
|
||||
{
|
||||
if ( !item.startsWith( HIDDEN_PATH_PREFIX ) )
|
||||
String item = p.toString();
|
||||
try
|
||||
{
|
||||
String path = locator.getResourcePath() + '/' + item;
|
||||
DavResourceLocator resourceLocator =
|
||||
locator.getFactory().createResourceLocator( locator.getPrefix(), path );
|
||||
DavResource resource = factory.createResource( resourceLocator, session );
|
||||
|
||||
if ( resource != null )
|
||||
if ( !item.startsWith( HIDDEN_PATH_PREFIX ) )
|
||||
{
|
||||
list.add( resource );
|
||||
String path = locator.getResourcePath( ) + '/' + item;
|
||||
DavResourceLocator resourceLocator =
|
||||
locator.getFactory( ).createResourceLocator( locator.getPrefix( ), path );
|
||||
DavResource resource = factory.createResource( resourceLocator, session );
|
||||
|
||||
if ( resource != null )
|
||||
{
|
||||
list.add( resource );
|
||||
}
|
||||
log.debug( "Resource '{}' retrieved by '{}'", item, this.principal );
|
||||
}
|
||||
log.debug( "Resource '{}' retrieved by '{}'", item, this.principal );
|
||||
}
|
||||
}
|
||||
catch ( DavException e )
|
||||
{
|
||||
// Should not occur
|
||||
}
|
||||
catch ( DavException e )
|
||||
{
|
||||
// Should not occur
|
||||
}
|
||||
});
|
||||
} catch (IOException e) {
|
||||
log.error("Error while listing {}", localResource);
|
||||
}
|
||||
}
|
||||
return new DavResourceIteratorImpl( list );
|
||||
@ -402,32 +433,24 @@ public DavResourceIterator getMembers()
|
||||
public void removeMember( DavResource member )
|
||||
throws DavException
|
||||
{
|
||||
File resource = checkDavResourceIsArchivaDavResource( member ).getLocalResource();
|
||||
Path resource = checkDavResourceIsArchivaDavResource( member ).getLocalResource();
|
||||
|
||||
if ( resource.exists() )
|
||||
if ( Files.exists(resource) )
|
||||
{
|
||||
try
|
||||
{
|
||||
if ( resource.isDirectory() )
|
||||
if ( Files.isDirectory(resource) )
|
||||
{
|
||||
if ( !FileUtils.deleteQuietly( resource ) )
|
||||
{
|
||||
throw new IOException( "Could not remove directory" );
|
||||
}
|
||||
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( resource );
|
||||
triggerAuditEvent( member, AuditEvent.REMOVE_DIR );
|
||||
}
|
||||
else
|
||||
{
|
||||
if ( !resource.delete() )
|
||||
{
|
||||
throw new IOException( "Could not remove file" );
|
||||
}
|
||||
|
||||
Files.deleteIfExists( resource );
|
||||
triggerAuditEvent( member, AuditEvent.REMOVE_FILE );
|
||||
}
|
||||
|
||||
log.debug( "{}{}' removed (current user '{}')", ( resource.isDirectory() ? "Directory '" : "File '" ),
|
||||
log.debug( "{}{}' removed (current user '{}')", ( Files.isDirectory(resource) ? "Directory '" : "File '" ),
|
||||
member.getDisplayName(), this.principal );
|
||||
|
||||
}
|
||||
@ -471,19 +494,19 @@ public void move( DavResource destination )
|
||||
ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( destination );
|
||||
if ( isCollection() )
|
||||
{
|
||||
FileUtils.moveDirectory( getLocalResource(), resource.getLocalResource() );
|
||||
FileUtils.moveDirectory( getLocalResource().toFile(), resource.getLocalResource().toFile() );
|
||||
|
||||
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_DIRECTORY );
|
||||
}
|
||||
else
|
||||
{
|
||||
FileUtils.moveFile( getLocalResource(), resource.getLocalResource() );
|
||||
FileUtils.moveFile( getLocalResource().toFile(), resource.getLocalResource().toFile() );
|
||||
|
||||
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_FILE );
|
||||
}
|
||||
|
||||
log.debug( "{}{}' moved to '{}' (current user '{}')", ( isCollection() ? "Directory '" : "File '" ),
|
||||
getLocalResource().getName(), destination, this.principal );
|
||||
getLocalResource().getFileName(), destination, this.principal );
|
||||
|
||||
}
|
||||
catch ( IOException e )
|
||||
@ -511,19 +534,19 @@ public void copy( DavResource destination, boolean shallow )
|
||||
ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( destination );
|
||||
if ( isCollection() )
|
||||
{
|
||||
FileUtils.copyDirectory( getLocalResource(), resource.getLocalResource() );
|
||||
FileUtils.copyDirectory( getLocalResource().toFile(), resource.getLocalResource().toFile() );
|
||||
|
||||
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_DIRECTORY );
|
||||
}
|
||||
else
|
||||
{
|
||||
FileUtils.copyFile( getLocalResource(), resource.getLocalResource() );
|
||||
FileUtils.copyFile( getLocalResource().toFile(), resource.getLocalResource().toFile() );
|
||||
|
||||
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_FILE );
|
||||
}
|
||||
|
||||
log.debug( "{}{}' copied to '{}' (current user '{}')", ( isCollection() ? "Directory '" : "File '" ),
|
||||
getLocalResource().getName(), destination, this.principal );
|
||||
getLocalResource().getFileName(), destination, this.principal );
|
||||
|
||||
}
|
||||
catch ( IOException e )
|
||||
@ -672,7 +695,16 @@ protected DavPropertySet initProperties()
|
||||
}
|
||||
|
||||
// Need to get the ISO8601 date for properties
|
||||
DateTime dt = new DateTime( localResource.lastModified() );
|
||||
DateTime dt = null;
|
||||
try
|
||||
{
|
||||
dt = new DateTime( Files.getLastModifiedTime( localResource ).toMillis() );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error("Could not get modification time of {}: {}", localResource, e.getMessage(), e);
|
||||
dt = new DateTime();
|
||||
}
|
||||
DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
|
||||
String modifiedDate = fmt.print( dt );
|
||||
|
||||
@ -680,7 +712,15 @@ protected DavPropertySet initProperties()
|
||||
|
||||
properties.add( new DefaultDavProperty( DavPropertyName.CREATIONDATE, modifiedDate ) );
|
||||
|
||||
properties.add( new DefaultDavProperty( DavPropertyName.GETCONTENTLENGTH, localResource.length() ) );
|
||||
try
|
||||
{
|
||||
properties.add( new DefaultDavProperty( DavPropertyName.GETCONTENTLENGTH, Files.size(localResource) ) );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error("Could not get file size of {}: {}", localResource, e.getMessage(), e);
|
||||
properties.add( new DefaultDavProperty( DavPropertyName.GETCONTENTLENGTH, 0 ) );
|
||||
}
|
||||
|
||||
this.properties = properties;
|
||||
|
||||
@ -709,11 +749,11 @@ private void triggerAuditEvent( String remoteIP, String repositoryId, String res
|
||||
}
|
||||
}
|
||||
|
||||
private void queueRepositoryTask( File localFile )
|
||||
private void queueRepositoryTask( Path localFile )
|
||||
{
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repository.getId() );
|
||||
task.setResourceFile( localFile.toPath() );
|
||||
task.setResourceFile( localFile );
|
||||
task.setUpdateRelatedArtifacts( false );
|
||||
task.setScanAll( false );
|
||||
|
||||
@ -724,7 +764,7 @@ private void queueRepositoryTask( File localFile )
|
||||
catch ( TaskQueueException e )
|
||||
{
|
||||
log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
|
||||
+ "'].", localFile.getName() );
|
||||
+ "'].", localFile.getFileName() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +74,6 @@
|
||||
import org.apache.archiva.webdav.util.TemporaryGroupIndexSessionCleaner;
|
||||
import org.apache.archiva.webdav.util.WebdavMethodUtil;
|
||||
import org.apache.archiva.xml.XMLException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang.SystemUtils;
|
||||
@ -102,10 +101,10 @@
|
||||
import javax.inject.Named;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.servlet.http.HttpSession;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
@ -276,10 +275,10 @@ public DavResource createResource( final DavResourceLocator locator, final DavSe
|
||||
{
|
||||
String logicalResource = getLogicalResource( archivaLocator, null, false );
|
||||
IndexingContext indexingContext = remoteRepositoryAdmin.createIndexContext( remoteRepository );
|
||||
File resourceFile = StringUtils.equals( logicalResource, "/" )
|
||||
? new File( indexingContext.getIndexDirectoryFile().getParent() )
|
||||
: new File( indexingContext.getIndexDirectoryFile().getParent(), logicalResource );
|
||||
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), //
|
||||
Path resourceFile = StringUtils.equals( logicalResource, "/" )
|
||||
? Paths.get( indexingContext.getIndexDirectoryFile().getParent() )
|
||||
: Paths.get( indexingContext.getIndexDirectoryFile().getParent(), logicalResource );
|
||||
resource = new ArchivaDavResource( resourceFile.toAbsolutePath().toString(), //
|
||||
locator.getResourcePath(), //
|
||||
null, //
|
||||
request.getRemoteAddr(), //
|
||||
@ -328,7 +327,7 @@ public DavResource createResource( final DavResourceLocator locator, final DavSe
|
||||
|
||||
String logicalResource = getLogicalResource( archivaLocator, null, false );
|
||||
resourcesInAbsolutePath.add(
|
||||
new File( managedRepositoryContent.getRepoRoot(), logicalResource ).getAbsolutePath() );
|
||||
Paths.get( managedRepositoryContent.getRepoRoot(), logicalResource ).toAbsolutePath().toString() );
|
||||
|
||||
}
|
||||
catch ( RepositoryAdminException e )
|
||||
@ -350,23 +349,23 @@ public DavResource createResource( final DavResourceLocator locator, final DavSe
|
||||
|
||||
ArchivaDavResource res = (ArchivaDavResource) resource;
|
||||
String filePath =
|
||||
StringUtils.substringBeforeLast( res.getLocalResource().getAbsolutePath().replace( '\\', '/' ),
|
||||
StringUtils.substringBeforeLast( res.getLocalResource().toAbsolutePath().toString().replace( '\\', '/' ),
|
||||
"/" );
|
||||
filePath = filePath + "/maven-metadata-" + repoGroupConfig.getId() + ".xml";
|
||||
|
||||
// for MRM-872 handle checksums of the merged metadata files
|
||||
if ( repositoryRequest.isSupportFile( requestedResource ) )
|
||||
{
|
||||
File metadataChecksum =
|
||||
new File( filePath + "." + StringUtils.substringAfterLast( requestedResource, "." ) );
|
||||
Path metadataChecksum =
|
||||
Paths.get( filePath + "." + StringUtils.substringAfterLast( requestedResource, "." ) );
|
||||
|
||||
if ( metadataChecksum.exists() )
|
||||
if ( Files.exists(metadataChecksum) )
|
||||
{
|
||||
LogicalResource logicalResource =
|
||||
new LogicalResource( getLogicalResource( archivaLocator, null, false ) );
|
||||
|
||||
resource =
|
||||
new ArchivaDavResource( metadataChecksum.getAbsolutePath(), logicalResource.getPath(), null,
|
||||
new ArchivaDavResource( metadataChecksum.toAbsolutePath().toString(), logicalResource.getPath(), null,
|
||||
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
|
||||
archivaLocator, this, mimeTypes, auditListeners, scheduler,
|
||||
fileLockManager );
|
||||
@ -382,8 +381,8 @@ public DavResource createResource( final DavResourceLocator locator, final DavSe
|
||||
{
|
||||
try
|
||||
{
|
||||
File metadataFile = new File( resourceAbsPath );
|
||||
ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read( metadataFile.toPath() );
|
||||
Path metadataFile = Paths.get( resourceAbsPath );
|
||||
ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read( metadataFile );
|
||||
mergedMetadata = RepositoryMetadataMerge.merge( mergedMetadata, repoMetadata );
|
||||
}
|
||||
catch ( XMLException e )
|
||||
@ -400,13 +399,13 @@ public DavResource createResource( final DavResourceLocator locator, final DavSe
|
||||
|
||||
try
|
||||
{
|
||||
File resourceFile = writeMergedMetadataToFile( mergedMetadata, filePath );
|
||||
Path resourceFile = writeMergedMetadataToFile( mergedMetadata, filePath );
|
||||
|
||||
LogicalResource logicalResource =
|
||||
new LogicalResource( getLogicalResource( archivaLocator, null, false ) );
|
||||
|
||||
resource =
|
||||
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(), null,
|
||||
new ArchivaDavResource( resourceFile.toAbsolutePath().toString(), logicalResource.getPath(), null,
|
||||
request.getRemoteAddr(), activePrincipal,
|
||||
request.getDavSession(), archivaLocator, this, mimeTypes,
|
||||
auditListeners, scheduler, fileLockManager );
|
||||
@ -460,11 +459,11 @@ private DavResource processRepositoryGroup( final DavServletRequest request,
|
||||
{
|
||||
// we are in the case of index file request
|
||||
String requestedFileName = StringUtils.substringAfterLast( pathInfo, "/" );
|
||||
File temporaryIndexDirectory =
|
||||
Path temporaryIndexDirectory =
|
||||
buildMergedIndexDirectory( repositories, activePrincipal, request, repoGroupConfig );
|
||||
|
||||
File resourceFile = new File( temporaryIndexDirectory, requestedFileName );
|
||||
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), requestedFileName, null,
|
||||
Path resourceFile = temporaryIndexDirectory.resolve( requestedFileName );
|
||||
resource = new ArchivaDavResource( resourceFile.toAbsolutePath().toString(), requestedFileName, null,
|
||||
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
|
||||
archivaLocator, this, mimeTypes, auditListeners, scheduler,
|
||||
fileLockManager );
|
||||
@ -505,7 +504,7 @@ private DavResource processRepositoryGroup( final DavServletRequest request,
|
||||
logicalResource = logicalResource.substring( 1 );
|
||||
}
|
||||
resourcesInAbsolutePath.add(
|
||||
new File( managedRepositoryContent.getRepoRoot(), logicalResource ).getAbsolutePath() );
|
||||
Paths.get( managedRepositoryContent.getRepoRoot(), logicalResource ).toAbsolutePath().toString() );
|
||||
}
|
||||
catch ( DavException e )
|
||||
{
|
||||
@ -606,15 +605,15 @@ private DavResource processRepository( final DavServletRequest request, ArchivaD
|
||||
path = path.substring( 1 );
|
||||
}
|
||||
LogicalResource logicalResource = new LogicalResource( path );
|
||||
File resourceFile = new File( managedRepositoryContent.getRepoRoot(), path );
|
||||
Path resourceFile = Paths.get( managedRepositoryContent.getRepoRoot(), path );
|
||||
resource =
|
||||
new ArchivaDavResource( resourceFile.getAbsolutePath(), path, managedRepositoryContent.getRepository(),
|
||||
new ArchivaDavResource( resourceFile.toAbsolutePath().toString(), path, managedRepositoryContent.getRepository(),
|
||||
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
|
||||
archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager );
|
||||
|
||||
if ( WebdavMethodUtil.isReadMethod( request.getMethod() ) )
|
||||
{
|
||||
if ( archivaLocator.getHref( false ).endsWith( "/" ) && !resourceFile.isDirectory() )
|
||||
if ( archivaLocator.getHref( false ).endsWith( "/" ) && !Files.isDirectory( resourceFile ) )
|
||||
{
|
||||
// force a resource not found
|
||||
throw new DavException( HttpServletResponse.SC_NOT_FOUND, "Resource does not exist" );
|
||||
@ -623,7 +622,7 @@ private DavResource processRepository( final DavServletRequest request, ArchivaD
|
||||
{
|
||||
if ( !resource.isCollection() )
|
||||
{
|
||||
boolean previouslyExisted = resourceFile.exists();
|
||||
boolean previouslyExisted = Files.exists(resourceFile);
|
||||
|
||||
boolean fromProxy = fetchContentFromProxies( managedRepositoryContent, request, logicalResource );
|
||||
|
||||
@ -635,9 +634,9 @@ private DavResource processRepository( final DavServletRequest request, ArchivaD
|
||||
// repository expected path.
|
||||
String localResourcePath =
|
||||
repositoryRequest.toNativePath( logicalResource.getPath(), managedRepositoryContent );
|
||||
resourceFile = new File( managedRepositoryContent.getRepoRoot(), localResourcePath );
|
||||
resourceFile = Paths.get( managedRepositoryContent.getRepoRoot(), localResourcePath );
|
||||
resource =
|
||||
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
|
||||
new ArchivaDavResource( resourceFile.toAbsolutePath().toString(), logicalResource.getPath(),
|
||||
managedRepositoryContent.getRepository(),
|
||||
request.getRemoteAddr(), activePrincipal,
|
||||
request.getDavSession(), archivaLocator, this, mimeTypes,
|
||||
@ -645,7 +644,7 @@ private DavResource processRepository( final DavServletRequest request, ArchivaD
|
||||
}
|
||||
catch ( LayoutException e )
|
||||
{
|
||||
if ( !resourceFile.exists() )
|
||||
if ( !Files.exists(resourceFile) )
|
||||
{
|
||||
throw new DavException( HttpServletResponse.SC_NOT_FOUND, e );
|
||||
}
|
||||
@ -657,13 +656,13 @@ private DavResource processRepository( final DavServletRequest request, ArchivaD
|
||||
+ PROXIED_SUFFIX;
|
||||
|
||||
log.debug( "Proxied artifact '{}' in repository '{}' (current user '{}')",
|
||||
resourceFile.getName(), managedRepositoryContent.getId(), activePrincipal );
|
||||
resourceFile.getFileName(), managedRepositoryContent.getId(), activePrincipal );
|
||||
|
||||
triggerAuditEvent( request.getRemoteAddr(), archivaLocator.getRepositoryId(),
|
||||
logicalResource.getPath(), action, activePrincipal );
|
||||
}
|
||||
|
||||
if ( !resourceFile.exists() )
|
||||
if ( !Files.exists(resourceFile) )
|
||||
{
|
||||
throw new DavException( HttpServletResponse.SC_NOT_FOUND, "Resource does not exist" );
|
||||
}
|
||||
@ -711,15 +710,23 @@ private DavResource processRepository( final DavServletRequest request, ArchivaD
|
||||
* create the collections themselves.
|
||||
*/
|
||||
|
||||
File rootDirectory = new File( managedRepositoryContent.getRepoRoot() );
|
||||
File destDir = new File( rootDirectory, logicalResource.getPath() ).getParentFile();
|
||||
Path rootDirectory = Paths.get( managedRepositoryContent.getRepoRoot() );
|
||||
Path destDir = rootDirectory.resolve( logicalResource.getPath() ).getParent();
|
||||
|
||||
if ( !destDir.exists() )
|
||||
if ( !Files.exists(destDir) )
|
||||
{
|
||||
destDir.mkdirs();
|
||||
String relPath = PathUtil.getRelative( rootDirectory.getAbsolutePath(), destDir );
|
||||
try
|
||||
{
|
||||
Files.createDirectories( destDir );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error("Could not create directory {}: {}", destDir, e.getMessage(), e);
|
||||
throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not create directory "+destDir );
|
||||
}
|
||||
String relPath = PathUtil.getRelative( rootDirectory.toAbsolutePath().toString(), destDir.toFile() );
|
||||
|
||||
log.debug( "Creating destination directory '{}' (current user '{}')", destDir.getName(),
|
||||
log.debug( "Creating destination directory '{}' (current user '{}')", destDir.getFileName(),
|
||||
activePrincipal );
|
||||
|
||||
triggerAuditEvent( request.getRemoteAddr(), managedRepositoryContent.getId(), relPath,
|
||||
@ -761,8 +768,8 @@ public DavResource createResource( final DavResourceLocator locator, final DavSe
|
||||
{
|
||||
logicalResource = logicalResource.substring( 1 );
|
||||
}
|
||||
File resourceFile = new File( managedRepositoryContent.getRepoRoot(), logicalResource );
|
||||
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource,
|
||||
Path resourceFile = Paths.get( managedRepositoryContent.getRepoRoot(), logicalResource );
|
||||
resource = new ArchivaDavResource( resourceFile.toAbsolutePath().toString(), logicalResource,
|
||||
managedRepositoryContent.getRepository(), davSession, archivaLocator,
|
||||
this, mimeTypes, auditListeners, scheduler, fileLockManager );
|
||||
|
||||
@ -877,7 +884,7 @@ private void setHeaders( DavServletResponse response, DavResourceLocator locator
|
||||
// [MRM-503] - Metadata file need Pragma:no-cache response
|
||||
// header.
|
||||
if ( locator.getResourcePath().endsWith( "/maven-metadata.xml" ) || ( resource instanceof ArchivaDavResource
|
||||
&& ( ArchivaDavResource.class.cast( resource ).getLocalResource().isDirectory() ) ) )
|
||||
&& ( Files.isDirectory( ArchivaDavResource.class.cast( resource ).getLocalResource()) ) ) )
|
||||
{
|
||||
response.setHeader( "Pragma", "no-cache" );
|
||||
response.setHeader( "Cache-Control", "no-cache" );
|
||||
@ -886,8 +893,8 @@ private void setHeaders( DavServletResponse response, DavResourceLocator locator
|
||||
// if the resource is a directory don't cache it as new groupId deployed will be available
|
||||
// without need of refreshing browser
|
||||
else if ( locator.getResourcePath().endsWith( "/maven-metadata.xml" ) || (
|
||||
resource instanceof ArchivaVirtualDavResource && ( new File(
|
||||
ArchivaVirtualDavResource.class.cast( resource ).getLogicalResource() ).isDirectory() ) ) )
|
||||
resource instanceof ArchivaVirtualDavResource && ( Files.isDirectory(Paths.get(
|
||||
ArchivaVirtualDavResource.class.cast( resource ).getLogicalResource() )) ) ) )
|
||||
{
|
||||
response.setHeader( "Pragma", "no-cache" );
|
||||
response.setHeader( "Cache-Control", "no-cache" );
|
||||
@ -1024,14 +1031,14 @@ private DavResource getResourceFromGroup( DavServletRequest request, List<String
|
||||
if ( repositoryGroupConfiguration.getRepositories() == null
|
||||
|| repositoryGroupConfiguration.getRepositories().isEmpty() )
|
||||
{
|
||||
File file =
|
||||
new File( System.getProperty( "appserver.base" ), "groups/" + repositoryGroupConfiguration.getId() );
|
||||
Path file =
|
||||
Paths.get( System.getProperty( "appserver.base" ), "groups/" + repositoryGroupConfiguration.getId() );
|
||||
|
||||
return new ArchivaDavResource( file.getPath(), "groups/" + repositoryGroupConfiguration.getId(), null,
|
||||
return new ArchivaDavResource( file.toString(), "groups/" + repositoryGroupConfiguration.getId(), null,
|
||||
request.getDavSession(), locator, this, mimeTypes, auditListeners, scheduler,
|
||||
fileLockManager );
|
||||
}
|
||||
List<File> mergedRepositoryContents = new ArrayList<>();
|
||||
List<Path> mergedRepositoryContents = new ArrayList<>();
|
||||
// multiple repo types so we guess they are all the same type
|
||||
// so use the first one
|
||||
// FIXME add a method with group in the repository storage
|
||||
@ -1061,7 +1068,7 @@ private DavResource getResourceFromGroup( DavServletRequest request, List<String
|
||||
|
||||
if ( StringUtils.endsWith( pathInfo, repositoryGroupConfiguration.getMergedIndexPath() ) )
|
||||
{
|
||||
File mergedRepoDir =
|
||||
Path mergedRepoDir =
|
||||
buildMergedIndexDirectory( repositories, activePrincipal, request, repositoryGroupConfiguration );
|
||||
mergedRepositoryContents.add( mergedRepoDir );
|
||||
}
|
||||
@ -1069,20 +1076,27 @@ private DavResource getResourceFromGroup( DavServletRequest request, List<String
|
||||
{
|
||||
if ( StringUtils.equalsIgnoreCase( pathInfo, "/" + repositoryGroupConfiguration.getId() ) )
|
||||
{
|
||||
File tmpDirectory = new File( SystemUtils.getJavaIoTmpDir(),
|
||||
repositoryGroupConfiguration.getId() + "/"
|
||||
+ repositoryGroupConfiguration.getMergedIndexPath() );
|
||||
if ( !tmpDirectory.exists() )
|
||||
Path tmpDirectory = Paths.get( SystemUtils.getJavaIoTmpDir().toString(),
|
||||
repositoryGroupConfiguration.getId(),
|
||||
repositoryGroupConfiguration.getMergedIndexPath() );
|
||||
if ( !Files.exists(tmpDirectory) )
|
||||
{
|
||||
synchronized ( tmpDirectory.getAbsolutePath() )
|
||||
synchronized ( tmpDirectory.toAbsolutePath().toString() )
|
||||
{
|
||||
if ( !tmpDirectory.exists() )
|
||||
if ( !Files.exists(tmpDirectory) )
|
||||
{
|
||||
tmpDirectory.mkdirs();
|
||||
try
|
||||
{
|
||||
Files.createDirectories( tmpDirectory );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not create direcotory "+tmpDirectory );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
mergedRepositoryContents.add( tmpDirectory.getParentFile() );
|
||||
mergedRepositoryContents.add( tmpDirectory.getParent() );
|
||||
}
|
||||
for ( String repository : repositories )
|
||||
{
|
||||
@ -1103,29 +1117,29 @@ private DavResource getResourceFromGroup( DavServletRequest request, List<String
|
||||
"Invalid managed repository <" + repository + ">: " + e.getMessage() );
|
||||
}
|
||||
|
||||
File resourceFile = new File( managedRepository.getRepoRoot(), logicalResource.getPath() );
|
||||
if ( resourceFile.exists() )
|
||||
Path resourceFile = Paths.get( managedRepository.getRepoRoot(), logicalResource.getPath() );
|
||||
if ( Files.exists(resourceFile) )
|
||||
{
|
||||
// in case of group displaying index directory doesn't have sense !!
|
||||
String repoIndexDirectory = managedRepository.getRepository().getIndexDirectory();
|
||||
if ( StringUtils.isNotEmpty( repoIndexDirectory ) )
|
||||
{
|
||||
if ( !new File( repoIndexDirectory ).isAbsolute() )
|
||||
if ( !Paths.get( repoIndexDirectory ).isAbsolute() )
|
||||
{
|
||||
repoIndexDirectory = new File( managedRepository.getRepository().getLocation(),
|
||||
repoIndexDirectory = Paths.get( managedRepository.getRepository().getLocation(),
|
||||
StringUtils.isEmpty( repoIndexDirectory )
|
||||
? ".indexer"
|
||||
: repoIndexDirectory ).getAbsolutePath();
|
||||
: repoIndexDirectory ).toAbsolutePath().toString();
|
||||
}
|
||||
}
|
||||
if ( StringUtils.isEmpty( repoIndexDirectory ) )
|
||||
{
|
||||
repoIndexDirectory = new File( managedRepository.getRepository().getLocation(),
|
||||
".indexer" ).getAbsolutePath();
|
||||
repoIndexDirectory = Paths.get( managedRepository.getRepository().getLocation(),
|
||||
".indexer" ).toAbsolutePath().toString();
|
||||
}
|
||||
|
||||
if ( !StringUtils.equals( FilenameUtils.normalize( repoIndexDirectory ),
|
||||
FilenameUtils.normalize( resourceFile.getAbsolutePath() ) ) )
|
||||
FilenameUtils.normalize( resourceFile.toAbsolutePath().toString() ) ) )
|
||||
{
|
||||
// for prompted authentication
|
||||
if ( httpAuth.getSecuritySession( request.getSession( true ) ) != null )
|
||||
@ -1260,17 +1274,17 @@ private boolean isAllowedToContinue( DavServletRequest request, List<String> rep
|
||||
return allow;
|
||||
}
|
||||
|
||||
private File writeMergedMetadataToFile( ArchivaRepositoryMetadata mergedMetadata, String outputFilename )
|
||||
private Path writeMergedMetadataToFile( ArchivaRepositoryMetadata mergedMetadata, String outputFilename )
|
||||
throws RepositoryMetadataException, DigesterException, IOException
|
||||
{
|
||||
File outputFile = new File( outputFilename );
|
||||
if ( outputFile.exists() )
|
||||
Path outputFile = Paths.get( outputFilename );
|
||||
if ( Files.exists(outputFile) )
|
||||
{
|
||||
FileUtils.deleteQuietly( outputFile );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteQuietly( outputFile );
|
||||
}
|
||||
|
||||
outputFile.getParentFile().mkdirs();
|
||||
RepositoryMetadataWriter.write( mergedMetadata, outputFile.toPath() );
|
||||
Files.createDirectories(outputFile.getParent());
|
||||
RepositoryMetadataWriter.write( mergedMetadata, outputFile );
|
||||
|
||||
createChecksumFile( outputFilename, digestSha1 );
|
||||
createChecksumFile( outputFilename, digestMd5 );
|
||||
@ -1281,13 +1295,13 @@ private File writeMergedMetadataToFile( ArchivaRepositoryMetadata mergedMetadata
|
||||
private void createChecksumFile( String path, Digester digester )
|
||||
throws DigesterException, IOException
|
||||
{
|
||||
File checksumFile = new File( path + digester.getFilenameExtension() );
|
||||
if ( !checksumFile.exists() )
|
||||
Path checksumFile = Paths.get( path + digester.getFilenameExtension() );
|
||||
if ( !Files.exists(checksumFile) )
|
||||
{
|
||||
FileUtils.deleteQuietly( checksumFile );
|
||||
checksum.createChecksum( new File( path ), digester );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteQuietly( checksumFile );
|
||||
checksum.createChecksum( Paths.get( path ).toFile(), digester );
|
||||
}
|
||||
else if ( !checksumFile.isFile() )
|
||||
else if ( !Files.isRegularFile( checksumFile) )
|
||||
{
|
||||
log.error( "Checksum file is not a file." );
|
||||
}
|
||||
@ -1306,7 +1320,7 @@ private boolean isProjectReference( String requestedResource )
|
||||
}
|
||||
}
|
||||
|
||||
protected File buildMergedIndexDirectory( List<String> repositories, String activePrincipal,
|
||||
protected Path buildMergedIndexDirectory( List<String> repositories, String activePrincipal,
|
||||
DavServletRequest request,
|
||||
RepositoryGroupConfiguration repositoryGroupConfiguration )
|
||||
throws DavException
|
||||
@ -1339,7 +1353,7 @@ protected File buildMergedIndexDirectory( List<String> repositories, String acti
|
||||
{
|
||||
log.debug( MarkerFactory.getMarker( "group.merged.index" ),
|
||||
"merged index for group '{}' found in cache", repositoryGroupConfiguration.getId() );
|
||||
return tmp.getDirectory().toFile();
|
||||
return tmp.getDirectory();
|
||||
}
|
||||
}
|
||||
|
||||
@ -1368,14 +1382,14 @@ protected File buildMergedIndexDirectory( List<String> repositories, String acti
|
||||
log.info( "generate temporary merged index for repository group '{}' for repositories '{}'",
|
||||
repositoryGroupConfiguration.getId(), authzRepos );
|
||||
|
||||
File tempRepoFile = Files.createTempDirectory( "temp" ).toFile();
|
||||
tempRepoFile.deleteOnExit();
|
||||
Path tempRepoFile = Files.createTempDirectory( "temp" );
|
||||
tempRepoFile.toFile().deleteOnExit();
|
||||
|
||||
IndexMergerRequest indexMergerRequest =
|
||||
new IndexMergerRequest( authzRepos, true, repositoryGroupConfiguration.getId(),
|
||||
repositoryGroupConfiguration.getMergedIndexPath(),
|
||||
repositoryGroupConfiguration.getMergedIndexTtl() ).mergedIndexDirectory(
|
||||
tempRepoFile.toPath() ).temporary( true );
|
||||
tempRepoFile ).temporary( true );
|
||||
|
||||
MergedRemoteIndexesTaskRequest taskRequest =
|
||||
new MergedRemoteIndexesTaskRequest( indexMergerRequest, indexMerger );
|
||||
@ -1384,9 +1398,9 @@ protected File buildMergedIndexDirectory( List<String> repositories, String acti
|
||||
|
||||
IndexingContext indexingContext = job.execute().getIndexingContext();
|
||||
|
||||
File mergedRepoDir = indexingContext.getIndexDirectoryFile();
|
||||
Path mergedRepoDir = indexingContext.getIndexDirectoryFile().toPath();
|
||||
TemporaryGroupIndex temporaryGroupIndex =
|
||||
new TemporaryGroupIndex( mergedRepoDir.toPath(), indexingContext.getId(), repositoryGroupConfiguration.getId(),
|
||||
new TemporaryGroupIndex( mergedRepoDir, indexingContext.getId(), repositoryGroupConfiguration.getId(),
|
||||
repositoryGroupConfiguration.getMergedIndexTtl() ) //
|
||||
.setCreationTime( new Date().getTime() );
|
||||
temporaryGroupIndexMap.put( repositoryGroupConfiguration.getId(), temporaryGroupIndex );
|
||||
|
@ -19,6 +19,8 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.webdav.util.IndexWriter;
|
||||
import org.apache.archiva.webdav.util.MimeTypes;
|
||||
import org.apache.jackrabbit.util.Text;
|
||||
import org.apache.jackrabbit.webdav.DavException;
|
||||
import org.apache.jackrabbit.webdav.DavResource;
|
||||
@ -40,14 +42,13 @@
|
||||
import org.apache.jackrabbit.webdav.property.DavPropertySet;
|
||||
import org.apache.jackrabbit.webdav.property.DefaultDavProperty;
|
||||
import org.apache.jackrabbit.webdav.property.ResourceType;
|
||||
import org.apache.archiva.webdav.util.IndexWriter;
|
||||
import org.apache.archiva.webdav.util.MimeTypes;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@ -72,9 +73,9 @@ public class ArchivaVirtualDavResource
|
||||
|
||||
private static final String METHODS = "OPTIONS, GET, HEAD, POST, TRACE, PROPFIND, PROPPATCH, MKCOL";
|
||||
|
||||
private final List<File> localResources;
|
||||
private final List<Path> localResources;
|
||||
|
||||
public ArchivaVirtualDavResource( List<File> localResources, String logicalResource, MimeTypes mimeTypes,
|
||||
public ArchivaVirtualDavResource( List<Path> localResources, String logicalResource, MimeTypes mimeTypes,
|
||||
ArchivaDavResourceLocator locator, DavResourceFactory factory )
|
||||
{
|
||||
this.localResources = localResources;
|
||||
@ -91,11 +92,11 @@ public void spool( OutputContext outputContext )
|
||||
if ( outputContext.hasStream() )
|
||||
{
|
||||
Collections.sort( localResources );
|
||||
List<File> localResourceFiles = new ArrayList<>();
|
||||
List<Path> localResourceFiles = new ArrayList<>();
|
||||
|
||||
for ( File resourceFile : localResources )
|
||||
for ( Path resourceFile : localResources )
|
||||
{
|
||||
if ( resourceFile.exists() )
|
||||
if ( Files.exists(resourceFile) )
|
||||
{
|
||||
localResourceFiles.add( resourceFile );
|
||||
}
|
||||
|
@ -49,8 +49,10 @@
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -186,13 +188,16 @@ public synchronized void initServers( ServletConfig servletConfig )
|
||||
|
||||
for ( ManagedRepository repo : repositoryMap.values() )
|
||||
{
|
||||
File repoDir = new File( repo.getLocation() );
|
||||
Path repoDir = Paths.get( repo.getLocation() );
|
||||
|
||||
if ( !repoDir.exists() )
|
||||
if ( !Files.exists(repoDir) )
|
||||
{
|
||||
if ( !repoDir.mkdirs() )
|
||||
try
|
||||
{
|
||||
Files.createDirectories( repoDir );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
// Skip invalid directories.
|
||||
log.info( "Unable to create missing directory for {}", repo.getLocation() );
|
||||
continue;
|
||||
}
|
||||
|
@ -22,9 +22,14 @@
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.jackrabbit.webdav.DavResource;
|
||||
import org.apache.jackrabbit.webdav.io.OutputContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.text.DateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@ -34,18 +39,22 @@
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class IndexWriter
|
||||
{
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger( IndexWriter.class );
|
||||
|
||||
private final String logicalResource;
|
||||
|
||||
private final List<File> localResources;
|
||||
private final List<Path> localResources;
|
||||
|
||||
private final boolean isVirtual;
|
||||
|
||||
public IndexWriter( DavResource resource, File localResource, String logicalResource )
|
||||
public IndexWriter( DavResource resource, Path localResource, String logicalResource )
|
||||
{
|
||||
this.localResources = new ArrayList<>();
|
||||
this.localResources.add( localResource );
|
||||
@ -53,7 +62,7 @@ public IndexWriter( DavResource resource, File localResource, String logicalReso
|
||||
this.isVirtual = false;
|
||||
}
|
||||
|
||||
public IndexWriter( DavResource resource, List<File> localResources, String logicalResource )
|
||||
public IndexWriter( DavResource resource, List<Path> localResources, String logicalResource )
|
||||
{
|
||||
this.logicalResource = logicalResource;
|
||||
this.localResources = localResources;
|
||||
@ -69,7 +78,14 @@ public void write( OutputContext outputContext )
|
||||
{
|
||||
PrintWriter writer = new PrintWriter( outputContext.getOutputStream() );
|
||||
writeDocumentStart( writer );
|
||||
writeHyperlinks( writer );
|
||||
try
|
||||
{
|
||||
writeHyperlinks( writer );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error("Could not write hyperlinks {}", e.getMessage(), e);
|
||||
}
|
||||
writeDocumentEnd( writer );
|
||||
writer.flush();
|
||||
writer.close();
|
||||
@ -111,8 +127,8 @@ private void writeDocumentStart( PrintWriter writer )
|
||||
//Check if not root
|
||||
if ( logicalResource != null && logicalResource.length() > 0 )
|
||||
{
|
||||
File file = new File( logicalResource );
|
||||
String parentName = file.getParent() == null ? "/" : file.getParent();
|
||||
Path file = Paths.get( logicalResource );
|
||||
String parentName = file.getParent() == null ? "/" : file.getParent().toString();
|
||||
|
||||
//convert to unix path in case archiva is hosted on windows
|
||||
parentName = StringUtils.replace( parentName, "\\", "/" );
|
||||
@ -133,18 +149,19 @@ private void writeDocumentEnd( PrintWriter writer )
|
||||
writer.println( "</html>" );
|
||||
}
|
||||
|
||||
private void writeHyperlinks( PrintWriter writer )
|
||||
private void writeHyperlinks( PrintWriter writer ) throws IOException
|
||||
{
|
||||
if ( !isVirtual )
|
||||
{
|
||||
for ( File localResource : localResources )
|
||||
for ( Path localResource : localResources )
|
||||
{
|
||||
List<File> files = new ArrayList<>( Arrays.asList( localResource.listFiles() ) );
|
||||
List<Path> files = Files.list(localResource).collect( Collectors.toList( ) );
|
||||
Collections.sort( files );
|
||||
|
||||
for ( File file : files )
|
||||
for ( Path file : files )
|
||||
{
|
||||
writeHyperlink( writer, file.getName(), file.lastModified(), file.length(), file.isDirectory() );
|
||||
writeHyperlink( writer, file.getFileName().toString(), Files.getLastModifiedTime( file ).toMillis(), Files.size(file),
|
||||
Files.isDirectory( file ) );
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -153,26 +170,26 @@ private void writeHyperlinks( PrintWriter writer )
|
||||
// virtual repository - filter unique directories
|
||||
Map<String, List<String>> uniqueChildFiles = new HashMap<>();
|
||||
List<String> sortedList = new ArrayList<>();
|
||||
for ( File resource : localResources )
|
||||
for ( Path resource : localResources )
|
||||
{
|
||||
List<File> files = new ArrayList<>( Arrays.asList( resource.listFiles() ) );
|
||||
for ( File file : files )
|
||||
List<Path> files = Files.list(resource).collect( Collectors.toList() );
|
||||
for ( Path file : files )
|
||||
{
|
||||
List<String> mergedChildFiles = new ArrayList<>();
|
||||
if ( uniqueChildFiles.get( file.getName() ) == null )
|
||||
if ( uniqueChildFiles.get( file.getFileName() ) == null )
|
||||
{
|
||||
mergedChildFiles.add( file.getAbsolutePath() );
|
||||
mergedChildFiles.add( file.toAbsolutePath().toString() );
|
||||
}
|
||||
else
|
||||
{
|
||||
mergedChildFiles = uniqueChildFiles.get( file.getName() );
|
||||
if ( !mergedChildFiles.contains( file.getAbsolutePath() ) )
|
||||
mergedChildFiles = uniqueChildFiles.get( file.getFileName() );
|
||||
if ( !mergedChildFiles.contains( file.toAbsolutePath().toString() ) )
|
||||
{
|
||||
mergedChildFiles.add( file.getAbsolutePath() );
|
||||
mergedChildFiles.add( file.toAbsolutePath().toString() );
|
||||
}
|
||||
}
|
||||
uniqueChildFiles.put( file.getName(), mergedChildFiles );
|
||||
sortedList.add( file.getName() );
|
||||
uniqueChildFiles.put( file.getFileName().toString(), mergedChildFiles );
|
||||
sortedList.add( file.getFileName().toString() );
|
||||
}
|
||||
}
|
||||
|
||||
@ -183,11 +200,12 @@ private void writeHyperlinks( PrintWriter writer )
|
||||
List<String> childFilesFromMap = uniqueChildFiles.get( fileName );
|
||||
for ( String childFilePath : childFilesFromMap )
|
||||
{
|
||||
File childFile = new File( childFilePath );
|
||||
if ( !written.contains( childFile.getName() ) )
|
||||
Path childFile = Paths.get( childFilePath );
|
||||
if ( !written.contains( childFile.getFileName().toString() ) )
|
||||
{
|
||||
written.add( childFile.getName() );
|
||||
writeHyperlink( writer, fileName, childFile.lastModified(), childFile.length(), childFile.isDirectory() );
|
||||
written.add( childFile.getFileName().toString() );
|
||||
writeHyperlink( writer, fileName, Files.getLastModifiedTime( childFile).toMillis(),
|
||||
Files.size(childFile), Files.isDirectory( childFile) );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,8 +39,9 @@
|
||||
import org.junit.Before;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import org.junit.Rule;
|
||||
@ -61,7 +62,7 @@ class RemoteRepoInfo
|
||||
|
||||
public Server server;
|
||||
|
||||
public File root;
|
||||
public Path root;
|
||||
|
||||
public RemoteRepositoryConfiguration config;
|
||||
}
|
||||
@ -128,15 +129,15 @@ protected RemoteRepoInfo createServer( String id )
|
||||
"temp" ).toFile();*/// new File( System.getProperty( "basedir" ) + "target/remote-repos/" + id + "/" );
|
||||
|
||||
// Remove exising root contents.
|
||||
if ( repo.root.exists() )
|
||||
if ( Files.exists(repo.root) )
|
||||
{
|
||||
FileUtils.deleteDirectory( repo.root );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( repo.root );
|
||||
}
|
||||
|
||||
// Establish root directory.
|
||||
if ( !repo.root.exists() )
|
||||
if ( !Files.exists(repo.root) )
|
||||
{
|
||||
repo.root.mkdirs();
|
||||
Files.createDirectories( repo.root );
|
||||
}
|
||||
|
||||
repo.server = new Server( );
|
||||
@ -147,7 +148,7 @@ protected RemoteRepoInfo createServer( String id )
|
||||
|
||||
ServletContextHandler context = new ServletContextHandler();
|
||||
context.setContextPath( repo.context );
|
||||
context.setResourceBase( repo.root.getAbsolutePath() );
|
||||
context.setResourceBase( repo.root.toAbsolutePath().toString() );
|
||||
context.setAttribute( "dirAllowed", true );
|
||||
context.setAttribute( "maxCacheSize", 0 );
|
||||
|
||||
@ -215,16 +216,16 @@ protected void shutdownServer( RemoteRepoInfo remoteRepo )
|
||||
}
|
||||
}
|
||||
|
||||
protected File populateRepo( RemoteRepoInfo remoteRepo, String path, String contents )
|
||||
protected Path populateRepo( RemoteRepoInfo remoteRepo, String path, String contents )
|
||||
throws Exception
|
||||
{
|
||||
File destFile = new File( remoteRepo.root, path );
|
||||
if ( destFile.exists() )
|
||||
Path destFile = remoteRepo.root.resolve( path );
|
||||
if ( Files.exists(destFile) )
|
||||
{
|
||||
destFile.delete();
|
||||
Files.delete(destFile);
|
||||
}
|
||||
destFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( destFile, contents, Charset.defaultCharset() );
|
||||
Files.createDirectories( destFile.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( destFile, Charset.defaultCharset(), contents);
|
||||
return destFile;
|
||||
}
|
||||
|
||||
|
@ -66,13 +66,15 @@
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
@ -88,9 +90,9 @@ public abstract class AbstractRepositoryServletTestCase
|
||||
{
|
||||
protected static final String REPOID_INTERNAL = "internal";
|
||||
|
||||
protected File repoRootInternal;
|
||||
protected Path repoRootInternal;
|
||||
|
||||
protected File repoRootLegacy;
|
||||
protected Path repoRootLegacy;
|
||||
|
||||
@Inject
|
||||
protected ArchivaConfiguration archivaConfiguration;
|
||||
@ -118,19 +120,19 @@ public void setUp()
|
||||
|
||||
super.setUp();
|
||||
|
||||
String appserverBase = new File( "target/appserver-base" ).getAbsolutePath();
|
||||
String appserverBase = Paths.get( "target/appserver-base" ).toAbsolutePath().toString();
|
||||
System.setProperty( "appserver.base", appserverBase );
|
||||
|
||||
File testConf = new File( "src/test/resources/repository-archiva.xml" );
|
||||
File testConfDest = new File( appserverBase, "conf/archiva.xml" );
|
||||
if ( testConfDest.exists() )
|
||||
Path testConf = Paths.get( "src/test/resources/repository-archiva.xml" );
|
||||
Path testConfDest = Paths.get(appserverBase, "conf/archiva.xml" );
|
||||
if ( Files.exists(testConfDest) )
|
||||
{
|
||||
FileUtils.deleteQuietly( testConfDest );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteQuietly( testConfDest );
|
||||
}
|
||||
FileUtils.copyFile( testConf, testConfDest );
|
||||
FileUtils.copyFile( testConf.toFile(), testConfDest.toFile() );
|
||||
|
||||
repoRootInternal = new File( appserverBase, "data/repositories/internal" );
|
||||
repoRootLegacy = new File( appserverBase, "data/repositories/legacy" );
|
||||
repoRootInternal = Paths.get( appserverBase, "data/repositories/internal" );
|
||||
repoRootLegacy = Paths.get( appserverBase, "data/repositories/legacy" );
|
||||
Configuration config = archivaConfiguration.getConfiguration();
|
||||
|
||||
config.getManagedRepositories().clear();
|
||||
@ -674,29 +676,30 @@ public void tearDown()
|
||||
throws Exception
|
||||
{
|
||||
|
||||
if ( repoRootInternal.exists() )
|
||||
if ( Files.exists(repoRootInternal) )
|
||||
{
|
||||
FileUtils.deleteDirectory( repoRootInternal );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoRootInternal );
|
||||
}
|
||||
|
||||
if ( repoRootLegacy.exists() )
|
||||
if ( Files.exists(repoRootLegacy) )
|
||||
{
|
||||
FileUtils.deleteDirectory( repoRootLegacy );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoRootLegacy );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
protected void assertFileContents( String expectedContents, File repoRoot, String path )
|
||||
protected void assertFileContents( String expectedContents, Path repoRoot, String subpath )
|
||||
throws IOException
|
||||
{
|
||||
File actualFile = new File( repoRoot, path );
|
||||
assertTrue( "File <" + actualFile.getAbsolutePath() + "> should exist.", actualFile.exists() );
|
||||
assertTrue( "File <" + actualFile.getAbsolutePath() + "> should be a file (not a dir/link/device/etc).",
|
||||
actualFile.isFile() );
|
||||
String path = Paths.get(subpath).isAbsolute() ? subpath.substring( 1,subpath.length() ) : subpath;
|
||||
Path actualFile = repoRoot.resolve( path );
|
||||
assertTrue( "File <" + actualFile.toAbsolutePath() + "> should exist.", Files.exists(actualFile) );
|
||||
assertTrue( "File <" + actualFile.toAbsolutePath() + "> should be a file (not a dir/link/device/etc).",
|
||||
Files.isRegularFile( actualFile ) );
|
||||
|
||||
String actualContents = FileUtils.readFileToString( actualFile, Charset.defaultCharset() );
|
||||
assertEquals( "File Contents of <" + actualFile.getAbsolutePath() + ">", expectedContents, actualContents );
|
||||
String actualContents = org.apache.archiva.common.utils.FileUtils.readFileToString( actualFile, Charset.defaultCharset() );
|
||||
assertEquals( "File Contents of <" + actualFile.toAbsolutePath() + ">", expectedContents, actualContents );
|
||||
}
|
||||
|
||||
protected void assertRepositoryValid( RepositoryServlet servlet, String repoId )
|
||||
@ -704,9 +707,9 @@ protected void assertRepositoryValid( RepositoryServlet servlet, String repoId )
|
||||
{
|
||||
ManagedRepository repository = servlet.getRepository( repoId );
|
||||
assertNotNull( "Archiva Managed Repository id:<" + repoId + "> should exist.", repository );
|
||||
File repoRoot = new File( repository.getLocation() );
|
||||
Path repoRoot = Paths.get( repository.getLocation() );
|
||||
assertTrue( "Archiva Managed Repository id:<" + repoId + "> should have a valid location on disk.",
|
||||
repoRoot.exists() && repoRoot.isDirectory() );
|
||||
Files.exists(repoRoot) && Files.isDirectory(repoRoot) );
|
||||
}
|
||||
|
||||
protected void assertResponseOK( WebResponse response )
|
||||
@ -745,19 +748,19 @@ protected void assertResponseConflictError( WebResponse response )
|
||||
response.getStatusCode() );
|
||||
}
|
||||
|
||||
protected ManagedRepositoryConfiguration createManagedRepository( String id, String name, File location,
|
||||
protected ManagedRepositoryConfiguration createManagedRepository( String id, String name, Path location,
|
||||
boolean blockRedeployments )
|
||||
{
|
||||
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
|
||||
repo.setId( id );
|
||||
repo.setName( name );
|
||||
repo.setLocation( location.getAbsolutePath() );
|
||||
repo.setLocation( location.toAbsolutePath().toString() );
|
||||
repo.setBlockRedeployments( blockRedeployments );
|
||||
|
||||
return repo;
|
||||
}
|
||||
|
||||
protected ManagedRepositoryConfiguration createManagedRepository( String id, String name, File location,
|
||||
protected ManagedRepositoryConfiguration createManagedRepository( String id, String name, Path location,
|
||||
String layout, boolean blockRedeployments )
|
||||
{
|
||||
ManagedRepositoryConfiguration repo = createManagedRepository( id, name, location, blockRedeployments );
|
||||
@ -781,21 +784,21 @@ protected void saveConfiguration( ArchivaConfiguration archivaConfiguration )
|
||||
}
|
||||
|
||||
|
||||
protected void setupCleanRepo( File repoRootDir )
|
||||
protected void setupCleanRepo( Path repoRootDir )
|
||||
throws IOException
|
||||
{
|
||||
FileUtils.deleteDirectory( repoRootDir );
|
||||
if ( !repoRootDir.exists() )
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoRootDir );
|
||||
if ( !Files.exists(repoRootDir) )
|
||||
{
|
||||
repoRootDir.mkdirs();
|
||||
Files.createDirectories( repoRootDir );
|
||||
}
|
||||
}
|
||||
|
||||
protected void assertManagedFileNotExists( File repoRootInternal, String resourcePath )
|
||||
protected void assertManagedFileNotExists( Path repoRootInternal, String resourcePath )
|
||||
{
|
||||
File repoFile = new File( repoRootInternal, resourcePath );
|
||||
assertFalse( "Managed Repository File <" + repoFile.getAbsolutePath() + "> should not exist.",
|
||||
repoFile.exists() );
|
||||
Path repoFile = repoRootInternal.resolve( resourcePath );
|
||||
assertFalse( "Managed Repository File <" + repoFile.toAbsolutePath() + "> should not exist.",
|
||||
Files.exists(repoFile) );
|
||||
}
|
||||
|
||||
protected void setupCleanInternalRepo()
|
||||
@ -804,12 +807,12 @@ protected void setupCleanInternalRepo()
|
||||
setupCleanRepo( repoRootInternal );
|
||||
}
|
||||
|
||||
protected File populateRepo( File repoRootManaged, String path, String contents )
|
||||
protected Path populateRepo( Path repoRootManaged, String path, String contents )
|
||||
throws Exception
|
||||
{
|
||||
File destFile = new File( repoRootManaged, path );
|
||||
destFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( destFile, contents, Charset.defaultCharset() );
|
||||
Path destFile = repoRootManaged.resolve( path );
|
||||
Files.createDirectories( destFile.getParent() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( destFile, Charset.defaultCharset(), contents );
|
||||
return destFile;
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,6 @@
|
||||
import org.apache.jackrabbit.webdav.DavResourceLocator;
|
||||
import org.apache.jackrabbit.webdav.DavServletRequest;
|
||||
import org.apache.jackrabbit.webdav.DavServletResponse;
|
||||
|
||||
import org.easymock.IMocksControl;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
@ -54,8 +53,9 @@
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@ -150,13 +150,13 @@ public void setUp()
|
||||
if ( defaultManagedRepositoryAdmin.getManagedRepository( RELEASES_REPO ) == null )
|
||||
{
|
||||
defaultManagedRepositoryAdmin.addManagedRepository(
|
||||
createManagedRepository( RELEASES_REPO, new File( "target/test-classes/" + RELEASES_REPO ).getPath(),
|
||||
createManagedRepository( RELEASES_REPO, Paths.get( "target/test-classes/" + RELEASES_REPO ).toString(),
|
||||
"default" ), false, null );
|
||||
}
|
||||
if ( defaultManagedRepositoryAdmin.getManagedRepository( INTERNAL_REPO ) == null )
|
||||
{
|
||||
defaultManagedRepositoryAdmin.addManagedRepository(
|
||||
createManagedRepository( INTERNAL_REPO, new File( "target/test-classes/" + INTERNAL_REPO ).getPath(),
|
||||
createManagedRepository( INTERNAL_REPO, Paths.get( "target/test-classes/" + INTERNAL_REPO ).toString(),
|
||||
"default" ), false, null );
|
||||
}
|
||||
RepositoryGroup repoGroupConfig = new RepositoryGroup();
|
||||
@ -255,8 +255,8 @@ public void testRepositoryGroupFirstRepositoryRequiresAuthentication()
|
||||
|
||||
expect( repoRequest.toNativePath( "org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar",
|
||||
internalRepo ) ).andReturn(
|
||||
new File( config.findManagedRepositoryById( INTERNAL_REPO ).getLocation(),
|
||||
"target/test-classes/internal/org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ).getPath() );
|
||||
Paths.get( config.findManagedRepositoryById( INTERNAL_REPO ).getLocation(),
|
||||
"target/test-classes/internal/org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ).toString());
|
||||
|
||||
expect( repoFactory.getManagedRepositoryContent( INTERNAL_REPO ) ).andReturn( internalRepo );
|
||||
|
||||
@ -335,8 +335,8 @@ public void testRepositoryGroupLastRepositoryRequiresAuthentication()
|
||||
|
||||
expect( repoRequest.toNativePath( "org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar",
|
||||
internalRepo ) ).andReturn(
|
||||
new File( config.findManagedRepositoryById( INTERNAL_REPO ).getLocation(),
|
||||
"target/test-classes/internal/org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ).getPath() );
|
||||
Paths.get( config.findManagedRepositoryById( INTERNAL_REPO ).getLocation(),
|
||||
"target/test-classes/internal/org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ).toString());
|
||||
|
||||
|
||||
expect( repoRequest.isArchetypeCatalog(
|
||||
@ -370,7 +370,7 @@ public void testRepositoryGroupArtifactDoesNotExistInAnyOfTheReposAuthentication
|
||||
new ArchivaDavLocatorFactory() );
|
||||
|
||||
defaultManagedRepositoryAdmin.addManagedRepository(
|
||||
createManagedRepository( LOCAL_MIRROR_REPO, new File( "target/test-classes/local-mirror" ).getPath(),
|
||||
createManagedRepository( LOCAL_MIRROR_REPO, Paths.get( "target/test-classes/local-mirror" ).toString(),
|
||||
"default" ), false, null );
|
||||
|
||||
List<RepositoryGroupConfiguration> repoGroups = new ArrayList<>();
|
||||
@ -417,13 +417,13 @@ public void testRepositoryGroupArtifactDoesNotExistInAnyOfTheReposAuthentication
|
||||
|
||||
expect( repoRequest.toNativePath( "org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar",
|
||||
internalRepo ) ).andReturn(
|
||||
new File( config.findManagedRepositoryById( INTERNAL_REPO ).getLocation(),
|
||||
"target/test-classes/internal/org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ).getPath() );
|
||||
Paths.get( config.findManagedRepositoryById( INTERNAL_REPO ).getLocation(),
|
||||
"target/test-classes/internal/org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ).toString() );
|
||||
|
||||
expect( repoRequest.toNativePath( "org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar",
|
||||
localMirrorRepo ) )
|
||||
.andReturn( new File( config.findManagedRepositoryById( LOCAL_MIRROR_REPO ).getLocation(),
|
||||
"target/test-classes/internal/org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ).getPath() );
|
||||
.andReturn( Paths.get( config.findManagedRepositoryById( LOCAL_MIRROR_REPO ).getLocation(),
|
||||
"target/test-classes/internal/org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ).toString());
|
||||
|
||||
expect( repoRequest.isArchetypeCatalog( "org/apache/archiva/archiva/1.2-SNAPSHOT/archiva-1.2-SNAPSHOT.jar" ) ).andReturn( false ).times( 2 );
|
||||
archivaConfigurationControl.replay();
|
||||
@ -562,7 +562,7 @@ public void testRequestMetadataRepoIsLegacy()
|
||||
throws Exception
|
||||
{
|
||||
defaultManagedRepositoryAdmin.addManagedRepository(
|
||||
createManagedRepository( LEGACY_REPO, new File( "target/test-classes/" + LEGACY_REPO ).getPath(),
|
||||
createManagedRepository( LEGACY_REPO, Paths.get( "target/test-classes/" + LEGACY_REPO ).toString(),
|
||||
"legacy" ), false, null );
|
||||
DavResourceLocator locator =
|
||||
new ArchivaDavResourceLocator( "", "/repository/" + LEGACY_REPO + "/eclipse/maven-metadata.xml",
|
||||
@ -647,17 +647,17 @@ class OverridingRepositoryProxyConnectors
|
||||
@Override
|
||||
public ProxyFetchResult fetchMetadataFromProxies( ManagedRepositoryContent repository, String logicalPath )
|
||||
{
|
||||
File target = new File( repository.getRepoRoot(), logicalPath );
|
||||
Path target = Paths.get(repository.getRepoRoot(), logicalPath );
|
||||
try
|
||||
{
|
||||
FileUtils.copyFile( new File( "target/test-classes/maven-metadata.xml" ), target );
|
||||
FileUtils.copyFile( Paths.get( "target/test-classes/maven-metadata.xml" ).toFile(), target.toFile() );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
return new ProxyFetchResult( target.toPath(), true );
|
||||
return new ProxyFetchResult( target, true );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,37 +15,36 @@
|
||||
*/
|
||||
package org.apache.archiva.webdav;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.junit.runner.Description;
|
||||
import org.junit.runners.model.Statement;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileSystems;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
/**
|
||||
* Rule to help creating folder for repository based on testmethod name
|
||||
* @author Eric
|
||||
*/
|
||||
public class ArchivaTemporaryFolderRule implements TestRule {
|
||||
private File d;
|
||||
private Path d;
|
||||
private Description desc = Description.EMPTY;
|
||||
|
||||
public void before() throws IOException {
|
||||
// hard coded maven target file
|
||||
File f1 = new File("target" + File.separator + "archivarepo" + File.separator + ArchivaTemporaryFolderRule.resumepackage(desc.getClassName()) + File.separator + desc.getMethodName());
|
||||
f1.mkdirs();
|
||||
Path p = Files.createDirectories(f1.toPath());
|
||||
d = p.toFile();
|
||||
Path f1 = Paths.get("target", "archivarepo", ArchivaTemporaryFolderRule.resumepackage(desc.getClassName()), desc.getMethodName());
|
||||
d = Files.createDirectories( f1 );
|
||||
}
|
||||
|
||||
public File getRoot() {
|
||||
public Path getRoot() {
|
||||
return d;
|
||||
}
|
||||
|
||||
public void after() throws IOException {
|
||||
FileUtils.deleteDirectory(getRoot());
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory(getRoot());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -77,7 +76,7 @@ public static String resumepackage(String packagename) {
|
||||
String[] p = packagename.split("\\.");
|
||||
for (int i = 0; i < p.length - 2; i++)
|
||||
{
|
||||
sb.append(p[i].charAt(0)).append(File.separator);
|
||||
sb.append(p[i].charAt(0)).append( FileSystems.getDefault( ).getSeparator());
|
||||
}
|
||||
sb.append(p[p.length - 1]);
|
||||
return sb.toString();
|
||||
|
@ -21,9 +21,10 @@
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.archiva.admin.model.beans.ManagedRepository;
|
||||
import org.apache.archiva.repository.events.AuditListener;
|
||||
import org.apache.archiva.common.filelock.FileLockManager;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.archiva.repository.events.AuditListener;
|
||||
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
|
||||
import org.apache.archiva.webdav.util.MimeTypes;
|
||||
import org.apache.jackrabbit.webdav.DavException;
|
||||
import org.apache.jackrabbit.webdav.DavResource;
|
||||
import org.apache.jackrabbit.webdav.DavResourceFactory;
|
||||
@ -37,7 +38,6 @@
|
||||
import org.apache.jackrabbit.webdav.lock.Scope;
|
||||
import org.apache.jackrabbit.webdav.lock.SimpleLockManager;
|
||||
import org.apache.jackrabbit.webdav.lock.Type;
|
||||
import org.apache.archiva.webdav.util.MimeTypes;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@ -45,9 +45,10 @@
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Collections;
|
||||
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
|
||||
|
||||
@RunWith( ArchivaSpringJUnit4ClassRunner.class )
|
||||
@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml" } )
|
||||
@ -66,11 +67,11 @@ public class DavResourceTest
|
||||
|
||||
private DavResourceFactory resourceFactory;
|
||||
|
||||
private File baseDir;
|
||||
private Path baseDir;
|
||||
|
||||
private final String REPOPATH = "myresource.jar";
|
||||
|
||||
private File myResource;
|
||||
private Path myResource;
|
||||
|
||||
private DavResource resource;
|
||||
|
||||
@ -85,10 +86,10 @@ public void setUp()
|
||||
{
|
||||
super.setUp();
|
||||
session = new ArchivaDavSession();
|
||||
baseDir = new File( "target/DavResourceTest" );
|
||||
baseDir.mkdirs();
|
||||
myResource = new File( baseDir, "myresource.jar" );
|
||||
assertTrue( "Could not create " + myResource.getAbsolutePath(), myResource.createNewFile() );
|
||||
baseDir = Paths.get( "target/DavResourceTest" );
|
||||
Files.createDirectories( baseDir );
|
||||
myResource = baseDir.resolve( "myresource.jar" );
|
||||
Files.createFile(myResource);
|
||||
resourceFactory = new RootContextDavResourceFactory();
|
||||
|
||||
resourceLocator =
|
||||
@ -104,12 +105,12 @@ public void tearDown()
|
||||
throws Exception
|
||||
{
|
||||
super.tearDown();
|
||||
FileUtils.deleteDirectory( baseDir );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( baseDir );
|
||||
}
|
||||
|
||||
private DavResource getDavResource( String logicalPath, File file )
|
||||
private DavResource getDavResource( String logicalPath, Path file )
|
||||
{
|
||||
return new ArchivaDavResource( file.getAbsolutePath(), logicalPath, repository, session, resourceLocator,
|
||||
return new ArchivaDavResource( file.toAbsolutePath().toString(), logicalPath, repository, session, resourceLocator,
|
||||
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null, fileLockManager );
|
||||
}
|
||||
|
||||
@ -117,7 +118,7 @@ private DavResource getDavResource( String logicalPath, File file )
|
||||
public void testDeleteNonExistantResourceShould404()
|
||||
throws Exception
|
||||
{
|
||||
File dir = new File( baseDir, "testdir" );
|
||||
Path dir = baseDir.resolve( "testdir" );
|
||||
try
|
||||
{
|
||||
DavResource directoryResource = getDavResource( "/testdir", dir );
|
||||
@ -134,17 +135,17 @@ public void testDeleteNonExistantResourceShould404()
|
||||
public void testDeleteCollection()
|
||||
throws Exception
|
||||
{
|
||||
File dir = new File( baseDir, "testdir" );
|
||||
Path dir = baseDir.resolve( "testdir" );
|
||||
try
|
||||
{
|
||||
assertTrue( dir.mkdir() );
|
||||
assertNotNull( Files.createDirectories(dir) );
|
||||
DavResource directoryResource = getDavResource( "/testdir", dir );
|
||||
directoryResource.getCollection().removeMember( directoryResource );
|
||||
assertFalse( dir.exists() );
|
||||
assertFalse( Files.exists(dir) );
|
||||
}
|
||||
finally
|
||||
{
|
||||
FileUtils.deleteDirectory( dir );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( dir );
|
||||
}
|
||||
}
|
||||
|
||||
@ -152,9 +153,9 @@ public void testDeleteCollection()
|
||||
public void testDeleteResource()
|
||||
throws Exception
|
||||
{
|
||||
assertTrue( myResource.exists() );
|
||||
assertTrue( Files.exists(myResource) );
|
||||
resource.getCollection().removeMember( resource );
|
||||
assertFalse( myResource.exists() );
|
||||
assertFalse( Files.exists(myResource) );
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -329,7 +330,7 @@ public DavResource createResource( DavResourceLocator locator, DavServletRequest
|
||||
public DavResource createResource( DavResourceLocator locator, DavSession session )
|
||||
throws DavException
|
||||
{
|
||||
return new ArchivaDavResource( baseDir.getAbsolutePath(), "/", repository, session, resourceLocator,
|
||||
return new ArchivaDavResource( baseDir.toAbsolutePath().toString(), "/", repository, session, resourceLocator,
|
||||
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
|
||||
null, fileLockManager );
|
||||
}
|
||||
|
@ -31,7 +31,7 @@
|
||||
import org.junit.Test;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
@ -48,10 +48,10 @@ public void setUp()
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
new File( repoRootInternal, "org/apache/archiva" ).mkdirs();
|
||||
new File( repoRootInternal, "org/codehaus/mojo/" ).mkdirs();
|
||||
new File( repoRootInternal, "net/sourceforge" ).mkdirs();
|
||||
new File( repoRootInternal, "commons-lang" ).mkdirs();
|
||||
Files.createDirectories( repoRootInternal.resolve( "org/apache/archiva" ));
|
||||
Files.createDirectories( repoRootInternal.resolve( "org/codehaus/mojo/" ));
|
||||
Files.createDirectories( repoRootInternal.resolve("net/sourceforge" ));
|
||||
Files.createDirectories( repoRootInternal.resolve("commons-lang" ));
|
||||
|
||||
startRepository();
|
||||
}
|
||||
@ -94,8 +94,8 @@ public void testBrowseSubdirectory()
|
||||
public void testGetDirectoryWhichHasMatchingFile() //MRM-893
|
||||
throws Exception
|
||||
{
|
||||
new File( repoRootInternal, "org/apache/archiva/artifactId/1.0" ).mkdirs();
|
||||
new File( repoRootInternal, "org/apache/archiva/artifactId/1.0/artifactId-1.0.jar" ).createNewFile();
|
||||
Files.createDirectories( repoRootInternal.resolve("org/apache/archiva/artifactId/1.0" ));
|
||||
Files.createFile( repoRootInternal.resolve("org/apache/archiva/artifactId/1.0/artifactId-1.0.jar" ));
|
||||
|
||||
WebRequest request =
|
||||
new GetMethodWebRequest( "http://machine.com/repository/internal/org/apache/archiva/artifactId" );
|
||||
|
@ -19,8 +19,10 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
@ -237,8 +239,8 @@ public void testMkColWithMissingParentCollectionFails()
|
||||
|
||||
assertEquals(HttpServletResponse.SC_CONFLICT, response.getStatusCode());
|
||||
|
||||
File mkColLocalPath = new File(repoRootInternal, "path/to/");
|
||||
assertFalse(mkColLocalPath.exists());
|
||||
Path mkColLocalPath = repoRootInternal.resolve( "path/to/");
|
||||
assertFalse( Files.exists(mkColLocalPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -21,12 +21,12 @@
|
||||
|
||||
import com.gargoylesoftware.htmlunit.WebRequest;
|
||||
import com.gargoylesoftware.htmlunit.WebResponse;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
* RepositoryServletTest
|
||||
@ -50,10 +50,10 @@ public void testGetVersionMetadataDefaultLayout()
|
||||
String commonsLangMetadata = "commons-lang/commons-lang/2.1/maven-metadata.xml";
|
||||
String expectedMetadataContents = "metadata-for-commons-lang-version-2.1";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, commonsLangMetadata );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( checksumFile, Charset.defaultCharset(), expectedMetadataContents );
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangMetadata );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -69,10 +69,10 @@ public void testGetProjectMetadataDefaultLayout()
|
||||
String commonsLangMetadata = "commons-lang/commons-lang/maven-metadata.xml";
|
||||
String expectedMetadataContents = "metadata-for-commons-lang-version-for-project";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, commonsLangMetadata );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( checksumFile, Charset.defaultCharset(), expectedMetadataContents );
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangMetadata );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -88,10 +88,10 @@ public void testGetGroupMetadataDefaultLayout()
|
||||
String commonsLangMetadata = "commons-lang/maven-metadata.xml";
|
||||
String expectedMetadataContents = "metadata-for-commons-lang-group";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, commonsLangMetadata );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( checksumFile, Charset.defaultCharset() , expectedMetadataContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangMetadata );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -108,10 +108,10 @@ public void testGetSnapshotVersionMetadataDefaultLayout()
|
||||
"org/apache/maven/plugins/maven-assembly-plugin/2.2-beta-2-SNAPSHOT/maven-metadata.xml";
|
||||
String expectedMetadataContents = "metadata-for-assembly-plugin-version-2.2-beta-2-SNAPSHOT";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, assemblyPluginMetadata );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve(assemblyPluginMetadata);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( checksumFile, Charset.defaultCharset(), expectedMetadataContents );
|
||||
|
||||
WebRequest request =
|
||||
new GetMethodWebRequest( "http://machine.com/repository/internal/" + assemblyPluginMetadata );
|
||||
|
@ -23,17 +23,17 @@
|
||||
import com.gargoylesoftware.htmlunit.WebRequest;
|
||||
import com.gargoylesoftware.htmlunit.WebResponse;
|
||||
import org.apache.archiva.configuration.ProxyConnectorConfiguration;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import org.springframework.mock.web.MockHttpServletRequest;
|
||||
import org.springframework.mock.web.MockHttpServletResponse;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
* RepositoryServletTest
|
||||
*/
|
||||
@ -57,10 +57,10 @@ public void testLastModifiedHeaderExists()
|
||||
{
|
||||
String commonsLangSha1 = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar.sha1";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, commonsLangSha1 );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve( commonsLangSha1 );
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, "dummy-checksum", Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( checksumFile, Charset.defaultCharset(), "dummy-checksum" );
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangSha1 );
|
||||
WebResponse response = getWebResponse( "/repository/internal/" + commonsLangSha1 );
|
||||
@ -73,10 +73,10 @@ public void testGetNoProxyChecksumDefaultLayout()
|
||||
{
|
||||
String commonsLangSha1 = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar.sha1";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, commonsLangSha1 );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve(commonsLangSha1);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, "dummy-checksum", Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( checksumFile, Charset.defaultCharset(), "dummy-checksum");
|
||||
|
||||
//WebRequest request = new WebRequest( "http://machine.com/repository/internal/" + commonsLangSha1 );
|
||||
WebResponse response = getWebResponse( "/repository/internal/" + commonsLangSha1 );
|
||||
@ -91,10 +91,10 @@ public void testGetNoProxyChecksumLegacyLayout()
|
||||
{
|
||||
String commonsLangSha1 = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar.sha1";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, commonsLangSha1 );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve(commonsLangSha1);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, "dummy-checksum", Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( checksumFile, Charset.defaultCharset() , "dummy-checksum");
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest(
|
||||
// "http://machine.com/repository/internal/" + "commons-lang/jars/commons-lang-2.1.jar.sha1" );
|
||||
@ -110,10 +110,10 @@ public void testGetNoProxyVersionedMetadataDefaultLayout()
|
||||
String commonsLangMetadata = "commons-lang/commons-lang/2.1/maven-metadata.xml";
|
||||
String expectedMetadataContents = "dummy-versioned-metadata";
|
||||
|
||||
File metadataFile = new File( repoRootInternal, commonsLangMetadata );
|
||||
metadataFile.getParentFile().mkdirs();
|
||||
Path metadataFile = repoRootInternal.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(metadataFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( metadataFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( metadataFile, Charset.defaultCharset(), expectedMetadataContents );
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangMetadata );
|
||||
WebResponse response = getWebResponse( "/repository/internal/" + commonsLangMetadata );
|
||||
@ -129,10 +129,10 @@ public void testGetNoProxyProjectMetadataDefaultLayout()
|
||||
String commonsLangMetadata = "commons-lang/commons-lang/maven-metadata.xml";
|
||||
String expectedMetadataContents = "dummy-project-metadata";
|
||||
|
||||
File metadataFile = new File( repoRootInternal, commonsLangMetadata );
|
||||
metadataFile.getParentFile().mkdirs();
|
||||
Path metadataFile = repoRootInternal.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(metadataFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( metadataFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( metadataFile, Charset.defaultCharset(), expectedMetadataContents );
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangMetadata );
|
||||
WebResponse response = getWebResponse( "/repository/internal/" + commonsLangMetadata );
|
||||
@ -148,10 +148,10 @@ public void testGetNoProxyGroupMetadataDefaultLayout()
|
||||
String commonsLangMetadata = "commons-lang/maven-metadata.xml";
|
||||
String expectedMetadataContents = "dummy-group-metadata";
|
||||
|
||||
File metadataFile = new File( repoRootInternal, commonsLangMetadata );
|
||||
metadataFile.getParentFile().mkdirs();
|
||||
Path metadataFile = repoRootInternal.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(metadataFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( metadataFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( metadataFile, Charset.defaultCharset(), expectedMetadataContents );
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangMetadata );
|
||||
WebResponse response = getWebResponse( "/repository/internal/" + commonsLangMetadata );
|
||||
@ -167,10 +167,10 @@ public void testGetNoProxyArtifactDefaultLayout()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( artifactFile, Charset.defaultCharset(), expectedArtifactContents );
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangJar );
|
||||
WebResponse response = getWebResponse( "/repository/internal/" + commonsLangJar );
|
||||
@ -186,10 +186,10 @@ public void testGetNoProxyArtifactLegacyLayout()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( artifactFile, Charset.defaultCharset(), expectedArtifactContents );
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest(
|
||||
// "http://machine.com/repository/internal/" + "commons-lang/jars/commons-lang-2.1.jar" );
|
||||
@ -205,10 +205,10 @@ public void testGetNoProxySnapshotArtifactDefaultLayout()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1-SNAPSHOT/commons-lang-2.1-SNAPSHOT.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-snapshot-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( artifactFile, Charset.defaultCharset() , expectedArtifactContents);
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangJar );
|
||||
WebResponse response = getWebResponse( "/repository/internal/" + commonsLangJar );
|
||||
@ -224,10 +224,10 @@ public void testGetNoProxySnapshotArtifactLegacyLayout()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1-SNAPSHOT/commons-lang-2.1-SNAPSHOT.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-snapshot-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( artifactFile, Charset.defaultCharset() , expectedArtifactContents);
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest(
|
||||
// "http://machine.com/repository/internal/" + "commons-lang/jars/commons-lang-2.1-SNAPSHOT.jar" );
|
||||
@ -242,10 +242,10 @@ public void testGetNoProxyTimestampedSnapshotArtifactDefaultLayout()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1-SNAPSHOT/commons-lang-2.1-20050821.023400-1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-snapshot-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
//WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangJar );
|
||||
WebResponse response = getWebResponse( "/repository/internal/" + commonsLangJar );
|
||||
@ -261,10 +261,10 @@ public void testGetNoProxyTimestampedSnapshotArtifactLegacyLayout()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1-SNAPSHOT/commons-lang-2.1-20050821.023400-1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-snapshot-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest(
|
||||
"http://machine.com/repository/internal/" + "commons-lang/jars/commons-lang-2.1-20050821.023400-1.jar" );
|
||||
@ -282,10 +282,10 @@ public void testGetNoProxyDualExtensionDefaultLayout()
|
||||
String expectedContents = "the-contents-of-the-dual-extension";
|
||||
String dualExtensionPath = "org/project/example-presentation/3.2/example-presentation-3.2.xml.zip";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, dualExtensionPath );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve(dualExtensionPath);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, expectedContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(checksumFile, Charset.defaultCharset(), expectedContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + dualExtensionPath );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -301,10 +301,10 @@ public void testGetNoProxyDistributionLegacyLayout()
|
||||
String expectedContents = "the-contents-of-the-dual-extension";
|
||||
String dualExtensionPath = "org/project/example-presentation/3.2/example-presentation-3.2.zip";
|
||||
|
||||
File checksumFile = new File( repoRootInternal, dualExtensionPath );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootInternal.resolve(dualExtensionPath);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, expectedContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(checksumFile, Charset.defaultCharset(), expectedContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest(
|
||||
"http://machine.com/repository/internal/" + "org.project/distributions/example-presentation-3.2.zip" );
|
||||
@ -319,10 +319,10 @@ public void testGetNoProxyChecksumDefaultLayoutManagedLegacy()
|
||||
{
|
||||
String commonsLangSha1 = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar.sha1";
|
||||
|
||||
File checksumFile = new File( repoRootLegacy, "commons-lang/jars/commons-lang-2.1.jar.sha1" );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootLegacy.resolve( "commons-lang/jars/commons-lang-2.1.jar.sha1" );
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, "dummy-checksum", Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( checksumFile, Charset.defaultCharset(), "dummy-checksum" );
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangSha1 );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -334,10 +334,10 @@ public void testGetNoProxyChecksumLegacyLayoutManagedLegacy()
|
||||
throws Exception
|
||||
{
|
||||
String commonsLangSha1 = "commons-lang/jars/commons-lang-2.1.jar.sha1";
|
||||
File checksumFile = new File( repoRootLegacy, commonsLangSha1 );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootLegacy.resolve(commonsLangSha1);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, "dummy-checksum", Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(checksumFile, Charset.defaultCharset(), "dummy-checksum");
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangSha1 );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -352,10 +352,10 @@ public void testGetNoProxyVersionedMetadataDefaultLayoutManagedLegacy()
|
||||
String expectedMetadataContents = "dummy-versioned-metadata";
|
||||
|
||||
// TODO: find out what this should be from maven-artifact
|
||||
File metadataFile = new File( repoRootLegacy, commonsLangMetadata );
|
||||
metadataFile.getParentFile().mkdirs();
|
||||
Path metadataFile = repoRootLegacy.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(metadataFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( metadataFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(metadataFile, Charset.defaultCharset(), expectedMetadataContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangMetadata );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -370,10 +370,10 @@ public void testGetNoProxyProjectMetadataDefaultLayoutManagedLegacy()
|
||||
String commonsLangMetadata = "commons-lang/commons-lang/maven-metadata.xml";
|
||||
String expectedMetadataContents = "dummy-project-metadata";
|
||||
|
||||
File metadataFile = new File( repoRootLegacy, commonsLangMetadata );
|
||||
metadataFile.getParentFile().mkdirs();
|
||||
Path metadataFile = repoRootLegacy.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(metadataFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( metadataFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(metadataFile, Charset.defaultCharset(), expectedMetadataContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangMetadata );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -387,10 +387,10 @@ public void testGetNoProxyGroupMetadataDefaultLayoutManagedLegacy()
|
||||
String commonsLangMetadata = "commons-lang/maven-metadata.xml";
|
||||
String expectedMetadataContents = "dummy-group-metadata";
|
||||
|
||||
File metadataFile = new File( repoRootLegacy, commonsLangMetadata );
|
||||
metadataFile.getParentFile().mkdirs();
|
||||
Path metadataFile = repoRootLegacy.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(metadataFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( metadataFile, expectedMetadataContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(metadataFile, Charset.defaultCharset(), expectedMetadataContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangMetadata );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -404,10 +404,10 @@ public void testGetNoProxyArtifactDefaultLayoutManagedLegacy()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootLegacy, "commons-lang/jars/commons-lang-2.1.jar" );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootLegacy.resolve("commons-lang/jars/commons-lang-2.1.jar" );
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangJar );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -421,10 +421,10 @@ public void testGetNoProxyArtifactLegacyLayoutManagedLegacy()
|
||||
String commonsLangJar = "commons-lang/jars/commons-lang-2.1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootLegacy, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootLegacy.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangJar );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -438,10 +438,10 @@ public void testGetNoProxySnapshotArtifactDefaultLayoutManagedLegacy()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1-SNAPSHOT/commons-lang-2.1-SNAPSHOT.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-snapshot-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootLegacy, "commons-lang/jars/commons-lang-2.1-SNAPSHOT.jar" );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootLegacy.resolve( "commons-lang/jars/commons-lang-2.1-SNAPSHOT.jar" );
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangJar );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -455,10 +455,10 @@ public void testGetNoProxySnapshotArtifactLegacyLayoutManagedLegacy()
|
||||
String commonsLangJar = "commons-lang/jars/commons-lang-2.1-SNAPSHOT.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-snapshot-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootLegacy, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootLegacy.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangJar );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -473,10 +473,10 @@ public void testGetNoProxyTimestampedSnapshotArtifactDefaultLayoutManagedLegacy(
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1-SNAPSHOT/" + filename;
|
||||
String expectedArtifactContents = "dummy-commons-lang-snapshot-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootLegacy, "commons-lang/jars/" + filename );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootLegacy.resolve( "commons-lang/jars/" + filename );
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangJar );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -490,10 +490,10 @@ public void testGetNoProxyTimestampedSnapshotArtifactLegacyLayoutManagedLegacy()
|
||||
String commonsLangJar = "commons-lang/jars/commons-lang-2.1-20050821.023400-1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-snapshot-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootLegacy, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootLegacy.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + commonsLangJar );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -510,10 +510,10 @@ public void testGetNoProxyDualExtensionDefaultLayoutManagedLegacy()
|
||||
String expectedContents = "the-contents-of-the-dual-extension";
|
||||
String dualExtensionPath = "org/project/example-presentation/3.2/example-presentation-3.2.xml.zip";
|
||||
|
||||
File checksumFile = new File( repoRootLegacy, "org.project/distributions/example-presentation-3.2.xml.zip" );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootLegacy.resolve( "org.project/distributions/example-presentation-3.2.xml.zip" );
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, expectedContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(checksumFile, Charset.defaultCharset(), expectedContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + dualExtensionPath );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -527,10 +527,10 @@ public void testGetNoProxyDistributionLegacyLayoutManagedLegacy()
|
||||
String expectedContents = "the-contents-of-the-dual-extension";
|
||||
String dualExtensionPath = "org.project/distributions/example-presentation-3.2.zip";
|
||||
|
||||
File checksumFile = new File( repoRootLegacy, dualExtensionPath );
|
||||
checksumFile.getParentFile().mkdirs();
|
||||
Path checksumFile = repoRootLegacy.resolve(dualExtensionPath);
|
||||
Files.createDirectories(checksumFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( checksumFile, expectedContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(checksumFile, Charset.defaultCharset(), expectedContents);
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/legacy/" + dualExtensionPath );
|
||||
WebResponse response = getServletUnitClient().getResponse( request );
|
||||
@ -548,13 +548,13 @@ public void testGetNoProxySnapshotRedirectToTimestampedSnapshot()
|
||||
|
||||
archivaConfiguration.getConfiguration().getWebapp().getUi().setApplicationUrl("http://localhost");
|
||||
|
||||
File artifactFile = new File( repoRootInternal, commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
Path artifactFile = repoRootInternal.resolve(commonsLangJar);
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(artifactFile, Charset.defaultCharset(), expectedArtifactContents);
|
||||
|
||||
File metadataFile = new File( repoRootInternal, commonsLangMetadata );
|
||||
metadataFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( metadataFile, createVersionMetadata("commons-lang", "commons-lang",
|
||||
Path metadataFile = repoRootInternal.resolve(commonsLangMetadata);
|
||||
Files.createDirectories(metadataFile.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( metadataFile, Charset.defaultCharset(), createVersionMetadata("commons-lang", "commons-lang",
|
||||
"2.1-SNAPSHOT", "20050821.023400", "1", "20050821.023400"));
|
||||
|
||||
WebRequest webRequest = new GetMethodWebRequest(
|
||||
|
@ -19,7 +19,6 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import com.gargoylesoftware.htmlunit.WebRequest;
|
||||
import com.gargoylesoftware.htmlunit.WebResponse;
|
||||
@ -27,6 +26,10 @@
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
|
||||
/**
|
||||
* RepositoryServlet Tests, Proxied, Get of resources that are not artifacts or metadata, with varying policy settings.
|
||||
*
|
||||
@ -136,13 +139,13 @@ private void assertGetProxiedResource( int expectation, boolean hasManagedCopy,
|
||||
|
||||
String expectedRemoteContents = contents;
|
||||
String expectedManagedContents = null;
|
||||
File remoteFile = populateRepo( remoteCentral, path, expectedRemoteContents );
|
||||
Path remoteFile = populateRepo( remoteCentral, path, expectedRemoteContents );
|
||||
|
||||
if ( hasManagedCopy )
|
||||
{
|
||||
expectedManagedContents = contents;
|
||||
File managedFile = populateRepo( repoRootInternal, path, expectedManagedContents );
|
||||
managedFile.setLastModified( remoteFile.lastModified() + deltaManagedToRemoteTimestamp );
|
||||
Path managedFile = populateRepo( repoRootInternal, path, expectedManagedContents );
|
||||
Files.setLastModifiedTime( managedFile, FileTime.fromMillis( Files.getLastModifiedTime( remoteFile ).toMillis() + deltaManagedToRemoteTimestamp ));
|
||||
}
|
||||
|
||||
setupConnector( REPOID_INTERNAL, remoteCentral );
|
||||
|
@ -26,7 +26,9 @@
|
||||
import org.apache.archiva.policies.SnapshotsPolicy;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
@ -169,13 +171,13 @@ private void assertGetProxiedSnapshotsArtifactWithPolicy( int expectation, Strin
|
||||
String resourcePath = "org/apache/archiva/archivatest-maven-plugin/4.0-alpha-1-SNAPSHOT/archivatest-maven-plugin-4.0-alpha-1-20070822.033400-42.jar";
|
||||
String expectedRemoteContents = "archivatest-maven-plugin-4.0-alpha-1-20070822.033400-42|jar-remote-contents";
|
||||
String expectedManagedContents = null;
|
||||
File remoteFile = populateRepo( remoteSnapshots, resourcePath, expectedRemoteContents );
|
||||
Path remoteFile = populateRepo( remoteSnapshots, resourcePath, expectedRemoteContents );
|
||||
|
||||
if ( hasManagedCopy )
|
||||
{
|
||||
expectedManagedContents = "archivatest-maven-plugin-4.0-alpha-1-20070822.033400-42|jar-managed-contents";
|
||||
File managedFile = populateRepo( repoRootInternal, resourcePath, expectedManagedContents );
|
||||
managedFile.setLastModified( remoteFile.lastModified() + deltaManagedToRemoteTimestamp );
|
||||
Path managedFile = populateRepo( repoRootInternal, resourcePath, expectedManagedContents );
|
||||
Files.setLastModifiedTime( managedFile, FileTime.fromMillis( Files.getLastModifiedTime( remoteFile).toMillis() + deltaManagedToRemoteTimestamp ));
|
||||
}
|
||||
|
||||
archivaConfiguration.getConfiguration().setProxyConnectors( new ArrayList<ProxyConnectorConfiguration>( 0 ) );
|
||||
|
@ -25,7 +25,9 @@
|
||||
import org.apache.archiva.policies.ReleasesPolicy;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
@ -165,13 +167,14 @@ private void assertGetProxiedReleaseArtifactWithPolicy( int expectation, String
|
||||
String resourcePath = "org/apache/archiva/test/1.0/test-1.0.jar";
|
||||
String expectedRemoteContents = "archiva-test-1.0|jar-remote-contents";
|
||||
String expectedManagedContents = null;
|
||||
File remoteFile = populateRepo( remoteCentral, resourcePath, expectedRemoteContents );
|
||||
Path remoteFile = populateRepo( remoteCentral, resourcePath, expectedRemoteContents );
|
||||
|
||||
if ( hasManagedCopy )
|
||||
{
|
||||
expectedManagedContents = "archiva-test-1.0|jar-managed-contents";
|
||||
File managedFile = populateRepo( repoRootInternal, resourcePath, expectedManagedContents );
|
||||
managedFile.setLastModified( remoteFile.lastModified() + deltaManagedToRemoteTimestamp );
|
||||
Path managedFile = populateRepo( repoRootInternal, resourcePath, expectedManagedContents );
|
||||
Files.setLastModifiedTime(
|
||||
managedFile, FileTime.fromMillis( Files.getLastModifiedTime( remoteFile ).toMillis() + deltaManagedToRemoteTimestamp ));
|
||||
}
|
||||
|
||||
archivaConfiguration.getConfiguration().setProxyConnectors( new ArrayList<ProxyConnectorConfiguration>() );
|
||||
|
@ -28,7 +28,9 @@
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
@ -191,13 +193,13 @@ private void assertGetProxiedSnapshotsArtifactWithPolicy( int expectation, Strin
|
||||
String resourcePath = "org/apache/archiva/test/2.0-SNAPSHOT/test-2.0-SNAPSHOT.jar";
|
||||
String expectedRemoteContents = "archiva-test-2.0-SNAPSHOT|jar-remote-contents";
|
||||
String expectedManagedContents = null;
|
||||
File remoteFile = populateRepo( remoteSnapshots, resourcePath, expectedRemoteContents );
|
||||
Path remoteFile = populateRepo( remoteSnapshots, resourcePath, expectedRemoteContents );
|
||||
|
||||
if ( hasManagedCopy )
|
||||
{
|
||||
expectedManagedContents = "archiva-test-2.0-SNAPSHOT|jar-managed-contents";
|
||||
File managedFile = populateRepo( repoRootInternal, resourcePath, expectedManagedContents );
|
||||
managedFile.setLastModified( remoteFile.lastModified() + deltaManagedToRemoteTimestamp );
|
||||
Path managedFile = populateRepo( repoRootInternal, resourcePath, expectedManagedContents );
|
||||
Files.setLastModifiedTime( managedFile, FileTime.fromMillis(Files.getLastModifiedTime( remoteFile ).toMillis() + deltaManagedToRemoteTimestamp ));
|
||||
}
|
||||
|
||||
setupSnapshotConnector( REPOID_INTERNAL, remoteSnapshots, snapshotsPolicy );
|
||||
|
@ -27,7 +27,9 @@
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
@ -187,13 +189,14 @@ private void assertGetProxiedSnapshotsArtifactWithPolicy( int expectation, Strin
|
||||
String resourcePath = "org/apache/archiva/test/3.0-SNAPSHOT/test-3.0-20070822.033400-42.jar";
|
||||
String expectedRemoteContents = "archiva-test-3.0-20070822.033400-42|jar-remote-contents";
|
||||
String expectedManagedContents = null;
|
||||
File remoteFile = populateRepo( remoteSnapshots, resourcePath, expectedRemoteContents );
|
||||
Path remoteFile = populateRepo( remoteSnapshots, resourcePath, expectedRemoteContents );
|
||||
|
||||
if ( hasManagedCopy )
|
||||
{
|
||||
expectedManagedContents = "archiva-test-3.0-20070822.033400-42|jar-managed-contents";
|
||||
File managedFile = populateRepo( repoRootInternal, resourcePath, expectedManagedContents );
|
||||
managedFile.setLastModified( remoteFile.lastModified() + deltaManagedToRemoteTimestamp );
|
||||
Path managedFile = populateRepo( repoRootInternal, resourcePath, expectedManagedContents );
|
||||
Files.setLastModifiedTime( managedFile, FileTime.fromMillis(
|
||||
Files.getLastModifiedTime( remoteFile ).toMillis() + deltaManagedToRemoteTimestamp ));
|
||||
}
|
||||
|
||||
setupSnapshotConnector( REPOID_INTERNAL, remoteSnapshots, snapshotsPolicy );
|
||||
|
@ -27,19 +27,21 @@
|
||||
import org.apache.archiva.configuration.RepositoryGroupConfiguration;
|
||||
import org.apache.archiva.maven2.metadata.MavenMetadataReader;
|
||||
import org.apache.archiva.model.ArchivaRepositoryMetadata;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
|
||||
/**
|
||||
* RepositoryServletRepositoryGroupTest
|
||||
@ -59,11 +61,11 @@
|
||||
public class RepositoryServletRepositoryGroupTest
|
||||
extends AbstractRepositoryServletTestCase
|
||||
{
|
||||
protected File repoRootFirst;
|
||||
protected Path repoRootFirst;
|
||||
|
||||
protected File repoRootLast;
|
||||
protected Path repoRootLast;
|
||||
|
||||
protected File repoRootInvalid;
|
||||
protected Path repoRootInvalid;
|
||||
|
||||
protected static final String MANAGED_REPO_FIRST = "first";
|
||||
|
||||
@ -87,8 +89,8 @@ public void setUp()
|
||||
|
||||
Configuration configuration = archivaConfiguration.getConfiguration();
|
||||
|
||||
repoRootFirst = new File( appserverBase, "data/repositories/" + MANAGED_REPO_FIRST );
|
||||
repoRootLast = new File( appserverBase, "data/repositories/" + MANAGED_REPO_LAST );
|
||||
repoRootFirst = Paths.get( appserverBase, "data/repositories/" + MANAGED_REPO_FIRST );
|
||||
repoRootLast = Paths.get( appserverBase, "data/repositories/" + MANAGED_REPO_LAST );
|
||||
|
||||
configuration.addManagedRepository(
|
||||
createManagedRepository( MANAGED_REPO_FIRST, "First Test Repo", repoRootFirst, true ) );
|
||||
@ -102,7 +104,7 @@ public void setUp()
|
||||
configuration.addRepositoryGroup( createRepositoryGroup( REPO_GROUP_WITH_VALID_REPOS, managedRepoIds ) );
|
||||
|
||||
// Create the repository group with an invalid managed repository
|
||||
repoRootInvalid = new File( appserverBase, "data/repositories/" + MANAGED_REPO_INVALID );
|
||||
repoRootInvalid = Paths.get( appserverBase, "data/repositories/" + MANAGED_REPO_INVALID );
|
||||
ManagedRepositoryConfiguration managedRepositoryConfiguration =
|
||||
createManagedRepository( MANAGED_REPO_INVALID, "Invalid Test Repo", repoRootInvalid, true );
|
||||
|
||||
@ -121,7 +123,7 @@ public void setUp()
|
||||
createRepositoryGroup( REPO_GROUP_WITH_INVALID_REPOS, invalidManagedRepoIds ) );
|
||||
|
||||
configuration.removeManagedRepository( managedRepositoryConfiguration );
|
||||
FileUtils.deleteDirectory( repoRootInvalid );
|
||||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoRootInvalid );
|
||||
|
||||
saveConfiguration( archivaConfiguration );
|
||||
|
||||
@ -148,9 +150,9 @@ public void testGetFromFirstManagedRepositoryReturnOk()
|
||||
{
|
||||
String resourceName = "dummy/dummy-first-resource/1.0/dummy-first-resource-1.0.txt";
|
||||
|
||||
File dummyInternalResourceFile = new File( repoRootFirst, resourceName );
|
||||
dummyInternalResourceFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( dummyInternalResourceFile, "first", Charset.defaultCharset() );
|
||||
Path dummyInternalResourceFile = repoRootFirst.resolve( resourceName );
|
||||
Files.createDirectories( dummyInternalResourceFile.getParent() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( dummyInternalResourceFile, Charset.defaultCharset(), "first" );
|
||||
|
||||
WebRequest request = new GetMethodWebRequest(
|
||||
"http://machine.com/repository/" + REPO_GROUP_WITH_VALID_REPOS + "/" + resourceName );
|
||||
@ -169,9 +171,9 @@ public void testGetFromLastManagedRepositoryReturnOk()
|
||||
{
|
||||
String resourceName = "dummy/dummy-last-resource/1.0/dummy-last-resource-1.0.txt";
|
||||
|
||||
File dummyReleasesResourceFile = new File( repoRootLast, resourceName );
|
||||
dummyReleasesResourceFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( dummyReleasesResourceFile, "last", Charset.defaultCharset() );
|
||||
Path dummyReleasesResourceFile = repoRootLast.resolve( resourceName );
|
||||
Files.createDirectories(dummyReleasesResourceFile.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(dummyReleasesResourceFile, Charset.defaultCharset(), "last");
|
||||
|
||||
WebRequest request = new GetMethodWebRequest(
|
||||
"http://machine.com/repository/" + REPO_GROUP_WITH_VALID_REPOS + "/" + resourceName );
|
||||
@ -239,32 +241,32 @@ public void testGetMergedMetadata()
|
||||
// first metadata file
|
||||
String resourceName = "dummy/dummy-merged-metadata-resource/maven-metadata.xml";
|
||||
|
||||
File dummyInternalResourceFile = new File( repoRootFirst, resourceName );
|
||||
dummyInternalResourceFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( dummyInternalResourceFile, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
|
||||
Path dummyInternalResourceFile = repoRootFirst.resolve( resourceName );
|
||||
Files.createDirectories(dummyInternalResourceFile.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( dummyInternalResourceFile,
|
||||
Charset.defaultCharset(), "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
|
||||
+ "<metadata>\n<groupId>dummy</groupId>\n<artifactId>dummy-merged-metadata-resource</artifactId>\n"
|
||||
+ "<versioning>\n<latest>1.0</latest>\n<release>1.0</release>\n<versions>\n<version>1.0</version>\n"
|
||||
+ "<version>2.5</version>\n</versions>\n<lastUpdated>20080708095554</lastUpdated>\n</versioning>\n</metadata>",
|
||||
Charset.defaultCharset() );
|
||||
+ "<version>2.5</version>\n</versions>\n<lastUpdated>20080708095554</lastUpdated>\n</versioning>\n</metadata>" );
|
||||
|
||||
//second metadata file
|
||||
resourceName = "dummy/dummy-merged-metadata-resource/maven-metadata.xml";
|
||||
dummyInternalResourceFile = new File( repoRootLast, resourceName );
|
||||
dummyInternalResourceFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( dummyInternalResourceFile, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
|
||||
dummyInternalResourceFile = repoRootLast.resolve( resourceName );
|
||||
Files.createDirectories(dummyInternalResourceFile.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( dummyInternalResourceFile, Charset.defaultCharset(), "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
|
||||
+ "<metadata><groupId>dummy</groupId><artifactId>dummy-merged-metadata-resource</artifactId>"
|
||||
+ "<versioning><latest>2.0</latest><release>2.0</release><versions><version>1.0</version>"
|
||||
+ "<version>1.5</version><version>2.0</version></versions><lastUpdated>20080709095554</lastUpdated>"
|
||||
+ "</versioning></metadata>", Charset.defaultCharset() );
|
||||
+ "</versioning></metadata>" );
|
||||
|
||||
WebRequest request = new GetMethodWebRequest(
|
||||
"http://machine.com/repository/" + REPO_GROUP_WITH_VALID_REPOS + "/dummy/"
|
||||
+ "dummy-merged-metadata-resource/maven-metadata.xml" );
|
||||
WebResponse response = getServletUnitClient().getResource( request );
|
||||
|
||||
File returnedMetadata = new File( "target/test-classes/retrievedMetadataFile.xml" );
|
||||
FileUtils.writeStringToFile( returnedMetadata, response.getContentAsString() );
|
||||
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( returnedMetadata.toPath() );
|
||||
Path returnedMetadata = Paths.get( "target/test-classes/retrievedMetadataFile.xml" );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( returnedMetadata, Charset.defaultCharset(), response.getContentAsString() );
|
||||
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( returnedMetadata );
|
||||
|
||||
assertResponseOK( response );
|
||||
|
||||
@ -273,13 +275,13 @@ public void testGetMergedMetadata()
|
||||
|
||||
|
||||
//check if the checksum files were generated
|
||||
File checksumFileSha1 = new File( repoRootFirst, resourceName + ".sha1" );
|
||||
checksumFileSha1.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( checksumFileSha1, "3290853214d3687134", Charset.defaultCharset() );
|
||||
Path checksumFileSha1 = repoRootFirst.resolve( resourceName + ".sha1" );
|
||||
Files.createDirectories(checksumFileSha1.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(checksumFileSha1, Charset.defaultCharset(), "3290853214d3687134");
|
||||
|
||||
File checksumFileMd5 = new File( repoRootFirst, resourceName + ".md5" );
|
||||
checksumFileMd5.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( checksumFileMd5, "98745897234eda12836423", Charset.defaultCharset() );
|
||||
Path checksumFileMd5 = repoRootFirst.resolve( resourceName + ".md5" );
|
||||
Files.createDirectories(checksumFileMd5.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(checksumFileMd5, Charset.defaultCharset(), "98745897234eda12836423");
|
||||
|
||||
// request the sha1 checksum of the metadata
|
||||
request = new GetMethodWebRequest( "http://machine.com/repository/" + REPO_GROUP_WITH_VALID_REPOS + "/dummy/"
|
||||
@ -309,15 +311,15 @@ public void testBrowseWithTwoArtifactsWithSameGroupIdInRepos()
|
||||
{
|
||||
String resourceName = "dummy/dummy-artifact/1.0/dummy-artifact-1.0.txt";
|
||||
|
||||
File dummyInternalResourceFile = new File( repoRootFirst, resourceName );
|
||||
dummyInternalResourceFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( dummyInternalResourceFile, "first", Charset.defaultCharset() );
|
||||
Path dummyInternalResourceFile = repoRootFirst.resolve( resourceName );
|
||||
Files.createDirectories(dummyInternalResourceFile.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(dummyInternalResourceFile, Charset.defaultCharset(), "first");
|
||||
|
||||
resourceName = "dummy/dummy-artifact/2.0/dummy-artifact-2.0.txt";
|
||||
|
||||
File dummyReleasesResourceFile = new File( repoRootLast, resourceName );
|
||||
dummyReleasesResourceFile.getParentFile().mkdirs();
|
||||
FileUtils.writeStringToFile( dummyReleasesResourceFile, "last", Charset.defaultCharset() );
|
||||
Path dummyReleasesResourceFile = repoRootLast.resolve( resourceName );
|
||||
Files.createDirectories(dummyReleasesResourceFile.getParent());
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile(dummyReleasesResourceFile, Charset.defaultCharset(), "last");
|
||||
|
||||
WebRequest request = new GetMethodWebRequest(
|
||||
"http://machine.com/repository/" + REPO_GROUP_WITH_VALID_REPOS + "/dummy/dummy-artifact/" );
|
||||
|
@ -45,6 +45,7 @@
|
||||
import org.easymock.IMocksControl;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
@ -60,15 +61,16 @@
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.servlet.http.HttpSession;
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.easymock.EasyMock.anyObject;
|
||||
import static org.easymock.EasyMock.eq;
|
||||
import org.junit.Rule;
|
||||
|
||||
/**
|
||||
* RepositoryServletSecurityTest Test the flow of the authentication and authorization checks. This does not necessarily
|
||||
@ -112,11 +114,11 @@ public void setUp()
|
||||
super.setUp();
|
||||
|
||||
String appserverBase =
|
||||
System.getProperty( "appserver.base", new File( "target/appserver-base" ).getAbsolutePath() );
|
||||
System.getProperty( "appserver.base", Paths.get( "target/appserver-base" ).toAbsolutePath().toString() );
|
||||
|
||||
File testConf = new File( "src/test/resources/repository-archiva.xml" );
|
||||
File testConfDest = new File( appserverBase, "conf/archiva.xml" );
|
||||
FileUtils.copyFile( testConf, testConfDest );
|
||||
Path testConf = Paths.get( "src/test/resources/repository-archiva.xml" );
|
||||
Path testConfDest = Paths.get(appserverBase, "conf/archiva.xml" );
|
||||
FileUtils.copyFile( testConf.toFile(), testConfDest.toFile() );
|
||||
|
||||
|
||||
|
||||
@ -168,12 +170,12 @@ public ServletContext getServletContext()
|
||||
servlet.init( mockServletConfig );
|
||||
}
|
||||
|
||||
protected ManagedRepositoryConfiguration createManagedRepository( String id, String name, File location )
|
||||
protected ManagedRepositoryConfiguration createManagedRepository( String id, String name, Path location )
|
||||
{
|
||||
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
|
||||
repo.setId( id );
|
||||
repo.setName( name );
|
||||
repo.setLocation( location.getAbsolutePath() );
|
||||
repo.setLocation( location.toAbsolutePath().toString() );
|
||||
return repo;
|
||||
}
|
||||
|
||||
@ -397,7 +399,7 @@ public void testPutWithValidUserWithNoWriteAccess()
|
||||
public void testPutWithValidUserWithWriteAccess()
|
||||
throws Exception
|
||||
{
|
||||
assertTrue( repoRootInternal.getRoot().exists() );
|
||||
assertTrue( Files.exists(repoRootInternal.getRoot()) );
|
||||
|
||||
MockHttpServletRequest mockHttpServletRequest = new MockHttpServletRequest();
|
||||
String putUrl = "http://machine.com/repository/internal/path/to/artifact.jar";
|
||||
@ -473,10 +475,10 @@ public void testGetWithInvalidUserAndGuestHasReadAccess()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal.getRoot(), commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.getRoot().resolve( commonsLangJar );
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( artifactFile, Charset.defaultCharset() , expectedArtifactContents);
|
||||
|
||||
servlet.setDavSessionProvider( davSessionProvider );
|
||||
|
||||
@ -545,10 +547,10 @@ public void testGetWithInvalidUserAndGuestHasNoReadAccess()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal.getRoot(), commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.getRoot().resolve( commonsLangJar );
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( artifactFile, Charset.defaultCharset() , expectedArtifactContents);
|
||||
|
||||
servlet.setDavSessionProvider( davSessionProvider );
|
||||
|
||||
@ -591,10 +593,10 @@ public void testGetWithAValidUserWithReadAccess()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal.getRoot(), commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.getRoot().resolve( commonsLangJar );
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( artifactFile, Charset.defaultCharset() , expectedArtifactContents);
|
||||
|
||||
servlet.setDavSessionProvider( davSessionProvider );
|
||||
|
||||
@ -657,10 +659,10 @@ public void testGetWithAValidUserWithNoReadAccess()
|
||||
String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
|
||||
String expectedArtifactContents = "dummy-commons-lang-artifact";
|
||||
|
||||
File artifactFile = new File( repoRootInternal.getRoot(), commonsLangJar );
|
||||
artifactFile.getParentFile().mkdirs();
|
||||
Path artifactFile = repoRootInternal.getRoot().resolve( commonsLangJar );
|
||||
Files.createDirectories(artifactFile.getParent());
|
||||
|
||||
FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, Charset.defaultCharset() );
|
||||
org.apache.archiva.common.utils.FileUtils.writeStringToFile( artifactFile, Charset.defaultCharset() , expectedArtifactContents);
|
||||
|
||||
servlet.setDavSessionProvider( davSessionProvider );
|
||||
|
||||
|
@ -28,7 +28,10 @@
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@ -94,12 +97,12 @@ public void testGetRepositoryAfterAdd()
|
||||
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
|
||||
repo.setId( NEW_REPOSITORY_ID );
|
||||
repo.setName( NEW_REPOSITORY_NAME );
|
||||
File repoRoot = new File( "target/test-repository-root" );
|
||||
if ( !repoRoot.exists() )
|
||||
Path repoRoot = Paths.get( "target/test-repository-root" );
|
||||
if ( !Files.exists(repoRoot) )
|
||||
{
|
||||
repoRoot.mkdirs();
|
||||
Files.createDirectories( repoRoot );
|
||||
}
|
||||
repo.setLocation( repoRoot.getAbsolutePath() );
|
||||
repo.setLocation( repoRoot.toAbsolutePath().toString() );
|
||||
c.addManagedRepository( repo );
|
||||
saveConfiguration( archivaConfiguration );
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user