Refactoring to StorageAsset access

This commit is contained in:
Martin Stockhammer 2019-07-28 15:24:13 +02:00
parent 8e4acdc82a
commit bb3b074aaf
117 changed files with 1685 additions and 1211 deletions

View File

@ -33,6 +33,10 @@
</properties>
<dependencies>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-policies</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva.redback.components.registry</groupId>
<artifactId>spring-registry-api</artifactId>

View File

@ -31,6 +31,8 @@ import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.repository.ContentNotFoundException;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -211,22 +213,22 @@ public abstract class AbstractRepositoryPurge
log.error( "Error during metadata retrieval {}: {}", metaBaseId, e.getMessage( ) );
}
}
Path artifactFile = repository.toFile( reference );
StorageAsset artifactFile = repository.toFile( reference );
for ( RepositoryListener listener : listeners )
{
listener.deleteArtifact( metadataRepository, repository.getId( ), reference.getGroupId( ),
reference.getArtifactId( ), reference.getVersion( ),
artifactFile.getFileName( ).toString( ) );
artifactFile.getName( ));
}
try
{
Files.delete( artifactFile );
log.debug( "File deleted: {}", artifactFile.toAbsolutePath( ) );
artifactFile.getStorage().removeAsset(artifactFile);
log.debug( "File deleted: {}", artifactFile );
}
catch ( IOException e )
{
log.error( "Could not delete file {}: {}", artifactFile.toAbsolutePath( ), e.getMessage( ), e );
log.error( "Could not delete file {}: {}", artifactFile.toString(), e.getMessage( ), e );
continue;
}
try
@ -364,11 +366,11 @@ public abstract class AbstractRepositoryPurge
}
}
private void deleteSilently( Path path )
private void deleteSilently( StorageAsset path )
{
try
{
Files.deleteIfExists( path );
path.getStorage().removeAsset(path);
triggerAuditEvent( repository.getRepository( ).getId( ), path.toString( ), AuditEvent.PURGE_FILE );
}
catch ( IOException e )
@ -387,22 +389,23 @@ public abstract class AbstractRepositoryPurge
*
* @param artifactFile the file to base off of.
*/
private void purgeSupportFiles( Path artifactFile )
private void purgeSupportFiles( StorageAsset artifactFile )
{
Path parentDir = artifactFile.getParent( );
StorageAsset parentDir = artifactFile.getParent( );
if ( !Files.exists( parentDir ) )
if ( !parentDir.exists() )
{
return;
}
final String artifactName = artifactFile.getFileName( ).toString( );
final String artifactName = artifactFile.getName( );
try
{
Files.find( parentDir, 3,
( path, basicFileAttributes ) -> path.getFileName( ).toString( ).startsWith( artifactName )
&& Files.isRegularFile( path ) ).forEach( this::deleteSilently );
StorageUtil.recurse(parentDir, a -> {
if (!artifactFile.isContainer() && artifactFile.getName().startsWith(artifactName)) deleteSilently(a);
}, true, 3 );
}
catch ( IOException e )
{

View File

@ -28,6 +28,7 @@ import org.apache.archiva.repository.ContentNotFoundException;
import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.time.DateUtils;
import java.io.IOException;
@ -115,12 +116,12 @@ public class DaysOldRepositoryPurge
artifactFile.toAbsolutePath( ).toString() );
newArtifactReference.setVersion( version );
Path newArtifactFile = repository.toFile( newArtifactReference );
StorageAsset newArtifactFile = repository.toFile( newArtifactReference );
// Is this a generic snapshot "1.0-SNAPSHOT" ?
if ( VersionUtil.isGenericSnapshot( newArtifactReference.getVersion( ) ) )
{
if ( Files.getLastModifiedTime( newArtifactFile ).toMillis() < olderThanThisDate.getTimeInMillis( ) )
if ( newArtifactFile.getModificationTime().toEpochMilli() < olderThanThisDate.getTimeInMillis( ) )
{
artifactsToDelete.addAll( repository.getRelatedArtifacts( newArtifactReference ) );
}
@ -138,7 +139,7 @@ public class DaysOldRepositoryPurge
}
purge( artifactsToDelete );
}
catch ( ContentNotFoundException | IOException e )
catch ( ContentNotFoundException e )
{
throw new RepositoryPurgeException( e.getMessage( ), e );
}

View File

@ -38,6 +38,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId>
@ -81,6 +85,11 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-fs</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-test-utils</artifactId>

View File

@ -20,16 +20,13 @@ package org.apache.archiva.policies;
*/
import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Properties;

View File

@ -20,7 +20,7 @@ package org.apache.archiva.policies;
*/
import org.apache.archiva.policies.urlcache.UrlFailureCache;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -22,7 +22,7 @@ package org.apache.archiva.policies;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksummedFile;
import org.apache.archiva.checksum.UpdateStatus;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -19,7 +19,7 @@ package org.apache.archiva.policies;
* under the License.
*/
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import java.util.Map;
import java.util.Properties;

View File

@ -19,7 +19,7 @@ package org.apache.archiva.policies;
* under the License.
*/
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import java.util.Properties;

View File

@ -19,7 +19,7 @@ package org.apache.archiva.policies;
* under the License.
*/
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -19,11 +19,10 @@ package org.apache.archiva.policies;
* under the License.
*/
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

View File

@ -20,7 +20,10 @@ package org.apache.archiva.policies;
*/
import junit.framework.TestCase;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.policies.urlcache.UrlFailureCache;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -28,6 +31,7 @@ import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Properties;
@ -47,6 +51,8 @@ public class CachedFailuresPolicyTest
@Inject
private UrlFailureCache urlFailureCache;
private FilesystemStorage filesystemStorage;
@Inject
@Named( value = "preDownloadPolicy#cache-failures" )
DownloadPolicy downloadPolicy;
@ -57,9 +63,11 @@ public class CachedFailuresPolicyTest
return downloadPolicy;
}
private Path getFile()
{
return Paths.get( "target/cache-failures/" + getName() + ".txt" );
private StorageAsset getFile() throws IOException {
if (filesystemStorage==null) {
filesystemStorage = new FilesystemStorage(Paths.get("target/cache-failures"), new DefaultFileLockManager());
}
return filesystemStorage.getAsset( getName() + ".txt" );
}
private Properties createRequest()
@ -74,7 +82,7 @@ public class CachedFailuresPolicyTest
throws Exception
{
DownloadPolicy policy = lookupPolicy();
Path localFile = getFile();
StorageAsset localFile = getFile();
Properties request = createRequest();
request.setProperty( "url", "http://a.bad.hostname.maven.org/path/to/resource.txt" );
@ -88,7 +96,7 @@ public class CachedFailuresPolicyTest
{
DownloadPolicy policy = lookupPolicy();
Path localFile = getFile();
StorageAsset localFile = getFile();
Properties request = createRequest();
// make unique name
String url = "http://a.bad.hostname.maven.org/path/to/resource"+ System.currentTimeMillis() +".txt";

View File

@ -19,6 +19,9 @@ package org.apache.archiva.policies;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils;
import org.junit.Rule;
@ -31,6 +34,7 @@ import javax.inject.Inject;
import javax.inject.Named;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -51,6 +55,8 @@ public class ChecksumPolicyTest
private static final String BAD = "bad";
private static FilesystemStorage filesystemStorage;
@Inject
@Named( value = "postDownloadPolicy#checksum" )
PostDownloadPolicy downloadPolicy;
@ -195,7 +201,7 @@ public class ChecksumPolicyTest
throws Exception
{
PostDownloadPolicy policy = lookupPolicy();
Path localFile = createTestableFiles( null, null );
StorageAsset localFile = createTestableFiles( null, null );
Properties request = createRequest();
policy.applyPolicy( ChecksumPolicy.IGNORE, request, localFile );
@ -205,7 +211,7 @@ public class ChecksumPolicyTest
throws Exception
{
PostDownloadPolicy policy = lookupPolicy();
Path localFile = createTestableFiles( md5State, sha1State );
StorageAsset localFile = createTestableFiles( md5State, sha1State );
Properties request = createRequest();
boolean actualResult;
@ -220,9 +226,9 @@ public class ChecksumPolicyTest
actualResult = false;
String msg = createMessage( ChecksumPolicy.FAIL, md5State, sha1State );
assertFalse( msg + " local file should not exist:", Files.exists(localFile) );
Path md5File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".sha1" );
Path sha1File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".md5" );
assertFalse( msg + " local file should not exist:", localFile.exists() );
Path md5File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".sha1" );
Path sha1File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".md5" );
assertFalse( msg + " local md5 file should not exist:", Files.exists(md5File) );
assertFalse( msg + " local sha1 file should not exist:", Files.exists(sha1File) );
}
@ -234,7 +240,7 @@ public class ChecksumPolicyTest
throws Exception
{
PostDownloadPolicy policy = lookupPolicy();
Path localFile = createTestableFiles( md5State, sha1State );
StorageAsset localFile = createTestableFiles( md5State, sha1State );
Properties request = createRequest();
boolean actualResult;
@ -252,8 +258,8 @@ public class ChecksumPolicyTest
assertEquals( createMessage( ChecksumPolicy.FIX, md5State, sha1State ), expectedResult, actualResult );
// End result should be legitimate SHA1 and MD5 files.
Path md5File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".md5" );
Path sha1File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".sha1" );
Path md5File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".md5" );
Path sha1File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".sha1" );
assertTrue( "ChecksumPolicy.apply(FIX) md5 should exist.", Files.exists(md5File) && Files.isRegularFile(md5File) );
assertTrue( "ChecksumPolicy.apply(FIX) sha1 should exist.", Files.exists(sha1File) && Files.isRegularFile(sha1File) );
@ -336,37 +342,41 @@ public class ChecksumPolicyTest
return request;
}
private Path createTestableFiles( String md5State, String sha1State )
private StorageAsset createTestableFiles(String md5State, String sha1State )
throws Exception
{
Path sourceDir = getTestFile( "src/test/resources/checksums/" );
Path destDir = getTestFile( "target/checksum-tests/" + name.getMethodName() + "/" );
FilesystemStorage fs = new FilesystemStorage(Paths.get("target/checksum-tests"), new DefaultFileLockManager());
StorageAsset sourceDir = getTestFile( "src/test/resources/checksums/" );
StorageAsset destDir = getTestFile( "target/checksum-tests/" + name.getMethodName() + "/" );
FileUtils.copyFileToDirectory( sourceDir.resolve("artifact.jar" ).toFile(), destDir.toFile() );
FileUtils.copyFileToDirectory( sourceDir.getFilePath().resolve("artifact.jar" ).toFile(), destDir.getFilePath().toFile() );
if ( md5State != null )
{
Path md5File = sourceDir.resolve("artifact.jar.md5-" + md5State );
Path md5File = sourceDir.getFilePath().resolve("artifact.jar.md5-" + md5State );
assertTrue( "Testable file exists: " + md5File.getFileName() + ":", Files.exists(md5File) && Files.isRegularFile(md5File) );
Path destFile = destDir.resolve("artifact.jar.md5" );
Path destFile = destDir.getFilePath().resolve("artifact.jar.md5" );
FileUtils.copyFile( md5File.toFile(), destFile.toFile() );
}
if ( sha1State != null )
{
Path sha1File = sourceDir.resolve("artifact.jar.sha1-" + sha1State );
Path sha1File = sourceDir.getFilePath().resolve("artifact.jar.sha1-" + sha1State );
assertTrue( "Testable file exists: " + sha1File.getFileName() + ":", Files.exists(sha1File) && Files.isRegularFile(sha1File) );
Path destFile = destDir.resolve("artifact.jar.sha1" );
Path destFile = destDir.getFilePath().resolve("artifact.jar.sha1" );
FileUtils.copyFile( sha1File.toFile(), destFile.toFile() );
}
Path localFile = destDir.resolve("artifact.jar" );
return localFile;
StorageAsset localAsset = fs.getAsset("artifact.jar");
return localAsset;
}
public static Path getTestFile( String path )
{
return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
public static StorageAsset getTestFile( String path ) throws IOException {
if (filesystemStorage==null) {
filesystemStorage = new FilesystemStorage(Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()), new DefaultFileLockManager());
}
return filesystemStorage.getAsset( path );
}
}

View File

@ -20,6 +20,7 @@ package org.apache.archiva.policies;
*/
import junit.framework.TestCase;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.Before;
import org.junit.Test;
@ -338,17 +339,17 @@ public class ReleasePolicyTest
request.setProperty( "version", "2.0" );
}
Path targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
Path localFile = targetDir.resolve( path );
StorageAsset targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
StorageAsset localFile = targetDir.resolve( path );
Files.deleteIfExists( localFile );
Files.deleteIfExists( localFile.getFilePath() );
if ( createLocalFile )
{
Files.createDirectories( localFile.getParent());
org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile, FILE_ENCODING, "random-junk" );
Files.setLastModifiedTime( localFile,
FileTime.fromMillis(Files.getLastModifiedTime(localFile).toMillis() - generatedLocalFileUpdateDelta));
Files.createDirectories( localFile.getParent().getFilePath());
org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile.getFilePath(), FILE_ENCODING, "random-junk" );
Files.setLastModifiedTime( localFile.getFilePath(),
FileTime.fromMillis(Files.getLastModifiedTime(localFile.getFilePath()).toMillis() - generatedLocalFileUpdateDelta));
}
policy.applyPolicy( setting, request, localFile );

View File

@ -20,6 +20,8 @@ package org.apache.archiva.policies;
*/
import junit.framework.TestCase;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.Before;
import org.junit.Test;
@ -80,6 +82,8 @@ public class SnapshotsPolicyTest
@Inject @Named(value="preDownloadPolicy#snapshots")
PreDownloadPolicy policy;
private FilesystemStorage filesystemStorage;
private PreDownloadPolicy lookupPolicy()
throws Exception
{
@ -337,17 +341,17 @@ public class SnapshotsPolicyTest
request.setProperty( "version", "2.0" );
}
Path targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
Path localFile = targetDir.resolve( path );
StorageAsset targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
StorageAsset localFile = targetDir.resolve( path );
Files.deleteIfExists( localFile );
Files.deleteIfExists( localFile.getFilePath() );
if ( createLocalFile )
{
Files.createDirectories( localFile.getParent());
org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile, FILE_ENCODING, "random-junk" );
Files.setLastModifiedTime( localFile,
FileTime.fromMillis( Files.getLastModifiedTime( localFile ).toMillis() - generatedLocalFileUpdateDelta ));
Files.createDirectories( localFile.getParent().getFilePath() );
org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile.getFilePath(), FILE_ENCODING, "random-junk" );
Files.setLastModifiedTime( localFile.getFilePath(),
FileTime.fromMillis( Files.getLastModifiedTime( localFile.getFilePath() ).toMillis() - generatedLocalFileUpdateDelta ));
}
policy.applyPolicy( setting, request, localFile );

View File

@ -20,9 +20,7 @@ package org.apache.archiva.proxy.model;
*/
import org.apache.archiva.repository.content.StorageAsset;
import java.nio.file.Path;
import org.apache.archiva.repository.storage.StorageAsset;
/**
* A result from a proxy fetch operation.

View File

@ -23,7 +23,7 @@ import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.policies.ProxyDownloadException;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import java.util.List;
import java.util.Map;

View File

@ -22,10 +22,7 @@ package org.apache.archiva.proxy;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksumUtil;
import org.apache.archiva.proxy.model.ProxyConnectorRuleType;
import org.apache.archiva.common.filelock.FileLockException;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;
import org.apache.archiva.configuration.*;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.Keys;
@ -39,9 +36,9 @@ import org.apache.archiva.redback.components.registry.Registry;
import org.apache.archiva.redback.components.registry.RegistryListener;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.content.StorageUtil;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
@ -61,7 +58,6 @@ import javax.inject.Named;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;

View File

@ -21,8 +21,8 @@ package org.apache.archiva.admin.model.group;
import org.apache.archiva.admin.model.AuditInformation;
import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.admin.model.beans.RepositoryGroup;
import org.apache.archiva.repository.storage.StorageAsset;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
@ -75,5 +75,5 @@ public interface RepositoryGroupAdmin
Map<String, List<String>> getRepositoryToGroupMap()
throws RepositoryAdminException;
Path getMergedIndexDirectory(String repositoryGroupId );
StorageAsset getMergedIndexDirectory(String repositoryGroupId );
}

View File

@ -29,11 +29,11 @@ import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.RepositoryGroupConfiguration;
import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler;
import org.apache.archiva.repository.EditableRepository;
import org.apache.archiva.repository.EditableRepositoryGroup;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -47,9 +47,6 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -112,9 +109,14 @@ public class DefaultRepositoryGroupAdmin
@Override
public Path getMergedIndexDirectory( String repositoryGroupId )
public StorageAsset getMergedIndexDirectory(String repositoryGroupId )
{
return groupsDirectory.resolve( repositoryGroupId );
org.apache.archiva.repository.RepositoryGroup group = repositoryRegistry.getRepositoryGroup(repositoryGroupId);
if (group!=null) {
return group.getFeature(IndexCreationFeature.class).get().getLocalIndexPath();
} else {
return null;
}
}
@Override

View File

@ -19,7 +19,7 @@ package org.apache.archiva.admin.mock;
* under the License.
*/
import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
@ -40,10 +40,12 @@ import org.apache.archiva.repository.RemoteRepository;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.index.ArtifactContext;
import org.apache.maven.index.ArtifactContextProducer;
@ -143,7 +145,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
private Path getIndexPath( ArchivaIndexingContext ctx )
{
return PathUtil.getPathFromUri( ctx.getPath( ) );
return ctx.getPath( ).getFilePath();
}
@FunctionalInterface
@ -378,9 +380,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
@Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) {
@ -394,9 +396,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
@Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) {
@ -455,7 +457,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
log.warn("Index close failed");
}
try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
StorageUtil.deleteRecursively(context.getPath());
} catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files");
}
@ -530,6 +532,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
URI indexDir = icf.getIndexPath();
String indexPath = indexDir.getPath();
Path indexDirectory = null;
FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getAsset("").getStorage();
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
{
@ -538,6 +541,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
if ( indexDirectory.isAbsolute( ) )
{
indexPath = indexDirectory.getFileName().toString();
filesystemStorage = new FilesystemStorage(indexDirectory.getParent(), new DefaultFileLockManager());
}
else
{
@ -554,7 +558,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
{
Files.createDirectories( indexDirectory );
}
return new FilesystemAsset( indexPath, indexDirectory);
return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory);
}
private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException

View File

@ -19,8 +19,12 @@ package org.apache.archiva.admin.mock;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.index.context.IndexingContext;
import java.io.IOException;
@ -38,10 +42,16 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
private IndexingContext delegate;
private Repository repository;
private FilesystemStorage filesystemStorage;
MavenIndexContextMock(Repository repository, IndexingContext delegate) {
this.delegate = delegate;
this.repository = repository;
try {
this.filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath(), new DefaultFileLockManager());
} catch (IOException e) {
e.printStackTrace();
}
}
@ -56,8 +66,9 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
}
@Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
public StorageAsset getPath() {
return
new FilesystemAsset(filesystemStorage, "", delegate.getIndexDirectoryFile().toPath());
}
@Override

View File

@ -21,10 +21,9 @@ package org.apache.archiva.admin.mock;
import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler;
import org.apache.archiva.repository.RepositoryGroup;
import org.apache.archiva.repository.storage.StorageAsset;
import org.springframework.stereotype.Service;
import java.nio.file.Path;
/**
* @author Olivier Lamy
*/
@ -34,7 +33,7 @@ public class MockMergedRemoteIndexesScheduler
{
@Override
public void schedule( RepositoryGroup repositoryGroup, Path directory )
public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory )
{
// no op
}

View File

@ -38,6 +38,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>

View File

@ -21,10 +21,8 @@ package org.apache.archiva.indexer;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.content.StorageAsset;
import java.net.URI;
import java.nio.file.Path;
import java.util.Collection;
import java.util.List;

View File

@ -20,6 +20,7 @@ package org.apache.archiva.indexer;
*/
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.StorageAsset;
import java.io.IOException;
import java.net.URI;
@ -48,7 +49,7 @@ public interface ArchivaIndexingContext {
* The path where the index is stored.
* @return
*/
URI getPath();
StorageAsset getPath();
/**
* Returns true, if the index has no entries or is not initialized.

View File

@ -18,6 +18,8 @@ package org.apache.archiva.indexer.merger;
* under the License.
*/
import org.apache.archiva.repository.storage.StorageAsset;
import java.nio.file.Path;
import java.util.Collection;
@ -47,7 +49,7 @@ public class IndexMergerRequest
private int mergedIndexTtl;
private Path mergedIndexDirectory;
private StorageAsset mergedIndexDirectory;
private boolean temporary;
@ -121,17 +123,17 @@ public class IndexMergerRequest
this.mergedIndexTtl = mergedIndexTtl;
}
public Path getMergedIndexDirectory()
public StorageAsset getMergedIndexDirectory()
{
return mergedIndexDirectory;
}
public void setMergedIndexDirectory( Path mergedIndexDirectory )
public void setMergedIndexDirectory( StorageAsset mergedIndexDirectory )
{
this.mergedIndexDirectory = mergedIndexDirectory;
}
public IndexMergerRequest mergedIndexDirectory( Path mergedIndexDirectory )
public IndexMergerRequest mergedIndexDirectory( StorageAsset mergedIndexDirectory )
{
this.mergedIndexDirectory = mergedIndexDirectory;
return this;

View File

@ -21,8 +21,7 @@ package org.apache.archiva.indexer.merger;
import org.apache.archiva.repository.RepositoryGroup;
import java.nio.file.Path;
import org.apache.archiva.repository.storage.StorageAsset;
/**
* @author Olivier Lamy
@ -35,8 +34,9 @@ public interface MergedRemoteIndexesScheduler
* will check if this repository group need to a schedule a cron to download/merge
* remote indexes
* @param repositoryGroup
* @param directory
*/
void schedule(RepositoryGroup repositoryGroup, Path directory );
void schedule(RepositoryGroup repositoryGroup, StorageAsset directory );
void unschedule( RepositoryGroup repositoryGroup );

View File

@ -18,6 +18,8 @@ package org.apache.archiva.indexer.merger;
* under the License.
*/
import org.apache.archiva.repository.storage.StorageAsset;
import java.io.Serializable;
import java.nio.file.Path;
import java.util.Date;
@ -30,7 +32,7 @@ public class TemporaryGroupIndex
{
private long creationTime = new Date().getTime();
private Path directory;
private StorageAsset directory;
private String indexId;
@ -38,7 +40,7 @@ public class TemporaryGroupIndex
private int mergedIndexTtl;
public TemporaryGroupIndex(Path directory, String indexId, String groupId, int mergedIndexTtl)
public TemporaryGroupIndex(StorageAsset directory, String indexId, String groupId, int mergedIndexTtl)
{
this.directory = directory;
this.indexId = indexId;
@ -57,12 +59,12 @@ public class TemporaryGroupIndex
return this;
}
public Path getDirectory()
public StorageAsset getDirectory()
{
return directory;
}
public TemporaryGroupIndex setDirectory( Path directory )
public TemporaryGroupIndex setDirectory( StorageAsset directory )
{
this.directory = directory;
return this;

View File

@ -20,8 +20,6 @@ package org.apache.archiva.repository;
*/
import org.apache.archiva.repository.content.RepositoryStorage;
import java.util.Set;
/**

View File

@ -23,7 +23,7 @@ import org.apache.archiva.model.ArchivaArtifact;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.ProjectReference;
import org.apache.archiva.model.VersionedReference;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import java.util.Set;

View File

@ -20,7 +20,7 @@ package org.apache.archiva.repository;
*/
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.features.RepositoryFeature;
import java.net.URI;

View File

@ -19,8 +19,7 @@ package org.apache.archiva.repository;
* under the License.
*/
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.RepositoryStorage;
import java.util.List;

View File

@ -22,12 +22,11 @@ package org.apache.archiva.repository.features;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryEventListener;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Path;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_PACKED_INDEX_PATH;

View File

@ -41,6 +41,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-fs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId>

View File

@ -28,6 +28,8 @@ import org.apache.archiva.indexer.merger.IndexMergerRequest;
import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.time.StopWatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -87,7 +89,7 @@ public class DefaultIndexMerger
stopWatch.reset();
stopWatch.start();
Path mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
StorageAsset mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
Repository destinationRepository = repositoryRegistry.getRepository(indexMergerRequest.getGroupId());
ArchivaIndexManager idxManager = repositoryRegistry.getIndexManager(destinationRepository.getType());
@ -131,10 +133,10 @@ public class DefaultIndexMerger
ctx.close(true);
temporaryGroupIndexes.remove( temporaryGroupIndex );
temporaryContextes.remove( ctx );
Path directory = temporaryGroupIndex.getDirectory();
if ( directory != null && Files.exists(directory) )
StorageAsset directory = temporaryGroupIndex.getDirectory();
if ( directory != null && directory.exists() )
{
FileUtils.deleteDirectory( directory );
StorageUtil.deleteRecursively( directory );
}
}
}

View File

@ -22,6 +22,7 @@ package org.apache.archiva.indexer.merger;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.RepositoryGroup;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -59,7 +60,7 @@ public class DefaultMergedRemoteIndexesScheduler
private Map<String, ScheduledFuture> scheduledFutureMap = new ConcurrentHashMap<>();
@Override
public void schedule(RepositoryGroup repositoryGroup, Path directory )
public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory )
{
if ( StringUtils.isEmpty( repositoryGroup.getSchedulingDefinition() ) )
{

View File

@ -20,17 +20,12 @@ package org.apache.archiva.repository;
*/
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.RepositoryStorage;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import java.util.function.Consumer;
/**
* Simple implementation of a managed repository.

View File

@ -20,7 +20,7 @@ package org.apache.archiva.repository;
*/
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import java.nio.file.Path;
import java.time.Duration;

View File

@ -23,10 +23,9 @@ import com.cronutils.model.CronType;
import com.cronutils.model.definition.CronDefinition;
import com.cronutils.model.definition.CronDefinitionBuilder;
import com.cronutils.parser.CronParser;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.RepositoryFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature;
import org.apache.commons.lang.StringUtils;

View File

@ -19,18 +19,13 @@ package org.apache.archiva.repository;
* under the License.
*/
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.commons.collections4.map.ListOrderedMap;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Consumer;
/**
* Abstract repository group implementation.

View File

@ -21,9 +21,8 @@ package org.apache.archiva.repository;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature;
@ -31,10 +30,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.Locale;
import java.util.function.Consumer;
/**
*

View File

@ -21,8 +21,8 @@ package org.apache.archiva.repository;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.slf4j.Logger;

View File

@ -26,8 +26,6 @@ import org.apache.archiva.indexer.IndexCreationFailedException;
import org.apache.archiva.indexer.IndexManagerFactory;
import org.apache.archiva.indexer.IndexUpdateFailedException;
import org.apache.archiva.redback.components.registry.RegistryException;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationEvent;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature;
@ -44,11 +42,9 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
import java.util.stream.Stream;

View File

@ -23,6 +23,7 @@ import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryContentFactory;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.storage.StorageAsset;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
@ -53,4 +54,19 @@ public class ArtifactUtil {
return Paths.get(repository.getLocation()).resolve(artifactPath);
}
/**
* Returns the physical location of a given artifact in the repository. There is no check for the
* existence of the returned file.
*
* @param repository The repository, where the artifact is stored.
* @param artifactReference The artifact reference.
* @return The asset representation of the artifact.
* @throws RepositoryException
*/
public StorageAsset getArtifactAsset(ManagedRepository repository, ArtifactReference artifactReference) throws RepositoryException {
final ManagedRepositoryContent content = repositoryContentFactory.getManagedRepositoryContent(repository);
final String artifactPath = content.toPath( artifactReference );
return repository.getAsset(artifactPath);
}
}

View File

@ -1,192 +0,0 @@
package org.apache.archiva.repository.content;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.common.filelock.FileLockException;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.CopyOption;
import java.nio.file.Files;
import java.nio.file.Path;
/**
* @author Martin Stockhammer <martin_s@apache.org>
*/
public class StorageUtil
{
private static final int DEFAULT_BUFFER_SIZE = 4096;
/**
* Copies the source asset to the target. The assets may be from different RepositoryStorage instances.
*
* @param source The source asset
* @param target The target asset
* @param locked If true, a readlock is set on the source and a write lock is set on the target.
* @param copyOptions Copy options
* @throws IOException
*/
public static final void copyAsset( final StorageAsset source,
final StorageAsset target,
boolean locked,
final CopyOption... copyOptions ) throws IOException
{
if (source.isFileBased() && target.isFileBased()) {
// Short cut for FS operations
final Path sourcePath = source.getFilePath();
final Path targetPath = target.getFilePath( );
if (locked) {
final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager();
final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager();
try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) )
{
Files.copy( sourcePath, targetPath, copyOptions );
}
catch ( FileLockException e )
{
throw new IOException( e );
}
catch ( FileLockTimeoutException e )
{
throw new IOException( e );
}
} else
{
Files.copy( sourcePath, targetPath, copyOptions );
}
} else {
try {
final RepositoryStorage sourceStorage = source.getStorage();
final RepositoryStorage targetStorage = target.getStorage();
sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
} catch (IOException e) {
throw e;
} catch (Throwable e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
} else
{
throw new IOException( e );
}
}
}
}
/**
*
* @param source
* @param target
* @param locked
* @param copyOptions
* @throws IOException
*/
public static void moveAsset(StorageAsset source, StorageAsset target, boolean locked, CopyOption... copyOptions) throws IOException
{
if (source.isFileBased() && target.isFileBased()) {
// Short cut for FS operations
// Move is atomic operation
Files.move( source.getFilePath(), target.getFilePath(), copyOptions );
} else {
try {
final RepositoryStorage sourceStorage = source.getStorage();
final RepositoryStorage targetStorage = target.getStorage();
sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
sourceStorage.removeAsset( source );
} catch (IOException e) {
throw e;
} catch (Throwable e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
} else
{
throw new IOException( e );
}
}
}
}
private static void wrapWriteFunction(ReadableByteChannel is, RepositoryStorage targetStorage, StorageAsset target, boolean locked) {
try {
targetStorage.writeDataToChannel( target, os -> copy(is, os), locked );
} catch (Exception e) {
throw new RuntimeException( e );
}
}
private static void copy( final ReadableByteChannel is, final WritableByteChannel os ) {
if (is instanceof FileChannel) {
copy( (FileChannel) is, os );
} else if (os instanceof FileChannel) {
copy(is, (FileChannel)os);
} else
{
try
{
ByteBuffer buffer = ByteBuffer.allocate( DEFAULT_BUFFER_SIZE );
while ( is.read( buffer ) != -1 )
{
buffer.flip( );
while ( buffer.hasRemaining( ) )
{
os.write( buffer );
}
buffer.clear( );
}
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
}
private static void copy( final FileChannel is, final WritableByteChannel os ) {
try
{
is.transferTo( 0, is.size( ), os );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
private static void copy( final ReadableByteChannel is, final FileChannel os ) {
try
{
os.transferFrom( is, 0, Long.MAX_VALUE );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
}

View File

@ -42,6 +42,7 @@ import org.apache.archiva.repository.ContentNotFoundException;
import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RemoteRepositoryContent;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang.StringUtils;
@ -369,9 +370,9 @@ public class MetadataTools
ProjectReference reference, String proxyId )
{
String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
if ( !Files.exists(metadataFile) || !Files.isRegularFile( metadataFile ))
if ( !metadataFile.exists() || metadataFile.isContainer())
{
// Nothing to do. return null.
return null;
@ -381,11 +382,11 @@ public class MetadataTools
{
return MavenMetadataReader.read( metadataFile );
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
// TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code?
log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
return null;
}
}
@ -394,9 +395,9 @@ public class MetadataTools
String logicalResource, String proxyId )
{
String metadataPath = getRepositorySpecificName( proxyId, logicalResource );
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
if ( !Files.exists(metadataFile) || !Files.isRegularFile( metadataFile))
if ( !metadataFile.exists() || metadataFile.isContainer())
{
// Nothing to do. return null.
return null;
@ -406,11 +407,11 @@ public class MetadataTools
{
return MavenMetadataReader.read( metadataFile );
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
// TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code?
log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
return null;
}
}
@ -419,9 +420,9 @@ public class MetadataTools
VersionedReference reference, String proxyId )
{
String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
if ( !Files.exists(metadataFile) || !Files.isRegularFile(metadataFile))
if ( !metadataFile.exists() || metadataFile.isContainer())
{
// Nothing to do. return null.
return null;
@ -431,11 +432,11 @@ public class MetadataTools
{
return MavenMetadataReader.read( metadataFile );
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
// TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code?
log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
return null;
}
}
@ -443,7 +444,7 @@ public class MetadataTools
public void updateMetadata( ManagedRepositoryContent managedRepository, String logicalResource )
throws RepositoryMetadataException
{
final Path metadataFile = Paths.get( managedRepository.getRepoRoot(), logicalResource );
final StorageAsset metadataFile = managedRepository.getRepository().getAsset( logicalResource );
ArchivaRepositoryMetadata metadata = null;
//Gather and merge all metadata available
@ -480,7 +481,7 @@ public class MetadataTools
RepositoryMetadataWriter.write( metadata, metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
checksum.fixChecksums( algorithms );
}
@ -491,30 +492,17 @@ public class MetadataTools
* @param metadataParentDirectory
* @return origional set plus newly found versions
*/
private Set<String> findPossibleVersions( Set<String> versions, Path metadataParentDirectory )
private Set<String> findPossibleVersions( Set<String> versions, StorageAsset metadataParentDirectory )
{
Set<String> result = new HashSet<String>( versions );
try (Stream<Path> stream = Files.list( metadataParentDirectory )) {
stream.filter( Files::isDirectory ).filter(
p ->
{
try(Stream<Path> substream = Files.list(p))
{
return substream.anyMatch( f -> Files.isRegularFile( f ) && f.toString().endsWith( ".pom" ));
}
catch ( IOException e )
{
return false;
}
metadataParentDirectory.list().stream().filter(asset ->
asset.isContainer()).filter(asset -> {
return asset.list().stream().anyMatch(f -> !f.isContainer() && f.getName().endsWith(".pom"));
}
).forEach(
p -> result.add(p.getFileName().toString())
);
} catch (IOException e) {
//
}
).forEach( p -> result.add(p.getName()));
return result;
}
@ -522,8 +510,9 @@ public class MetadataTools
ManagedRepositoryContent managedRepository, String logicalResource )
{
List<ArchivaRepositoryMetadata> metadatas = new ArrayList<>();
Path file = Paths.get( managedRepository.getRepoRoot(), logicalResource );
if ( Files.exists(file) )
StorageAsset file = managedRepository.getRepository().getAsset( logicalResource );
if ( file.exists() )
{
try
{
@ -533,10 +522,14 @@ public class MetadataTools
metadatas.add( existingMetadata );
}
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
log.debug( "Could not read metadata at {}. Metadata will be removed.", file.toAbsolutePath() );
FileUtils.deleteQuietly( file );
log.debug( "Could not read metadata at {}. Metadata will be removed.", file.getPath() );
try {
file.getStorage().removeAsset(file);
} catch (IOException ex) {
log.error("Could not remove asset {}", file.getPath());
}
}
}
@ -578,7 +571,8 @@ public class MetadataTools
public void updateMetadata( ManagedRepositoryContent managedRepository, ProjectReference reference )
throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
{
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), toPath( reference ) );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );
long lastUpdated = getExistingLastUpdated( metadataFile );
@ -593,7 +587,7 @@ public class MetadataTools
// TODO: do we know this information instead?
// Set<Plugin> allPlugins = managedRepository.getPlugins( reference );
Set<Plugin> allPlugins;
if ( Files.exists(metadataFile))
if ( metadataFile.exists())
{
try
{
@ -653,7 +647,7 @@ public class MetadataTools
// Save the metadata model to disk.
RepositoryMetadataWriter.write( metadata, metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
checksum.fixChecksums( algorithms );
}
@ -748,9 +742,9 @@ public class MetadataTools
}
}
private long getExistingLastUpdated( Path metadataFile )
private long getExistingLastUpdated( StorageAsset metadataFile )
{
if ( !Files.exists(metadataFile) )
if ( !metadataFile.exists() )
{
// Doesn't exist.
return 0;
@ -762,7 +756,7 @@ public class MetadataTools
return getLastUpdated( metadata );
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
// Error.
return 0;
@ -788,7 +782,7 @@ public class MetadataTools
public void updateMetadata( ManagedRepositoryContent managedRepository, VersionedReference reference )
throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
{
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), toPath( reference ) );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );
long lastUpdated = getExistingLastUpdated( metadataFile );
@ -893,7 +887,7 @@ public class MetadataTools
// Save the metadata model to disk.
RepositoryMetadataWriter.write( metadata, metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
checksum.fixChecksums( algorithms );
}

View File

@ -22,6 +22,7 @@ package org.apache.archiva.repository.metadata;
import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.model.ArchivaRepositoryMetadata;
import org.apache.archiva.model.Plugin;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException;
import org.apache.archiva.xml.XMLWriter;
import org.apache.commons.collections4.CollectionUtils;
@ -29,9 +30,12 @@ import org.apache.commons.lang.StringUtils;
import org.dom4j.Document;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.file.Path;
import java.util.Collections;
@ -44,11 +48,13 @@ import java.util.List;
*/
public class RepositoryMetadataWriter
{
public static void write( ArchivaRepositoryMetadata metadata, Path outputFile )
private static final Logger log = LoggerFactory.getLogger(RepositoryMetadataWriter.class);
public static void write( ArchivaRepositoryMetadata metadata, StorageAsset outputFile )
throws RepositoryMetadataException
{
boolean thrown = false;
try (FileWriter writer = new FileWriter( outputFile.toFile() ))
try (OutputStreamWriter writer = new OutputStreamWriter( outputFile.getWriteStream(true)))
{
write( metadata, writer );
writer.flush();
@ -57,13 +63,17 @@ public class RepositoryMetadataWriter
{
thrown = true;
throw new RepositoryMetadataException(
"Unable to write metadata file: " + outputFile.toAbsolutePath() + " - " + e.getMessage(), e );
"Unable to write metadata file: " + outputFile.getPath() + " - " + e.getMessage(), e );
}
finally
{
if ( thrown )
{
FileUtils.deleteQuietly( outputFile );
try {
outputFile.getStorage().removeAsset(outputFile);
} catch (IOException e) {
log.error("Could not remove asset {}", outputFile);
}
}
}
}

View File

@ -1,202 +0,0 @@
package org.apache.archiva.repository.content;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import static org.junit.Assert.*;
public class FilesystemAssetTest {
Path assetPathFile;
Path assetPathDir;
@Before
public void init() throws IOException {
assetPathFile = Files.createTempFile("assetFile", "dat");
assetPathDir = Files.createTempDirectory("assetDir");
}
@After
public void cleanup() {
try {
Files.deleteIfExists(assetPathFile);
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(assetPathDir);
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
public void getPath() {
FilesystemAsset asset = new FilesystemAsset("/"+assetPathFile.getFileName().toString(), assetPathFile);
assertEquals("/"+assetPathFile.getFileName().toString(), asset.getPath());
}
@Test
public void getName() {
FilesystemAsset asset = new FilesystemAsset("/"+assetPathFile.getFileName().toString(), assetPathFile);
assertEquals(assetPathFile.getFileName().toString(), asset.getName());
}
@Test
public void getModificationTime() throws IOException {
Instant modTime = Files.getLastModifiedTime(assetPathFile).toInstant();
FilesystemAsset asset = new FilesystemAsset("/test123", assetPathFile);
assertTrue(modTime.equals(asset.getModificationTime()));
}
@Test
public void isContainer() {
FilesystemAsset asset = new FilesystemAsset("/test1323", assetPathFile);
assertFalse(asset.isContainer());
FilesystemAsset asset2 = new FilesystemAsset("/test1234", assetPathDir);
assertTrue(asset2.isContainer());
}
@Test
public void list() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
assertEquals(0, asset.list().size());
FilesystemAsset asset2 = new FilesystemAsset("/test1235", assetPathDir);
assertEquals(0, asset2.list().size());
Path f1 = Files.createTempFile(assetPathDir, "testfile", "dat");
Path f2 = Files.createTempFile(assetPathDir, "testfile", "dat");
Path d1 = Files.createTempDirectory(assetPathDir, "testdir");
assertEquals(3, asset2.list().size());
assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f1.getFileName().toString())));
assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f2.getFileName().toString())));
assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(d1.getFileName().toString())));
Files.deleteIfExists(f1);
Files.deleteIfExists(f2);
Files.deleteIfExists(d1);
}
@Test
public void getSize() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
assertEquals(0, asset.getSize());
Files.write(assetPathFile, new String("abcdef").getBytes("ASCII"));
assertTrue(asset.getSize()>=6);
}
@Test
public void getData() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(InputStream is = asset.getReadStream()) {
assertEquals("abcdef", IOUtils.toString(is, "ASCII"));
}
}
@Test
public void getDataExceptionOnDir() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathDir);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try {
InputStream is = asset.getReadStream();
assertFalse("Exception expected for data on dir", true);
} catch (IOException e) {
// fine
}
}
@Test
public void writeData() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(OutputStream os = asset.getWriteStream(true)) {
IOUtils.write("test12345", os, "ASCII");
}
assertEquals("test12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}
@Test
public void writeDataAppend() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(OutputStream os = asset.getWriteStream(false)) {
IOUtils.write("test12345", os, "ASCII");
}
assertEquals("abcdeftest12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}
@Test
public void writeDataExceptionOnDir() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathDir);
try {
OutputStream os = asset.getWriteStream(true);
assertTrue("Writing to a directory should throw a IOException", false);
} catch (IOException e) {
// Fine
}
}
@Test
public void storeDataFile() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
Path dataFile = Files.createTempFile("testdata", "dat");
try(OutputStream os = Files.newOutputStream(dataFile)) {
IOUtils.write("testkdkdkd", os, "ASCII");
}
asset.replaceDataFromFile(dataFile);
assertEquals("testkdkdkd", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}
@Test
public void exists() {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
assertTrue(asset.exists());
FilesystemAsset asset2 = new FilesystemAsset("/test1234", Paths.get("abcdefgkdkdk"));
assertFalse(asset2.exists());
}
@Test
public void getFilePath() {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
assertEquals(assetPathFile, asset.getFilePath());
}
}

View File

@ -28,7 +28,7 @@ import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.springframework.stereotype.Service;
import java.util.Set;

View File

@ -25,7 +25,7 @@ import org.apache.archiva.consumers.InvalidRepositoryContentConsumer;
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.consumers.RepositoryContentConsumer;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.collections4.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -29,7 +29,7 @@ import org.apache.archiva.repository.BasicRemoteRepository;
import org.apache.archiva.repository.EditableManagedRepository;
import org.apache.archiva.repository.EditableRemoteRepository;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.scanner.mock.ManagedRepositoryContentMock;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils;

View File

@ -19,6 +19,7 @@ package org.apache.archiva.repository.scanner.mock;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
@ -27,9 +28,11 @@ import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.ProjectReference;
import org.apache.archiva.model.VersionedReference;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
@ -48,6 +51,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
private ManagedRepository repository;
private FilesystemStorage fsStorage;
public ManagedRepositoryContentMock(ManagedRepository repo) {
this.repository = repo;
@ -92,7 +96,18 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
@Override
public String getRepoRoot( )
{
return Paths.get("", "target", "test-repository", "managed").toString();
return getRepoRootAsset().getFilePath().toString();
}
private StorageAsset getRepoRootAsset() {
if (fsStorage==null) {
try {
fsStorage = new FilesystemStorage(Paths.get("", "target", "test-repository", "managed"), new DefaultFileLockManager());
} catch (IOException e) {
e.printStackTrace();
}
}
return fsStorage.getAsset("");
}
@Override
@ -329,7 +344,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
@Override
public StorageAsset toFile( ArtifactReference reference )
{
return Paths.get(getRepoRoot(), refs.get(reference));
return getRepoRootAsset().resolve(refs.get(reference));
}
@Override

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>archiva-base</artifactId>
<groupId>org.apache.archiva</groupId>
<version>3.0.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>archiva-storage-api</artifactId>
<name>Archiva Base :: Repository API</name>
<properties>
<site.staging.base>${project.parent.parent.basedir}</site.staging.base>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<basedir>${basedir}</basedir>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,4 +1,4 @@
package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -28,15 +28,20 @@ import java.nio.file.CopyOption;
import java.util.function.Consumer;
/**
* Repository storage gives access to the files and directories on the storage.
* The storage may be on a filesystem but can be any other storage system.
*
* This API is low level repository access. If you use this API you must
* either have knowledge about the specific repository layout or use the structure
* This is the low level API to access artifacts in a repository. Each artifact is represented
* by one storage asset. Each asset can be accessed by a path that is independent on the underlying storage
* implementation. Paths always use '/' as path separator. The path is local to the repository and
* is unique for each asset.
* The storage API knows nothing about the repository layout or repository specific metadata.
* If you use this API you must either have knowledge about the specific repository layout or use the structure
* as it is, e.g. for browsing.
*
* It is the decision of the implementation, if this API provides access to all elements, or
* just a selected view.
* The base implementation for the storage uses a directory structure on the local filesystem.
*
*
* It is the decision of the repository type specific implementation, if this API provides access to all elements, that
* is really stored or just a selected view.
*
* Checking access is not part of this API.
*/
@ -104,7 +109,9 @@ public interface RepositoryStorage {
void removeAsset(StorageAsset asset) throws IOException;
/**
* Moves the asset to the given location and returns the asset object for the destination.
* Moves the asset to the given location and returns the asset object for the destination. Moves only assets that
* belong to the same storage instance. It will throw a IOException if the assets are from differents storage
* instances.
*
* @param origin The original asset
* @param destination The destination path pointing to the new asset.
@ -114,17 +121,20 @@ public interface RepositoryStorage {
StorageAsset moveAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException;
/**
* Moves the asset to the new path.
*
* Moves the asset to the given location and returns the asset object for the destination. Moves only assets that
* belong to the same storage instance. It will throw a IOException if the assets are from differents storage
* instances.
* *
* @param origin The original asset
* @param destination The destination asset.
* @param destination The destination path.
* @param copyOptions The copy options (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING}
* @throws IOException If it was not possible to copy the asset.
*/
void moveAsset(StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException;
/**
* Copies the given asset to the new destination.
* Copies the given asset to the new destination. Copies only assets that belong to the same storage instance.
* It will throw a IOException if the assets are from differents storage instances.
*
* @param origin The original asset
* @param destination The path to the new asset
@ -135,7 +145,8 @@ public interface RepositoryStorage {
StorageAsset copyAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException;
/**
* Copies the given asset to the new destination.
* Copies the given asset to the new destination. Copies only assets that belong to the same storage instance.
* It will throw a IOException if the assets are from differents storage instances.
*
* @param origin The original asset
* @param destination The path to the new asset

View File

@ -1,4 +1,4 @@
package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -27,10 +27,9 @@ import java.nio.channels.WritableByteChannel;
import java.nio.file.Path;
import java.time.Instant;
import java.util.List;
import java.util.function.Consumer;
/**
* A instance of this interface represents information about an specific asset in a repository.
* A instance of this interface represents information about a specific asset in a repository.
* The asset may be an real artifact, a directory, or a virtual asset.
*
* Each asset has a unique path relative to the repository.
@ -177,4 +176,11 @@ public interface StorageAsset
* @return The asset, or <code>null</code>, if it does not exist.
*/
StorageAsset getParent();
/**
* Returns the asset relative to the given path
* @param toPath
* @return
*/
StorageAsset resolve(String toPath);
}

View File

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>archiva-base</artifactId>
<groupId>org.apache.archiva</groupId>
<version>3.0.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>archiva-storage-fs</artifactId>
<name>Archiva Base :: Storage Filesystem Based</name>
<properties>
<site.staging.base>${project.parent.parent.basedir}</site.staging.base>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-filelock</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<basedir>${basedir}</basedir>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,4 +1,4 @@
package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -98,7 +98,7 @@ public class FilesystemAsset implements StorageAsset {
FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath) {
this.assetPath = assetPath;
this.relativePath = path;
this.relativePath = normalizePath(path);
this.setPermissionsForNew=false;
this.basePath = basePath;
this.storage = storage;
@ -114,7 +114,7 @@ public class FilesystemAsset implements StorageAsset {
*/
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath) {
this.assetPath = assetPath;
this.relativePath = path;
this.relativePath = normalizePath(path);
this.setPermissionsForNew = false;
this.basePath = null;
this.storage = storage;
@ -132,7 +132,7 @@ public class FilesystemAsset implements StorageAsset {
*/
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory) {
this.assetPath = assetPath;
this.relativePath = path;
this.relativePath = normalizePath(path);
this.directoryHint = directory;
this.setPermissionsForNew = false;
this.basePath = basePath;
@ -151,7 +151,7 @@ public class FilesystemAsset implements StorageAsset {
*/
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) {
this.assetPath = assetPath;
this.relativePath = path;
this.relativePath = normalizePath(path);
this.directoryHint = directory;
this.setPermissionsForNew = setPermissionsForNew;
this.basePath = basePath;
@ -159,6 +159,14 @@ public class FilesystemAsset implements StorageAsset {
init();
}
private String normalizePath(String path) {
if (!path.startsWith("/")) {
return "/"+path;
} else {
return path;
}
}
private void init() {
if (setPermissionsForNew) {
@ -423,6 +431,11 @@ public class FilesystemAsset implements StorageAsset {
}
}
@Override
public StorageAsset resolve(String toPath) {
return storage.getAsset(this.getPath()+"/"+toPath);
}
public void setDefaultFileAcls(List<AclEntry> acl) {
defaultFileAcls = acl;

View File

@ -1,4 +1,4 @@
package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -81,7 +81,7 @@ public class FilesystemStorage implements RepositoryStorage {
}
@Override
public void consumeData( StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
public void consumeData(StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
{
final Path path = asset.getFilePath();
try {
@ -333,6 +333,12 @@ public class FilesystemStorage implements RepositoryStorage {
@Override
public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
{
if (origin.getStorage()!=this) {
throw new IOException("The origin asset does not belong to this storage instance. Cannot copy between different storage instances.");
}
if (destination.getStorage()!=this) {
throw new IOException("The destination asset does not belong to this storage instance. Cannot copy between different storage instances.");
}
Files.move(origin.getFilePath(), destination.getFilePath(), copyOptions);
}
@ -348,6 +354,12 @@ public class FilesystemStorage implements RepositoryStorage {
@Override
public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
{
if (origin.getStorage()!=this) {
throw new IOException("The origin asset does not belong to this storage instance. Cannot copy between different storage instances.");
}
if (destination.getStorage()!=this) {
throw new IOException("The destination asset does not belong to this storage instance. Cannot copy between different storage instances.");
}
Path destinationPath = destination.getFilePath();
boolean overwrite = false;
for (int i=0; i<copyOptions.length; i++) {

View File

@ -0,0 +1,346 @@
package org.apache.archiva.repository.storage;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.common.filelock.FileLockException;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.*;
import java.util.HashSet;
import java.util.function.Consumer;
/**
*
* Utility class for assets. Allows to copy, move between different storage instances and
* recursively consume the tree.
*
* @author Martin Stockhammer <martin_s@apache.org>
*/
public class StorageUtil
{
private static final int DEFAULT_BUFFER_SIZE = 4096;
private static final Logger log = LoggerFactory.getLogger(StorageUtil.class);
/**
* Copies the source asset to the target. The assets may be from different RepositoryStorage instances.
* If you know that source and asset are from the same storage instance, the copy method of the storage
* instance may be faster.
*
* @param source The source asset
* @param target The target asset
* @param locked If true, a readlock is set on the source and a write lock is set on the target.
* @param copyOptions Copy options
* @throws IOException
*/
public static final void copyAsset( final StorageAsset source,
final StorageAsset target,
boolean locked,
final CopyOption... copyOptions ) throws IOException
{
if (source.isFileBased() && target.isFileBased()) {
// Short cut for FS operations
final Path sourcePath = source.getFilePath();
final Path targetPath = target.getFilePath( );
if (locked) {
final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager();
final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager();
try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) )
{
Files.copy( sourcePath, targetPath, copyOptions );
}
catch ( FileLockException e )
{
throw new IOException( e );
}
catch ( FileLockTimeoutException e )
{
throw new IOException( e );
}
} else
{
Files.copy( sourcePath, targetPath, copyOptions );
}
} else {
try {
final RepositoryStorage sourceStorage = source.getStorage();
final RepositoryStorage targetStorage = target.getStorage();
sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
} catch (IOException e) {
throw e;
} catch (Throwable e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
} else
{
throw new IOException( e );
}
}
}
}
/**
* Moves a asset between different storage instances.
* If you know that source and asset are from the same storage instance, the move method of the storage
* instance may be faster.
*
* @param source The source asset
* @param target The target asset
* @param locked If true, a lock is used for the move operation.
* @param copyOptions Options for copying
* @throws IOException If the move fails
*/
public static final void moveAsset(StorageAsset source, StorageAsset target, boolean locked, CopyOption... copyOptions) throws IOException
{
if (source.isFileBased() && target.isFileBased()) {
// Short cut for FS operations
// Move is atomic operation
Files.move( source.getFilePath(), target.getFilePath(), copyOptions );
} else {
try {
final RepositoryStorage sourceStorage = source.getStorage();
final RepositoryStorage targetStorage = target.getStorage();
sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
sourceStorage.removeAsset( source );
} catch (IOException e) {
throw e;
} catch (Throwable e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
} else
{
throw new IOException( e );
}
}
}
}
private static final void wrapWriteFunction(ReadableByteChannel is, RepositoryStorage targetStorage, StorageAsset target, boolean locked) {
try {
targetStorage.writeDataToChannel( target, os -> copy(is, os), locked );
} catch (Exception e) {
throw new RuntimeException( e );
}
}
private static final void copy( final ReadableByteChannel is, final WritableByteChannel os ) {
if (is instanceof FileChannel) {
copy( (FileChannel) is, os );
} else if (os instanceof FileChannel) {
copy(is, (FileChannel)os);
} else
{
try
{
ByteBuffer buffer = ByteBuffer.allocate( DEFAULT_BUFFER_SIZE );
while ( is.read( buffer ) != -1 )
{
buffer.flip( );
while ( buffer.hasRemaining( ) )
{
os.write( buffer );
}
buffer.clear( );
}
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
}
private static final void copy( final FileChannel is, final WritableByteChannel os ) {
try
{
is.transferTo( 0, is.size( ), os );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
private static final void copy( final ReadableByteChannel is, final FileChannel os ) {
try
{
os.transferFrom( is, 0, Long.MAX_VALUE );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
/**
* Runs the consumer function recursively on each asset found starting at the base path
* @param baseAsset The base path where to start search
* @param consumer The consumer function applied to each found asset
* @param depthFirst If true, the deepest elements are consumed first.
* @param maxDepth The maximum depth to recurse into. 0 means, only the baseAsset is consumed, 1 the base asset and its children and so forth.
*/
public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst, final int maxDepth) throws IOException {
recurse(baseAsset, consumer, depthFirst, maxDepth, 0);
}
/**
* Runs the consumer function recursively on each asset found starting at the base path. The function descends into
* maximum depth.
*
* @param baseAsset The base path where to start search
* @param consumer The consumer function applied to each found asset
* @param depthFirst If true, the deepest elements are consumed first.
*/
public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst) throws IOException {
recurse(baseAsset, consumer, depthFirst, Integer.MAX_VALUE, 0);
}
/**
* Runs the consumer function recursively on each asset found starting at the base path. It does not recurse with
* depth first and stops only if there are no more children available.
*
* @param baseAsset The base path where to start search
* @param consumer The consumer function applied to each found asset
*/
public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer) throws IOException {
recurse(baseAsset, consumer, false, Integer.MAX_VALUE, 0);
}
private static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst, final int maxDepth, final int currentDepth)
throws IOException {
if (!depthFirst) {
consumer.accept(baseAsset);
}
if (currentDepth<maxDepth && baseAsset.isContainer()) {
for(StorageAsset asset : baseAsset.list() ) {
recurse(asset, consumer, depthFirst, maxDepth, currentDepth+1);
}
}
if (depthFirst) {
consumer.accept(baseAsset);
}
}
/**
* Deletes the given asset and all child assets recursively.
* @param baseDir The base asset to remove.
* @throws IOException
*/
public static final void deleteRecursively(StorageAsset baseDir) throws IOException {
recurse(baseDir, a -> {
try {
a.getStorage().removeAsset(a);
} catch (IOException e) {
log.error("Could not delete asset {}", a.getPath());
}
},true);
}
/**
* Returns the extension of the name of a given asset. Extension is the substring after the last occurence of '.' in the
* string. If no '.' is found, the empty string is returned.
*
* @param asset The asset from which to return the extension string.
* @return The extension.
*/
public static final String getExtension(StorageAsset asset) {
return StringUtils.substringAfterLast(asset.getName(),".");
}
public static final void copyToLocalFile(StorageAsset asset, Path destination, CopyOption... copyOptions) throws IOException {
if (asset.isFileBased()) {
Files.copy(asset.getFilePath(), destination, copyOptions);
} else {
try {
HashSet<OpenOption> openOptions = new HashSet<>();
for (CopyOption option : copyOptions) {
if (option == StandardCopyOption.REPLACE_EXISTING) {
openOptions.add(StandardOpenOption.CREATE);
openOptions.add(StandardOpenOption.TRUNCATE_EXISTING);
openOptions.add(StandardOpenOption.WRITE);
} else {
openOptions.add(StandardOpenOption.WRITE);
openOptions.add(StandardOpenOption.CREATE_NEW);
}
}
asset.getStorage().consumeDataFromChannel(asset, channel -> {
try {
FileChannel.open(destination, openOptions).transferFrom(channel, 0, Long.MAX_VALUE);
} catch (IOException e) {
throw new RuntimeException(e);
}
}, false);
} catch (Throwable e) {
if (e.getCause() instanceof IOException) {
throw (IOException)e.getCause();
} else {
throw new IOException(e);
}
}
}
}
public static class PathInformation {
final Path path ;
final boolean tmpFile;
PathInformation(Path path, boolean tmpFile) {
this.path = path;
this.tmpFile = tmpFile;
}
public Path getPath() {
return path;
}
public boolean isTmpFile() {
return tmpFile;
}
}
public static final PathInformation getAssetDataAsPath(StorageAsset asset) throws IOException {
if (!asset.exists()) {
throw new IOException("Asset does not exist");
}
if (asset.isFileBased()) {
return new PathInformation(asset.getFilePath(), false);
} else {
Path tmpFile = Files.createTempFile(asset.getName(), getExtension(asset));
copyToLocalFile(asset, tmpFile, StandardCopyOption.REPLACE_EXISTING);
return new PathInformation(tmpFile, true);
}
}
}

View File

@ -0,0 +1,203 @@
package org.apache.archiva.repository.storage;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
public class FilesystemAssetTest {
Path assetPathFile;
Path assetPathDir;
FilesystemStorage filesystemStorage;
@Before
public void init() throws IOException {
assetPathDir = Files.createTempDirectory("assetDir");
assetPathFile = Files.createTempFile(assetPathDir,"assetFile", "dat");
filesystemStorage = new FilesystemStorage(assetPathDir, new DefaultFileLockManager());
}
@After
public void cleanup() {
try {
Files.deleteIfExists(assetPathFile);
} catch (IOException e) {
e.printStackTrace();
}
FileUtils.deleteQuietly(assetPathDir.toFile());
}
@Test
public void getPath() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, assetPathFile.getFileName().toString(), assetPathFile);
Assert.assertEquals("/"+assetPathFile.getFileName().toString(), asset.getPath());
}
@Test
public void getName() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/"+assetPathFile.getFileName().toString(), assetPathFile);
Assert.assertEquals(assetPathFile.getFileName().toString(), asset.getName());
}
@Test
public void getModificationTime() throws IOException {
Instant modTime = Files.getLastModifiedTime(assetPathFile).toInstant();
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test123", assetPathFile);
Assert.assertTrue(modTime.equals(asset.getModificationTime()));
}
@Test
public void isContainer() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1323", assetPathFile);
Assert.assertFalse(asset.isContainer());
FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
Assert.assertTrue(asset2.isContainer());
}
@Test
public void list() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Assert.assertEquals(0, asset.list().size());
FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1235", assetPathDir);
Assert.assertEquals(1, asset2.list().size());
Path f1 = Files.createTempFile(assetPathDir, "testfile", "dat");
Path f2 = Files.createTempFile(assetPathDir, "testfile", "dat");
Path d1 = Files.createTempDirectory(assetPathDir, "testdir");
Assert.assertEquals(4, asset2.list().size());
Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f1.getFileName().toString())));
Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f2.getFileName().toString())));
Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(d1.getFileName().toString())));
Files.deleteIfExists(f1);
Files.deleteIfExists(f2);
Files.deleteIfExists(d1);
}
@Test
public void getSize() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Assert.assertEquals(0, asset.getSize());
Files.write(assetPathFile, new String("abcdef").getBytes("ASCII"));
Assert.assertTrue(asset.getSize()>=6);
}
@Test
public void getData() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(InputStream is = asset.getReadStream()) {
Assert.assertEquals("abcdef", IOUtils.toString(is, "ASCII"));
}
}
@Test
public void getDataExceptionOnDir() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try {
InputStream is = asset.getReadStream();
Assert.assertFalse("Exception expected for data on dir", true);
} catch (IOException e) {
// fine
}
}
@Test
public void writeData() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(OutputStream os = asset.getWriteStream(true)) {
IOUtils.write("test12345", os, "ASCII");
}
Assert.assertEquals("test12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}
@Test
public void writeDataAppend() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(OutputStream os = asset.getWriteStream(false)) {
IOUtils.write("test12345", os, "ASCII");
}
Assert.assertEquals("abcdeftest12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}
@Test
public void writeDataExceptionOnDir() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
try {
OutputStream os = asset.getWriteStream(true);
Assert.assertTrue("Writing to a directory should throw a IOException", false);
} catch (IOException e) {
// Fine
}
}
@Test
public void storeDataFile() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Path dataFile = Files.createTempFile("testdata", "dat");
try(OutputStream os = Files.newOutputStream(dataFile)) {
IOUtils.write("testkdkdkd", os, "ASCII");
}
asset.replaceDataFromFile(dataFile);
Assert.assertEquals("testkdkdkd", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}
@Test
public void exists() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Assert.assertTrue(asset.exists());
FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1234", Paths.get("abcdefgkdkdk"));
Assert.assertFalse(asset2.exists());
}
@Test
public void getFilePath() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Assert.assertEquals(assetPathFile, asset.getFilePath());
}
}

View File

@ -1,4 +1,4 @@
package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -20,8 +20,13 @@ package org.apache.archiva.repository.content;
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -30,6 +35,7 @@ import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import static org.junit.Assert.*;
@ -51,8 +57,8 @@ public class FilesystemStorageTest {
Files.createDirectories(baseDir.resolve("dir2"));
file1 = Files.createFile(baseDir.resolve("dir1/testfile1.dat"));
dir1 = Files.createDirectories(baseDir.resolve("dir1/testdir"));
file1Asset = new FilesystemAsset("/dir1/testfile1.dat", file1);
dir1Asset = new FilesystemAsset("/dir1/testdir", dir1);
file1Asset = new FilesystemAsset(fsStorage, "/dir1/testfile1.dat", file1);
dir1Asset = new FilesystemAsset(fsStorage, "/dir1/testdir", dir1);
}
private class StringResult {
@ -70,31 +76,11 @@ public class FilesystemStorageTest {
@After
public void cleanup() {
try {
Files.deleteIfExists(file1);
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(dir1);
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(baseDir.resolve("dir1"));
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(baseDir.resolve("dir2"));
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(baseDir);
} catch (IOException e) {
e.printStackTrace();
}
FileUtils.deleteQuietly(file1.toFile());
FileUtils.deleteQuietly(dir1.toFile());
FileUtils.deleteQuietly(baseDir.resolve("dir1").toFile());
FileUtils.deleteQuietly(baseDir.resolve("dir2").toFile());
FileUtils.deleteQuietly(baseDir.toFile());
}
@ -107,7 +93,7 @@ public class FilesystemStorageTest {
}
StringResult result = new StringResult();
fsStorage.consumeData(file1Asset, is -> consume(is, result), false );
assertEquals("abcdefghijkl" ,result.getData());
Assert.assertEquals("abcdefghijkl" ,result.getData());
}
private void consume(InputStream is, StringResult result) {
@ -122,31 +108,31 @@ public class FilesystemStorageTest {
@Test
public void getAsset() {
StorageAsset asset = fsStorage.getAsset("/dir1/testfile1.dat");
assertEquals(file1, asset.getFilePath());
Assert.assertEquals(file1, asset.getFilePath());
}
@Test
public void addAsset() {
StorageAsset newAsset = fsStorage.addAsset("dir2/test", false);
assertNotNull(newAsset);
assertFalse(newAsset.isContainer());
assertFalse(newAsset.exists());
Assert.assertNotNull(newAsset);
Assert.assertFalse(newAsset.isContainer());
Assert.assertFalse(newAsset.exists());
StorageAsset newDirAsset = fsStorage.addAsset("/dir2/testdir2", true);
assertNotNull(newDirAsset);
assertTrue(newDirAsset.isContainer());
assertFalse(newDirAsset.exists());
Assert.assertNotNull(newDirAsset);
Assert.assertTrue(newDirAsset.isContainer());
Assert.assertFalse(newDirAsset.exists());
}
@Test
public void removeAsset() throws IOException {
assertTrue(Files.exists(file1));
Assert.assertTrue(Files.exists(file1));
fsStorage.removeAsset(file1Asset);
assertFalse(Files.exists(file1));
Assert.assertFalse(Files.exists(file1));
assertTrue(Files.exists(dir1));
Assert.assertTrue(Files.exists(dir1));
fsStorage.removeAsset(dir1Asset);
assertFalse(Files.exists(dir1));
Assert.assertFalse(Files.exists(dir1));
}
@Test
@ -154,23 +140,23 @@ public class FilesystemStorageTest {
Path newFile=null;
Path newDir=null;
try {
assertTrue(Files.exists(file1));
Assert.assertTrue(Files.exists(file1));
try (OutputStream os = Files.newOutputStream(file1)) {
IOUtils.write("testakdkkdkdkdk", os, "ASCII");
}
long fileSize = Files.size(file1);
fsStorage.moveAsset(file1Asset, "/dir2/testfile2.dat");
assertFalse(Files.exists(file1));
Assert.assertFalse(Files.exists(file1));
newFile = baseDir.resolve("dir2/testfile2.dat");
assertTrue(Files.exists(newFile));
assertEquals(fileSize, Files.size(newFile));
Assert.assertTrue(Files.exists(newFile));
Assert.assertEquals(fileSize, Files.size(newFile));
assertTrue(Files.exists(dir1));
Assert.assertTrue(Files.exists(dir1));
newDir = baseDir.resolve("dir2/testdir2");
fsStorage.moveAsset(dir1Asset, "dir2/testdir2");
assertFalse(Files.exists(dir1));
assertTrue(Files.exists(newDir));
Assert.assertFalse(Files.exists(dir1));
Assert.assertTrue(Files.exists(newDir));
} finally {
if (newFile!=null) Files.deleteIfExists(newFile);
if (newDir!=null) Files.deleteIfExists(newDir);
@ -182,27 +168,33 @@ public class FilesystemStorageTest {
Path newFile=null;
Path newDir=null;
try {
assertTrue(Files.exists(file1));
Assert.assertTrue(Files.exists(file1));
try (OutputStream os = Files.newOutputStream(file1)) {
IOUtils.write("testakdkkdkdkdk", os, "ASCII");
}
long fileSize = Files.size(file1);
fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat");
assertTrue(Files.exists(file1));
assertEquals(fileSize, Files.size(file1));
fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat", StandardCopyOption.REPLACE_EXISTING);
Assert.assertTrue(Files.exists(file1));
Assert.assertEquals(fileSize, Files.size(file1));
newFile = baseDir.resolve("dir2/testfile2.dat");
assertTrue(Files.exists(newFile));
assertEquals(fileSize, Files.size(newFile));
Assert.assertTrue(Files.exists(newFile));
Assert.assertEquals(fileSize, Files.size(newFile));
try {
fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat");
Assert.assertTrue("IOException should be thrown (File exists)", false);
} catch (IOException ex) {
Assert.assertTrue("Exception must contain 'file exists'", ex.getMessage().contains("file exists"));
}
assertTrue(Files.exists(dir1));
Assert.assertTrue(Files.exists(dir1));
newDir = baseDir.resolve("dir2/testdir2");
fsStorage.copyAsset(dir1Asset, "dir2/testdir2");
assertTrue(Files.exists(dir1));
assertTrue(Files.exists(newDir));
Assert.assertTrue(Files.exists(dir1));
Assert.assertTrue(Files.exists(newDir));
} finally {
if (newFile!=null) Files.deleteIfExists(newFile);
if (newDir!=null) Files.deleteIfExists(newDir);
if (newDir!=null) FileUtils.deleteQuietly(newDir.toFile());
}
}
}

View File

@ -52,5 +52,7 @@
<module>archiva-repository-scanner</module>
<module>archiva-repository-admin</module>
<module>archiva-security-common</module>
<module>archiva-storage-api</module>
<module>archiva-storage-fs</module>
</modules>
</project>

View File

@ -28,12 +28,11 @@ import org.apache.archiva.consumers.InvalidRepositoryContentConsumer;
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.converter.RepositoryConversionException;
import org.apache.archiva.repository.BasicManagedRepository;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.content.maven2.ManagedDefaultRepositoryContent;
import org.apache.archiva.repository.scanner.RepositoryScanner;
import org.apache.archiva.repository.scanner.RepositoryScannerException;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.MavenArtifactRepository;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.springframework.stereotype.Service;

View File

@ -19,14 +19,21 @@ package org.apache.archiva.indexer.maven;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.index.context.IndexingContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.sql.Date;
import java.time.ZonedDateTime;
import java.util.Set;
@ -36,8 +43,11 @@ import java.util.Set;
*/
public class MavenIndexContext implements ArchivaIndexingContext {
private static final Logger log = LoggerFactory.getLogger(ArchivaIndexingContext.class);
private IndexingContext delegate;
private Repository repository;
private StorageAsset dir = null;
protected MavenIndexContext(Repository repository, IndexingContext delegate) {
this.delegate = delegate;
@ -56,8 +66,23 @@ public class MavenIndexContext implements ArchivaIndexingContext {
}
@Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
public StorageAsset getPath() {
if (dir==null) {
StorageAsset repositoryDirAsset = repository.getAsset("");
Path repositoryDir = repositoryDirAsset.getFilePath().toAbsolutePath();
Path indexDir = delegate.getIndexDirectoryFile().toPath();
if (indexDir.startsWith(repositoryDir)) {
dir = repository.getAsset(repositoryDir.relativize(indexDir).toString());
} else {
try {
FilesystemStorage storage = new FilesystemStorage(indexDir, new DefaultFileLockManager());
dir = storage.getAsset("");
} catch (IOException e) {
log.error("Error occured while creating storage for index dir");
}
}
}
return dir;
}
@Override

View File

@ -19,7 +19,6 @@ package org.apache.archiva.indexer.maven;
* under the License.
*/
import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
@ -28,8 +27,6 @@ import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.indexer.IndexCreationFailedException;
import org.apache.archiva.indexer.IndexUpdateFailedException;
import org.apache.archiva.indexer.UnsupportedBaseContextException;
import org.apache.archiva.indexer.merger.IndexMergerException;
import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
import org.apache.archiva.proxy.ProxyRegistry;
import org.apache.archiva.proxy.maven.WagonFactory;
import org.apache.archiva.proxy.maven.WagonFactoryException;
@ -42,10 +39,12 @@ import org.apache.archiva.repository.RemoteRepository;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.index.ArtifactContext;
import org.apache.maven.index.ArtifactContextProducer;
@ -142,7 +141,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
private ProxyRegistry proxyRegistry;
private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
private ConcurrentSkipListSet<StorageAsset> activeContexts = new ConcurrentSkipListSet<>( );
private static final int WAIT_TIME = 100;
private static final int MAX_WAIT = 10;
@ -158,9 +157,9 @@ public class MavenIndexManager implements ArchivaIndexManager {
return context.getBaseContext( IndexingContext.class );
}
private Path getIndexPath( ArchivaIndexingContext ctx )
private StorageAsset getIndexPath( ArchivaIndexingContext ctx )
{
return PathUtil.getPathFromUri( ctx.getPath( ) );
return ctx.getPath( );
}
@FunctionalInterface
@ -185,7 +184,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
{
throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
}
final Path ctxPath = getIndexPath( context );
final StorageAsset ctxPath = getIndexPath( context );
int loop = MAX_WAIT;
boolean active = false;
while ( loop-- > 0 && !active )
@ -395,9 +394,9 @@ public class MavenIndexManager implements ArchivaIndexManager {
@Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) {
@ -411,9 +410,9 @@ public class MavenIndexManager implements ArchivaIndexManager {
@Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) {
@ -457,9 +456,8 @@ public class MavenIndexManager implements ArchivaIndexManager {
throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
+ ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
}
MavenIndexContext context = new MavenIndexContext( repository, mvnCtx );
return context;
return new MavenIndexContext( repository, mvnCtx );
}
@Override
@ -472,7 +470,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
log.warn("Index close failed");
}
try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
StorageUtil.deleteRecursively(context.getPath());
} catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files");
}
@ -593,51 +591,57 @@ public class MavenIndexManager implements ArchivaIndexManager {
}
}
private StorageAsset getIndexPath(URI indexDir, Path repoDir, String defaultDir) throws IOException
private StorageAsset getIndexPath(URI indexDirUri, RepositoryStorage storage, String defaultDir) throws IOException
{
String indexPath = indexDir.getPath();
Path indexDirectory = null;
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
Path indexDirectory;
Path repositoryPath = storage.getAsset("").getFilePath().toAbsolutePath();
StorageAsset indexDir;
if ( ! StringUtils.isEmpty(indexDirUri.toString( ) ) )
{
indexDirectory = PathUtil.getPathFromUri( indexDir );
indexDirectory = PathUtil.getPathFromUri( indexDirUri );
// not absolute so create it in repository directory
if ( indexDirectory.isAbsolute( ) )
if ( indexDirectory.isAbsolute( ) && !indexDirectory.startsWith(repositoryPath))
{
indexPath = indexDirectory.getFileName().toString();
if (storage instanceof FilesystemStorage) {
FilesystemStorage fsStorage = (FilesystemStorage) storage;
FilesystemStorage indexStorage = new FilesystemStorage(indexDirectory.getParent(), fsStorage.getFileLockManager());
indexDir = indexStorage.getAsset(indexDirectory.getFileName().toString());
} else {
throw new IOException("The given storage is not file based.");
}
} else if (indexDirectory.isAbsolute()) {
indexDir = storage.getAsset(repositoryPath.relativize(indexDirectory).toString());
}
else
{
indexDirectory = repoDir.resolve( indexDirectory );
indexDir = storage.getAsset(indexDirectory.toString());
}
}
else
{
indexDirectory = repoDir.resolve( defaultDir );
indexPath = defaultDir;
indexDir = storage.getAsset( defaultDir );
}
if ( !Files.exists( indexDirectory ) )
if ( !indexDir.exists() )
{
Files.createDirectories( indexDirectory );
indexDir.create();
}
return new FilesystemAsset( indexPath, indexDirectory);
return indexDir;
}
private StorageAsset getIndexPath( Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
return getIndexPath( icf.getIndexPath(), repo.getAsset( "" ).getFilePath(), DEFAULT_INDEX_PATH);
return getIndexPath( icf.getIndexPath(), repo, DEFAULT_INDEX_PATH);
}
private StorageAsset getPackedIndexPath(Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
return getIndexPath(icf.getPackedIndexPath(), repo.getAsset( "" ).getFilePath(), DEFAULT_PACKED_INDEX_PATH);
return getIndexPath(icf.getPackedIndexPath(), repo, DEFAULT_PACKED_INDEX_PATH);
}
private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
{
Path appServerBase = archivaConfiguration.getAppServerBaseDir( );
String contextKey = "remote-" + remoteRepository.getId( );
@ -648,7 +652,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
Files.createDirectories( repoDir );
}
StorageAsset indexDirectory = null;
StorageAsset indexDirectory;
// is there configured indexDirectory ?
if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
@ -715,7 +719,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
}
}
StorageAsset indexDirectory = null;
StorageAsset indexDirectory;
if ( repository.supportsFeature( IndexCreationFeature.class ) )
{
@ -837,23 +841,18 @@ public class MavenIndexManager implements ArchivaIndexManager {
}
@Override
public void connect( String id, String url )
throws IOException
{
public void connect( String id, String url ) {
//no op
}
@Override
public void disconnect( )
throws IOException
{
public void disconnect( ) {
// no op
}
@Override
public InputStream retrieve( String name )
throws IOException, FileNotFoundException
{
throws IOException {
try
{
log.info( "index update retrieve file, name:{}", name );

View File

@ -208,7 +208,7 @@ public class MavenIndexManagerTest {
assertNotNull(ctx);
assertEquals(repository, ctx.getRepository());
assertEquals("test-repo", ctx.getId());
assertEquals(indexPath.toAbsolutePath(), Paths.get(ctx.getPath()).toAbsolutePath());
assertEquals(indexPath.toAbsolutePath(), ctx.getPath().getFilePath().toAbsolutePath());
assertTrue(Files.exists(indexPath));
List<Path> li = Files.list(indexPath).collect(Collectors.toList());
assertTrue(li.size()>0);

View File

@ -39,6 +39,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-xml-tools</artifactId>

View File

@ -21,6 +21,7 @@ package org.apache.archiva.maven2.metadata;
import org.apache.archiva.model.ArchivaRepositoryMetadata;
import org.apache.archiva.model.Plugin;
import org.apache.archiva.model.SnapshotVersion;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException;
import org.apache.archiva.xml.XMLReader;
import org.apache.commons.lang.math.NumberUtils;
@ -64,6 +65,14 @@ public class MavenMetadataReader
private static final Logger log = LoggerFactory.getLogger( MavenMetadataReader.class );
public static ArchivaRepositoryMetadata read(StorageAsset metadataFile) throws XMLException, IOException {
if (metadataFile.isFileBased()) {
return read(metadataFile.getFilePath());
} else {
throw new IOException("StorageAsset is not file based");
}
}
/**
* Read and return the {@link org.apache.archiva.model.ArchivaRepositoryMetadata} object from the provided xml file.
*
@ -72,8 +81,7 @@ public class MavenMetadataReader
* @throws XMLException
*/
public static ArchivaRepositoryMetadata read( Path metadataFile )
throws XMLException
{
throws XMLException, IOException {
XMLReader xml = new XMLReader( "metadata", metadataFile );
// invoke this to remove namespaces, see MRM-1136
@ -85,25 +93,9 @@ public class MavenMetadataReader
metadata.setArtifactId( xml.getElementText( "//metadata/artifactId" ) );
metadata.setVersion( xml.getElementText( "//metadata/version" ) );
Date modTime;
try
{
modTime = new Date(Files.getLastModifiedTime( metadataFile ).toMillis( ));
}
catch ( IOException e )
{
modTime = new Date();
log.error("Could not read modification time of {}", metadataFile);
}
modTime = new Date(Files.getLastModifiedTime(metadataFile).toMillis());
metadata.setFileLastModified( modTime );
try
{
metadata.setFileSize( Files.size( metadataFile ) );
}
catch ( IOException e )
{
metadata.setFileSize( 0 );
log.error("Could not read file size of {}", metadataFile);
}
metadata.setFileSize( Files.size(metadataFile) );
metadata.setLastUpdated( xml.getElementText( "//metadata/versioning/lastUpdated" ) );
metadata.setLatestVersion( xml.getElementText( "//metadata/versioning/latest" ) );

View File

@ -28,7 +28,7 @@ import org.apache.archiva.proxy.ProxyException;
import org.apache.archiva.proxy.model.NetworkProxy;
import org.apache.archiva.proxy.model.ProxyConnector;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.wagon.ConnectionException;
import org.apache.maven.wagon.ResourceDoesNotExistException;
@ -113,7 +113,7 @@ public class MavenRepositoryProxyHandler extends DefaultRepositoryProxyHandler {
* @throws NotModifiedException
*/
protected void transferResources( ProxyConnector connector, RemoteRepositoryContent remoteRepository,
Path tmpResource, Path[] checksumFiles, String url, String remotePath, StorageAsset resource,
StorageAsset tmpResource, StorageAsset[] checksumFiles, String url, String remotePath, StorageAsset resource,
Path workingDirectory, ManagedRepositoryContent repository )
throws ProxyException, NotModifiedException {
Wagon wagon = null;
@ -153,9 +153,9 @@ public class MavenRepositoryProxyHandler extends DefaultRepositoryProxyHandler {
// to
// save on connections since md5 is rarely used
for (int i=0; i<checksumFiles.length; i++) {
String ext = "."+StringUtils.substringAfterLast( checksumFiles[i].getFileName( ).toString( ), "." );
String ext = "."+StringUtils.substringAfterLast(checksumFiles[i].getName( ), "." );
transferChecksum(wagon, remoteRepository, remotePath, repository, resource.getFilePath(), ext,
checksumFiles[i]);
checksumFiles[i].getFilePath());
}
}
} catch (NotFoundException e) {
@ -182,9 +182,9 @@ public class MavenRepositoryProxyHandler extends DefaultRepositoryProxyHandler {
protected void transferArtifact(Wagon wagon, RemoteRepositoryContent remoteRepository, String remotePath,
ManagedRepositoryContent repository, Path resource, Path tmpDirectory,
Path destFile)
StorageAsset destFile)
throws ProxyException {
transferSimpleFile(wagon, remoteRepository, remotePath, repository, resource, destFile);
transferSimpleFile(wagon, remoteRepository, remotePath, repository, resource, destFile.getFilePath());
}
/**

View File

@ -26,6 +26,7 @@ import org.apache.archiva.policies.ChecksumPolicy;
import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.policies.urlcache.UrlFailureCache;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.easymock.EasyMock;
import org.junit.Test;
@ -82,7 +83,7 @@ public class CacheFailuresTransferTest
wagonMockControl.replay();
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
wagonMockControl.verify();
@ -92,7 +93,7 @@ public class CacheFailuresTransferTest
downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
wagonMockControl.verify();
assertNotDownloaded( downloadedFile);
assertNotDownloaded( downloadedFile.getFilePath());
assertNoTempFiles( expectedFile );
}
@ -124,7 +125,7 @@ public class CacheFailuresTransferTest
wagonMockControl.replay();
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
wagonMockControl.verify();
@ -140,7 +141,7 @@ public class CacheFailuresTransferTest
wagonMockControl.verify();
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNoTempFiles( expectedFile );
}
@ -168,11 +169,11 @@ public class CacheFailuresTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied2", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.YES, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
// Validate that file actually came from proxied2 (as intended).
Path proxied2File = Paths.get( REPOPATH_PROXIED2, path );
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
assertNoTempFiles( expectedFile );
}

View File

@ -25,6 +25,7 @@ import org.apache.archiva.policies.CachedFailuresPolicy;
import org.apache.archiva.policies.ChecksumPolicy;
import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.easymock.EasyMock;
import org.junit.Test;
@ -63,7 +64,7 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, true );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNull( downloadedFile );
}
@ -85,10 +86,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar",
"e58f30c6a150a2e843552438d18e15cb *get-checksum-both-right-1.0.jar" );
@ -111,10 +112,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
null );
@ -137,10 +138,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
}
@ -162,10 +163,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, null );
}
@ -187,10 +188,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" );
}
@ -212,9 +213,9 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertChecksums( expectedFile, null, null );
}
@ -235,10 +236,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "4ec20a12dc91557330bd0b39d1805be5e329ae56 get-checksum-both-bad-1.0.jar",
"a292491a35925465e693a44809a078b5 get-checksum-both-bad-1.0.jar" );
@ -261,9 +262,9 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertChecksums( expectedFile, null, null );
}
@ -284,11 +285,11 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
// This is a success situation. No SHA1 with a Good MD5.
Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
}
@ -310,9 +311,9 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertChecksums( expectedFile, null, null );
}
@ -333,10 +334,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
"invalid checksum file" );
@ -360,10 +361,10 @@ public class ChecksumTransferTest
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
"c35f3b76268b73a4ba617f6f275c49ab get-checksum-sha1-bad-md5-1.0.jar" );
@ -386,10 +387,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "71f7dc3f72053a3f2d9fdd6fef9db055ef957ffb get-checksum-md5-only-1.0.jar",
"f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
@ -412,10 +413,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "1f12821c5e43e1a0b76b9564a6ddb0548ccb9486 get-default-layout-1.0.jar",
"3f7341545f21226b6f49a3c2704cb9be get-default-layout-1.0.jar" );
@ -452,7 +453,7 @@ public class ChecksumTransferTest
wagonMockControl.replay();
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
wagonMockControl.verify();
@ -465,7 +466,7 @@ public class ChecksumTransferTest
// Test results.
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
null );
@ -489,10 +490,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
// There are no hashcodes on the proxy side to download, hence the local ones should remain invalid.
assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" );
@ -516,9 +517,9 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNoTempFiles( expectedFile );
// There are no hashcodes on the proxy side to download.
// The FAIL policy will delete the checksums as bad.
@ -544,10 +545,10 @@ public class ChecksumTransferTest
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "96a08dc80a108cba8efd3b20aec91b32a0b2cbd4 get-bad-local-checksum-1.0.jar",
"46fdd6ca55bf1d7a7eb0c858f41e0ccd get-bad-local-checksum-1.0.jar" );

View File

@ -27,6 +27,7 @@ import org.apache.archiva.policies.ProxyDownloadException;
import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.apache.maven.wagon.TransferFailedException;
import org.apache.maven.wagon.authorization.AuthorizationException;
@ -598,7 +599,7 @@ public class ErrorHandlingTest
wagonMockControl.replay();
// Attempt the proxy fetch.
Path downloadedFile = null;
StorageAsset downloadedFile = null;
try
{
downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,
@ -616,33 +617,33 @@ public class ErrorHandlingTest
wagonMockControl.verify();
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
}
private void confirmSuccess( String path, Path expectedFile, String basedir )
throws Exception
{
Path downloadedFile = performDownload( path );
StorageAsset downloadedFile = performDownload( path );
Path proxied1File = Paths.get( basedir, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
}
private void confirmNotDownloadedNoError( String path )
throws Exception
{
Path downloadedFile = performDownload( path );
StorageAsset downloadedFile = performDownload( path );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
}
private Path performDownload( String path )
private StorageAsset performDownload( String path )
throws ProxyDownloadException, LayoutException
{
wagonMockControl.replay();
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,
managedDefaultRepository.toArtifactReference( path ) );
wagonMockControl.verify();

View File

@ -32,6 +32,7 @@ import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.proxy.model.RepositoryProxyHandler;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils;
import org.assertj.core.api.Assertions;
@ -208,18 +209,18 @@ public class HttpProxyTransferTest
ArtifactReference artifact = managedDefaultRepository.toArtifactReference( path );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path sourceFile = Paths.get( PROXIED_BASEDIR, path );
assertNotNull( "Expected File should not be null.", expectedFile );
assertNotNull( "Actual File should not be null.", downloadedFile );
assertTrue( "Check actual file exists.", Files.exists(downloadedFile));
assertTrue( "Check filename path is appropriate.", Files.isSameFile( expectedFile, downloadedFile));
assertTrue( "Check file path matches.", Files.isSameFile( expectedFile, downloadedFile));
assertTrue( "Check actual file exists.", Files.exists(downloadedFile.getFilePath()));
assertTrue( "Check filename path is appropriate.", Files.isSameFile( expectedFile, downloadedFile.getFilePath()));
assertTrue( "Check file path matches.", Files.isSameFile( expectedFile, downloadedFile.getFilePath()));
String expectedContents = FileUtils.readFileToString( sourceFile.toFile(), Charset.defaultCharset() );
String actualContents = FileUtils.readFileToString( downloadedFile.toFile(), Charset.defaultCharset() );
String actualContents = FileUtils.readFileToString( downloadedFile.getFilePath().toFile(), Charset.defaultCharset() );
assertEquals( "Check file contents.", expectedContents, actualContents );
Assertions.assertThat( System.getProperty( "http.proxyHost" , "") ).isEmpty();

View File

@ -24,6 +24,7 @@ import org.apache.archiva.policies.CachedFailuresPolicy;
import org.apache.archiva.policies.ChecksumPolicy;
import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.wagon.ResourceDoesNotExistException;
@ -64,7 +65,7 @@ public class ManagedDefaultTransferTest
CachedFailuresPolicy.NO, true );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNull( "File should not have been downloaded", downloadedFile );
}
@ -86,10 +87,10 @@ public class ManagedDefaultTransferTest
CachedFailuresPolicy.NO, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path sourceFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, sourceFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), sourceFile );
assertNoTempFiles( expectedFile );
}
@ -110,13 +111,13 @@ public class ManagedDefaultTransferTest
CachedFailuresPolicy.NO, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
Path sourceFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, sourceFile );
assertFalse( Files.exists( downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".sha1" )) );
assertFalse( Files.exists(downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".md5" ) ));
assertFalse( Files.exists( downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".asc" ) ));
assertFileEquals( expectedFile, downloadedFile.getFilePath(), sourceFile );
assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".sha1" )) );
assertFalse( Files.exists(downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".md5" ) ));
assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".asc" ) ));
assertNoTempFiles( expectedFile );
}
@ -145,9 +146,9 @@ public class ManagedDefaultTransferTest
CachedFailuresPolicy.NO, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertFileEquals( expectedFile, downloadedFile, expectedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), expectedFile );
assertNoTempFiles( expectedFile );
}
@ -179,9 +180,9 @@ public class ManagedDefaultTransferTest
CachedFailuresPolicy.NO, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNotModified( expectedFile, originalModificationTime );
assertNoTempFiles( expectedFile );
}
@ -225,9 +226,9 @@ public class ManagedDefaultTransferTest
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNotModified( expectedFile, originalModificationTime );
assertNoTempFiles( expectedFile );
}
@ -270,10 +271,10 @@ public class ManagedDefaultTransferTest
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile );
}
@ -302,10 +303,10 @@ public class ManagedDefaultTransferTest
CachedFailuresPolicy.NO, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile );
}
@ -326,15 +327,15 @@ public class ManagedDefaultTransferTest
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile );
// TODO: is this check even needed if it passes above?
String actualContents = FileUtils.readFileToString( downloadedFile.toFile(), Charset.defaultCharset() );
String actualContents = FileUtils.readFileToString( downloadedFile.getFilePath().toFile(), Charset.defaultCharset() );
String badContents = FileUtils.readFileToString( proxied2File.toFile(), Charset.defaultCharset() );
assertFalse( "Downloaded file contents should not be that of proxy 2",
StringUtils.equals( actualContents, badContents ) );
@ -357,10 +358,10 @@ public class ManagedDefaultTransferTest
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
assertNoTempFiles( expectedFile );
}
@ -381,7 +382,7 @@ public class ManagedDefaultTransferTest
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNull( "File returned was: " + downloadedFile + "; should have got a not found exception",
downloadedFile );
@ -412,12 +413,12 @@ public class ManagedDefaultTransferTest
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );
// Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
wagonMockControl.verify();
Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
assertNoTempFiles( expectedFile );
}
@ -451,9 +452,9 @@ public class ManagedDefaultTransferTest
wagonMockControl.replay();
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
wagonMockControl.verify();
assertNoTempFiles( expectedFile );

View File

@ -19,6 +19,7 @@ package org.apache.archiva.proxy;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.configuration.ProxyConnectorConfiguration;
import org.apache.archiva.maven2.metadata.MavenMetadataReader;
@ -34,6 +35,8 @@ import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.wagon.TransferFailedException;
import org.custommonkey.xmlunit.DetailedDiff;
@ -125,7 +128,7 @@ public class MetadataTransferTest
ProjectReference metadata = createProjectReference( requestedResource );
Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath(
metadata ) ).getFile();
@ -988,7 +991,7 @@ public class MetadataTransferTest
ProjectReference metadata = createProjectReference( requestedResource );
Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath(
metadata ) ).getFile();
@ -1014,7 +1017,7 @@ public class MetadataTransferTest
Path expectedFile = managedDefaultDir.resolve(requestedResource);
ProjectReference metadata = createProjectReference( requestedResource );
Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath(
metadata ) ).getFile();
@ -1035,7 +1038,7 @@ public class MetadataTransferTest
VersionedReference metadata = createVersionedReference( requestedResource );
Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath(
metadata ) ).getFile();
@ -1061,7 +1064,7 @@ public class MetadataTransferTest
Path expectedFile = managedDefaultDir.resolve(requestedResource);
VersionedReference metadata = createVersionedReference( requestedResource );
Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath(
metadata ) ).getFile();
@ -1090,7 +1093,9 @@ public class MetadataTransferTest
assertTrue( "Actual file exists.", Files.exists(actualFile) );
StringWriter actualContents = new StringWriter();
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( actualFile );
FilesystemStorage fsStorage = new FilesystemStorage(actualFile.getParent(), new DefaultFileLockManager());
StorageAsset actualFileAsset = fsStorage.getAsset(actualFile.getFileName().toString());
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( actualFileAsset );
RepositoryMetadataWriter.write( metadata, actualContents );
DetailedDiff detailedDiff = new DetailedDiff( new Diff( expectedMetadataXml, actualContents.toString() ) );

View File

@ -24,6 +24,7 @@ import org.apache.archiva.policies.CachedFailuresPolicy;
import org.apache.archiva.policies.ChecksumPolicy;
import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.storage.StorageAsset;
import org.junit.Test;
import java.nio.file.Files;
@ -59,8 +60,8 @@ public class SnapshotTransferTest
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNoTempFiles( expectedFile );
}
@ -80,10 +81,10 @@ public class SnapshotTransferTest
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile );
}
@ -103,10 +104,10 @@ public class SnapshotTransferTest
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile );
}
@ -128,10 +129,10 @@ public class SnapshotTransferTest
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false );
// Attempt to download.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
// Should not have downloaded as managed is newer than remote.
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNoTempFiles( expectedFile );
}
@ -220,9 +221,9 @@ public class SnapshotTransferTest
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile );
}
@ -244,9 +245,9 @@ public class SnapshotTransferTest
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNotModified( expectedFile, expectedTimestamp );
assertNoTempFiles( expectedFile );
}
@ -270,10 +271,10 @@ public class SnapshotTransferTest
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.YES , false);
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile );
}
@ -293,10 +294,10 @@ public class SnapshotTransferTest
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile );
}
@ -320,10 +321,10 @@ public class SnapshotTransferTest
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile );
}
}

View File

@ -19,6 +19,7 @@ package org.apache.archiva.repository.mock;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
@ -27,10 +28,12 @@ import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.ProjectReference;
import org.apache.archiva.model.VersionedReference;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
@ -50,6 +53,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
private ManagedRepository repository;
private FilesystemStorage fsStorage;
ManagedRepositoryContentMock(ManagedRepository repo) {
this.repository = repo;
@ -94,7 +98,18 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
@Override
public String getRepoRoot( )
{
return Paths.get("", "target", "test-repository", "managed").toString();
return getRepoRootAsset().getFilePath().toString();
}
private StorageAsset getRepoRootAsset() {
if (fsStorage==null) {
try {
fsStorage = new FilesystemStorage(Paths.get("", "target", "test-repository", "managed"), new DefaultFileLockManager());
} catch (IOException e) {
e.printStackTrace();
}
}
return fsStorage.getAsset("");
}
@Override
@ -331,7 +346,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
@Override
public StorageAsset toFile( ArtifactReference reference )
{
return Paths.get(getRepoRoot(), refs.get(reference));
return getRepoRootAsset().resolve( refs.get(reference));
}
@Override

View File

@ -36,6 +36,7 @@ import org.apache.archiva.repository.RemoteRepository;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.maven2.MavenSystemManager;
import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.artifact.Artifact;
@ -55,8 +56,7 @@ import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.inject.Named;
import java.nio.file.Files;
import java.nio.file.Path;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -233,22 +233,22 @@ public class Maven3DependencyTreeBuilder
for ( String repoId : repositoryIds )
{
ManagedRepository managedRepo = repositoryRegistry.getManagedRepository(repoId);
Path repoDir = managedRepo.getAsset("").getFilePath();
StorageAsset repoDir = managedRepo.getAsset("");
Path file = pathTranslator.toFile( repoDir, projectArtifact.getGroupId(), projectArtifact.getArtifactId(),
StorageAsset file = pathTranslator.toFile( repoDir, projectArtifact.getGroupId(), projectArtifact.getArtifactId(),
projectArtifact.getBaseVersion(),
projectArtifact.getArtifactId() + "-" + projectArtifact.getVersion()
+ ".pom" );
if ( Files.exists(file) )
if ( file.exists() )
{
return managedRepo;
}
// try with snapshot version
if ( StringUtils.endsWith( projectArtifact.getBaseVersion(), VersionUtil.SNAPSHOT ) )
{
Path metadataFile = file.getParent().resolve( MetadataTools.MAVEN_METADATA );
if ( Files.exists(metadataFile) )
StorageAsset metadataFile = file.getParent().resolve( MetadataTools.MAVEN_METADATA );
if ( metadataFile.exists() )
{
try
{
@ -262,14 +262,14 @@ public class Maven3DependencyTreeBuilder
"-" + VersionUtil.SNAPSHOT ) ).append( '-' ).append(
timeStamp ).append( '-' ).append( Integer.toString( buildNumber ) ).append(
".pom" ).toString();
Path timeStampFile = file.getParent().resolve( timeStampFileName );
StorageAsset timeStampFile = file.getParent().resolve( timeStampFileName );
log.debug( "try to find timestamped snapshot version file: {}", timeStampFile);
if ( Files.exists(timeStampFile) )
if ( timeStampFile.exists() )
{
return managedRepo;
}
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
log.warn( "skip fail to find timestamped snapshot pom: {}", e.getMessage() );
}

View File

@ -23,6 +23,7 @@ import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
import org.apache.archiva.repository.storage.StorageAsset;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
@ -78,13 +79,13 @@ public class Maven2RepositoryPathTranslator
}
@Override
public Path toFile(Path basedir, String namespace, String projectId, String projectVersion, String filename )
public StorageAsset toFile(StorageAsset basedir, String namespace, String projectId, String projectVersion, String filename )
{
return basedir.resolve( toPath( namespace, projectId, projectVersion, filename ) );
}
@Override
public Path toFile( Path basedir, String namespace, String projectId, String projectVersion )
public StorageAsset toFile( StorageAsset basedir, String namespace, String projectId, String projectVersion )
{
return basedir.resolve( toPath( namespace, projectId, projectVersion ) );
}
@ -148,13 +149,13 @@ public class Maven2RepositoryPathTranslator
}
@Override
public Path toFile( Path basedir, String namespace, String projectId )
public StorageAsset toFile( StorageAsset basedir, String namespace, String projectId )
{
return basedir.resolve( toPath( namespace, projectId ) );
}
@Override
public Path toFile( Path basedir, String namespace )
public StorageAsset toFile( StorageAsset basedir, String namespace )
{
return basedir.resolve( toPath( namespace ) );
}

View File

@ -48,6 +48,7 @@ import org.apache.archiva.proxy.model.RepositoryProxyHandler;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.PathParser;
import org.apache.archiva.repository.maven2.MavenSystemManager;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
@ -80,11 +81,9 @@ import javax.inject.Named;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.Reader;
import java.nio.channels.Channels;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -95,7 +94,6 @@ import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
// import java.io.FileNotFoundException;
@ -187,9 +185,9 @@ public class Maven2RepositoryStorage
}
}
}
Path basedir = Paths.get(managedRepository.getLocation());
StorageAsset basedir = managedRepository.getAsset("");
if (VersionUtil.isSnapshot(artifactVersion)) {
Path metadataFile = pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(),
StorageAsset metadataFile = pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(),
readMetadataRequest.getProjectId(), artifactVersion,
METADATA_FILENAME);
try {
@ -203,7 +201,7 @@ public class Maven2RepositoryStorage
artifactVersion =
artifactVersion + snapshotVersion.getTimestamp() + "-" + snapshotVersion.getBuildNumber();
}
} catch (XMLException e) {
} catch (XMLException | IOException e) {
// unable to parse metadata - LOGGER it, and continue with the version as the original SNAPSHOT version
LOGGER.warn("Invalid metadata: {} - {}", metadataFile, e.getMessage());
}
@ -211,14 +209,14 @@ public class Maven2RepositoryStorage
// TODO: won't work well with some other layouts, might need to convert artifact parts to ID by path translator
String id = readMetadataRequest.getProjectId() + "-" + artifactVersion + ".pom";
Path file =
StorageAsset file =
pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(),
readMetadataRequest.getProjectVersion(), id);
if (!Files.exists(file)) {
if (!file.exists()) {
// metadata could not be resolved
throw new RepositoryStorageMetadataNotFoundException(
"The artifact's POM file '" + file.toAbsolutePath() + "' was missing");
"The artifact's POM file '" + file.getPath() + "' was missing");
}
// TODO: this is a workaround until we can properly resolve using proxies as well - this doesn't cache
@ -254,7 +252,7 @@ public class Maven2RepositoryStorage
}
ModelBuildingRequest req =
new DefaultModelBuildingRequest().setProcessPlugins(false).setPomFile(file.toFile()).setTwoPhaseBuilding(
new DefaultModelBuildingRequest().setProcessPlugins(false).setPomFile(file.getFilePath().toFile()).setTwoPhaseBuilding(
false).setValidationLevel(ModelBuildingRequest.VALIDATION_LEVEL_MINIMAL);
//MRM-1607. olamy this will resolve jdk profiles on the current running archiva jvm
@ -460,67 +458,52 @@ public class Maven2RepositoryStorage
@Override
public Collection<String> listRootNamespaces(String repoId, Filter<String> filter)
throws RepositoryStorageRuntimeException {
Path dir = getRepositoryBasedir(repoId);
StorageAsset dir = getRepositoryBasedir(repoId);
return getSortedFiles(dir, filter);
}
private static Collection<String> getSortedFiles(Path dir, Filter<String> filter) {
private static Collection<String> getSortedFiles(StorageAsset dir, Filter<String> filter) {
try (Stream<Path> stream = Files.list(dir)) {
final Predicate<Path> dFilter = new DirectoryFilter(filter);
return stream.filter(Files::isDirectory)
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
return dir.list().stream().filter(f -> f.isContainer())
.filter(dFilter)
.map(path -> path.getFileName().toString())
.map(path -> path.getName().toString())
.sorted().collect(Collectors.toList());
} catch (IOException e) {
LOGGER.error("Could not read directory list {}: {}", dir, e.getMessage(), e);
return Collections.emptyList();
}
}
private Path getRepositoryBasedir(String repoId)
private StorageAsset getRepositoryBasedir(String repoId)
throws RepositoryStorageRuntimeException {
ManagedRepository repositoryConfiguration = repositoryRegistry.getManagedRepository(repoId);
return Paths.get(repositoryConfiguration.getLocation());
return repositoryConfiguration.getAsset("");
}
@Override
public Collection<String> listNamespaces(String repoId, String namespace, Filter<String> filter)
throws RepositoryStorageRuntimeException {
Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
if (!(Files.exists(dir) && Files.isDirectory(dir))) {
StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
if (!(dir.exists()) && !dir.isContainer()) {
return Collections.emptyList();
}
// scan all the directories which are potential namespaces. Any directories known to be projects are excluded
Predicate<Path> dFilter = new DirectoryFilter(filter);
try (Stream<Path> stream = Files.list(dir)) {
return stream.filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getFileName().toString())
Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
return dir.list().stream().filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getName().toString())
.sorted().collect(Collectors.toList());
} catch (IOException e) {
LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
return Collections.emptyList();
}
}
@Override
public Collection<String> listProjects(String repoId, String namespace, Filter<String> filter)
throws RepositoryStorageRuntimeException {
Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
if (!(Files.exists(dir) && Files.isDirectory(dir))) {
StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
if (!(dir.exists() && dir.isContainer())) {
return Collections.emptyList();
}
// scan all directories in the namespace, and only include those that are known to be projects
final Predicate<Path> dFilter = new DirectoryFilter(filter);
try (Stream<Path> stream = Files.list(dir)) {
return stream.filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getFileName().toString())
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
return dir.list().stream().filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getName().toString())
.sorted().collect(Collectors.toList());
} catch (IOException e) {
LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
return Collections.emptyList();
}
}
@ -528,8 +511,8 @@ public class Maven2RepositoryStorage
public Collection<String> listProjectVersions(String repoId, String namespace, String projectId,
Filter<String> filter)
throws RepositoryStorageRuntimeException {
Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace, projectId);
if (!(Files.exists(dir) && Files.isDirectory(dir))) {
StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace, projectId);
if (!(dir.exists() && dir.isContainer())) {
return Collections.emptyList();
}
@ -540,18 +523,17 @@ public class Maven2RepositoryStorage
@Override
public Collection<ArtifactMetadata> readArtifactsMetadata(ReadMetadataRequest readMetadataRequest)
throws RepositoryStorageRuntimeException {
Path dir = pathTranslator.toFile(getRepositoryBasedir(readMetadataRequest.getRepositoryId()),
StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(readMetadataRequest.getRepositoryId()),
readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(),
readMetadataRequest.getProjectVersion());
if (!(Files.exists(dir) && Files.isDirectory(dir))) {
if (!(dir.exists() && dir.isContainer())) {
return Collections.emptyList();
}
// all files that are not metadata and not a checksum / signature are considered artifacts
final Predicate<Path> dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter());
try (Stream<Path> stream = Files.list(dir)) {
final Predicate<StorageAsset> dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter());
// Returns a map TRUE -> (success values), FALSE -> (Exceptions)
Map<Boolean, List<Try<ArtifactMetadata>>> result = stream.filter(dFilter).map(path -> {
Map<Boolean, List<Try<ArtifactMetadata>>> result = dir.list().stream().filter(dFilter).map(path -> {
try {
return Try.success(getArtifactFromFile(readMetadataRequest.getRepositoryId(), readMetadataRequest.getNamespace(),
readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion(),
@ -573,10 +555,6 @@ public class Maven2RepositoryStorage
}
return result.get(Boolean.TRUE).stream().map(tr -> tr.get()).collect(Collectors.toList());
}
} catch (IOException e) {
LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
}
return Collections.emptyList();
}
@ -595,9 +573,9 @@ public class Maven2RepositoryStorage
}
private ArtifactMetadata getArtifactFromFile(String repoId, String namespace, String projectId,
String projectVersion, Path file) throws IOException {
String projectVersion, StorageAsset file) throws IOException {
ArtifactMetadata metadata =
pathTranslator.getArtifactFromId(repoId, namespace, projectId, projectVersion, file.getFileName().toString());
pathTranslator.getArtifactFromId(repoId, namespace, projectId, projectVersion, file.getName());
populateArtifactMetadataFromFile(metadata, file);
@ -629,17 +607,17 @@ public class Maven2RepositoryStorage
proxyHandler.fetchFromProxies(managedRepository, pomReference);
// Open and read the POM from the managed repo
Path pom = managedRepository.toFile(pomReference);
StorageAsset pom = managedRepository.toFile(pomReference);
if (!Files.exists(pom)) {
if (!pom.exists()) {
return;
}
try {
// MavenXpp3Reader leaves the file open, so we need to close it ourselves.
Model model = null;
try (Reader reader = Files.newBufferedReader(pom, Charset.defaultCharset())) {
Model model;
try (Reader reader = Channels.newReader(pom.getReadChannel(), Charset.defaultCharset().name())) {
model = MAVEN_XPP_3_READER.read(reader);
}
@ -708,7 +686,7 @@ public class Maven2RepositoryStorage
@Override
public String getFilePathWithVersion(final String requestPath, ManagedRepositoryContent managedRepositoryContent)
throws XMLException, RelocationException {
throws RelocationException, XMLException, IOException {
if (StringUtils.endsWith(requestPath, METADATA_FILENAME)) {
return getFilePath(requestPath, managedRepositoryContent.getRepository());
@ -725,12 +703,12 @@ public class Maven2RepositoryStorage
if (StringUtils.endsWith(artifactReference.getVersion(), VersionUtil.SNAPSHOT)) {
// read maven metadata to get last timestamp
Path metadataDir = Paths.get(managedRepositoryContent.getRepoRoot(), filePath).getParent();
if (!Files.exists(metadataDir)) {
StorageAsset metadataDir = managedRepositoryContent.getRepository().getAsset( filePath).getParent();
if (!metadataDir.exists()) {
return filePath;
}
Path metadataFile = metadataDir.resolve(METADATA_FILENAME);
if (!Files.exists(metadataFile)) {
StorageAsset metadataFile = metadataDir.resolve(METADATA_FILENAME);
if (!metadataFile.exists()) {
return filePath;
}
ArchivaRepositoryMetadata archivaRepositoryMetadata = MavenMetadataReader.read(metadataFile);
@ -788,10 +766,10 @@ public class Maven2RepositoryStorage
return joinedString;
}
private static void populateArtifactMetadataFromFile(ArtifactMetadata metadata, Path file) throws IOException {
private static void populateArtifactMetadataFromFile(ArtifactMetadata metadata, StorageAsset file) throws IOException {
metadata.setWhenGathered(new Date());
metadata.setFileLastModified(Files.getLastModifiedTime(file).toMillis());
ChecksummedFile checksummedFile = new ChecksummedFile(file);
metadata.setFileLastModified(file.getModificationTime().toEpochMilli());
ChecksummedFile checksummedFile = new ChecksummedFile(file.getFilePath());
try {
metadata.setMd5(checksummedFile.calculateChecksum(ChecksumAlgorithm.MD5));
} catch (IOException e) {
@ -802,52 +780,43 @@ public class Maven2RepositoryStorage
} catch (IOException e) {
LOGGER.error("Unable to checksum file {}: {},SHA1", file, e.getMessage());
}
metadata.setSize(Files.size(file));
metadata.setSize(file.getSize());
}
private boolean isProject(Path dir, Filter<String> filter) {
private boolean isProject(StorageAsset dir, Filter<String> filter) {
// scan directories for a valid project version subdirectory, meaning this must be a project directory
final Predicate<Path> dFilter = new DirectoryFilter(filter);
try (Stream<Path> stream = Files.list(dir)) {
boolean projFound = stream.filter(dFilter)
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
boolean projFound = dir.list().stream().filter(dFilter)
.anyMatch(path -> isProjectVersion(path));
if (projFound) {
return true;
}
} catch (IOException e) {
LOGGER.error("Could not read directory list {}: {}", dir, e.getMessage(), e);
}
// if a metadata file is present, check if this is the "artifactId" directory, marking it as a project
ArchivaRepositoryMetadata metadata = readMetadata(dir);
if (metadata != null && dir.getFileName().toString().equals(metadata.getArtifactId())) {
if (metadata != null && dir.getName().toString().equals(metadata.getArtifactId())) {
return true;
}
return false;
}
private boolean isProjectVersion(Path dir) {
final String artifactId = dir.getParent().getFileName().toString();
final String projectVersion = dir.getFileName().toString();
private boolean isProjectVersion(StorageAsset dir) {
final String artifactId = dir.getParent().getName();
final String projectVersion = dir.getName();
// check if there is a POM artifact file to ensure it is a version directory
Predicate<Path> filter;
Predicate<StorageAsset> filter;
if (VersionUtil.isSnapshot(projectVersion)) {
filter = new PomFilenameFilter(artifactId, projectVersion);
} else {
final String pomFile = artifactId + "-" + projectVersion + ".pom";
filter = new PomFileFilter(pomFile);
}
try (Stream<Path> stream = Files.list(dir)) {
if (stream.filter(Files::isRegularFile).anyMatch(filter)) {
if (dir.list().stream().filter(f -> !f.isContainer()).anyMatch(filter)) {
return true;
}
} catch (IOException e) {
LOGGER.error("Could not list directory {}: {}", dir, e.getMessage(), e);
}
// if a metadata file is present, check if this is the "version" directory, marking it as a project version
ArchivaRepositoryMetadata metadata = readMetadata(dir);
if (metadata != null && projectVersion.equals(metadata.getVersion())) {
@ -857,13 +826,13 @@ public class Maven2RepositoryStorage
return false;
}
private ArchivaRepositoryMetadata readMetadata(Path directory) {
private ArchivaRepositoryMetadata readMetadata(StorageAsset directory) {
ArchivaRepositoryMetadata metadata = null;
Path metadataFile = directory.resolve(METADATA_FILENAME);
if (Files.exists(metadataFile)) {
StorageAsset metadataFile = directory.resolve(METADATA_FILENAME);
if (metadataFile.exists()) {
try {
metadata = MavenMetadataReader.read(metadataFile);
} catch (XMLException e) {
} catch (XMLException | IOException e) {
// ignore missing or invalid metadata
}
}
@ -871,7 +840,7 @@ public class Maven2RepositoryStorage
}
private static class DirectoryFilter
implements Predicate<Path> {
implements Predicate<StorageAsset> {
private final Filter<String> filter;
public DirectoryFilter(Filter<String> filter) {
@ -879,13 +848,13 @@ public class Maven2RepositoryStorage
}
@Override
public boolean test(Path dir) {
final String name = dir.getFileName().toString();
public boolean test(StorageAsset dir) {
final String name = dir.getName();
if (!filter.accept(name)) {
return false;
} else if (name.startsWith(".")) {
return false;
} else if (!Files.isDirectory(dir)) {
} else if (!dir.isContainer()) {
return false;
}
return true;
@ -893,7 +862,7 @@ public class Maven2RepositoryStorage
}
private static class ArtifactDirectoryFilter
implements Predicate<Path> {
implements Predicate<StorageAsset> {
private final Filter<String> filter;
private ArtifactDirectoryFilter(Filter<String> filter) {
@ -901,8 +870,8 @@ public class Maven2RepositoryStorage
}
@Override
public boolean test(Path dir) {
final String name = dir.getFileName().toString();
public boolean test(StorageAsset dir) {
final String name = dir.getName().toString();
// TODO compare to logic in maven-repository-layer
if (!filter.accept(name)) {
return false;
@ -912,7 +881,7 @@ public class Maven2RepositoryStorage
return false;
} else if (Arrays.binarySearch(IGNORED_FILES, name) >= 0) {
return false;
} else if (Files.isDirectory(dir)) {
} else if (dir.isContainer()) {
return false;
}
// some files from remote repositories can have name like maven-metadata-archiva-vm-all-public.xml
@ -927,7 +896,7 @@ public class Maven2RepositoryStorage
private static final class PomFilenameFilter
implements Predicate<Path> {
implements Predicate<StorageAsset> {
private final String artifactId, projectVersion;
@ -937,8 +906,8 @@ public class Maven2RepositoryStorage
}
@Override
public boolean test(Path dir) {
final String name = dir.getFileName().toString();
public boolean test(StorageAsset dir) {
final String name = dir.getName();
if (name.startsWith(artifactId + "-") && name.endsWith(".pom")) {
String v = name.substring(artifactId.length() + 1, name.length() - 4);
v = VersionUtil.getBaseVersion(v);
@ -952,7 +921,7 @@ public class Maven2RepositoryStorage
}
private static class PomFileFilter
implements Predicate<Path> {
implements Predicate<StorageAsset> {
private final String pomFile;
private PomFileFilter(String pomFile) {
@ -960,8 +929,8 @@ public class Maven2RepositoryStorage
}
@Override
public boolean test(Path dir) {
return pomFile.equals(dir.getFileName().toString());
public boolean test(StorageAsset dir) {
return pomFile.equals(dir.getName());
}
}

View File

@ -32,6 +32,7 @@ import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.RemoteRepository;
import org.apache.archiva.repository.RepositoryCredentials;
import org.apache.archiva.repository.maven2.MavenSystemManager;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException;
import org.apache.commons.lang.StringUtils;
import org.apache.http.auth.UsernamePasswordCredentials;
@ -78,7 +79,7 @@ public class RepositoryModelResolver
private RepositorySystemSession session;
private VersionRangeResolver versionRangeResolver;
private Path basedir;
private StorageAsset basedir;
private RepositoryPathTranslator pathTranslator;
@ -98,7 +99,7 @@ public class RepositoryModelResolver
private ManagedRepository managedRepository;
public RepositoryModelResolver( Path basedir, RepositoryPathTranslator pathTranslator )
public RepositoryModelResolver(StorageAsset basedir, RepositoryPathTranslator pathTranslator )
{
this.basedir = basedir;
@ -110,7 +111,7 @@ public class RepositoryModelResolver
Map<String, NetworkProxy> networkProxiesMap, ManagedRepository targetRepository,
MavenSystemManager mavenSystemManager)
{
this( Paths.get( managedRepository.getLocation() ), pathTranslator );
this( managedRepository.getAsset(""), pathTranslator );
this.managedRepository = managedRepository;
@ -138,9 +139,9 @@ public class RepositoryModelResolver
String filename = artifactId + "-" + version + ".pom";
// TODO: we need to convert 1.0-20091120.112233-1 type paths to baseVersion for the below call - add a test
Path model = pathTranslator.toFile( basedir, groupId, artifactId, version, filename );
StorageAsset model = pathTranslator.toFile( basedir, groupId, artifactId, version, filename );
if ( !Files.exists(model) )
if ( !model.exists() )
{
/**
*
@ -161,10 +162,10 @@ public class RepositoryModelResolver
try
{
boolean success = getModelFromProxy( remoteRepository, groupId, artifactId, version, filename );
if ( success && Files.exists(model) )
if ( success && model.exists() )
{
log.info( "Model '{}' successfully retrieved from remote repository '{}'",
model.toAbsolutePath(), remoteRepository.getId() );
model.getPath(), remoteRepository.getId() );
break;
}
}
@ -172,20 +173,20 @@ public class RepositoryModelResolver
{
log.info(
"An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}",
model.toAbsolutePath(), remoteRepository.getId(), e.getMessage() );
model.getPath(), remoteRepository.getId(), e.getMessage() );
}
catch ( Exception e )
{
log.warn(
"An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}",
model.toAbsolutePath(), remoteRepository.getId(), e.getMessage() );
model.getPath(), remoteRepository.getId(), e.getMessage() );
continue;
}
}
}
return new FileModelSource( model.toFile() );
return new FileModelSource( model.getFilePath().toFile() );
}
public ModelSource resolveModel(Parent parent) throws UnresolvableModelException {
@ -249,15 +250,15 @@ public class RepositoryModelResolver
log.debug( "use snapshot path {} for maven coordinate {}:{}:{}", snapshotPath, groupId, artifactId,
version );
Path model = basedir.resolve( snapshotPath );
StorageAsset model = basedir.resolve( snapshotPath );
//model = pathTranslator.toFile( basedir, groupId, artifactId, lastVersion, filename );
if ( Files.exists(model) )
if ( model.exists() )
{
return model;
return model.getFilePath();
}
}
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
log.warn( "fail to read {}, {}", mavenMetadata.toAbsolutePath(), e.getCause() );
}

View File

@ -34,7 +34,7 @@ import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;

View File

@ -23,8 +23,7 @@ import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.content.maven2.MavenRepositoryRequestInfo;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.repository.features.IndexCreationFeature;
@ -38,7 +37,6 @@ import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;
import java.util.function.Function;
/**
* Maven2 managed repository implementation.

View File

@ -9,7 +9,7 @@ import org.apache.archiva.repository.RepositoryCapabilities;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.StandardCapabilities;
import org.apache.archiva.repository.UnsupportedFeatureException;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.features.RepositoryFeature;

View File

@ -22,13 +22,12 @@ package org.apache.archiva.repository.maven2;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;

View File

@ -22,8 +22,7 @@ package org.apache.archiva.repository.maven2;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.configuration.*;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
@ -298,11 +297,19 @@ public class MavenRepositoryProvider implements RepositoryProvider {
IndexCreationFeature indexCreationFeature = repositoryGroup.getFeature( IndexCreationFeature.class ).get();
indexCreationFeature.setIndexPath( getURIFromString(configuration.getMergedIndexPath()) );
Path localPath = Paths.get(configuration.getMergedIndexPath());
if (localPath.isAbsolute()) {
indexCreationFeature.setLocalIndexPath( new FilesystemAsset(localPath.getFileName().toString(), localPath) );
Path repoGroupPath = repositoryGroup.getAsset("").getFilePath().toAbsolutePath();
if (localPath.isAbsolute() && !localPath.startsWith(repoGroupPath)) {
try {
FilesystemStorage storage = new FilesystemStorage(localPath.getParent(), fileLockManager);
indexCreationFeature.setLocalIndexPath(storage.getAsset(localPath.getFileName().toString()));
} catch (IOException e) {
throw new RepositoryException("Could not initialize storage for index path "+localPath);
}
} else if (localPath.isAbsolute()) {
indexCreationFeature.setLocalIndexPath(repositoryGroup.getAsset(repoGroupPath.relativize(localPath).toString()));
} else
{
indexCreationFeature.setLocalIndexPath( new FilesystemAsset(localPath.toString(), archivaConfiguration.getRepositoryGroupBaseDir( ).resolve( localPath )));
indexCreationFeature.setLocalIndexPath(repositoryGroup.getAsset(localPath.toString()));
}
}
// References to other repositories are set filled by the registry

View File

@ -29,6 +29,7 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
@ -46,8 +47,7 @@ public class MavenRepositoryMetadataReaderTest
@Test
public void testGroupMetadata()
throws XMLException
{
throws XMLException, IOException {
Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/plugins/maven-metadata.xml" );
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile );
@ -81,8 +81,7 @@ public class MavenRepositoryMetadataReaderTest
@Test
public void testProjectMetadata()
throws XMLException
{
throws XMLException, IOException {
Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" );
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile);
@ -99,8 +98,7 @@ public class MavenRepositoryMetadataReaderTest
@Test
public void testProjectVersionMetadata()
throws XMLException
{
throws XMLException, IOException {
Path metadataFile = defaultRepoDir.resolve( "org/apache/apache/5-SNAPSHOT/maven-metadata.xml" );
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile );

View File

@ -19,7 +19,7 @@ package org.apache.archiva.repository.index.mock;
* under the License.
*/
import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
@ -40,8 +40,9 @@ import org.apache.archiva.repository.RemoteRepository;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.commons.lang.StringUtils;
@ -145,7 +146,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
private Path getIndexPath( ArchivaIndexingContext ctx )
{
return PathUtil.getPathFromUri( ctx.getPath( ) );
return ctx.getPath( ).getFilePath();
}
@FunctionalInterface
@ -380,9 +381,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
@Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) {
@ -396,9 +397,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
@Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) {
@ -442,7 +443,12 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
+ ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
}
MavenIndexContextMock context = new MavenIndexContextMock( repository, mvnCtx );
MavenIndexContextMock context = null;
try {
context = new MavenIndexContextMock( repository, mvnCtx );
} catch (IOException e) {
throw new IndexCreationFailedException(e);
}
return context;
}
@ -457,7 +463,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
log.warn("Index close failed");
}
try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
FileUtils.deleteDirectory(context.getPath().getFilePath());
} catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files");
}
@ -527,12 +533,14 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
}
private StorageAsset getIndexPath( Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
Path repoDir = repo.getAsset( "" ).getFilePath();
URI indexDir = icf.getIndexPath();
String indexPath = indexDir.getPath();
Path indexDirectory = null;
FilesystemStorage fsStorage = (FilesystemStorage) repo.getAsset("").getStorage();
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
{
@ -541,6 +549,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
if ( indexDirectory.isAbsolute( ) )
{
indexPath = indexDirectory.getFileName().toString();
fsStorage = new FilesystemStorage(indexDirectory.getParent(), new DefaultFileLockManager());
}
else
{
@ -557,7 +566,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
{
Files.createDirectories( indexDirectory );
}
return new FilesystemAsset( indexPath, indexDirectory );
return new FilesystemAsset( fsStorage, indexPath, indexDirectory );
}
private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException

View File

@ -19,12 +19,14 @@ package org.apache.archiva.repository.index.mock;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.index.context.IndexingContext;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.sql.Date;
@ -38,10 +40,12 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
private IndexingContext delegate;
private Repository repository;
private FilesystemStorage indexStorage;
MavenIndexContextMock(Repository repository, IndexingContext delegate) {
MavenIndexContextMock(Repository repository, IndexingContext delegate) throws IOException {
this.delegate = delegate;
this.repository = repository;
indexStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath(), new DefaultFileLockManager());
}
@ -56,8 +60,8 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
}
@Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
public StorageAsset getPath() {
return indexStorage.getAsset("");
}
@Override

View File

@ -27,6 +27,7 @@ import org.apache.archiva.xml.XMLException;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -41,8 +42,7 @@ public class RepositoryMetadataReaderTest
{
@Test
public void testLoadSimple()
throws XMLException
{
throws XMLException, IOException {
Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" );
Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" );
@ -59,8 +59,7 @@ public class RepositoryMetadataReaderTest
@Test
public void testLoadComplex()
throws XMLException
{
throws XMLException, IOException {
Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" );
Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/samplejar/maven-metadata.xml" );

View File

@ -26,7 +26,7 @@ import org.apache.archiva.repository.BasicManagedRepository;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ReleaseScheme;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;

View File

@ -1,9 +1,7 @@
package org.apache.archiva.scheduler.repository.model;
import org.apache.archiva.redback.components.taskqueue.Task;
import org.apache.archiva.repository.content.StorageAsset;
import java.nio.file.Path;
import org.apache.archiva.repository.storage.StorageAsset;
/*

View File

@ -19,6 +19,7 @@ package org.apache.archiva.mock;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
@ -27,7 +28,6 @@ import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.indexer.IndexCreationFailedException;
import org.apache.archiva.indexer.IndexUpdateFailedException;
import org.apache.archiva.indexer.UnsupportedBaseContextException;
import org.apache.archiva.proxy.ProxyRegistry;
import org.apache.archiva.proxy.maven.WagonFactory;
import org.apache.archiva.proxy.maven.WagonFactoryException;
import org.apache.archiva.proxy.maven.WagonFactoryRequest;
@ -39,10 +39,12 @@ import org.apache.archiva.repository.RemoteRepository;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.index.ArtifactContext;
import org.apache.maven.index.ArtifactContextProducer;
@ -140,7 +142,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
private Path getIndexPath( ArchivaIndexingContext ctx )
{
return PathUtil.getPathFromUri( ctx.getPath( ) );
return ctx.getPath().getFilePath();
}
@FunctionalInterface
@ -365,9 +367,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
@Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) {
@ -381,9 +383,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
@Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) {
@ -442,7 +444,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
log.warn("Index close failed");
}
try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
StorageUtil.deleteRecursively(context.getPath());
} catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files");
}
@ -517,6 +519,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
URI indexDir = icf.getIndexPath();
String indexPath = indexDir.getPath();
Path indexDirectory = null;
FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getAsset("").getStorage();
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
{
@ -525,6 +528,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
if ( indexDirectory.isAbsolute( ) )
{
indexPath = indexDirectory.getFileName().toString();
filesystemStorage = new FilesystemStorage(indexDirectory, new DefaultFileLockManager());
}
else
{
@ -541,7 +545,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
{
Files.createDirectories( indexDirectory );
}
return new FilesystemAsset( indexPath, indexDirectory);
return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory);
}
private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException

View File

@ -19,8 +19,12 @@ package org.apache.archiva.mock;
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.index.context.IndexingContext;
import java.io.IOException;
@ -38,10 +42,16 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
private IndexingContext delegate;
private Repository repository;
private FilesystemStorage filesystemStorage;
MavenIndexContextMock( Repository repository, IndexingContext delegate) {
this.delegate = delegate;
this.repository = repository;
try {
filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath().getParent(), new DefaultFileLockManager());
} catch (IOException e) {
e.printStackTrace();
}
}
@ -56,8 +66,9 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
}
@Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
public StorageAsset getPath() {
return new FilesystemAsset(filesystemStorage, delegate.getIndexDirectoryFile().toPath().getFileName().toString(), delegate.getIndexDirectoryFile().toPath());
}
@Override

View File

@ -44,6 +44,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-security</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-admin-api</artifactId>

View File

@ -43,6 +43,8 @@ import org.apache.archiva.repository.ReleaseScheme;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryNotFoundException;
import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.archiva.rest.api.model.*;
import org.apache.archiva.rest.api.services.ArchivaRestServiceException;
import org.apache.archiva.rest.api.services.BrowseService;
@ -62,6 +64,8 @@ import java.io.InputStream;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.*;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
@ -696,8 +700,8 @@ public class DefaultBrowseService
ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier,
StringUtils.isEmpty( type ) ? "jar" : type,
repoId );
Path file = managedRepositoryContent.toFile( archivaArtifact );
if ( Files.exists(file) )
StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );
if ( file.exists() )
{
return readFileEntries( file, path, repoId );
}
@ -781,8 +785,8 @@ public class DefaultBrowseService
ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier,
StringUtils.isEmpty( type ) ? "jar" : type,
repoId );
Path file = managedRepositoryContent.toFile( archivaArtifact );
if ( !Files.exists(file) )
StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );
if ( !file.exists() )
{
log.debug( "file: {} not exists for repository: {} try next repository", file, repoId );
continue;
@ -790,7 +794,8 @@ public class DefaultBrowseService
if ( StringUtils.isNotBlank( path ) )
{
// zip entry of the path -> path must a real file entry of the archive
JarFile jarFile = new JarFile( file.toFile() );
StorageUtil.PathInformation pathInfo = StorageUtil.getAssetDataAsPath(file);
JarFile jarFile = new JarFile( pathInfo.getPath().toFile());
ZipEntry zipEntry = jarFile.getEntry( path );
try (InputStream inputStream = jarFile.getInputStream( zipEntry ))
{
@ -799,9 +804,14 @@ public class DefaultBrowseService
finally
{
closeQuietly( jarFile );
if (pathInfo.isTmpFile()) {
Files.deleteIfExists(pathInfo.getPath());
}
}
}
return new ArtifactContent( new String(Files.readAllBytes( file ), ARTIFACT_CONTENT_ENCODING), repoId );
try(InputStream readStream = file.getReadStream()) {
return new ArtifactContent(IOUtils.toString(readStream, ARTIFACT_CONTENT_ENCODING), repoId);
}
}
}
catch ( IOException e )
@ -846,9 +856,9 @@ public class DefaultBrowseService
StringUtils.isEmpty( classifier )
? ""
: classifier, "jar", repoId );
Path file = managedRepositoryContent.toFile( archivaArtifact );
StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );
if ( file != null && Files.exists(file) )
if ( file != null && file.exists() )
{
return true;
}
@ -856,8 +866,8 @@ public class DefaultBrowseService
// in case of SNAPSHOT we can have timestamped version locally !
if ( StringUtils.endsWith( version, VersionUtil.SNAPSHOT ) )
{
Path metadataFile = file.getParent().resolve(MetadataTools.MAVEN_METADATA );
if ( Files.exists(metadataFile) )
StorageAsset metadataFile = file.getStorage().getAsset(file.getParent().getPath()+"/"+MetadataTools.MAVEN_METADATA );
if ( metadataFile.exists() )
{
try
{
@ -873,14 +883,14 @@ public class DefaultBrowseService
.append( ( StringUtils.isEmpty( classifier ) ? "" : "-" + classifier ) ) //
.append( ".jar" ).toString();
Path timeStampFile = file.getParent().resolve( timeStampFileName );
log.debug( "try to find timestamped snapshot version file: {}", timeStampFile.toAbsolutePath() );
if ( Files.exists(timeStampFile) )
StorageAsset timeStampFile = file.getStorage().getAsset(file.getParent().getPath() + "/" + timeStampFileName );
log.debug( "try to find timestamped snapshot version file: {}", timeStampFile.getPath() );
if ( timeStampFile.exists() )
{
return true;
}
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
log.warn( "skip fail to find timestamped snapshot file: {}", e.getMessage() );
}
@ -891,7 +901,7 @@ public class DefaultBrowseService
file = proxyHandler.fetchFromProxies( managedRepositoryContent, path );
if ( file != null && Files.exists(file) )
if ( file != null && file.exists() )
{
// download pom now
String pomPath = StringUtils.substringBeforeLast( path, ".jar" ) + ".pom";
@ -1075,7 +1085,7 @@ public class DefaultBrowseService
}
}
protected List<ArtifactContentEntry> readFileEntries(final Path file, final String filterPath, final String repoId )
protected List<ArtifactContentEntry> readFileEntries(final StorageAsset file, final String filterPath, final String repoId )
throws IOException
{
String cleanedfilterPath = filterPath==null ? "" : (StringUtils.startsWith(filterPath, "/") ?
@ -1085,7 +1095,9 @@ public class DefaultBrowseService
if (!StringUtils.endsWith(cleanedfilterPath,"/") && !StringUtils.isEmpty(cleanedfilterPath)) {
filterDepth++;
}
JarFile jarFile = new JarFile( file.toFile() );
StorageUtil.PathInformation pathInfo = StorageUtil.getAssetDataAsPath(file);
JarFile jarFile = new JarFile(pathInfo.getPath().toFile());
try
{
Enumeration<JarEntry> jarEntryEnumeration = jarFile.entries();
@ -1141,6 +1153,9 @@ public class DefaultBrowseService
{
jarFile.close();
}
if (pathInfo.isTmpFile()) {
Files.deleteIfExists(pathInfo.getPath());
}
}
List<ArtifactContentEntry> sorted = new ArrayList<>( artifactContentEntryMap.values() );
Collections.sort( sorted, ArtifactContentEntryComparator.INSTANCE );

View File

@ -54,9 +54,9 @@ import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryNotFoundException;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.content.StorageUtil;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.metadata.RepositoryMetadataException;
@ -89,11 +89,10 @@ import javax.inject.Inject;
import javax.inject.Named;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@ -522,7 +521,7 @@ public class DefaultRepositoriesService
{
metadata = MavenMetadataReader.read( metadataFile.getFilePath() );
}
catch ( XMLException e )
catch (XMLException | IOException e )
{
throw new RepositoryMetadataException( e.getMessage(), e );
}
@ -543,7 +542,7 @@ public class DefaultRepositoriesService
throws IOException
{
StorageUtil.copyAsset( sourceStorage, sourceFile, targetStorage, targetPath, true );
StorageUtil.copyAsset( sourceFile, targetPath, true );
if ( fixChecksums )
{
fixChecksums( targetPath );
@ -612,7 +611,11 @@ public class DefaultRepositoriesService
projectMetadata.setReleasedVersion( latestVersion );
}
RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile.getFilePath());
try(OutputStreamWriter writer = new OutputStreamWriter(projectMetadataFile.getWriteStream(true))) {
RepositoryMetadataWriter.write(projectMetadata, writer);
} catch (IOException e) {
throw new RepositoryMetadataException(e);
}
if ( fixChecksums )
{
@ -1177,7 +1180,11 @@ public class DefaultRepositoriesService
metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp );
metadata.setAvailableVersions( availableVersions );
RepositoryMetadataWriter.write( metadata, metadataFile.getFilePath());
try (OutputStreamWriter writer = new OutputStreamWriter(metadataFile.getWriteStream(true))) {
RepositoryMetadataWriter.write(metadata, writer);
} catch (IOException e) {
throw new RepositoryMetadataException(e);
}
ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
checksum.fixChecksums( algorithms );
}

View File

@ -23,6 +23,8 @@ import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.io.FilenameUtils;
import java.nio.file.Path;
@ -79,7 +81,7 @@ public class ArtifactBuilder
ref.setClassifier( classifier );
ref.setType( type );
Path file = managedRepositoryContent.toFile( ref );
StorageAsset file = managedRepositoryContent.toFile( ref );
String extension = getExtensionFromFile(file);
@ -124,10 +126,10 @@ public class ArtifactBuilder
/**
* Extract file extension
*/
String getExtensionFromFile( Path file )
String getExtensionFromFile( StorageAsset file )
{
// we are just interested in the section after the last -
String[] parts = file.getFileName().toString().split( "-" );
String[] parts = file.getName().split( "-" );
if ( parts.length > 0 )
{
// get anything after a dot followed by a letter a-z, including other dots
@ -139,7 +141,7 @@ public class ArtifactBuilder
}
}
// just in case
return FilenameUtils.getExtension( file.toFile().getName() );
return StorageUtil.getExtension( file );
}
}

View File

@ -19,6 +19,9 @@ package org.apache.archiva.rest.services;
*/
import junit.framework.TestCase;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.rest.api.model.ArtifactContentEntry;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
import org.junit.Test;
@ -56,10 +59,11 @@ public class ArtifactContentEntriesTests
throws Exception
{
FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
Path file = Paths.get( getBasedir(),
"src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( file, null, "foo" );
List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), null, "foo" );
log.info( "artifactContentEntries: {}", artifactContentEntries );
@ -74,10 +78,12 @@ public class ArtifactContentEntriesTests
throws Exception
{
FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
Path file = Paths.get( getBasedir(),
"src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( file, "", "foo" );
List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries(
new FilesystemAsset(filesystemStorage, file.toString(), file), "", "foo" );
log.info( "artifactContentEntries: {}", artifactContentEntries );
@ -92,10 +98,12 @@ public class ArtifactContentEntriesTests
throws Exception
{
FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
Path file = Paths.get( getBasedir(),
"src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( file, "/", "foo" );
List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(),file), "/", "foo" );
log.info( "artifactContentEntries: {}", artifactContentEntries );
@ -110,10 +118,12 @@ public class ArtifactContentEntriesTests
throws Exception
{
FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
Path file = Paths.get( getBasedir(),
"src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( file, "org", "foo" );
List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org", "foo" );
log.info( "artifactContentEntries: {}", artifactContentEntries );
@ -127,11 +137,13 @@ public class ArtifactContentEntriesTests
throws Exception
{
FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
Path file = Paths.get( getBasedir(),
"src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
List<ArtifactContentEntry> artifactContentEntries =
browseService.readFileEntries( file, "org/apache/commons/logging/impl/", "foo" );
browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org/apache/commons/logging/impl/", "foo" );
log.info( "artifactContentEntries: {}", artifactContentEntries );
@ -145,11 +157,13 @@ public class ArtifactContentEntriesTests
throws Exception
{
FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
Path file = Paths.get( getBasedir(),
"src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
List<ArtifactContentEntry> artifactContentEntries =
browseService.readFileEntries( file, "org/apache/commons/logging/", "foo" );
browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org/apache/commons/logging/", "foo" );
log.info( "artifactContentEntries: {}", artifactContentEntries );

Some files were not shown because too many files have changed in this diff Show More