Improving storage handling with assets

This commit is contained in:
Martin Stockhammer 2021-05-30 19:14:03 +02:00
parent fe117fcc4b
commit 79c61a7c6a
23 changed files with 1366 additions and 236 deletions

View File

@ -90,6 +90,30 @@ public interface ManagedRepositoryContent extends RepositoryContent
*/
void deleteItem( ContentItem item ) throws ItemNotFoundException, ContentAccessException;
/**
* Copies the given item to the destination repository. The destination repository must be of the same type
* as this repository. Metadata is updated only if there is no metadata scan consumer is active.
*
* @param item the item to copy
* @param destinationRepository the destination repository
* @throws ItemNotFoundException if the given item does not exist
* @throws ContentAccessException if an error occurred during the copy process
*/
void copyItem(ContentItem item, ManagedRepository destinationRepository) throws ItemNotFoundException, ContentAccessException;
/**
* Copies the given item to the destination repository. The destination repository must be of the same type
* as this repository.
*
* @param item the item to copy
* @param destinationRepository the destination repository
* @param updateMetadata <code>true</code>, if the metadata will be updated immediately after copying. <code>false</code>
* if metadata is not updated after copying, but it may be updated by the metadata scan consumer, if it is configured.
* @throws ItemNotFoundException if the given item does not exist
* @throws ContentAccessException if an error occurred during the copy process
*/
void copyItem(ContentItem item, ManagedRepository destinationRepository, boolean updateMetadata) throws ItemNotFoundException, ContentAccessException;
/**
* Returns a item for the given selector. The type of the returned item depends on the
* selector.

View File

@ -114,6 +114,18 @@ public class ManagedRepositoryContentMock implements BaseRepositoryContentLayout
}
@Override
public void copyItem( ContentItem item, ManagedRepository destinationRepository ) throws ItemNotFoundException, ContentAccessException
{
}
@Override
public void copyItem( ContentItem item, ManagedRepository destinationRepository, boolean updateMetadata ) throws ItemNotFoundException, ContentAccessException
{
}
@Override
public ContentItem getItem( ItemSelector selector ) throws ContentAccessException, IllegalArgumentException
{

View File

@ -125,7 +125,19 @@ public class ManagedRepositoryContentMock implements BaseRepositoryContentLayout
}
@Override
public void deleteItem( ContentItem item ) throws ItemNotFoundException, ContentAccessException
public void deleteItem( ContentItem item ) throws ContentAccessException
{
}
@Override
public void copyItem( ContentItem item, ManagedRepository destinationRepository ) throws ContentAccessException
{
}
@Override
public void copyItem( ContentItem item, ManagedRepository destinationRepository, boolean updateMetadata ) throws ContentAccessException
{
}
@ -161,7 +173,7 @@ public class ManagedRepositoryContentMock implements BaseRepositoryContentLayout
}
@Override
public Artifact getArtifact( String path ) throws LayoutException, ContentAccessException
public Artifact getArtifact( String path ) throws ContentAccessException
{
return null;
}
@ -252,13 +264,13 @@ public class ManagedRepositoryContentMock implements BaseRepositoryContentLayout
}
@Override
public <T extends ContentItem> T applyCharacteristic( Class<T> clazz, ContentItem item ) throws LayoutException
public <T extends ContentItem> T applyCharacteristic( Class<T> clazz, ContentItem item )
{
return null;
}
@Override
public <T extends ManagedRepositoryContentLayout> T getLayout( Class<T> clazz ) throws LayoutException
public <T extends ManagedRepositoryContentLayout> T getLayout( Class<T> clazz )
{
return null;
}
@ -296,7 +308,7 @@ public class ManagedRepositoryContentMock implements BaseRepositoryContentLayout
@Override
public ContentItem toItem( String path ) throws LayoutException
public ContentItem toItem( String path )
{
return null;
}

View File

@ -49,11 +49,39 @@
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<!-- We need the abstract test class for other projects and are creating a jar file with test classifier -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>

View File

@ -0,0 +1,29 @@
package org.apache.archiva.repository.storage;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* The asset type is only used for creating new assets.
* FILE type contains data and provides input and output stream
* CONTAINER type contains only further assets and not data
* @author Martin Stockhammer <martin_s@apache.org>
*/
public enum AssetType
{
FILE,CONTAINER
}

View File

@ -43,13 +43,14 @@ public interface StorageAsset
{
/**
* Returns the storage this asset belongs to.
* @return
* Returns the storage this asset belongs to. Each asset belongs to exactly one storage instance.
*
* @return the storage instance
*/
RepositoryStorage getStorage();
/**
* Returns the complete path relative to the repository to the given asset.
* Returns the complete path relative to the base path to the given asset.
*
* @return A path starting with '/' that uniquely identifies the asset in the repository.
*/
@ -81,7 +82,7 @@ public interface StorageAsset
boolean isLeaf();
/**
* List the child assets.
* List the child assets. Implementations should return a ordered list of children.
*
* @return The list of children. If there are no children and if the asset is not a container, a empty list will be returned.
*/
@ -96,40 +97,55 @@ public interface StorageAsset
/**
* Returns the input stream of the artifact content.
* It will throw a IOException, if the stream could not be created.
* This method will throw a IOException, if the stream could not be created.
* Assets of type {@link AssetType#CONTAINER} will throw an IOException because they have no data attached.
* Implementations should create a new stream instance for each invocation and make sure that the
* stream is proper closed after usage.
*
* @return The InputStream representing the content of the artifact.
* @throws IOException
* @throws IOException if the stream could not be created, either because of a problem accessing the storage, or because
* the asset is not capable to provide a data stream
*/
InputStream getReadStream() throws IOException;
/**
* Returns a NIO representation of the data.
*
* This method will throw a IOException, if the stream could not be created.
* Assets of type {@link AssetType#CONTAINER} will throw an IOException because they have no data attached.
* Implementations should create a new channel instance for each invocation and make sure that the
* channel is proper closed after usage.
* @return A channel to the asset data.
* @throws IOException
* @throws IOException if the channel could not be created, either because of a problem accessing the storage, or because
* the asset is not capable to provide a data stream
*/
ReadableByteChannel getReadChannel() throws IOException;
/**
*
* Returns an output stream where you can write data to the asset. The operation is not locked or synchronized.
* This method will throw a IOException, if the stream could not be created.
* Assets of type {@link AssetType#CONTAINER} will throw an IOException because they have no data attached.
* User of this method have to make sure, that the stream is proper closed after usage.
*
* @param replace If true, the original data will be replaced, otherwise the data will be appended.
* @return The OutputStream where the data can be written.
* @throws IOException
* @throws IOException if the stream could not be created, either because of a problem accessing the storage, or because
* the asset is not capable to provide a data stream
*/
OutputStream getWriteStream( boolean replace) throws IOException;
/**
* Returns a NIO representation of the asset where you can write the data.
* This method will throw a IOException, if the stream could not be created.
* Assets of type {@link AssetType#CONTAINER} will throw an IOException because they have no data attached.
* Implementations should create a new channel instance for each invocation and make sure that the
* channel is proper closed after usage.
*
* @param replace True, if the content should be replaced by the data written to the stream.
* @return The Channel for writing the data.
* @throws IOException
* @throws IOException if the channel could not be created, either because of a problem accessing the storage, or because
* the asset is not capable to provide a data stream
*/
WritableByteChannel getWriteChannel( boolean replace) throws IOException;
@ -140,6 +156,7 @@ public interface StorageAsset
* The original file may be deleted, if the storage was successful.
*
* @param newData Replaces the data by the content of the given file.
* @throws IOException if the access to the storage failed
*/
boolean replaceDataFromFile( Path newData) throws IOException;
@ -155,6 +172,13 @@ public interface StorageAsset
*/
void create() throws IOException;
/**
* Creates the asset as the given type
* @param type the type to create, if the asset does not exist
* @throws IOException if the asset could not be created
*/
void create(AssetType type) throws IOException;
/**
* Returns the real path to the asset, if it exist. Not all implementations may implement this method.
* The method throws {@link UnsupportedOperationException}, if and only if {@link #isFileBased()} returns false.
@ -166,7 +190,7 @@ public interface StorageAsset
/**
* Returns true, if the asset can return a file path for the given asset. If this is true, the {@link #getFilePath()}
* will not throw a {@link UnsupportedOperationException}
* must not throw a {@link UnsupportedOperationException}
*
* @return
*/
@ -174,20 +198,33 @@ public interface StorageAsset
/**
* Returns true, if there is a parent to this asset.
* @return
* @return True, if this asset is a descendant of a parent. False, if this is the root asset of the storage.
*/
boolean hasParent();
/**
* Returns the parent of this asset.
* Returns the parent of this asset. If this is the root asset of the underlying storage,
* <code>null</code> will be returned.
*
* @return The asset, or <code>null</code>, if it does not exist.
*/
StorageAsset getParent();
/**
* Returns the asset relative to the given path
* @param toPath
* @return
* Returns the asset instance relative to the given path. The returned asset may not persisted yet.
*
* @param toPath the path relative to the current asset
* @return the asset representing the given path
*/
StorageAsset resolve(String toPath);
/**
* Returns the relative path from <code>this</code> asset to the given asset.
* If the given asset is from a different storage implementation, than this asset, the
* result is undefined.
*
* @param asset the asset that should be a descendant of <code>this</code>
* @return the relative path
*/
String relativize(StorageAsset asset );
}

View File

@ -18,6 +18,7 @@ package org.apache.archiva.repository.storage.util;
* under the License.
*/
import org.apache.archiva.repository.storage.AssetType;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang3.StringUtils;
@ -31,8 +32,14 @@ import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.CopyOption;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
@ -135,45 +142,121 @@ public class StorageUtil
final RepositoryStorage storage = baseDir.getStorage( );
try(Stream<StorageAsset> stream = newAssetStream( baseDir ))
{
if ( stopOnError )
try
{
// Return true, if no exception occurred
// anyMatch is short-circuiting, that means it stops if the condition matches
return !stream.map( a -> {
try
{
storage.removeAsset( a );
// Returning false, if OK
return Boolean.FALSE;
}
catch ( IOException e )
{
LOG.error( "Could not delete asset {}: {}", a.getPath( ), e.getMessage( ), e );
// Returning true, if exception
return Boolean.TRUE;
}
} ).anyMatch( r -> r );
} else {
// Return true, if all removals were OK
// We want to consume all, so we use allMatch
return stream.map( a -> {
try
{
storage.removeAsset( a );
// Returning true, if OK
return Boolean.TRUE;
}
catch ( IOException e )
{
LOG.error( "Could not delete asset {}: {}", a.getPath( ), e.getMessage( ), e );
// Returning false, if exception
return Boolean.FALSE;
if (stopOnError) {
throw new RuntimeException( e );
} else
{
return Boolean.FALSE;
}
}
} ).allMatch( r -> r );
} ).reduce( (a,b)->Boolean.logicalAnd( a,b ) ).orElse( Boolean.FALSE );
} catch ( RuntimeException e ) {
return false;
}
}
}
/**
* Deletes the given asset and all child assets recursively.
* @param srcAsset The source directory
* @param destAsset The destination directory
* @param stopOnError if <code>true</code> the traversal stops, if an exception is encountered
* @return returns <code>true</code>, if every item was removed. If an IOException was encountered during
* traversal it returns <code>false</code>
*/
public static final boolean copyRecursively(final StorageAsset srcAsset, final StorageAsset destAsset, final boolean stopOnError) throws IOException
{
try
{
if ( srcAsset.isFileBased( ) && destAsset.isFileBased( ) )
{
Path src = srcAsset.getFilePath( );
Path dest = destAsset.getFilePath( );
return Files.walk( src )
.map( source -> {
try
{
Files.copy( source, dest.resolve( src.relativize( source ) ),
StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES );
return Boolean.TRUE;
}
catch ( IOException e )
{
if ( stopOnError )
{
throw new RuntimeException( e );
}
else
{
return Boolean.FALSE;
}
}
} ).reduce( ( a, b ) -> Boolean.logicalAnd( a, b ) ).get();
}
else
{
try ( Stream<StorageAsset> stream = newAssetStream( srcAsset ) )
{
if (!destAsset.exists() && srcAsset.isContainer()) {
destAsset.create( AssetType.CONTAINER );
}
return stream.map( a -> {
try
{
String relativePath = destAsset.relativize( a );
System.out.println( "Destination relative: " + relativePath );
StorageAsset destFile = destAsset.resolve( relativePath );
assert destFile != null;
System.out.println( "Destination " + destFile.getPath( ) + " " + a.isContainer() );
if (a.isContainer()) {
destFile.create( AssetType.CONTAINER);
} else {
if (!destFile.getParent( ).exists() ) {
System.out.println( "Creating parent " + destFile.getParent( ) );
destFile.getParent().create( AssetType.CONTAINER );
}
System.out.println( "Copying " + a.getPath( ) + "->" + destFile.getPath( ) );
copy( a.getReadChannel( ), destFile.getWriteChannel( true ) );
}
return Boolean.TRUE;
}
catch ( IOException e )
{
LOG.error( "Could not copy asset {}: {}", a.getPath( ), e.getMessage( ), e );
// Returning true, if exception
if ( stopOnError )
{
throw new RuntimeException( e );
}
else
{
return Boolean.FALSE;
}
}
} ).reduce( ( a, b ) -> Boolean.logicalAnd( a, b ) ).orElse(Boolean.FALSE);
}
}
} catch (RuntimeException e) {
System.err.println( "Exception " + e.getMessage( ) );
e.printStackTrace( );
return false;
}
}
/**
* Moves a asset between different storage instances.
* If you know that source and asset are from the same storage instance, the move method of the storage

View File

@ -19,21 +19,29 @@ package org.apache.archiva.repository.storage.mock;
* under the License.
*/
import org.apache.archiva.repository.storage.AssetType;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.util.StorageUtil;
import org.apache.commons.lang3.StringUtils;
import org.mockito.ArgumentMatcher;
import org.mockito.Mock;
import org.mockito.Mockito;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static org.mockito.Matchers.any;
public class MockAsset implements StorageAsset
{
@ -43,24 +51,37 @@ public class MockAsset implements StorageAsset
private LinkedHashMap<String, MockAsset> children = new LinkedHashMap<>( );
private boolean container = false;
private RepositoryStorage storage;
private boolean exists = false;
private boolean throwException;
public MockAsset( String name ) {
this.parent = null;
this.name = name;
this.path = "/";
}
public MockAsset( MockAsset parent, String name ) {
if (parent!=null && "".equals(name)) {
throw new RuntimeException( "Bad asset creation with empty name and parent" );
}
this.parent = parent;
this.path = (parent.hasParent()?parent.getPath( ):"") + "/" + name;
this.path = getPath( parent, name );
this.name = name;
this.storage = parent.getStorage( );
parent.registerChild( this );
}
private String getPath(MockAsset parent, String name) {
if (parent.hasParent() && !parent.getPath( ).equals( "/" )) {
return parent.getPath( ) + "/" + name;
} else {
return "/" + name;
}
}
public void registerChild(MockAsset child) {
children.putIfAbsent( child.getName(), child );
this.container = true;
@ -136,25 +157,28 @@ public class MockAsset implements StorageAsset
@Override
public InputStream getReadStream( ) throws IOException
{
return null;
return Mockito.mock( InputStream.class );
}
@Override
public ReadableByteChannel getReadChannel( ) throws IOException
{
return null;
ReadableByteChannel channel = Mockito.mock( ReadableByteChannel.class );
Mockito.when( channel.read( any( ByteBuffer.class ) ) ).thenReturn( -1 );
return channel;
}
@Override
public OutputStream getWriteStream( boolean replace ) throws IOException
{
return null;
return Mockito.mock( OutputStream.class );
}
@Override
public WritableByteChannel getWriteChannel( boolean replace ) throws IOException
{
return null;
this.exists = true;
return Mockito.mock( WritableByteChannel.class );
}
@Override
@ -166,13 +190,22 @@ public class MockAsset implements StorageAsset
@Override
public boolean exists( )
{
return false;
return exists;
}
@Override
public void create( ) throws IOException
{
this.exists = true;
}
@Override
public void create( AssetType type ) throws IOException
{
if (type.equals( AssetType.CONTAINER )) {
this.container = true;
}
this.exists = true;
}
@Override
@ -205,7 +238,39 @@ public class MockAsset implements StorageAsset
if (children.containsKey( toPath )) {
return children.get( toPath );
} else {
return null;
if (toPath.startsWith( "/" )) {
toPath = StringUtils.removeStart( toPath, "/" );
}
if ( "".equals( toPath ) )
{
return this;
}
String[] destPath = toPath.split( "/" );
StringBuilder destPathStr = new StringBuilder( );
MockAsset destParent = this;
for (int i=0; i<destPath.length; i++) {
destPathStr.append( "/" ).append( destPath[i] );
StorageAsset child = storage.getAsset( destPathStr.toString( ) );
if (child!=null) {
destParent = (MockAsset) child;
} else
{
System.out.println( "Resolve " + destParent.getPath( ) + " -- " + destPath[i] );
destParent = new MockAsset( destParent, destPath[i] );
}
}
return destParent;
}
}
@Override
public String relativize( StorageAsset asset )
{
System.out.println( "relativize this " + this.getPath( ) + " -> other " + asset.getPath( ) );
if (asset.isFileBased()) {
return Paths.get( getPath( ) ).relativize( asset.getFilePath( ) ).toString();
} else {
return StringUtils.removeStart( asset.getPath( ), this.getPath( ) );
}
}

View File

@ -18,6 +18,7 @@ package org.apache.archiva.repository.storage.mock;
* under the License.
*/
import org.apache.archiva.repository.storage.AssetType;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.util.VisitStatus;
@ -48,12 +49,30 @@ public class MockStorage implements RepositoryStorage
public MockStorage( MockAsset root )
{
this.root = root;
try
{
this.root.create( AssetType.CONTAINER );
}
catch ( IOException e )
{
e.printStackTrace( );
}
root.setStorage( this );
assets.put( "/", root );
}
public MockStorage() {
this.root = new MockAsset( "" );
this.root = new MockAsset( "/" );
try
{
this.root.create( AssetType.CONTAINER );
}
catch ( IOException e )
{
e.printStackTrace( );
}
this.root.setStorage( this );
assets.put( "/", this.root );
}
public VisitStatus getStatus() {
@ -85,8 +104,9 @@ public class MockStorage implements RepositoryStorage
}
@Override
public StorageAsset getAsset( String path )
public StorageAsset getAsset( final String requestedPath )
{
String path = requestedPath.startsWith( "/" ) ? requestedPath : "/"+requestedPath;
if (assets.containsKey( path )) {
return assets.get( path );
}

View File

@ -0,0 +1,234 @@
package org.apache.archiva.repository.storage.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.repository.storage.AssetType;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.mock.MockStorage;
import org.apache.archiva.repository.storage.util.StorageUtil;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.*;
/**
* Abstract test class for the storage utility to test for different storage implementations.
*
* @author Martin Stockhammer <martin_s@apache.org>
*/
public abstract class AbstractStorageUtilTest
{
private static final int LEVEL1 = 12;
private static final int LEVEL2 = 13;
private static final int LEVEL3 = 6;
/**
* A subclass must override this method. This method returns a new asset instance with the given parent.
*
* @param parent the parent asset for the newly created asset
* @param name the name of the new asset
* @return the asset
*/
protected abstract StorageAsset createAsset( StorageAsset parent, String name, AssetType type );
protected StorageAsset createAsset(StorageAsset parent, String name) {
return createAsset( parent, name, AssetType.FILE );
}
/**
* A subclass must override this method. This method returns a new root asset instance without parent.
* @return the newly created asset instance
*/
protected abstract StorageAsset createRootAsset( );
/**
* Activates a exception on a certain asset in the storage
* @param root the root asset
*/
protected abstract void activateException( StorageAsset root );
/**
* A subclass should override this method. This method creates a new storage instance with the given root element.
*
* @param root the root asset
* @return the storage instance
*/
protected abstract RepositoryStorage createStorage( StorageAsset root );
protected StorageAsset createTree( )
{
return createTree( LEVEL1, LEVEL2, LEVEL3 );
}
protected StorageAsset createTree( int... levelElements )
{
StorageAsset root = createRootAsset( );
recurseSubTree( root, 0, levelElements );
return root;
}
private void recurseSubTree( StorageAsset parent, int level, int[] levelElements )
{
if ( level < levelElements.length )
{
AssetType type = ( level == levelElements.length - 1 ) ? AssetType.FILE : AssetType.CONTAINER;
for ( int k = 0; k < levelElements[level]; k++ )
{
String name = parent.getName( ) + String.format( "%03d", k );
StorageAsset asset = createAsset( parent, name, type );
recurseSubTree( asset, level + 1, levelElements );
}
}
}
@Test
void testWalkFromRoot( )
{
StorageAsset root = createTree( );
ConsumeVisitStatus status = new ConsumeVisitStatus( );
StorageUtil.walk( root, status );
int expected = LEVEL1 * LEVEL2 * LEVEL3 + LEVEL1 * LEVEL2 + LEVEL1 + 1;
Assertions.assertEquals( expected, status.size( ) );
StorageAsset first = root.list( ).get( 0 ).list( ).get( 0 ).list( ).get( 0 );
Assertions.assertEquals( first, status.getFirst( ) );
Assertions.assertEquals( root, status.getLast( ) );
}
@Test
void testWalkFromChild( )
{
StorageAsset root = createTree( );
ConsumeVisitStatus status = new ConsumeVisitStatus( );
StorageAsset testRoot = root.list( ).get( 3 );
StorageUtil.walk( testRoot, status );
int expected = LEVEL2 * LEVEL3 + LEVEL2 + 1;
Assertions.assertEquals( expected, status.size( ) );
StorageAsset first = root.list( ).get( 3 ).list( ).get( 0 ).list( ).get( 0 );
Assertions.assertEquals( first, status.getFirst( ) );
Assertions.assertEquals( testRoot, status.getLast( ) );
}
@Test
void testWalkFromRootWithCondition( )
{
StorageAsset root = createTree( );
StopVisitStatus status = new StopVisitStatus( );
status.setStopCondition( a -> a.getName( ).equals( "001002003" ) );
StorageUtil.walk( root, status );
Assertions.assertEquals( "001002003", status.getLast( ).getName( ) );
int expected = LEVEL2 * LEVEL3 + LEVEL2 + 2 * LEVEL3 + 1 + 1 + 1 + 4;
Assertions.assertEquals( expected, status.size( ) );
}
@Test
void testStream( )
{
StorageAsset root = createTree( );
List<StorageAsset> result;
try ( Stream<StorageAsset> stream = StorageUtil.newAssetStream( root, false ) )
{
result = stream.filter( a -> a.getName( ).startsWith( "001" ) ).collect( Collectors.toList( ) );
}
int expected = LEVEL2 * LEVEL3 + LEVEL2 + 1;
Assertions.assertEquals( expected, result.size( ) );
Assertions.assertEquals( "001", result.get( result.size( ) - 1 ).getName( ) );
Assertions.assertEquals( "001012", result.get( result.size( ) - 2 ).getName( ) );
}
@Test
void testStreamParallel( )
{
StorageAsset root = createTree( );
List<StorageAsset> result;
try ( Stream<StorageAsset> stream = StorageUtil.newAssetStream( root, true ) )
{
result = stream.filter( a -> a.getName( ).startsWith( "001" ) ).collect( Collectors.toList( ) );
}
int expected = LEVEL2 * LEVEL3 + LEVEL2 + 1;
Assertions.assertEquals( expected, result.size( ) );
}
@Test
void testDelete( )
{
StorageAsset root = createTree( );
RepositoryStorage storage = createStorage( root );
StorageUtil.deleteRecursively( root );
int expected = LEVEL1 * LEVEL2 * LEVEL3 + LEVEL1 * LEVEL2 + LEVEL1 + 1;
testDeletionStatus( expected, storage );
}
protected abstract void testDeletionStatus( int expected, RepositoryStorage storage );
@Test
void testDeleteWithException( )
{
StorageAsset root = createTree( );
RepositoryStorage storage = createStorage( root );
activateException( root );
StorageUtil.deleteRecursively( root );
int expected = LEVEL1 * LEVEL2 * LEVEL3 + LEVEL1 * LEVEL2 + LEVEL1 + 1;
testDeletionStatus( expected, storage );
}
@Test
void testDeleteWithExceptionFailFast( )
{
StorageAsset root = createTree( );
RepositoryStorage storage = createStorage( root );
activateException( root );
StorageUtil.deleteRecursively( root, true );
int expected = 113;
testDeletionStatus( expected, storage );
}
@Test
void testCopyRecursive( ) throws IOException
{
StorageAsset root = createTree( );
createStorage( root );
StorageAsset destinationRoot = createRootAsset( );
RepositoryStorage destinationStorage = createStorage( destinationRoot );
StorageAsset destination = destinationStorage.getAsset( "" );
boolean result = StorageUtil.copyRecursively( root, destination, false );
Assertions.assertTrue( result );
Assertions.assertTrue( destination.exists( ) );
Assertions.assertTrue( destination.resolve( "000/000000/000000000" ).exists( ) );
Assertions.assertTrue( destination.resolve( "011/011000/011000000" ).exists( ) );
Assertions.assertTrue( destination.resolve( "010/010000/010000000" ).exists( ) );
Assertions.assertTrue( destination.resolve( "000/000000/000000000" ).exists( ) );
}
}

View File

@ -19,157 +19,71 @@ package org.apache.archiva.repository.storage.util;
*/
import org.apache.archiva.repository.storage.AssetType;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.mock.MockAsset;
import org.apache.archiva.repository.storage.mock.MockStorage;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Assertions;
/**
* @author Martin Stockhammer <martin_s@apache.org>
*/
class StorageUtilTest
class StorageUtilTest extends AbstractStorageUtilTest
{
private static int LEVEL1 = 12;
private static int LEVEL2 = 13;
private static int LEVEL3 = 6;
private MockAsset createTree() {
return createTree( LEVEL1, LEVEL2, LEVEL3 );
private MockStorage createStorage(MockAsset root) {
return new MockStorage( root );
}
private MockAsset createTree(int... levelElements) {
MockAsset root = new MockAsset( "" );
recurseSubTree( root, 0, levelElements );
return root;
}
private void recurseSubTree(MockAsset parent, int level, int[] levelElements) {
if (level < levelElements.length)
private MockAsset createAsset( MockAsset parent, String name, AssetType type ) {
if (parent==null) {
return new MockAsset( name );
} else
{
for ( int k = 0; k < levelElements[level]; k++ )
{
String name = parent.getName( ) + String.format( "%03d", k );
MockAsset asset = new MockAsset( parent, name );
recurseSubTree( asset, level + 1, levelElements );
}
return new MockAsset( parent, name );
}
}
@Test
void testWalkFromRoot() {
StorageAsset root = createTree( );
ConsumeVisitStatus status = new ConsumeVisitStatus( );
StorageUtil.walk( root, status );
int expected = LEVEL1 * LEVEL2 * LEVEL3 + LEVEL1 * LEVEL2 + LEVEL1 + 1;
assertEquals( expected, status.size() );
StorageAsset first = root.list( ).get( 0 ).list( ).get( 0 ).list().get(0);
assertEquals( first, status.getFirst( ) );
assertEquals( root, status.getLast( ) );
}
@Test
void testWalkFromChild() {
StorageAsset root = createTree( );
ConsumeVisitStatus status = new ConsumeVisitStatus( );
StorageAsset testRoot = root.list( ).get( 3 );
StorageUtil.walk( testRoot, status );
int expected = LEVEL2 * LEVEL3 + LEVEL2 + 1;
assertEquals( expected, status.size() );
StorageAsset first = root.list( ).get( 3 ).list( ).get( 0 ).list().get(0);
assertEquals( first, status.getFirst( ) );
assertEquals( testRoot, status.getLast( ) );
}
@Test
void testWalkFromRootWithCondition() {
StorageAsset root = createTree( );
StopVisitStatus status = new StopVisitStatus( );
status.setStopCondition( a -> a.getName().equals("001002003") );
StorageUtil.walk( root, status );
assertEquals( "001002003", status.getLast( ).getName() );
int expected = LEVEL2 * LEVEL3 + LEVEL2 + 2 * LEVEL3 + 1 + 1 + 1 + 4;
assertEquals( expected, status.size() );
}
@Test
void testStream() {
StorageAsset root = createTree( );
ConsumeVisitStatus status = new ConsumeVisitStatus( );
List<StorageAsset> result;
try ( Stream<StorageAsset> stream = StorageUtil.newAssetStream( root, false ) )
{
result = stream.filter( a -> a.getName( ).startsWith( "001" ) ).collect( Collectors.toList());
}
int expected = LEVEL2 * LEVEL3 + LEVEL2 + 1;
assertEquals( expected, result.size( ) );
assertEquals( "001", result.get( result.size( ) - 1 ).getName() );
assertEquals( "001012", result.get( result.size( ) - 2 ).getName() );
}
@Test
void testStreamParallel() {
StorageAsset root = createTree( );
ConsumeVisitStatus status = new ConsumeVisitStatus( );
List<StorageAsset> result;
try ( Stream<StorageAsset> stream = StorageUtil.newAssetStream( root, true ) )
{
result = stream.filter( a -> a.getName( ).startsWith( "001" ) ).collect( Collectors.toList());
}
int expected = LEVEL2 * LEVEL3 + LEVEL2 + 1;
assertEquals( expected, result.size( ) );
}
@Test
void testDelete() throws IOException
{
MockAsset root = createTree( );
MockStorage storage = new MockStorage( root );
StorageUtil.deleteRecursively( root );
int expected = LEVEL1 * LEVEL2 * LEVEL3 + LEVEL1 * LEVEL2 + LEVEL1 + 1;
assertEquals( expected, storage.getStatus( ).size( MockStorage.REMOVE ) );
}
@Test
void testDeleteWithException() throws IOException
{
MockAsset root = createTree( );
MockStorage storage = new MockStorage( root );
protected void activateException(MockAsset root) {
root.list( ).get( 1 ).list( ).get( 2 ).setThrowException( true );
StorageUtil.deleteRecursively( root );
int expected = LEVEL1 * LEVEL2 * LEVEL3 + LEVEL1 * LEVEL2 + LEVEL1 + 1;
assertEquals( expected, storage.getStatus( ).size( MockStorage.REMOVE ) );
}
@Test
void testDeleteWithExceptionFailFast() throws IOException
@Override
protected StorageAsset createAsset( StorageAsset parent, String name, AssetType type )
{
MockAsset root = createTree( );
MockStorage storage = new MockStorage( root );
root.list( ).get( 1 ).list( ).get( 2 ).setThrowException( true );
return createAsset( (MockAsset) parent, name, type);
}
StorageUtil.deleteRecursively( root, true );
int expected = 113;
assertEquals( expected, storage.getStatus( ).size( MockStorage.REMOVE ) );
@Override
protected StorageAsset createRootAsset( )
{
return new MockAsset( "" );
}
@Override
protected void activateException( StorageAsset root )
{
activateException( (MockAsset)root );
}
@Override
protected RepositoryStorage createStorage( StorageAsset root )
{
return new MockStorage( (MockAsset) root );
}
protected void testDeletionStatus( int expected, RepositoryStorage storage )
{
if ( storage instanceof MockStorage )
{
Assertions.assertEquals( expected, ( (MockStorage) storage ).getStatus( ).size( MockStorage.REMOVE ) );
}
else
{
Assertions.fail( "Deletion status not implemented for this storage " + storage.getClass( ).getName( ) );
}
}
}

View File

@ -65,6 +65,15 @@
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
<classifier>tests</classifier>
<type>test-jar</type>
<scope>test</scope>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>

View File

@ -18,12 +18,14 @@ package org.apache.archiva.repository.storage.fs;
* under the License.
*/
import org.apache.archiva.repository.storage.AssetType;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@ -89,7 +91,7 @@ public class FilesystemAsset implements StorageAsset, Comparable {
boolean supportsAcl = false;
boolean supportsPosix = false;
final boolean setPermissionsForNew;
final RepositoryStorage storage;
final FilesystemStorage storage;
boolean directoryHint = false;
@ -97,7 +99,7 @@ public class FilesystemAsset implements StorageAsset, Comparable {
private static final OpenOption[] APPEND_OPTIONS = new OpenOption[]{StandardOpenOption.APPEND};
FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath) {
FilesystemAsset(FilesystemStorage storage, String path, Path assetPath, Path basePath) {
this.assetPath = assetPath;
this.relativePath = normalizePath(path);
this.setPermissionsForNew=false;
@ -113,7 +115,7 @@ public class FilesystemAsset implements StorageAsset, Comparable {
* @param path The logical path for the asset relative to the repository.
* @param assetPath The asset path.
*/
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath) {
public FilesystemAsset(FilesystemStorage storage, String path, Path assetPath) {
this.assetPath = assetPath;
this.relativePath = normalizePath(path);
this.setPermissionsForNew = false;
@ -135,7 +137,7 @@ public class FilesystemAsset implements StorageAsset, Comparable {
* @param directory This is only relevant, if the represented file or directory does not exist yet and
* is a hint.
*/
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory) {
public FilesystemAsset(FilesystemStorage storage, String path, Path assetPath, Path basePath, boolean directory) {
this.assetPath = assetPath;
this.relativePath = normalizePath(path);
this.directoryHint = directory;
@ -154,7 +156,7 @@ public class FilesystemAsset implements StorageAsset, Comparable {
* @param directory This is only relevant, if the represented file or directory does not exist yet and
* is a hint.
*/
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) {
public FilesystemAsset(FilesystemStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) {
this.assetPath = assetPath;
this.relativePath = normalizePath(path);
this.directoryHint = directory;
@ -231,7 +233,7 @@ public class FilesystemAsset implements StorageAsset, Comparable {
@Override
public String getName() {
return assetPath.getFileName().toString();
return hasParent( ) ? assetPath.getFileName( ).toString( ) : "";
}
@Override
@ -277,7 +279,9 @@ public class FilesystemAsset implements StorageAsset, Comparable {
@Override
public List<StorageAsset> list() {
try {
return Files.list(assetPath).map(p -> new FilesystemAsset(storage, relativePath + "/" + p.getFileName().toString(), assetPath.resolve(p), this.basePath))
return Files.list(assetPath)
.sorted()
.map(p -> new FilesystemAsset(storage, relativePath + "/" + p.getFileName().toString(), assetPath.resolve(p), this.basePath))
.collect(Collectors.toList());
} catch (IOException e) {
return Collections.EMPTY_LIST;
@ -505,6 +509,37 @@ public class FilesystemAsset implements StorageAsset, Comparable {
}
}
@Override
public void create( AssetType type ) throws IOException {
if (!Files.exists(assetPath)) {
if (type.equals( AssetType.CONTAINER ) || directoryHint) {
Files.createDirectories(assetPath);
} else {
if (!Files.exists( assetPath.getParent() )) {
Files.createDirectories( assetPath.getParent( ) );
}
Files.createFile(assetPath);
}
if (setPermissionsForNew) {
applyDefaultPermissions(assetPath);
}
}
}
@Override
public String relativize( StorageAsset asset )
{
if (asset instanceof FilesystemAsset ) {
return this.relativize( (FilesystemAsset) asset );
} else {
return StringUtils.removeStart( asset.getPath( ), this.getPath( ) );
}
}
public String relativize( FilesystemAsset asset) {
return this.getFilePath( ).relativize( asset.getFilePath( ) ).toString();
}
@Override
public String toString() {
return relativePath+":"+assetPath;

View File

@ -0,0 +1,120 @@
package org.apache.archiva.repository.storage.fs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.repository.storage.AssetType;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.util.AbstractStorageUtilTest;
import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.TestInstance;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.fail;
/**
* @author Martin Stockhammer <martin_s@apache.org>
*/
@TestInstance( TestInstance.Lifecycle.PER_CLASS)
public class FileSystemStorageUtilTest extends AbstractStorageUtilTest
{
List<Path> tmpDirs = new ArrayList<>( );
@Override
protected StorageAsset createAsset( StorageAsset parent, String name, AssetType type )
{
if (parent instanceof FilesystemAsset) {
return createAsset( (FilesystemAsset) parent, name, type );
} else {
fail( "Bad asset instance type" );
return null;
}
}
@AfterAll
private void cleanup() {
for( Path dir : tmpDirs) {
if (Files.exists( dir )) {
FileUtils.deleteQuietly( dir.toFile( ) );
}
}
}
private FilesystemAsset createAsset(FilesystemAsset parent, String name, AssetType type) {
FilesystemAsset asset = (FilesystemAsset) parent.resolve( name );
try
{
asset.create(type);
return asset;
}
catch ( IOException e )
{
fail( "Could not create asset " + e.getMessage( ) );
return null;
}
}
@Override
protected StorageAsset createRootAsset( )
{
try
{
Path tmpDir = Files.createTempDirectory( "testfs" );
tmpDirs.add( tmpDir );
FilesystemStorage storage = new FilesystemStorage( tmpDir, new DefaultFileLockManager( ) );
return storage.getRoot( );
}
catch ( IOException e )
{
fail( "Could not create storage" );
return null;
}
}
@Override
protected void activateException( StorageAsset root )
{
// Not done here
}
@Override
protected RepositoryStorage createStorage( StorageAsset root )
{
if (root instanceof FilesystemAsset) {
return root.getStorage( );
} else {
fail( "Wrong asset implementation " + root.getClass( ).getName( ) );
return null;
}
}
@Override
protected void testDeletionStatus( int expected, RepositoryStorage storage )
{
assertFalse( Files.exists( storage.getRoot( ).getFilePath( ) ) );
}
}

View File

@ -135,6 +135,18 @@ public class ManagedRepositoryContentMock implements BaseRepositoryContentLayout
}
@Override
public void copyItem( ContentItem item, ManagedRepository destinationRepository ) throws ItemNotFoundException, ContentAccessException
{
}
@Override
public void copyItem( ContentItem item, ManagedRepository destinationRepository, boolean updateMetadata ) throws ItemNotFoundException, ContentAccessException
{
}
@Override
public ContentItem getItem( ItemSelector selector ) throws ContentAccessException, IllegalArgumentException
{

View File

@ -286,6 +286,18 @@ public class ManagedDefaultRepositoryContent
}
}
@Override
public void copyItem( ContentItem item, ManagedRepository destinationRepository ) throws ItemNotFoundException, ContentAccessException
{
}
@Override
public void copyItem( ContentItem item, ManagedRepository destinationRepository, boolean updateMetadata ) throws ItemNotFoundException, ContentAccessException
{
}
@Override
public ContentItem getItem( ItemSelector selector ) throws ContentAccessException, IllegalArgumentException
{

View File

@ -35,9 +35,11 @@ package org.apache.archiva.rest.api.model.v2;/*
*/
import io.swagger.v3.oas.annotations.media.Schema;
import org.apache.archiva.repository.storage.StorageAsset;
import java.io.Serializable;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
/**
* @author Martin Stockhammer <martin_s@apache.org>
@ -50,7 +52,19 @@ public class FileInfo implements Serializable
private String fileName;
private String path;
@Schema(description = "Time when the file was last modified")
public FileInfo( )
{
}
public static FileInfo of( StorageAsset asset ) {
FileInfo fileInfo = new FileInfo( );
fileInfo.setFileName( asset.getName() );
fileInfo.setPath( asset.getPath() );
fileInfo.setModified( asset.getModificationTime( ).atOffset( ZoneOffset.UTC ) );
return fileInfo;
}
@Schema(description = "Time when the file was last modified")
public OffsetDateTime getModified( )
{
return modified;

View File

@ -35,9 +35,17 @@ package org.apache.archiva.rest.api.model.v2;/*
*/
import io.swagger.v3.oas.annotations.media.Schema;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature;
import java.time.Period;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* @author Martin Stockhammer <martin_s@apache.org>
@ -49,6 +57,52 @@ public class MavenManagedRepository extends Repository
boolean blocksRedeployments;
List<String> releaseSchemes = new ArrayList<>( );
boolean deleteSnapshotsOfRelease = false;
private Period retentionPeriod;
private int retentionCount;
private String indexPath;
private String packedIndexPath;
private boolean skipPackedIndexCreation;
private boolean hasStagingRepository;
private String stagingRepository;
public MavenManagedRepository( )
{
super.setCharacteristic( Repository.CHARACTERISTIC_MANAGED );
super.setType( RepositoryType.MAVEN.name( ) );
}
protected static void update(MavenManagedRepository repo, ManagedRepository beanRepo) {
repo.setDescription( beanRepo.getDescription() );
repo.setId( beanRepo.getId() );
repo.setIndex( true );
repo.setLayout( beanRepo.getLayout() );
repo.setBlocksRedeployments( beanRepo.blocksRedeployments() );
repo.setReleaseSchemes( beanRepo.getActiveReleaseSchemes().stream().map( Objects::toString).collect( Collectors.toList()) );
repo.setLocation( beanRepo.getLocation().toString() );
repo.setName( beanRepo.getName());
repo.setScanned( beanRepo.isScanned() );
repo.setSchedulingDefinition( beanRepo.getSchedulingDefinition() );
ArtifactCleanupFeature artifactCleanupFeature = beanRepo.getFeature( ArtifactCleanupFeature.class ).get( );
repo.setDeleteSnapshotsOfRelease( artifactCleanupFeature.isDeleteReleasedSnapshots());
repo.setRetentionCount( artifactCleanupFeature.getRetentionCount());
repo.setRetentionPeriod( artifactCleanupFeature.getRetentionPeriod() );
IndexCreationFeature icf = beanRepo.getFeature( IndexCreationFeature.class ).get( );
repo.setIndex( icf.hasIndex( ) );
repo.setIndexPath( icf.getIndexPath( ).getPath( ) );
repo.setPackedIndexPath( icf.getPackedIndexPath( ).getPath( ) );
repo.setSkipPackedIndexCreation( icf.isSkipPackedIndexCreation() );
StagingRepositoryFeature srf = beanRepo.getFeature( StagingRepositoryFeature.class ).get( );
repo.setHasStagingRepository( srf.isStageRepoNeeded( ) );
repo.setStagingRepository( srf.getStagingRepository()!=null?srf.getStagingRepository().getId():"" );
}
public static MavenManagedRepository of( ManagedRepository beanRepo ) {
MavenManagedRepository repo = new MavenManagedRepository( );
update( repo, beanRepo );
return repo;
}
@Schema(name="blocks_redeployments",description = "True, if redeployments to this repository are not allowed")
public boolean isBlocksRedeployments( )
@ -72,6 +126,101 @@ public class MavenManagedRepository extends Repository
this.releaseSchemes = new ArrayList<>( releaseSchemes );
}
public void addReleaseScheme(String scheme) {
if (!this.releaseSchemes.contains( scheme ))
{
this.releaseSchemes.add( scheme );
}
}
@Schema(name="delete_snaphots_of_release", description = "True, if snapshots are deleted, after a version is released")
public boolean isDeleteSnapshotsOfRelease( )
{
return deleteSnapshotsOfRelease;
}
public void setDeleteSnapshotsOfRelease( boolean deleteSnapshotsOfRelease )
{
this.deleteSnapshotsOfRelease = deleteSnapshotsOfRelease;
}
@Schema(name="retention_period", description = "The period after which snapshots are deleted.")
public Period getRetentionPeriod( )
{
return retentionPeriod;
}
public void setRetentionPeriod( Period retentionPeriod )
{
this.retentionPeriod = retentionPeriod;
}
@Schema(name="retention_count", description = "Number of snapshot artifacts to keep.")
public int getRetentionCount( )
{
return retentionCount;
}
public void setRetentionCount( int retentionCount )
{
this.retentionCount = retentionCount;
}
@Schema( name = "index_path", description = "Path to the directory that contains the index, relative to the repository base directory" )
public String getIndexPath( )
{
return indexPath;
}
public void setIndexPath( String indexPath )
{
this.indexPath = indexPath;
}
@Schema( name = "packed_index_path", description = "Path to the directory that contains the packed index, relative to the repository base directory" )
public String getPackedIndexPath( )
{
return packedIndexPath;
}
public void setPackedIndexPath( String packedIndexPath )
{
this.packedIndexPath = packedIndexPath;
}
@Schema(name="skip_packed_index_creation", description = "True, if packed index is not created during index update")
public boolean isSkipPackedIndexCreation( )
{
return skipPackedIndexCreation;
}
public void setSkipPackedIndexCreation( boolean skipPackedIndexCreation )
{
this.skipPackedIndexCreation = skipPackedIndexCreation;
}
@Schema(name="has_staging_repository", description = "True, if this repository has a staging repository assigned")
public boolean isHasStagingRepository( )
{
return hasStagingRepository;
}
public void setHasStagingRepository( boolean hasStagingRepository )
{
this.hasStagingRepository = hasStagingRepository;
}
@Schema(name="staging_repository", description = "The id of the assigned staging repository")
public String getStagingRepository( )
{
return stagingRepository;
}
public void setStagingRepository( String stagingRepository )
{
this.stagingRepository = stagingRepository;
}
@Override
public boolean equals( Object o )
{

View File

@ -0,0 +1,47 @@
package org.apache.archiva.rest.api.model.v2;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.repository.ManagedRepository;
import java.io.Serializable;
/**
* @author Martin Stockhammer <martin_s@apache.org>
*/
public class MavenManagedRepositoryUpdate extends MavenManagedRepository implements Serializable
{
private static final long serialVersionUID = -9181643343284109862L;
private boolean resetStats = false;
public static MavenManagedRepositoryUpdate of( ManagedRepository repository ) {
MavenManagedRepositoryUpdate repo = new MavenManagedRepositoryUpdate( );
update( repo, repository );
return repo;
}
public boolean isResetStats( )
{
return resetStats;
}
public void setResetStats( boolean resetStats )
{
this.resetStats = resetStats;
}
}

View File

@ -78,8 +78,48 @@ public interface ErrorKeys
String TASK_QUEUE_FAILED = PREFIX + "task.queue_failed";
String REPOSITORY_SCAN_FAILED = REPOSITORY_PREFIX + "scan.failed";
String ARTIFACT_EXISTS_AT_DEST = REPOSITORY_PREFIX + "artifact.dest.exists";
String REPOSITORY_REMOTE_INDEX_DOWNLOAD_FAILED = REPOSITORY_PREFIX + "remote.index.download_failed";
String REPOSITORY_WRONG_TYPE = REPOSITORY_PREFIX + "wrong_type";
String REPOSITORY_DELETE_FAILED = REPOSITORY_PREFIX + "delete.failed";
String REPOSITORY_INVALID_ID = REPOSITORY_PREFIX + "invalid.id";
String REPOSITORY_ID_EXISTS = REPOSITORY_PREFIX + "id.exists";
String REPOSITORY_UPDATE_FAILED = REPOSITORY_PREFIX + "update.failed";
String ARTIFACT_NOT_FOUND = REPOSITORY_PREFIX + "artifact.notfound";
String REPOSITORY_LAYOUT_ERROR = REPOSITORY_PREFIX + "layout.error";
String ARTIFACT_COPY_ERROR = REPOSITORY_PREFIX + "artifact.copy.error";
/**
* The given user was not found
* Parameters:
* - User Id
*/
String USER_NOT_FOUND = PREFIX+"user.not_found";
/**
* Error from UserManager
* Parameters:
* - Error Message
*/
String USER_MANAGER_ERROR = PREFIX+"user_manager.error";
/**
* Permission to the repository denied.
* Parameters:
* - Repository Id
* - Permission ID
*/
String PERMISSION_REPOSITORY_DENIED = PREFIX + "permission.repository.denied";
/**
* A generic authorization error thrown during the authorization check.
* Parameters:
* - Error message
*/
String AUTHORIZATION_ERROR = PREFIX + "authorization.error";
/**
* When the operation needs authentication, but not authenticated user was found in the request context.
*/
String NOT_AUTHENTICATED = PREFIX + "user.not_authenticated";
}

View File

@ -23,13 +23,14 @@ import io.swagger.v3.oas.annotations.headers.Header;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.security.OAuthScope;
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
import io.swagger.v3.oas.annotations.tags.Tag;
import org.apache.archiva.components.rest.model.PagedResult;
import org.apache.archiva.redback.authorization.RedbackAuthorization;
import org.apache.archiva.rest.api.model.v2.Artifact;
import org.apache.archiva.rest.api.model.v2.FileInfo;
import org.apache.archiva.rest.api.model.v2.MavenManagedRepository;
import org.apache.archiva.rest.api.model.v2.MavenManagedRepositoryUpdate;
import org.apache.archiva.security.common.ArchivaRoleConstants;
import javax.ws.rs.Consumes;
@ -199,14 +200,14 @@ public interface MavenManagedRepositoryService
content = @Content( mediaType = APPLICATION_JSON, schema = @Schema( implementation = ArchivaRestError.class ) ) )
}
)
MavenManagedRepository updateManagedRepository( MavenManagedRepository managedRepository )
MavenManagedRepository updateManagedRepository( @PathParam( "id" ) String repositoryId, MavenManagedRepositoryUpdate managedRepository )
throws ArchivaRestServiceException;
@Path( "{id}/a/{filePath: .+}" )
@Path( "{id}/path/{filePath: .+}" )
@GET
@Produces( {MediaType.APPLICATION_JSON} )
@RedbackAuthorization( permissions = ArchivaRoleConstants.OPERATION_MANAGE_CONFIGURATION )
@RedbackAuthorization( permissions = ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS, resource = "{id}")
@Operation( summary = "Returns the status of a given file in the repository",
security = {
@SecurityRequirement(
@ -229,18 +230,28 @@ public interface MavenManagedRepositoryService
/**
* permissions are checked in impl
* Permissions are checked in impl
* will copy an artifact from the source repository to the target repository
*/
@Path ("{srcId}/a/{path: .+}/copyto/{dstId}")
@Path ("{srcId}/path/{path: .+}/copyto/{dstId}")
@POST
@Produces({APPLICATION_JSON})
@RedbackAuthorization (noPermission = true)
@Operation( summary = "Copies a artifact from the source repository to the destination repository",
security = {
@SecurityRequirement(
name = ArchivaRoleConstants.OPERATION_RUN_INDEXER
name = ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS,
scopes = {
"{srcId}"
}
),
@SecurityRequirement(
name= ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD,
scopes = {
"{dstId}"
}
)
},
responses = {
@ApiResponse( responseCode = "200",
@ -257,7 +268,7 @@ public interface MavenManagedRepositoryService
throws ArchivaRestServiceException;
@Path ("{id}/a/{path: .+}")
@Path ("{id}/path/{path: .+}")
@DELETE
@Consumes ({ APPLICATION_JSON })
@RedbackAuthorization (noPermission = true)
@ -280,7 +291,7 @@ public interface MavenManagedRepositoryService
Response deleteArtifact( @PathParam( "id" ) String repositoryId, @PathParam( "path" ) String path )
throws ArchivaRestServiceException;
@Path ("{id}/c/{namespace}/{projectId}/{version}")
@Path ( "{id}/co/{group}/{project}/{version}" )
@DELETE
@Produces ({ MediaType.APPLICATION_JSON })
@RedbackAuthorization (noPermission = true)
@ -301,12 +312,12 @@ public interface MavenManagedRepositoryService
}
)
Response removeProjectVersion( @PathParam ( "id" ) String repositoryId,
@PathParam ( "namespace" ) String namespace, @PathParam ( "projectId" ) String projectId,
@PathParam ( "version" ) String version )
@PathParam ( "group" ) String namespace, @PathParam ( "project" ) String projectId,
@PathParam ( "version" ) String version )
throws org.apache.archiva.rest.api.services.ArchivaRestServiceException;
@Path ( "{id}/c/{namespace}/{projectId}" )
@Path ( "{id}/co/{group}/{project}" )
@DELETE
@Produces ({ MediaType.APPLICATION_JSON })
@RedbackAuthorization (noPermission = true)
@ -326,10 +337,10 @@ public interface MavenManagedRepositoryService
content = @Content( mediaType = APPLICATION_JSON, schema = @Schema( implementation = ArchivaRestError.class ) ) )
}
)
Response deleteProject( @PathParam ("id") String repositoryId, @PathParam ( "namespace" ) String namespace, @PathParam ("projectId") String projectId )
Response deleteProject( @PathParam ("id") String repositoryId, @PathParam ( "group" ) String namespace, @PathParam ( "project" ) String projectId )
throws org.apache.archiva.rest.api.services.ArchivaRestServiceException;
@Path ( "{id}/c/{namespace}" )
@Path ( "{id}/co/{namespace}" )
@DELETE
@Produces ({ MediaType.APPLICATION_JSON })
@RedbackAuthorization (noPermission = true)

View File

@ -17,27 +17,62 @@ package org.apache.archiva.rest.services.v2;
* under the License.
*/
import org.apache.archiva.admin.model.AuditInformation;
import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.admin.model.managed.ManagedRepositoryAdmin;
import org.apache.archiva.components.rest.model.PagedResult;
import org.apache.archiva.components.rest.util.QueryHelper;
import org.apache.archiva.redback.authentication.AuthenticationResult;
import org.apache.archiva.redback.authorization.AuthorizationException;
import org.apache.archiva.redback.rest.services.RedbackAuthenticationThreadLocal;
import org.apache.archiva.redback.rest.services.RedbackRequestInformation;
import org.apache.archiva.redback.system.DefaultSecuritySession;
import org.apache.archiva.redback.system.SecuritySession;
import org.apache.archiva.redback.system.SecuritySystem;
import org.apache.archiva.redback.users.User;
import org.apache.archiva.redback.users.UserManagerException;
import org.apache.archiva.redback.users.UserNotFoundException;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ReleaseScheme;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.content.ContentItem;
import org.apache.archiva.repository.content.LayoutException;
import org.apache.archiva.repository.storage.fs.FilesystemStorage;
import org.apache.archiva.repository.storage.fs.FsStorageUtil;
import org.apache.archiva.repository.storage.util.StorageUtil;
import org.apache.archiva.rest.api.model.v2.Artifact;
import org.apache.archiva.rest.api.model.v2.FileInfo;
import org.apache.archiva.rest.api.model.v2.MavenManagedRepository;
import org.apache.archiva.rest.api.model.v2.MavenManagedRepositoryUpdate;
import org.apache.archiva.rest.api.services.v2.ArchivaRestServiceException;
import org.apache.archiva.rest.api.services.v2.ErrorKeys;
import org.apache.archiva.rest.api.services.v2.ErrorMessage;
import org.apache.archiva.rest.api.services.v2.MavenManagedRepositoryService;
import org.apache.archiva.security.common.ArchivaRoleConstants;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.apache.archiva.security.common.ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS;
import static org.apache.archiva.security.common.ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD;
/**
* @author Martin Stockhammer <martin_s@apache.org>
@ -45,81 +80,196 @@ import java.util.List;
@Service("v2.managedMavenRepositoryService#rest")
public class DefaultMavenManagedRepositoryService implements MavenManagedRepositoryService
{
@Context
HttpServletResponse httpServletResponse;
@Context
UriInfo uriInfo;
private static final Logger log = LoggerFactory.getLogger( DefaultMavenManagedRepositoryService.class );
private static final QueryHelper<org.apache.archiva.admin.model.beans.ManagedRepository> QUERY_HELPER = new QueryHelper<>( new String[]{"id", "name"} );
private static final QueryHelper<ManagedRepository> QUERY_HELPER = new QueryHelper<>( new String[]{"id", "name"} );
static
{
QUERY_HELPER.addStringFilter( "id", org.apache.archiva.admin.model.beans.ManagedRepository::getId );
QUERY_HELPER.addStringFilter( "name", org.apache.archiva.admin.model.beans.ManagedRepository::getName );
QUERY_HELPER.addStringFilter( "location", org.apache.archiva.admin.model.beans.ManagedRepository::getName );
QUERY_HELPER.addBooleanFilter( "snapshot", org.apache.archiva.admin.model.beans.ManagedRepository::isSnapshots );
QUERY_HELPER.addBooleanFilter( "release", org.apache.archiva.admin.model.beans.ManagedRepository::isReleases );
QUERY_HELPER.addNullsafeFieldComparator( "id", org.apache.archiva.admin.model.beans.ManagedRepository::getId );
QUERY_HELPER.addNullsafeFieldComparator( "name", org.apache.archiva.admin.model.beans.ManagedRepository::getName );
QUERY_HELPER.addStringFilter( "id", ManagedRepository::getId );
QUERY_HELPER.addStringFilter( "name", ManagedRepository::getName );
QUERY_HELPER.addStringFilter( "location", (r) -> r.getLocation().toString() );
QUERY_HELPER.addBooleanFilter( "snapshot", (r) -> r.getActiveReleaseSchemes( ).contains( ReleaseScheme.SNAPSHOT ) );
QUERY_HELPER.addBooleanFilter( "release", (r) -> r.getActiveReleaseSchemes().contains( ReleaseScheme.RELEASE ));
QUERY_HELPER.addNullsafeFieldComparator( "id", ManagedRepository::getId );
QUERY_HELPER.addNullsafeFieldComparator( "name", ManagedRepository::getName );
}
private ManagedRepositoryAdmin managedRepositoryAdmin;
private RepositoryRegistry repositoryRegistry;
private SecuritySystem securitySystem;
public DefaultMavenManagedRepositoryService( RepositoryRegistry repositoryRegistry, ManagedRepositoryAdmin managedRepositoryAdmin )
public DefaultMavenManagedRepositoryService( SecuritySystem securitySystem, RepositoryRegistry repositoryRegistry, ManagedRepositoryAdmin managedRepositoryAdmin )
{
this.securitySystem = securitySystem;
this.repositoryRegistry = repositoryRegistry;
this.managedRepositoryAdmin = managedRepositoryAdmin;
}
protected AuditInformation getAuditInformation( )
{
RedbackRequestInformation redbackRequestInformation = RedbackAuthenticationThreadLocal.get( );
User user = redbackRequestInformation == null ? null : redbackRequestInformation.getUser( );
String remoteAddr = redbackRequestInformation == null ? null : redbackRequestInformation.getRemoteAddr( );
return new AuditInformation( user, remoteAddr );
}
@Override
public PagedResult<MavenManagedRepository> getManagedRepositories( String searchTerm, Integer offset, Integer limit, List<String> orderBy, String order ) throws ArchivaRestServiceException
public PagedResult<MavenManagedRepository> getManagedRepositories( final String searchTerm, final Integer offset,
final Integer limit, final List<String> orderBy,
final String order ) throws ArchivaRestServiceException
{
try
{
List<org.apache.archiva.admin.model.beans.ManagedRepository> result = managedRepositoryAdmin.getManagedRepositories( );
int totalCount = Math.toIntExact( result.stream( ).count( ) );
Collection<ManagedRepository> repos = repositoryRegistry.getManagedRepositories( );
final Predicate<ManagedRepository> queryFilter = QUERY_HELPER.getQueryFilter( searchTerm ).and( r -> r.getType() == RepositoryType.MAVEN );
final Comparator<ManagedRepository> comparator = QUERY_HELPER.getComparator( orderBy, order );
int totalCount = Math.toIntExact( repos.stream( ).filter( queryFilter ).count( ) );
return PagedResult.of( totalCount, offset, limit, repos.stream( ).filter( queryFilter ).sorted( comparator )
.map(mr -> MavenManagedRepository.of(mr)).skip( offset ).limit( limit ).collect( Collectors.toList( ) ) );
}
catch (ArithmeticException e) {
log.error( "Invalid number of repositories detected." );
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.INVALID_RESULT_SET_ERROR ) );
}
catch ( RepositoryAdminException e )
{
e.printStackTrace( );
}
return null;
}
@Override
public MavenManagedRepository getManagedRepository( String repositoryId ) throws ArchivaRestServiceException
{
return null;
ManagedRepository repo = repositoryRegistry.getManagedRepository( repositoryId );
if (repo==null) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_NOT_FOUND, repositoryId ), 404 );
}
if (repo.getType()!=RepositoryType.MAVEN) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_WRONG_TYPE, repositoryId, repo.getType().name() ), 404 );
}
return MavenManagedRepository.of( repo );
}
@Override
public Response deleteManagedRepository( String repositoryId, boolean deleteContent ) throws ArchivaRestServiceException
{
return null;
ManagedRepository repo = repositoryRegistry.getManagedRepository( repositoryId );
if (repo==null) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_NOT_FOUND, repositoryId ), 404 );
}
if (repo.getType()!=RepositoryType.MAVEN) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_WRONG_TYPE, repositoryId, repo.getType().name() ), 404 );
}
try
{
managedRepositoryAdmin.deleteManagedRepository( repositoryId, getAuditInformation( ), deleteContent );
return Response.ok( ).build( );
}
catch ( RepositoryAdminException e )
{
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_DELETE_FAILED, e.getMessage( ) ) );
}
}
private org.apache.archiva.admin.model.beans.ManagedRepository convert(MavenManagedRepository repository) {
org.apache.archiva.admin.model.beans.ManagedRepository repoBean = new org.apache.archiva.admin.model.beans.ManagedRepository( );
repoBean.setId( repository.getId( ) );
repoBean.setName( repository.getName() );
repoBean.setDescription( repository.getDescription() );
repoBean.setBlockRedeployments( repository.isBlocksRedeployments() );
repoBean.setCronExpression( repository.getSchedulingDefinition() );
repoBean.setLocation( repository.getLocation() );
repoBean.setReleases( repository.getReleaseSchemes().contains( ReleaseScheme.RELEASE.name() ) );
repoBean.setSnapshots( repository.getReleaseSchemes().contains( ReleaseScheme.SNAPSHOT.name() ) );
repoBean.setScanned( repository.isScanned() );
repoBean.setDeleteReleasedSnapshots( repository.isDeleteSnapshotsOfRelease() );
repoBean.setSkipPackedIndexCreation( repository.isSkipPackedIndexCreation() );
repoBean.setRetentionCount( repository.getRetentionCount() );
repoBean.setRetentionPeriod( repository.getRetentionPeriod().getDays() );
repoBean.setIndexDirectory( repository.getIndexPath() );
repoBean.setPackedIndexDirectory( repository.getPackedIndexPath() );
repoBean.setLayout( repository.getLayout() );
repoBean.setType( RepositoryType.MAVEN.name( ) );
return repoBean;
}
@Override
public MavenManagedRepository addManagedRepository( MavenManagedRepository managedRepository ) throws ArchivaRestServiceException
{
return null;
final String repoId = managedRepository.getId( );
if ( StringUtils.isEmpty( repoId ) ) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_INVALID_ID, repoId ), 422 );
}
Repository repo = repositoryRegistry.getRepository( repoId );
if (repo!=null) {
httpServletResponse.setHeader( "Location", uriInfo.getAbsolutePathBuilder( ).path( repoId ).build( ).toString( ) );
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_ID_EXISTS, repoId ), 303 );
}
try
{
managedRepositoryAdmin.addManagedRepository( convert( managedRepository ), managedRepository.isHasStagingRepository(), getAuditInformation() );
httpServletResponse.setStatus( 201 );
return MavenManagedRepository.of( repositoryRegistry.getManagedRepository( repoId ) );
}
catch ( RepositoryAdminException e )
{
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_ADMIN_ERROR, e.getMessage( ) ) );
}
}
@Override
public MavenManagedRepository updateManagedRepository( MavenManagedRepository managedRepository ) throws ArchivaRestServiceException
public MavenManagedRepository updateManagedRepository( final String repositoryId, final MavenManagedRepositoryUpdate managedRepository ) throws ArchivaRestServiceException
{
return null;
org.apache.archiva.admin.model.beans.ManagedRepository repo = convert( managedRepository );
try
{
managedRepositoryAdmin.updateManagedRepository( repo, managedRepository.isHasStagingRepository( ), getAuditInformation( ), managedRepository.isResetStats( ) );
ManagedRepository newRepo = repositoryRegistry.getManagedRepository( managedRepository.getId( ) );
if (newRepo==null) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_UPDATE_FAILED, repositoryId ) );
}
return MavenManagedRepository.of( newRepo );
}
catch ( RepositoryAdminException e )
{
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_ADMIN_ERROR, e.getMessage( ) ) );
}
}
@Override
public FileInfo getFileStatus( String repositoryId, String fileLocation ) throws ArchivaRestServiceException
{
return null;
ManagedRepository repo = repositoryRegistry.getManagedRepository( repositoryId );
if (repo==null) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_NOT_FOUND, repositoryId ), 404 );
}
try
{
ContentItem contentItem = repo.getContent( ).toItem( fileLocation );
if (contentItem.getAsset( ).exists( )) {
return FileInfo.of( contentItem.getAsset( ) );
} else {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.ARTIFACT_NOT_FOUND, repositoryId, fileLocation ), 404 );
}
}
catch ( LayoutException e )
{
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_LAYOUT_ERROR, e.getMessage( ) ) );
}
}
@Override
public Response copyArtifact( String srcRepositoryId, String dstRepositoryId,
String path ) throws ArchivaRestServiceException
{
final AuditInformation auditInformation = getAuditInformation( );
final String userName = auditInformation.getUser( ).getUsername( );
if ( StringUtils.isEmpty( userName ) )
{
httpServletResponse.setHeader( "WWW-Authenticate", "Bearer realm=\"archiva\"" );
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.NOT_AUTHENTICATED ), 401 );
}
ManagedRepository srcRepo = repositoryRegistry.getManagedRepository( srcRepositoryId );
if (srcRepo==null) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_NOT_FOUND, srcRepositoryId ), 404 );
@ -128,17 +278,89 @@ public class DefaultMavenManagedRepositoryService implements MavenManagedReposit
if (dstRepo==null) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_NOT_FOUND, dstRepositoryId ), 404 );
}
if (dstRepo.getAsset( path ).exists()) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.ARTIFACT_EXISTS_AT_DEST, path ) );
checkAuthority( auditInformation.getUser().getUsername(), srcRepositoryId, dstRepositoryId );
try
{
ContentItem srcItem = srcRepo.getContent( ).toItem( path );
ContentItem dstItem = dstRepo.getContent( ).toItem( path );
if (!srcItem.getAsset().exists()){
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.ARTIFACT_NOT_FOUND, srcRepositoryId, path ), 404 );
}
if (dstItem.getAsset().exists()) {
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.ARTIFACT_EXISTS_AT_DEST, srcRepositoryId, path ), 400 );
}
FsStorageUtil.copyAsset( srcItem.getAsset( ), dstItem.getAsset( ), true );
}
catch ( LayoutException e )
{
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.REPOSITORY_LAYOUT_ERROR, e.getMessage() ) );
}
catch ( IOException e )
{
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.ARTIFACT_COPY_ERROR, e.getMessage() ) );
}
return Response.ok( ).build();
}
private void checkAuthority(final String userName, final String srcRepositoryId, final String dstRepositoryId ) throws ArchivaRestServiceException {
User user = null;
try
{
user = securitySystem.getUserManager().findUser( userName );
}
catch ( UserNotFoundException e )
{
httpServletResponse.setHeader( "WWW-Authenticate", "Bearer realm=\"archiva\"" );
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.USER_NOT_FOUND, userName ), 401 );
}
catch ( UserManagerException e )
{
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.USER_MANAGER_ERROR, e.getMessage( ) ) );
}
// check karma on source : read
AuthenticationResult authn = new AuthenticationResult( true, userName, null );
SecuritySession securitySession = new DefaultSecuritySession( authn, user );
try
{
boolean authz =
securitySystem.isAuthorized( securitySession, OPERATION_REPOSITORY_ACCESS,
srcRepositoryId );
if ( !authz )
{
throw new ArchivaRestServiceException(ErrorMessage.of( ErrorKeys.PERMISSION_REPOSITORY_DENIED, srcRepositoryId, OPERATION_REPOSITORY_ACCESS ), 403);
}
}
catch ( AuthorizationException e )
{
log.error( "Error reading permission: {}", e.getMessage(), e );
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.AUTHORIZATION_ERROR, e.getMessage() ), 403);
}
// check karma on target: write
try
{
boolean authz =
securitySystem.isAuthorized( securitySession, ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD,
dstRepositoryId );
if ( !authz )
{
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.PERMISSION_REPOSITORY_DENIED, dstRepositoryId, OPERATION_REPOSITORY_UPLOAD ) );
}
}
catch ( AuthorizationException e )
{
log.error( "Error reading permission: {}", e.getMessage(), e );
throw new ArchivaRestServiceException( ErrorMessage.of( ErrorKeys.AUTHORIZATION_ERROR, e.getMessage() ), 403);
}
return null;
}
@Override
public Response deleteArtifact( String repositoryId, String path ) throws ArchivaRestServiceException
{
return null;
}

View File

@ -27,6 +27,7 @@ import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.storage.fs.FilesystemAsset;
import org.apache.archiva.metadata.audit.AuditListener;
import org.apache.archiva.repository.maven.MavenManagedRepository;
import org.apache.archiva.repository.storage.fs.FilesystemStorage;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.archiva.webdav.util.MimeTypes;
import org.apache.commons.lang3.StringUtils;
@ -127,7 +128,7 @@ public class DavResourceTest
private DavResource getDavResource( String logicalPath, Path file ) throws LayoutException
{
return new ArchivaDavResource( new FilesystemAsset( repository, logicalPath, file.toAbsolutePath()) , logicalPath, repository, session, resourceLocator,
return new ArchivaDavResource( new FilesystemAsset( (FilesystemStorage) repository.getRoot().getStorage(), logicalPath, file.toAbsolutePath()) , logicalPath, repository, session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null);
}
@ -349,7 +350,7 @@ public class DavResourceTest
{
try
{
return new ArchivaDavResource( new FilesystemAsset(repository, "/" , baseDir.toAbsolutePath()), "/", repository, session, resourceLocator,
return new ArchivaDavResource( new FilesystemAsset( (FilesystemStorage) repository.getRoot().getStorage(), "/" , baseDir.toAbsolutePath()), "/", repository, session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
null );
}