mirror of https://github.com/apache/archiva.git
take care of some ArrayList size init
git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@1229118 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8e2309ee39
commit
717cbf7c8a
|
@ -22,10 +22,10 @@ package org.apache.archiva.converter.artifact;
|
|||
import org.apache.archiva.common.plexusbridge.DigesterUtils;
|
||||
import org.apache.archiva.common.plexusbridge.PlexusSisuBridge;
|
||||
import org.apache.archiva.common.plexusbridge.PlexusSisuBridgeException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.archiva.transaction.FileTransaction;
|
||||
import org.apache.archiva.transaction.TransactionException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.maven.artifact.Artifact;
|
||||
import org.apache.maven.artifact.factory.ArtifactFactory;
|
||||
import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
|
||||
|
@ -75,7 +75,6 @@ public class LegacyToDefaultConverter
|
|||
{
|
||||
/**
|
||||
* {@link List}<{@link Digester}
|
||||
*
|
||||
*/
|
||||
private List<? extends Digester> digesters;
|
||||
|
||||
|
@ -701,7 +700,7 @@ public class LegacyToDefaultConverter
|
|||
List<String> messages = warnings.get( artifact );
|
||||
if ( messages == null )
|
||||
{
|
||||
messages = new ArrayList<String>();
|
||||
messages = new ArrayList<String>( 1 );
|
||||
}
|
||||
messages.add( message );
|
||||
warnings.put( artifact, messages );
|
||||
|
|
|
@ -19,6 +19,12 @@ package org.apache.archiva.checksum;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -27,24 +33,18 @@ import java.util.List;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* ChecksummedFile
|
||||
*
|
||||
* <p/>
|
||||
* <dl>
|
||||
* <lh>Terminology:</lh>
|
||||
* <dt>Checksum File</dt>
|
||||
* <dd>The file that contains the previously calculated checksum value for the reference file.
|
||||
* This is a text file with the extension ".sha1" or ".md5", and contains a single entry
|
||||
* consisting of an optional reference filename, and a checksum string.
|
||||
* </dd>
|
||||
* <dt>Reference File</dt>
|
||||
* <dd>The file that is being referenced in the checksum file.</dd>
|
||||
* <lh>Terminology:</lh>
|
||||
* <dt>Checksum File</dt>
|
||||
* <dd>The file that contains the previously calculated checksum value for the reference file.
|
||||
* This is a text file with the extension ".sha1" or ".md5", and contains a single entry
|
||||
* consisting of an optional reference filename, and a checksum string.
|
||||
* </dd>
|
||||
* <dt>Reference File</dt>
|
||||
* <dd>The file that is being referenced in the checksum file.</dd>
|
||||
* </dl>
|
||||
*
|
||||
* @version $Id$
|
||||
|
@ -57,7 +57,7 @@ public class ChecksummedFile
|
|||
|
||||
/**
|
||||
* Construct a ChecksummedFile object.
|
||||
*
|
||||
*
|
||||
* @param referenceFile
|
||||
*/
|
||||
public ChecksummedFile( final File referenceFile )
|
||||
|
@ -67,7 +67,7 @@ public class ChecksummedFile
|
|||
|
||||
/**
|
||||
* Calculate the checksum based on a given checksum.
|
||||
*
|
||||
*
|
||||
* @param checksumAlgorithm the algorithm to use.
|
||||
* @return the checksum string for the file.
|
||||
* @throws IOException if unable to calculate the checksum.
|
||||
|
@ -91,8 +91,8 @@ public class ChecksummedFile
|
|||
|
||||
/**
|
||||
* Creates a checksum file of the provided referenceFile.
|
||||
*
|
||||
* @param checksumAlgorithm the hash to use.
|
||||
*
|
||||
* @return the checksum File that was created.
|
||||
* @throws IOException if there was a problem either reading the referenceFile, or writing the checksum file.
|
||||
*/
|
||||
|
@ -107,7 +107,7 @@ public class ChecksummedFile
|
|||
|
||||
/**
|
||||
* Get the checksum file for the reference file and hash.
|
||||
*
|
||||
*
|
||||
* @param checksumAlgorithm the hash that we are interested in.
|
||||
* @return the checksum file to return
|
||||
*/
|
||||
|
@ -120,11 +120,11 @@ public class ChecksummedFile
|
|||
* <p>
|
||||
* Given a checksum file, check to see if the file it represents is valid according to the checksum.
|
||||
* </p>
|
||||
*
|
||||
* <p/>
|
||||
* <p>
|
||||
* NOTE: Only supports single file checksums of type MD5 or SHA1.
|
||||
* </p>
|
||||
*
|
||||
*
|
||||
* @param checksumFile the algorithms to check for.
|
||||
* @return true if the checksum is valid for the file it represents. or if the checksum file does not exist.
|
||||
* @throws IOException if the reading of the checksumFile or the file it refers to fails.
|
||||
|
@ -132,13 +132,13 @@ public class ChecksummedFile
|
|||
public boolean isValidChecksum( ChecksumAlgorithm algorithm )
|
||||
throws IOException
|
||||
{
|
||||
return isValidChecksums( new ChecksumAlgorithm[] { algorithm } );
|
||||
return isValidChecksums( new ChecksumAlgorithm[]{ algorithm } );
|
||||
}
|
||||
|
||||
/**
|
||||
* Of any checksum files present, validate that the reference file conforms
|
||||
* the to the checksum.
|
||||
*
|
||||
* the to the checksum.
|
||||
*
|
||||
* @param algorithms the algorithms to check for.
|
||||
* @return true if the checksums report that the the reference file is valid, false if invalid.
|
||||
*/
|
||||
|
@ -147,7 +147,7 @@ public class ChecksummedFile
|
|||
FileInputStream fis = null;
|
||||
try
|
||||
{
|
||||
List<Checksum> checksums = new ArrayList<Checksum>();
|
||||
List<Checksum> checksums = new ArrayList<Checksum>( algorithms.length );
|
||||
// Create checksum object for each algorithm.
|
||||
for ( ChecksumAlgorithm checksumAlgorithm : algorithms )
|
||||
{
|
||||
|
@ -214,13 +214,13 @@ public class ChecksummedFile
|
|||
|
||||
/**
|
||||
* Fix or create checksum files for the reference file.
|
||||
*
|
||||
*
|
||||
* @param algorithms the hashes to check for.
|
||||
* @return true if checksums were created successfully.
|
||||
*/
|
||||
public boolean fixChecksums( ChecksumAlgorithm algorithms[] )
|
||||
public boolean fixChecksums( ChecksumAlgorithm[] algorithms )
|
||||
{
|
||||
List<Checksum> checksums = new ArrayList<Checksum>();
|
||||
List<Checksum> checksums = new ArrayList<Checksum>( algorithms.length );
|
||||
// Create checksum object for each algorithm.
|
||||
for ( ChecksumAlgorithm checksumAlgorithm : algorithms )
|
||||
{
|
||||
|
@ -267,7 +267,7 @@ public class ChecksummedFile
|
|||
String rawChecksum = FileUtils.readFileToString( checksumFile );
|
||||
String expectedChecksum = parseChecksum( rawChecksum, checksumAlgorithm, referenceFile.getName() );
|
||||
|
||||
if ( ! StringUtils.equalsIgnoreCase( expectedChecksum, actualChecksum ) )
|
||||
if ( !StringUtils.equalsIgnoreCase( expectedChecksum, actualChecksum ) )
|
||||
{
|
||||
// create checksum (again)
|
||||
FileUtils.writeStringToFile( checksumFile, actualChecksum + " " + referenceFile.getName() );
|
||||
|
@ -294,21 +294,20 @@ public class ChecksummedFile
|
|||
// check if it is a remote metadata file
|
||||
Pattern pattern = Pattern.compile( "maven-metadata-\\S*.xml" );
|
||||
Matcher m = pattern.matcher( path );
|
||||
if( m.matches() )
|
||||
if ( m.matches() )
|
||||
{
|
||||
return filename.endsWith( path ) || ( "-".equals( filename ) )
|
||||
|| filename.endsWith( "maven-metadata.xml" );
|
||||
return filename.endsWith( path ) || ( "-".equals( filename ) ) || filename.endsWith( "maven-metadata.xml" );
|
||||
}
|
||||
|
||||
|
||||
return filename.endsWith( path ) || ( "-".equals( filename ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a checksum string.
|
||||
*
|
||||
* <p/>
|
||||
* Validate the expected path, and expected checksum algorithm, then return
|
||||
* the trimmed checksum hex string.
|
||||
*
|
||||
* the trimmed checksum hex string.
|
||||
*
|
||||
* @param rawChecksumString
|
||||
* @param expectedHash
|
||||
* @param expectedPath
|
||||
|
@ -328,8 +327,8 @@ public class ChecksummedFile
|
|||
String filename = m.group( 1 );
|
||||
if ( !isValidChecksumPattern( filename, expectedPath ) )
|
||||
{
|
||||
throw new IOException( "Supplied checksum file '" + filename + "' does not match expected file: '"
|
||||
+ expectedPath + "'" );
|
||||
throw new IOException(
|
||||
"Supplied checksum file '" + filename + "' does not match expected file: '" + expectedPath + "'" );
|
||||
}
|
||||
trimmedChecksum = m.group( 2 );
|
||||
}
|
||||
|
@ -342,8 +341,9 @@ public class ChecksummedFile
|
|||
String filename = m.group( 2 );
|
||||
if ( !isValidChecksumPattern( filename, expectedPath ) )
|
||||
{
|
||||
throw new IOException( "Supplied checksum file '" + filename + "' does not match expected file: '"
|
||||
+ expectedPath + "'" );
|
||||
throw new IOException(
|
||||
"Supplied checksum file '" + filename + "' does not match expected file: '" + expectedPath
|
||||
+ "'" );
|
||||
}
|
||||
trimmedChecksum = m.group( 1 );
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class VersionComparator
|
|||
|
||||
public VersionComparator()
|
||||
{
|
||||
specialWords = new ArrayList<String>();
|
||||
specialWords = new ArrayList<String>( 23 );
|
||||
|
||||
// ids that refer to LATEST
|
||||
specialWords.add( "final" );
|
||||
|
|
|
@ -259,9 +259,9 @@ public class DefaultArchivaConfiguration
|
|||
{
|
||||
// Fix Proxy Connector Settings.
|
||||
|
||||
List<ProxyConnectorConfiguration> proxyConnectorList = new ArrayList<ProxyConnectorConfiguration>();
|
||||
// Create a copy of the list to read from (to prevent concurrent modification exceptions)
|
||||
proxyConnectorList.addAll( config.getProxyConnectors() );
|
||||
List<ProxyConnectorConfiguration> proxyConnectorList =
|
||||
new ArrayList<ProxyConnectorConfiguration>( config.getProxyConnectors() );
|
||||
// Remove the old connector list.
|
||||
config.getProxyConnectors().clear();
|
||||
|
||||
|
@ -709,7 +709,8 @@ public class DefaultArchivaConfiguration
|
|||
if ( !intersec.isEmpty() )
|
||||
{
|
||||
|
||||
List<String> knowContentConsumers = new ArrayList<String>();
|
||||
List<String> knowContentConsumers =
|
||||
new ArrayList<String>( configuration.getRepositoryScanning().getKnownContentConsumers().size() );
|
||||
for ( String knowContentConsumer : configuration.getRepositoryScanning().getKnownContentConsumers() )
|
||||
{
|
||||
if ( !dbConsumers.contains( knowContentConsumer ) )
|
||||
|
|
|
@ -227,7 +227,7 @@ public class FileTypes
|
|||
List<String> patterns = defaultTypeMap.get( filetype.getId() );
|
||||
if ( patterns == null )
|
||||
{
|
||||
patterns = new ArrayList<String>();
|
||||
patterns = new ArrayList<String>( filetype.getPatterns().size() );
|
||||
}
|
||||
patterns.addAll( filetype.getPatterns() );
|
||||
|
||||
|
|
|
@ -70,31 +70,30 @@ public class ArtifactMissingChecksumsConsumer
|
|||
|
||||
private File repositoryDir;
|
||||
|
||||
private List<String> includes = new ArrayList<String>( );
|
||||
private List<String> includes = new ArrayList<String>( 0 );
|
||||
|
||||
@Inject
|
||||
public ArtifactMissingChecksumsConsumer( ArchivaConfiguration configuration,
|
||||
FileTypes filetypes )
|
||||
public ArtifactMissingChecksumsConsumer( ArchivaConfiguration configuration, FileTypes filetypes )
|
||||
{
|
||||
this.configuration = configuration;
|
||||
this.filetypes = filetypes;
|
||||
|
||||
configuration.addChangeListener( this );
|
||||
|
||||
initIncludes( );
|
||||
initIncludes();
|
||||
}
|
||||
|
||||
public String getId( )
|
||||
public String getId()
|
||||
{
|
||||
return this.id;
|
||||
}
|
||||
|
||||
public String getDescription( )
|
||||
public String getDescription()
|
||||
{
|
||||
return this.description;
|
||||
}
|
||||
|
||||
public boolean isPermanent( )
|
||||
public boolean isPermanent()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -102,7 +101,7 @@ public class ArtifactMissingChecksumsConsumer
|
|||
public void beginScan( ManagedRepository repo, Date whenGathered )
|
||||
throws ConsumerException
|
||||
{
|
||||
this.repositoryDir = new File( repo.getLocation( ) );
|
||||
this.repositoryDir = new File( repo.getLocation() );
|
||||
}
|
||||
|
||||
public void beginScan( ManagedRepository repo, Date whenGathered, boolean executeOnEntireRepo )
|
||||
|
@ -111,22 +110,22 @@ public class ArtifactMissingChecksumsConsumer
|
|||
beginScan( repo, whenGathered );
|
||||
}
|
||||
|
||||
public void completeScan( )
|
||||
public void completeScan()
|
||||
{
|
||||
/* do nothing */
|
||||
}
|
||||
|
||||
public void completeScan( boolean executeOnEntireRepo )
|
||||
{
|
||||
completeScan( );
|
||||
completeScan();
|
||||
}
|
||||
|
||||
public List<String> getExcludes( )
|
||||
public List<String> getExcludes()
|
||||
{
|
||||
return getDefaultArtifactExclusions( );
|
||||
return getDefaultArtifactExclusions();
|
||||
}
|
||||
|
||||
public List<String> getIncludes( )
|
||||
public List<String> getIncludes()
|
||||
{
|
||||
return includes;
|
||||
}
|
||||
|
@ -147,9 +146,9 @@ public class ArtifactMissingChecksumsConsumer
|
|||
private void createFixChecksum( String path, ChecksumAlgorithm checksumAlgorithm[] )
|
||||
{
|
||||
File artifactFile = new File( this.repositoryDir, path );
|
||||
File checksumFile = new File( this.repositoryDir, path + checksumAlgorithm[0].getExt( ) );
|
||||
File checksumFile = new File( this.repositoryDir, path + checksumAlgorithm[0].getExt() );
|
||||
|
||||
if ( checksumFile.exists( ) )
|
||||
if ( checksumFile.exists() )
|
||||
{
|
||||
checksum = new ChecksummedFile( artifactFile );
|
||||
try
|
||||
|
@ -157,33 +156,33 @@ public class ArtifactMissingChecksumsConsumer
|
|||
if ( !checksum.isValidChecksum( checksumAlgorithm[0] ) )
|
||||
{
|
||||
checksum.fixChecksums( checksumAlgorithm );
|
||||
triggerConsumerInfo( "Fixed checksum file " + checksumFile.getAbsolutePath( ) );
|
||||
triggerConsumerInfo( "Fixed checksum file " + checksumFile.getAbsolutePath() );
|
||||
}
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
triggerConsumerError( TYPE_CHECKSUM_CANNOT_CALC, "Cannot calculate checksum for file " + checksumFile +
|
||||
": " + e.getMessage( ) );
|
||||
": " + e.getMessage() );
|
||||
}
|
||||
}
|
||||
else if ( !checksumFile.exists( ) )
|
||||
else if ( !checksumFile.exists() )
|
||||
{
|
||||
checksum = new ChecksummedFile( artifactFile );
|
||||
try
|
||||
{
|
||||
checksum.createChecksum( checksumAlgorithm[0] );
|
||||
triggerConsumerInfo( "Created missing checksum file " + checksumFile.getAbsolutePath( ) );
|
||||
triggerConsumerInfo( "Created missing checksum file " + checksumFile.getAbsolutePath() );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
triggerConsumerError( TYPE_CHECKSUM_CANNOT_CREATE, "Cannot create checksum for file " + checksumFile +
|
||||
": " + e.getMessage( ) );
|
||||
": " + e.getMessage() );
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
triggerConsumerWarning( TYPE_CHECKSUM_NOT_FILE,
|
||||
"Checksum file " + checksumFile.getAbsolutePath( ) + " is not a file." );
|
||||
"Checksum file " + checksumFile.getAbsolutePath() + " is not a file." );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -191,7 +190,7 @@ public class ArtifactMissingChecksumsConsumer
|
|||
{
|
||||
if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
|
||||
{
|
||||
initIncludes( );
|
||||
initIncludes();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,18 +199,17 @@ public class ArtifactMissingChecksumsConsumer
|
|||
/* do nothing */
|
||||
}
|
||||
|
||||
private void initIncludes( )
|
||||
private void initIncludes()
|
||||
{
|
||||
includes.clear( );
|
||||
includes = new ArrayList<String>( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
|
||||
includes.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void initialize( )
|
||||
public void initialize()
|
||||
{
|
||||
configuration.addChangeListener( this );
|
||||
|
||||
initIncludes( );
|
||||
initIncludes();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,8 +43,8 @@ import java.util.List;
|
|||
*
|
||||
* @version $Id$
|
||||
*/
|
||||
@Service("knownRepositoryContentConsumer#auto-remove")
|
||||
@Scope("prototype")
|
||||
@Service( "knownRepositoryContentConsumer#auto-remove" )
|
||||
@Scope( "prototype" )
|
||||
public class AutoRemoveConsumer
|
||||
extends AbstractMonitoredConsumer
|
||||
implements KnownRepositoryContentConsumer, RegistryListener
|
||||
|
@ -72,8 +72,8 @@ public class AutoRemoveConsumer
|
|||
private FileTypes filetypes;
|
||||
|
||||
private File repositoryDir;
|
||||
|
||||
private List<String> includes = new ArrayList<String>();
|
||||
|
||||
private List<String> includes = new ArrayList<String>( 0 );
|
||||
|
||||
public String getId()
|
||||
{
|
||||
|
@ -136,11 +136,11 @@ public class AutoRemoveConsumer
|
|||
public void processFile( String path, boolean executeOnEntireRepo )
|
||||
throws ConsumerException
|
||||
{
|
||||
processFile( path );
|
||||
processFile( path );
|
||||
}
|
||||
|
||||
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
|
||||
{
|
||||
{
|
||||
if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
|
||||
{
|
||||
initIncludes();
|
||||
|
@ -154,9 +154,7 @@ public class AutoRemoveConsumer
|
|||
|
||||
private void initIncludes()
|
||||
{
|
||||
includes.clear();
|
||||
|
||||
includes.addAll( filetypes.getFileTypePatterns( FileTypes.AUTO_REMOVE ) );
|
||||
includes = new ArrayList<String>( filetypes.getFileTypePatterns( FileTypes.AUTO_REMOVE ) );
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
|
|
|
@ -20,10 +20,10 @@ package org.apache.archiva.consumers.core;
|
|||
*/
|
||||
|
||||
import org.apache.archiva.admin.model.beans.ManagedRepository;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.archiva.consumers.AbstractMonitoredConsumer;
|
||||
import org.apache.archiva.consumers.ConsumerException;
|
||||
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.springframework.context.annotation.Scope;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
|
@ -41,8 +41,8 @@ import java.util.Map;
|
|||
*
|
||||
* @version $Id$
|
||||
*/
|
||||
@Service("knownRepositoryContentConsumer#auto-rename")
|
||||
@Scope("prototype")
|
||||
@Service( "knownRepositoryContentConsumer#auto-rename" )
|
||||
@Scope( "prototype" )
|
||||
public class AutoRenameConsumer
|
||||
extends AbstractMonitoredConsumer
|
||||
implements KnownRepositoryContentConsumer
|
||||
|
@ -61,7 +61,7 @@ public class AutoRenameConsumer
|
|||
|
||||
private File repositoryDir;
|
||||
|
||||
private List<String> includes = new ArrayList<String>();
|
||||
private List<String> includes = new ArrayList<String>( 3 );
|
||||
|
||||
private Map<String, String> extensionRenameMap = new HashMap<String, String>();
|
||||
|
||||
|
@ -159,6 +159,6 @@ public class AutoRenameConsumer
|
|||
public void processFile( String path, boolean executeOnEntireRepo )
|
||||
throws ConsumerException
|
||||
{
|
||||
processFile( path );
|
||||
processFile( path );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,14 +57,14 @@ import java.util.List;
|
|||
*
|
||||
* @version $Id$
|
||||
*/
|
||||
@Service("knownRepositoryContentConsumer#metadata-updater")
|
||||
@Scope("prototype")
|
||||
@Service( "knownRepositoryContentConsumer#metadata-updater" )
|
||||
@Scope( "prototype" )
|
||||
public class MetadataUpdaterConsumer
|
||||
extends AbstractMonitoredConsumer
|
||||
implements KnownRepositoryContentConsumer, RegistryListener
|
||||
{
|
||||
private Logger log = LoggerFactory.getLogger( MetadataUpdaterConsumer.class );
|
||||
|
||||
|
||||
/**
|
||||
* default-value="metadata-updater"
|
||||
*/
|
||||
|
@ -109,7 +109,7 @@ public class MetadataUpdaterConsumer
|
|||
|
||||
private File repositoryDir;
|
||||
|
||||
private List<String> includes = new ArrayList<String>();
|
||||
private List<String> includes = new ArrayList<String>( 0 );
|
||||
|
||||
private long scanStartTimestamp = 0;
|
||||
|
||||
|
@ -222,18 +222,19 @@ public class MetadataUpdaterConsumer
|
|||
}
|
||||
catch ( LayoutException e )
|
||||
{
|
||||
triggerConsumerWarning( TYPE_METADATA_BAD_INTERNAL_REF, "Unable to convert path [" + path
|
||||
+ "] to an internal project reference: " + e.getMessage() );
|
||||
triggerConsumerWarning( TYPE_METADATA_BAD_INTERNAL_REF,
|
||||
"Unable to convert path [" + path + "] to an internal project reference: "
|
||||
+ e.getMessage() );
|
||||
}
|
||||
catch ( RepositoryMetadataException e )
|
||||
{
|
||||
triggerConsumerError( TYPE_METADATA_WRITE_FAILURE, "Unable to write project metadata for artifact [" + path
|
||||
+ "]: " + e.getMessage() );
|
||||
triggerConsumerError( TYPE_METADATA_WRITE_FAILURE,
|
||||
"Unable to write project metadata for artifact [" + path + "]: " + e.getMessage() );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
triggerConsumerWarning( TYPE_METADATA_IO, "Project metadata not written due to IO warning: "
|
||||
+ e.getMessage() );
|
||||
triggerConsumerWarning( TYPE_METADATA_IO,
|
||||
"Project metadata not written due to IO warning: " + e.getMessage() );
|
||||
}
|
||||
catch ( ContentNotFoundException e )
|
||||
{
|
||||
|
@ -268,18 +269,19 @@ public class MetadataUpdaterConsumer
|
|||
}
|
||||
catch ( LayoutException e )
|
||||
{
|
||||
triggerConsumerWarning( TYPE_METADATA_BAD_INTERNAL_REF, "Unable to convert path [" + path
|
||||
+ "] to an internal version reference: " + e.getMessage() );
|
||||
triggerConsumerWarning( TYPE_METADATA_BAD_INTERNAL_REF,
|
||||
"Unable to convert path [" + path + "] to an internal version reference: "
|
||||
+ e.getMessage() );
|
||||
}
|
||||
catch ( RepositoryMetadataException e )
|
||||
{
|
||||
triggerConsumerError( TYPE_METADATA_WRITE_FAILURE, "Unable to write version metadata for artifact [" + path
|
||||
+ "]: " + e.getMessage() );
|
||||
triggerConsumerError( TYPE_METADATA_WRITE_FAILURE,
|
||||
"Unable to write version metadata for artifact [" + path + "]: " + e.getMessage() );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
triggerConsumerWarning( TYPE_METADATA_IO, "Version metadata not written due to IO warning: "
|
||||
+ e.getMessage() );
|
||||
triggerConsumerWarning( TYPE_METADATA_IO,
|
||||
"Version metadata not written due to IO warning: " + e.getMessage() );
|
||||
}
|
||||
catch ( ContentNotFoundException e )
|
||||
{
|
||||
|
@ -309,9 +311,7 @@ public class MetadataUpdaterConsumer
|
|||
|
||||
private void initIncludes()
|
||||
{
|
||||
includes.clear();
|
||||
|
||||
includes.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
includes = new ArrayList<String>( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
|
|
|
@ -88,7 +88,7 @@ public class ValidateChecksumConsumer
|
|||
|
||||
private File repositoryDir;
|
||||
|
||||
private List<String> includes = new ArrayList<String>();
|
||||
private List<String> includes;
|
||||
|
||||
public String getId()
|
||||
{
|
||||
|
@ -175,6 +175,7 @@ public class ValidateChecksumConsumer
|
|||
{
|
||||
checksum = plexusSisuBridge.lookup( ChecksumFile.class );
|
||||
List<Digester> allDigesters = new ArrayList<Digester>( digesterUtils.getAllDigesters() );
|
||||
includes = new ArrayList<String>( allDigesters.size() );
|
||||
for ( Digester digester : allDigesters )
|
||||
{
|
||||
includes.add( "**/*" + digester.getFilenameExtension() );
|
||||
|
|
|
@ -21,19 +21,19 @@ package org.apache.archiva.consumers.core.repository;
|
|||
|
||||
import org.apache.archiva.admin.model.beans.ManagedRepository;
|
||||
import org.apache.archiva.admin.model.managed.ManagedRepositoryAdmin;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.archiva.configuration.ConfigurationNames;
|
||||
import org.apache.archiva.configuration.FileTypes;
|
||||
import org.apache.archiva.consumers.AbstractMonitoredConsumer;
|
||||
import org.apache.archiva.consumers.ConsumerException;
|
||||
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.repository.ManagedRepositoryContent;
|
||||
import org.apache.archiva.repository.RepositoryContentFactory;
|
||||
import org.apache.archiva.repository.RepositoryException;
|
||||
import org.apache.archiva.repository.RepositoryNotFoundException;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.archiva.repository.metadata.MetadataTools;
|
||||
import org.codehaus.plexus.registry.Registry;
|
||||
import org.codehaus.plexus.registry.RegistryListener;
|
||||
|
@ -237,16 +237,12 @@ public class RepositoryPurgeConsumer
|
|||
|
||||
private void initIncludes()
|
||||
{
|
||||
includes.clear();
|
||||
|
||||
includes.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
includes = new ArrayList<String>( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void initialize()
|
||||
{
|
||||
//this.listeners =
|
||||
// new ArrayList<RepositoryListener>( applicationContext.getBeansOfType( RepositoryListener.class ).values() );
|
||||
configuration.addChangeListener( this );
|
||||
|
||||
initIncludes();
|
||||
|
|
|
@ -76,7 +76,7 @@ public class NexusIndexerConsumer
|
|||
|
||||
private NexusIndexer nexusIndexer;
|
||||
|
||||
private List<String> includes = new ArrayList<String>();
|
||||
private List<String> includes = new ArrayList<String>( 0 );
|
||||
|
||||
private ManagedRepository repository;
|
||||
|
||||
|
@ -238,11 +238,14 @@ public class NexusIndexerConsumer
|
|||
|
||||
private void initIncludes()
|
||||
{
|
||||
includes.clear();
|
||||
List<String> indexable = filetypes.getFileTypePatterns( FileTypes.INDEXABLE_CONTENT );
|
||||
List<String> artifacts = filetypes.getFileTypePatterns( FileTypes.ARTIFACTS );
|
||||
|
||||
includes.addAll( filetypes.getFileTypePatterns( FileTypes.INDEXABLE_CONTENT ) );
|
||||
includes = new ArrayList<String>( indexable.size() + artifacts.size() );
|
||||
|
||||
includes.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
includes.addAll( indexable );
|
||||
|
||||
includes.addAll( artifacts );
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
|
|
|
@ -20,6 +20,13 @@ package org.apache.archiva.consumers.metadata;
|
|||
*/
|
||||
|
||||
import org.apache.archiva.admin.model.beans.ManagedRepository;
|
||||
import org.apache.archiva.common.utils.VersionUtil;
|
||||
import org.apache.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.archiva.configuration.ConfigurationNames;
|
||||
import org.apache.archiva.configuration.FileTypes;
|
||||
import org.apache.archiva.consumers.AbstractMonitoredConsumer;
|
||||
import org.apache.archiva.consumers.ConsumerException;
|
||||
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.model.ProjectMetadata;
|
||||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
|
@ -30,13 +37,6 @@ import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
|||
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
|
||||
import org.apache.archiva.common.utils.VersionUtil;
|
||||
import org.apache.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.archiva.configuration.ConfigurationNames;
|
||||
import org.apache.archiva.configuration.FileTypes;
|
||||
import org.apache.archiva.consumers.AbstractMonitoredConsumer;
|
||||
import org.apache.archiva.consumers.ConsumerException;
|
||||
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
|
||||
import org.codehaus.plexus.registry.Registry;
|
||||
import org.codehaus.plexus.registry.RegistryListener;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -56,8 +56,8 @@ import java.util.List;
|
|||
*
|
||||
* @version $Id: ArtifactUpdateDatabaseConsumer.java 718864 2008-11-19 06:33:35Z brett $
|
||||
*/
|
||||
@Service("knownRepositoryContentConsumer#create-archiva-metadata")
|
||||
@Scope("prototype")
|
||||
@Service( "knownRepositoryContentConsumer#create-archiva-metadata" )
|
||||
@Scope( "prototype" )
|
||||
public class ArchivaMetadataCreationConsumer
|
||||
extends AbstractMonitoredConsumer
|
||||
implements KnownRepositoryContentConsumer, RegistryListener
|
||||
|
@ -84,11 +84,10 @@ public class ArchivaMetadataCreationConsumer
|
|||
|
||||
private Date whenGathered;
|
||||
|
||||
private List<String> includes = new ArrayList<String>();
|
||||
private List<String> includes = new ArrayList<String>( 0 );
|
||||
|
||||
/**
|
||||
* FIXME: can be of other types
|
||||
*
|
||||
*/
|
||||
@Inject
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
@ -96,10 +95,9 @@ public class ArchivaMetadataCreationConsumer
|
|||
/**
|
||||
* FIXME: this needs to be configurable based on storage type - and could also be instantiated per repo. Change to a
|
||||
* factory.
|
||||
*
|
||||
*/
|
||||
@Inject
|
||||
@Named(value = "repositoryStorage#maven2")
|
||||
@Named( value = "repositoryStorage#maven2" )
|
||||
private RepositoryStorage repositoryStorage;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger( ArchivaMetadataCreationConsumer.class );
|
||||
|
@ -242,9 +240,7 @@ public class ArchivaMetadataCreationConsumer
|
|||
|
||||
private void initIncludes()
|
||||
{
|
||||
includes.clear();
|
||||
|
||||
includes.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
includes = new ArrayList<String>( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
|
|
|
@ -22,12 +22,12 @@ package org.apache.archiva.converter.legacy;
|
|||
import org.apache.archiva.admin.model.beans.ManagedRepository;
|
||||
import org.apache.archiva.common.plexusbridge.PlexusSisuBridge;
|
||||
import org.apache.archiva.common.plexusbridge.PlexusSisuBridgeException;
|
||||
import org.apache.archiva.converter.RepositoryConversionException;
|
||||
import org.apache.archiva.repository.scanner.RepositoryScanner;
|
||||
import org.apache.archiva.repository.scanner.RepositoryScannerException;
|
||||
import org.apache.archiva.common.utils.PathUtil;
|
||||
import org.apache.archiva.consumers.InvalidRepositoryContentConsumer;
|
||||
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
|
||||
import org.apache.archiva.converter.RepositoryConversionException;
|
||||
import org.apache.archiva.repository.scanner.RepositoryScanner;
|
||||
import org.apache.archiva.repository.scanner.RepositoryScannerException;
|
||||
import org.apache.maven.artifact.repository.ArtifactRepository;
|
||||
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
|
||||
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
|
||||
|
@ -101,12 +101,11 @@ public class DefaultLegacyRepositoryConverter
|
|||
legacyConverterConsumer.setExcludes( fileExclusionPatterns );
|
||||
legacyConverterConsumer.setDestinationRepository( repository );
|
||||
|
||||
List<KnownRepositoryContentConsumer> knownConsumers = new ArrayList<KnownRepositoryContentConsumer>();
|
||||
List<KnownRepositoryContentConsumer> knownConsumers = new ArrayList<KnownRepositoryContentConsumer>( 1 );
|
||||
knownConsumers.add( legacyConverterConsumer );
|
||||
|
||||
List<InvalidRepositoryContentConsumer> invalidConsumers = Collections.emptyList();
|
||||
List<String> ignoredContent = new ArrayList<String>();
|
||||
ignoredContent.addAll( Arrays.asList( RepositoryScanner.IGNORABLE_CONTENT ) );
|
||||
List<String> ignoredContent = new ArrayList<String>( Arrays.asList( RepositoryScanner.IGNORABLE_CONTENT ) );
|
||||
|
||||
repoScanner.scan( legacyRepository, knownConsumers, invalidConsumers, ignoredContent,
|
||||
RepositoryScanner.FRESH_SCAN );
|
||||
|
|
|
@ -81,7 +81,7 @@ public class LegacyConverterArtifactConsumer
|
|||
public LegacyConverterArtifactConsumer( PlexusSisuBridge plexusSisuBridge )
|
||||
throws PlexusSisuBridgeException
|
||||
{
|
||||
includes = new ArrayList<String>();
|
||||
includes = new ArrayList<String>( 3 );
|
||||
includes.add( "**/*.jar" );
|
||||
includes.add( "**/*.ear" );
|
||||
includes.add( "**/*.war" );
|
||||
|
|
|
@ -25,7 +25,7 @@ import java.util.List;
|
|||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Utility methods for cloning various Archiva Model objects.
|
||||
* Utility methods for cloning various Archiva Model objects.
|
||||
*
|
||||
* @version $Id$
|
||||
*/
|
||||
|
@ -50,7 +50,7 @@ public class ArchivaModelCloner
|
|||
return cloned;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings( "unchecked" )
|
||||
public static Properties clone( Properties properties )
|
||||
{
|
||||
if ( properties == null )
|
||||
|
@ -109,7 +109,7 @@ public class ArchivaModelCloner
|
|||
return null;
|
||||
}
|
||||
|
||||
List<ArtifactReference> ret = new ArrayList<ArtifactReference>();
|
||||
List<ArtifactReference> ret = new ArrayList<ArtifactReference>( artifactReferenceList.size() );
|
||||
|
||||
for ( ArtifactReference ref : artifactReferenceList )
|
||||
{
|
||||
|
@ -126,7 +126,7 @@ public class ArchivaModelCloner
|
|||
return null;
|
||||
}
|
||||
|
||||
List<String> ret = new ArrayList<String>();
|
||||
List<String> ret = new ArrayList<String>( simple.size() );
|
||||
|
||||
for ( String txt : simple )
|
||||
{
|
||||
|
|
|
@ -19,19 +19,19 @@ package org.apache.archiva.policies;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.common.utils.VersionUtil;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.archiva.common.utils.VersionUtil;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* AbstractUpdatePolicy
|
||||
* AbstractUpdatePolicy
|
||||
*
|
||||
* @version $Id$
|
||||
*/
|
||||
|
@ -39,12 +39,12 @@ public abstract class AbstractUpdatePolicy
|
|||
implements PreDownloadPolicy
|
||||
{
|
||||
private Logger log = LoggerFactory.getLogger( AbstractUpdatePolicy.class );
|
||||
|
||||
|
||||
/**
|
||||
* The ALWAYS policy setting means that the artifact is always uipdated from the remote repo.
|
||||
*/
|
||||
public static final String ALWAYS = "always";
|
||||
|
||||
|
||||
/**
|
||||
* The NEVER policy setting means that the artifact is never updated from the remote repo.
|
||||
*/
|
||||
|
@ -56,8 +56,8 @@ public abstract class AbstractUpdatePolicy
|
|||
* the following conditions are met...
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>The local artifact is not present.</li>
|
||||
* <li>The local artifact has a last modified timestamp older than (now - 1 day).</li>
|
||||
* <li>The local artifact is not present.</li>
|
||||
* <li>The local artifact has a last modified timestamp older than (now - 1 day).</li>
|
||||
* </ul>
|
||||
*/
|
||||
public static final String DAILY = "daily";
|
||||
|
@ -68,8 +68,8 @@ public abstract class AbstractUpdatePolicy
|
|||
* the following conditions are met...
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>The local artifact is not present.</li>
|
||||
* <li>The local artifact has a last modified timestamp older than (now - 1 hour).</li>
|
||||
* <li>The local artifact is not present.</li>
|
||||
* <li>The local artifact has a last modified timestamp older than (now - 1 hour).</li>
|
||||
* </ul>
|
||||
*/
|
||||
public static final String HOURLY = "hourly";
|
||||
|
@ -81,7 +81,7 @@ public abstract class AbstractUpdatePolicy
|
|||
*/
|
||||
public static final String ONCE = "once";
|
||||
|
||||
private List<String> options = new ArrayList<String>();
|
||||
private List<String> options = new ArrayList<String>( 5 );
|
||||
|
||||
public AbstractUpdatePolicy()
|
||||
{
|
||||
|
@ -93,9 +93,9 @@ public abstract class AbstractUpdatePolicy
|
|||
}
|
||||
|
||||
protected abstract boolean isSnapshotPolicy();
|
||||
|
||||
|
||||
protected abstract String getUpdateMode();
|
||||
|
||||
|
||||
public List<String> getOptions()
|
||||
{
|
||||
return options;
|
||||
|
@ -109,7 +109,7 @@ public abstract class AbstractUpdatePolicy
|
|||
// Only process artifact file types.
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
String version = request.getProperty( "version", "" );
|
||||
boolean isSnapshotVersion = false;
|
||||
|
||||
|
@ -121,8 +121,9 @@ public abstract class AbstractUpdatePolicy
|
|||
if ( !options.contains( policySetting ) )
|
||||
{
|
||||
// Not a valid code.
|
||||
throw new PolicyConfigurationException( "Unknown " + getUpdateMode() + " policy setting [" + policySetting
|
||||
+ "], valid settings are [" + StringUtils.join( options.iterator(), "," ) + "]" );
|
||||
throw new PolicyConfigurationException(
|
||||
"Unknown " + getUpdateMode() + " policy setting [" + policySetting + "], valid settings are ["
|
||||
+ StringUtils.join( options.iterator(), "," ) + "]" );
|
||||
}
|
||||
|
||||
if ( ALWAYS.equals( policySetting ) )
|
||||
|
@ -161,7 +162,8 @@ public abstract class AbstractUpdatePolicy
|
|||
if ( ONCE.equals( policySetting ) )
|
||||
{
|
||||
// File exists, but policy is once.
|
||||
throw new PolicyViolationException( "NO to update " + getUpdateMode() + ", policy is ONCE, and local file exist." );
|
||||
throw new PolicyViolationException(
|
||||
"NO to update " + getUpdateMode() + ", policy is ONCE, and local file exist." );
|
||||
}
|
||||
|
||||
if ( DAILY.equals( policySetting ) )
|
||||
|
@ -171,7 +173,7 @@ public abstract class AbstractUpdatePolicy
|
|||
Calendar fileCal = Calendar.getInstance();
|
||||
fileCal.setTimeInMillis( localFile.lastModified() );
|
||||
|
||||
if( cal.after( fileCal ) )
|
||||
if ( cal.after( fileCal ) )
|
||||
{
|
||||
// Its ok.
|
||||
return;
|
||||
|
@ -179,7 +181,7 @@ public abstract class AbstractUpdatePolicy
|
|||
else
|
||||
{
|
||||
throw new PolicyViolationException( "NO to update " + getUpdateMode()
|
||||
+ ", policy is DAILY, local file exist, and has been updated within the last day." );
|
||||
+ ", policy is DAILY, local file exist, and has been updated within the last day." );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -190,7 +192,7 @@ public abstract class AbstractUpdatePolicy
|
|||
Calendar fileCal = Calendar.getInstance();
|
||||
fileCal.setTimeInMillis( localFile.lastModified() );
|
||||
|
||||
if( cal.after( fileCal ) )
|
||||
if ( cal.after( fileCal ) )
|
||||
{
|
||||
// Its ok.
|
||||
return;
|
||||
|
@ -198,11 +200,11 @@ public abstract class AbstractUpdatePolicy
|
|||
else
|
||||
{
|
||||
throw new PolicyViolationException( "NO to update " + getUpdateMode()
|
||||
+ ", policy is HOURLY, local file exist, and has been updated within the last hour." );
|
||||
+ ", policy is HOURLY, local file exist, and has been updated within the last hour." );
|
||||
}
|
||||
}
|
||||
|
||||
throw new PolicyConfigurationException( "Unable to process " + getUpdateMode()
|
||||
+ " policy of [" + policySetting + "], please file a bug report." );
|
||||
throw new PolicyConfigurationException(
|
||||
"Unable to process " + getUpdateMode() + " policy of [" + policySetting + "], please file a bug report." );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,30 +19,29 @@ package org.apache.archiva.policies;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.archiva.policies.urlcache.UrlFailureCache;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* {@link PreDownloadPolicy} to check if the requested url has failed before.
|
||||
*
|
||||
* @version $Id$
|
||||
*/
|
||||
@Service("preDownloadPolicy#cache-failures")
|
||||
@Service( "preDownloadPolicy#cache-failures" )
|
||||
public class CachedFailuresPolicy
|
||||
implements PreDownloadPolicy
|
||||
{
|
||||
private Logger log = LoggerFactory.getLogger( CachedFailuresPolicy.class );
|
||||
|
||||
|
||||
/**
|
||||
* The NO policy setting means that the the existence of old failures is <strong>not</strong> checked.
|
||||
* All resource requests are allowed thru to the remote repo.
|
||||
|
@ -61,7 +60,7 @@ public class CachedFailuresPolicy
|
|||
@Inject
|
||||
private UrlFailureCache urlFailureCache;
|
||||
|
||||
private List<String> options = new ArrayList<String>();
|
||||
private List<String> options = new ArrayList<String>( 2 );
|
||||
|
||||
public CachedFailuresPolicy()
|
||||
{
|
||||
|
@ -76,7 +75,8 @@ public class CachedFailuresPolicy
|
|||
{
|
||||
// Not a valid code.
|
||||
throw new PolicyConfigurationException( "Unknown cache-failues policy setting [" + policySetting +
|
||||
"], valid settings are [" + StringUtils.join( options.iterator(), "," ) + "]" );
|
||||
"], valid settings are [" + StringUtils.join(
|
||||
options.iterator(), "," ) + "]" );
|
||||
}
|
||||
|
||||
if ( NO.equals( policySetting ) )
|
||||
|
|
|
@ -19,11 +19,6 @@ package org.apache.archiva.policies;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.archiva.checksum.ChecksumAlgorithm;
|
||||
import org.apache.archiva.checksum.ChecksummedFile;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
|
@ -31,25 +26,30 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* ChecksumPolicy - a policy applied after the download to see if the file has been downloaded
|
||||
* successfully and completely (or not).
|
||||
*
|
||||
* @version $Id$
|
||||
*/
|
||||
@Service("postDownloadPolicy#checksum")
|
||||
@Service( "postDownloadPolicy#checksum" )
|
||||
public class ChecksumPolicy
|
||||
implements PostDownloadPolicy
|
||||
{
|
||||
private Logger log = LoggerFactory.getLogger( ChecksumPolicy.class );
|
||||
|
||||
|
||||
/**
|
||||
* The IGNORE policy indicates that if the checksum policy is ignored, and
|
||||
* the state of, contents of, or validity of the checksum files are not
|
||||
* checked.
|
||||
*/
|
||||
public static final String IGNORE = "ignore";
|
||||
|
||||
|
||||
/**
|
||||
* The FAIL policy indicates that if the checksum does not match the
|
||||
* downloaded file, then remove the downloaded artifact, and checksum
|
||||
|
@ -64,9 +64,9 @@ public class ChecksumPolicy
|
|||
*/
|
||||
public static final String FIX = "fix";
|
||||
|
||||
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[] { ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 };
|
||||
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[]{ ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 };
|
||||
|
||||
private List<String> options = new ArrayList<String>();
|
||||
private List<String> options = new ArrayList<String>( 3 );
|
||||
|
||||
public ChecksumPolicy()
|
||||
{
|
||||
|
@ -82,12 +82,13 @@ public class ChecksumPolicy
|
|||
{
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if ( !options.contains( policySetting ) )
|
||||
{
|
||||
// Not a valid code.
|
||||
throw new PolicyConfigurationException( "Unknown checksum policy setting [" + policySetting
|
||||
+ "], valid settings are [" + StringUtils.join( options.iterator(), "," ) + "]" );
|
||||
throw new PolicyConfigurationException(
|
||||
"Unknown checksum policy setting [" + policySetting + "], valid settings are [" + StringUtils.join(
|
||||
options.iterator(), "," ) + "]" );
|
||||
}
|
||||
|
||||
if ( IGNORE.equals( policySetting ) )
|
||||
|
@ -100,8 +101,8 @@ public class ChecksumPolicy
|
|||
if ( !localFile.exists() )
|
||||
{
|
||||
// Local File does not exist.
|
||||
throw new PolicyViolationException( "Checksum policy failure, local file " + localFile.getAbsolutePath()
|
||||
+ " does not exist to check." );
|
||||
throw new PolicyViolationException(
|
||||
"Checksum policy failure, local file " + localFile.getAbsolutePath() + " does not exist to check." );
|
||||
}
|
||||
|
||||
if ( FAIL.equals( policySetting ) )
|
||||
|
@ -120,29 +121,31 @@ public class ChecksumPolicy
|
|||
file.delete();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
localFile.delete();
|
||||
throw new PolicyViolationException( "Checksums do not match, policy set to FAIL, "
|
||||
+ "deleting checksum files and local file " + localFile.getAbsolutePath() + "." );
|
||||
throw new PolicyViolationException(
|
||||
"Checksums do not match, policy set to FAIL, " + "deleting checksum files and local file "
|
||||
+ localFile.getAbsolutePath() + "." );
|
||||
}
|
||||
|
||||
if ( FIX.equals( policySetting ) )
|
||||
{
|
||||
ChecksummedFile checksum = new ChecksummedFile( localFile );
|
||||
if( checksum.fixChecksums( algorithms ) )
|
||||
if ( checksum.fixChecksums( algorithms ) )
|
||||
{
|
||||
log.debug( "Checksum policy set to FIX, checksum files have been updated." );
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new PolicyViolationException( "Checksum policy set to FIX, "
|
||||
+ "yet unable to update checksums for local file " + localFile.getAbsolutePath() + "." );
|
||||
throw new PolicyViolationException(
|
||||
"Checksum policy set to FIX, " + "yet unable to update checksums for local file "
|
||||
+ localFile.getAbsolutePath() + "." );
|
||||
}
|
||||
}
|
||||
|
||||
throw new PolicyConfigurationException( "Unable to process checksum policy of [" + policySetting
|
||||
+ "], please file a bug report." );
|
||||
throw new PolicyConfigurationException(
|
||||
"Unable to process checksum policy of [" + policySetting + "], please file a bug report." );
|
||||
}
|
||||
|
||||
public String getDefaultOption()
|
||||
|
|
|
@ -19,26 +19,26 @@ package org.apache.archiva.policies;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
/**
|
||||
* PropagateErrorsPolicy - a policy applied on error to determine how to treat the error.
|
||||
*/
|
||||
@Service("downloadErrorPolicy#propagate-errors")
|
||||
@Service( "downloadErrorPolicy#propagate-errors" )
|
||||
public class PropagateErrorsDownloadPolicy
|
||||
implements DownloadErrorPolicy
|
||||
{
|
||||
private Logger log = LoggerFactory.getLogger( PropagateErrorsDownloadPolicy.class );
|
||||
|
||||
|
||||
/**
|
||||
* Signifies any error should stop searching for other proxies.
|
||||
*/
|
||||
|
@ -54,7 +54,7 @@ public class PropagateErrorsDownloadPolicy
|
|||
*/
|
||||
public static final String IGNORE = "ignore";
|
||||
|
||||
private List<String> options = new ArrayList<String>();
|
||||
private List<String> options = new ArrayList<String>( 3 );
|
||||
|
||||
public PropagateErrorsDownloadPolicy()
|
||||
{
|
||||
|
@ -71,7 +71,8 @@ public class PropagateErrorsDownloadPolicy
|
|||
{
|
||||
// Not a valid code.
|
||||
throw new PolicyConfigurationException( "Unknown error policy setting [" + policySetting +
|
||||
"], valid settings are [" + StringUtils.join( options.iterator(), "," ) + "]" );
|
||||
"], valid settings are [" + StringUtils.join(
|
||||
options.iterator(), "," ) + "]" );
|
||||
}
|
||||
|
||||
if ( IGNORE.equals( policySetting ) )
|
||||
|
|
|
@ -19,19 +19,19 @@ package org.apache.archiva.policies;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
/**
|
||||
* PropagateErrorsPolicy - a policy applied on error to determine how to treat the error.
|
||||
*/
|
||||
@Service("downloadErrorPolicy#propagate-errors-on-update")
|
||||
@Service( "downloadErrorPolicy#propagate-errors-on-update" )
|
||||
public class PropagateErrorsOnUpdateDownloadPolicy
|
||||
implements DownloadErrorPolicy
|
||||
{
|
||||
|
@ -45,7 +45,7 @@ public class PropagateErrorsOnUpdateDownloadPolicy
|
|||
*/
|
||||
public static final String NOT_PRESENT = "artifact not already present";
|
||||
|
||||
private List<String> options = new ArrayList<String>();
|
||||
private List<String> options = new ArrayList<String>( 2 );
|
||||
|
||||
public PropagateErrorsOnUpdateDownloadPolicy()
|
||||
{
|
||||
|
@ -54,14 +54,15 @@ public class PropagateErrorsOnUpdateDownloadPolicy
|
|||
}
|
||||
|
||||
public boolean applyPolicy( String policySetting, Properties request, File localFile, Exception exception,
|
||||
Map<String,Exception> previousExceptions )
|
||||
Map<String, Exception> previousExceptions )
|
||||
throws PolicyConfigurationException
|
||||
{
|
||||
if ( !options.contains( policySetting ) )
|
||||
{
|
||||
// Not a valid code.
|
||||
throw new PolicyConfigurationException( "Unknown error policy setting [" + policySetting
|
||||
+ "], valid settings are [" + StringUtils.join( options.iterator(), "," ) + "]" );
|
||||
throw new PolicyConfigurationException(
|
||||
"Unknown error policy setting [" + policySetting + "], valid settings are [" + StringUtils.join(
|
||||
options.iterator(), "," ) + "]" );
|
||||
}
|
||||
|
||||
if ( ALWAYS.equals( policySetting ) )
|
||||
|
@ -76,8 +77,8 @@ public class PropagateErrorsOnUpdateDownloadPolicy
|
|||
return !localFile.exists();
|
||||
}
|
||||
|
||||
throw new PolicyConfigurationException( "Unable to process checksum policy of [" + policySetting
|
||||
+ "], please file a bug report." );
|
||||
throw new PolicyConfigurationException(
|
||||
"Unable to process checksum policy of [" + policySetting + "], please file a bug report." );
|
||||
}
|
||||
|
||||
public String getDefaultOption()
|
||||
|
|
|
@ -20,13 +20,6 @@ package org.apache.archiva.proxy;
|
|||
*/
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.archiva.proxy.common.WagonFactory;
|
||||
import org.apache.archiva.proxy.common.WagonFactoryException;
|
||||
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
|
||||
import org.apache.archiva.scheduler.repository.RepositoryTask;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.archiva.configuration.ConfigurationNames;
|
||||
import org.apache.archiva.configuration.NetworkProxyConfiguration;
|
||||
|
@ -42,6 +35,8 @@ import org.apache.archiva.policies.PostDownloadPolicy;
|
|||
import org.apache.archiva.policies.PreDownloadPolicy;
|
||||
import org.apache.archiva.policies.ProxyDownloadException;
|
||||
import org.apache.archiva.policies.urlcache.UrlFailureCache;
|
||||
import org.apache.archiva.proxy.common.WagonFactory;
|
||||
import org.apache.archiva.proxy.common.WagonFactoryException;
|
||||
import org.apache.archiva.repository.ManagedRepositoryContent;
|
||||
import org.apache.archiva.repository.RemoteRepositoryContent;
|
||||
import org.apache.archiva.repository.RepositoryContentFactory;
|
||||
|
@ -49,6 +44,11 @@ import org.apache.archiva.repository.RepositoryException;
|
|||
import org.apache.archiva.repository.RepositoryNotFoundException;
|
||||
import org.apache.archiva.repository.metadata.MetadataTools;
|
||||
import org.apache.archiva.repository.metadata.RepositoryMetadataException;
|
||||
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
|
||||
import org.apache.archiva.scheduler.repository.RepositoryTask;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.maven.wagon.ConnectionException;
|
||||
import org.apache.maven.wagon.ResourceDoesNotExistException;
|
||||
import org.apache.maven.wagon.Wagon;
|
||||
|
@ -195,7 +195,7 @@ public class DefaultRepositoryProxyConnectors
|
|||
connector.setDisabled( proxyConfig.isDisabled() );
|
||||
|
||||
// Copy any blacklist patterns.
|
||||
List<String> blacklist = new ArrayList<String>();
|
||||
List<String> blacklist = new ArrayList<String>( 0 );
|
||||
if ( CollectionUtils.isNotEmpty( proxyConfig.getBlackListPatterns() ) )
|
||||
{
|
||||
blacklist.addAll( proxyConfig.getBlackListPatterns() );
|
||||
|
@ -203,7 +203,7 @@ public class DefaultRepositoryProxyConnectors
|
|||
connector.setBlacklist( blacklist );
|
||||
|
||||
// Copy any whitelist patterns.
|
||||
List<String> whitelist = new ArrayList<String>();
|
||||
List<String> whitelist = new ArrayList<String>( 0 );
|
||||
if ( CollectionUtils.isNotEmpty( proxyConfig.getWhiteListPatterns() ) )
|
||||
{
|
||||
whitelist.addAll( proxyConfig.getWhiteListPatterns() );
|
||||
|
@ -215,7 +215,7 @@ public class DefaultRepositoryProxyConnectors
|
|||
if ( connectors == null )
|
||||
{
|
||||
// Create if we are the first.
|
||||
connectors = new ArrayList<ProxyConnector>();
|
||||
connectors = new ArrayList<ProxyConnector>( 1 );
|
||||
}
|
||||
|
||||
// Add the connector.
|
||||
|
@ -959,9 +959,9 @@ public class DefaultRepositoryProxyConnectors
|
|||
previousExceptions.remove( content.getId() );
|
||||
}
|
||||
|
||||
log.warn( "Transfer error from repository \"" + content.getRepository().getId() + "\" for artifact "
|
||||
+ Keys.toKey( artifact ) + ", continuing to next repository. Error message: "
|
||||
+ exception.getMessage() );
|
||||
log.warn(
|
||||
"Transfer error from repository \"" + content.getRepository().getId() + "\" for artifact " + Keys.toKey(
|
||||
artifact ) + ", continuing to next repository. Error message: " + exception.getMessage() );
|
||||
log.debug( "Full stack trace", exception );
|
||||
}
|
||||
|
||||
|
@ -1165,10 +1165,9 @@ public class DefaultRepositoryProxyConnectors
|
|||
|
||||
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
|
||||
{
|
||||
if ( ConfigurationNames.isNetworkProxy( propertyName )
|
||||
|| ConfigurationNames.isManagedRepositories( propertyName )
|
||||
|| ConfigurationNames.isRemoteRepositories( propertyName ) || ConfigurationNames.isProxyConnector(
|
||||
propertyName ) )
|
||||
if ( ConfigurationNames.isNetworkProxy( propertyName ) || ConfigurationNames.isManagedRepositories(
|
||||
propertyName ) || ConfigurationNames.isRemoteRepositories( propertyName )
|
||||
|| ConfigurationNames.isProxyConnector( propertyName ) )
|
||||
{
|
||||
initConnectorsAndNetworkProxies();
|
||||
}
|
||||
|
|
|
@ -126,7 +126,7 @@ public abstract class AbstractRepositoryConnector
|
|||
{
|
||||
if ( this.blackListPatterns == null )
|
||||
{
|
||||
this.blackListPatterns = new ArrayList<String>();
|
||||
this.blackListPatterns = new ArrayList<String>( 0 );
|
||||
}
|
||||
|
||||
return this.blackListPatterns;
|
||||
|
@ -201,7 +201,7 @@ public abstract class AbstractRepositoryConnector
|
|||
{
|
||||
if ( this.whiteListPatterns == null )
|
||||
{
|
||||
this.whiteListPatterns = new ArrayList<String>();
|
||||
this.whiteListPatterns = new ArrayList<String>( 0 );
|
||||
}
|
||||
|
||||
return this.whiteListPatterns;
|
||||
|
|
|
@ -66,7 +66,7 @@ public class FileType
|
|||
{
|
||||
if ( patterns == null )
|
||||
{
|
||||
this.patterns = new ArrayList<String>();
|
||||
this.patterns = new ArrayList<String>( 0 );
|
||||
}
|
||||
return patterns;
|
||||
}
|
||||
|
|
|
@ -77,11 +77,11 @@ public class RepositoryGroup
|
|||
*
|
||||
* @return List
|
||||
*/
|
||||
public java.util.List<String> getRepositories()
|
||||
public List<String> getRepositories()
|
||||
{
|
||||
if ( this.repositories == null )
|
||||
{
|
||||
this.repositories = new ArrayList<String>();
|
||||
this.repositories = new ArrayList<String>( 0 );
|
||||
}
|
||||
|
||||
return this.repositories;
|
||||
|
|
|
@ -64,7 +64,8 @@ public class DefaultArchivaAdministration
|
|||
public List<LegacyArtifactPath> getLegacyArtifactPaths()
|
||||
throws RepositoryAdminException
|
||||
{
|
||||
List<LegacyArtifactPath> legacyArtifactPaths = new ArrayList<LegacyArtifactPath>();
|
||||
List<LegacyArtifactPath> legacyArtifactPaths = new ArrayList<LegacyArtifactPath>(
|
||||
getArchivaConfiguration().getConfiguration().getLegacyArtifactPaths().size() );
|
||||
for ( org.apache.archiva.configuration.LegacyArtifactPath legacyArtifactPath : getArchivaConfiguration().getConfiguration().getLegacyArtifactPaths() )
|
||||
{
|
||||
legacyArtifactPaths.add(
|
||||
|
@ -113,7 +114,8 @@ public class DefaultArchivaAdministration
|
|||
|
||||
if ( fileType.getPatterns().contains( pattern ) )
|
||||
{
|
||||
throw new RepositoryAdminException( "File type [" + fileTypeId + "] already contains pattern [" + pattern + "]" );
|
||||
throw new RepositoryAdminException(
|
||||
"File type [" + fileTypeId + "] already contains pattern [" + pattern + "]" );
|
||||
}
|
||||
fileType.addPattern( pattern );
|
||||
|
||||
|
@ -255,7 +257,7 @@ public class DefaultArchivaAdministration
|
|||
{
|
||||
return Collections.emptyList();
|
||||
}
|
||||
List<FileType> fileTypes = new ArrayList<FileType>();
|
||||
List<FileType> fileTypes = new ArrayList<FileType>( configFileTypes.size() );
|
||||
for ( org.apache.archiva.configuration.FileType fileType : configFileTypes )
|
||||
{
|
||||
fileTypes.add( new BeanReplicator().replicateBean( fileType, FileType.class ) );
|
||||
|
|
|
@ -26,9 +26,9 @@ import org.apache.archiva.admin.model.group.RepositoryGroupAdmin;
|
|||
import org.apache.archiva.admin.model.managed.ManagedRepositoryAdmin;
|
||||
import org.apache.archiva.admin.repository.AbstractRepositoryAdmin;
|
||||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.archiva.configuration.Configuration;
|
||||
import org.apache.archiva.configuration.RepositoryGroupConfiguration;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
@ -61,7 +61,8 @@ public class DefaultRepositoryGroupAdmin
|
|||
public List<RepositoryGroup> getRepositoriesGroups()
|
||||
throws RepositoryAdminException
|
||||
{
|
||||
List<RepositoryGroup> repositoriesGroups = new ArrayList<RepositoryGroup>();
|
||||
List<RepositoryGroup> repositoriesGroups =
|
||||
new ArrayList<RepositoryGroup>( getArchivaConfiguration().getConfiguration().getRepositoryGroups().size() );
|
||||
|
||||
for ( RepositoryGroupConfiguration repositoryGroupConfiguration : getArchivaConfiguration().getConfiguration().getRepositoryGroups() )
|
||||
{
|
||||
|
|
|
@ -25,13 +25,12 @@ import org.apache.archiva.admin.model.beans.NetworkProxy;
|
|||
import org.apache.archiva.admin.model.networkproxy.NetworkProxyAdmin;
|
||||
import org.apache.archiva.admin.repository.AbstractRepositoryAdmin;
|
||||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.archiva.configuration.RemoteRepositoryConfiguration;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.archiva.configuration.Configuration;
|
||||
import org.apache.archiva.configuration.NetworkProxyConfiguration;
|
||||
import org.apache.archiva.configuration.RemoteRepositoryConfiguration;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.rmi.Remote;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -48,7 +47,8 @@ public class DefaultNetworkProxyAdmin
|
|||
public List<NetworkProxy> getNetworkProxies()
|
||||
throws RepositoryAdminException
|
||||
{
|
||||
List<NetworkProxy> networkProxies = new ArrayList<NetworkProxy>();
|
||||
List<NetworkProxy> networkProxies =
|
||||
new ArrayList<NetworkProxy>( getArchivaConfiguration().getConfiguration().getNetworkProxies().size() );
|
||||
for ( NetworkProxyConfiguration networkProxyConfiguration : getArchivaConfiguration().getConfiguration().getNetworkProxies() )
|
||||
{
|
||||
networkProxies.add( getNetworkProxy( networkProxyConfiguration ) );
|
||||
|
@ -126,9 +126,9 @@ public class DefaultNetworkProxyAdmin
|
|||
NetworkProxyConfiguration networkProxyConfiguration = getNetworkProxyConfiguration( networkProxy );
|
||||
configuration.removeNetworkProxy( networkProxyConfiguration );
|
||||
|
||||
for ( RemoteRepositoryConfiguration rrc : configuration.getRemoteRepositories())
|
||||
for ( RemoteRepositoryConfiguration rrc : configuration.getRemoteRepositories() )
|
||||
{
|
||||
if (StringUtils.equals( rrc.getRemoteDownloadNetworkProxyId(), networkProxyId ))
|
||||
if ( StringUtils.equals( rrc.getRemoteDownloadNetworkProxyId(), networkProxyId ) )
|
||||
{
|
||||
rrc.setRemoteDownloadNetworkProxyId( null );
|
||||
}
|
||||
|
|
|
@ -28,11 +28,11 @@ import org.apache.archiva.admin.model.proxyconnector.ProxyConnectorOrderComparat
|
|||
import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
|
||||
import org.apache.archiva.admin.repository.AbstractRepositoryAdmin;
|
||||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.archiva.configuration.Configuration;
|
||||
import org.apache.archiva.configuration.ProxyConnectorConfiguration;
|
||||
import org.apache.archiva.configuration.functors.ProxyConnectorSelectionPredicate;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
@ -148,16 +148,17 @@ public class DefaultProxyConnectorAdmin
|
|||
|
||||
protected List<String> unescapePatterns( List<String> patterns )
|
||||
{
|
||||
List<String> rawPatterns = new ArrayList<String>();
|
||||
if ( patterns != null )
|
||||
{
|
||||
List<String> rawPatterns = new ArrayList<String>( patterns.size() );
|
||||
for ( String pattern : patterns )
|
||||
{
|
||||
rawPatterns.add( StringUtils.replace( pattern, "\\\\", "\\" ) );
|
||||
}
|
||||
return rawPatterns;
|
||||
}
|
||||
|
||||
return rawPatterns;
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
public Map<String, List<ProxyConnector>> getProxyConnectorAsMap()
|
||||
|
@ -174,7 +175,7 @@ public class DefaultProxyConnectorAdmin
|
|||
List<ProxyConnector> connectors = proxyConnectorMap.get( key );
|
||||
if ( connectors == null )
|
||||
{
|
||||
connectors = new ArrayList<ProxyConnector>();
|
||||
connectors = new ArrayList<ProxyConnector>( 1 );
|
||||
proxyConnectorMap.put( key, connectors );
|
||||
}
|
||||
|
||||
|
|
|
@ -114,7 +114,8 @@ public class DefaultRemoteRepositoryAdmin
|
|||
public List<RemoteRepository> getRemoteRepositories()
|
||||
throws RepositoryAdminException
|
||||
{
|
||||
List<RemoteRepository> remoteRepositories = new ArrayList<RemoteRepository>();
|
||||
List<RemoteRepository> remoteRepositories = new ArrayList<RemoteRepository>(
|
||||
getArchivaConfiguration().getConfiguration().getRemoteRepositories().size() );
|
||||
for ( RemoteRepositoryConfiguration repositoryConfiguration : getArchivaConfiguration().getConfiguration().getRemoteRepositories() )
|
||||
{
|
||||
RemoteRepository remoteRepository =
|
||||
|
|
|
@ -47,7 +47,8 @@ public class DefaultRepositoryGroupService
|
|||
{
|
||||
try
|
||||
{
|
||||
List<RepositoryGroup> repositoriesGroups = new ArrayList<RepositoryGroup>();
|
||||
List<RepositoryGroup> repositoriesGroups =
|
||||
new ArrayList<RepositoryGroup>( repositoryGroupAdmin.getRepositoriesGroups().size() );
|
||||
for ( org.apache.archiva.admin.model.beans.RepositoryGroup repoGroup : repositoryGroupAdmin.getRepositoriesGroups() )
|
||||
{
|
||||
repositoriesGroups.add(
|
||||
|
|
Loading…
Reference in New Issue