Generics and cleanup to reduce compiler warnings

This commit is contained in:
Martin Stockhammer 2018-04-02 21:35:47 +02:00
parent 0f400d66a3
commit a2febcecd9
13 changed files with 33 additions and 32 deletions

View File

@ -57,6 +57,7 @@ public class ArchivaConfigurationMRM789Test
return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
}
@SuppressWarnings( "unchecked" )
protected <T> T lookup( Class<T> clazz, String hint )
{
return (T) applicationContext.getBean( "archivaConfiguration#" + hint, ArchivaConfiguration.class );

View File

@ -63,6 +63,7 @@ public class ArchivaConfigurationTest
return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
}
@SuppressWarnings( "unchecked" )
protected <T> T lookup( Class<T> clazz, String hint )
{
return (T) applicationContext.getBean( "archivaConfiguration#" + hint, ArchivaConfiguration.class );

View File

@ -85,7 +85,7 @@ public class DefaultWagonFactory
{
try
{
Class clazz = wagon.getClass();
Class<? extends Wagon> clazz = wagon.getClass();
Method getHttpHeaders = clazz.getMethod( "getHttpHeaders" );
Properties headers = (Properties) getHttpHeaders.invoke( wagon );

View File

@ -48,7 +48,7 @@ public class StandardCapabilities implements RepositoryCapabilities
boolean indexable, boolean fileBased,
boolean canBlockRedeployments, boolean scannable, boolean allowsFailover )
{
this.supportedReleaseSchemes = new HashSet();
this.supportedReleaseSchemes = new HashSet<>();
for (ReleaseScheme scheme : supportedReleaseSchemes) {
this.supportedReleaseSchemes.add(scheme);
}

View File

@ -56,7 +56,7 @@ public class AbstractFeature {
this.listener.clear();
}
protected void raiseEvent(RepositoryEvent event) {
protected <T> void raiseEvent(RepositoryEvent<T> event) {
for(RepositoryEventListener listr : listener) {
listr.raise(event);
}

View File

@ -182,6 +182,7 @@ public abstract class AbstractRepository implements EditableRepository, Reposito
@Override
public abstract RepositoryCapabilities getCapabilities( );
@SuppressWarnings( "unchecked" )
@Override
public <T extends RepositoryFeature<T>> RepositoryFeature<T> getFeature( Class<T> clazz ) throws UnsupportedFeatureException
{
@ -265,6 +266,7 @@ public abstract class AbstractRepository implements EditableRepository, Reposito
this.schedulingDefinition = cronExpression;
}
@SuppressWarnings( "unchecked" )
protected <T extends RepositoryFeature<T>> void addFeature(RepositoryFeature<T> feature) {
featureMap.put( (Class<? extends RepositoryFeature<?>>) feature.getClass(), feature);
}

View File

@ -169,7 +169,7 @@ public class RepositoryRegistry implements ConfigurationListener, RepositoryEven
if ( managedRepoConfigs == null )
{
return Collections.EMPTY_MAP;
return Collections.emptyMap();
}
Map<String, ManagedRepository> managedRepos = new LinkedHashMap<>( managedRepoConfigs.size( ) );
@ -332,9 +332,7 @@ public class RepositoryRegistry implements ConfigurationListener, RepositoryEven
if ( repo instanceof EditableRemoteRepository && repo.getContent() == null)
{
EditableRemoteRepository editableRepo = (EditableRemoteRepository) repo;
if (repo.getContent()==null) {
editableRepo.setContent( repositoryContentFactory.getRemoteRepositoryContent( repo ) );
}
editableRepo.setContent( repositoryContentFactory.getRemoteRepositoryContent( repo ) );
if (repo.supportsFeature(IndexCreationFeature.class) && repo.getIndexingContext()==null ) {
createIndexingContext(editableRepo);
}
@ -631,7 +629,7 @@ public class RepositoryRegistry implements ConfigurationListener, RepositoryEven
}
RemoteRepository originRepo = remoteRepositories.put( id, remoteRepository );
RemoteRepositoryConfiguration oldCfg=null;
RemoteRepositoryConfiguration newCfg=null;
RemoteRepositoryConfiguration newCfg;
try
{
if (originRepo!=null) {

View File

@ -144,6 +144,7 @@ public class RepositoryProviderMock implements RepositoryProvider
return remoteRepository;
}
@SuppressWarnings( "unchecked" )
@Override
public void updateRemoteInstance( EditableRemoteRepository remoteRepository, RemoteRepositoryConfiguration configuration ) throws RepositoryException
{

View File

@ -23,6 +23,7 @@ import org.apache.archiva.common.utils.BaseFile;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.consumers.InvalidRepositoryContentConsumer;
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.consumers.RepositoryContentConsumer;
import org.apache.archiva.consumers.functors.ConsumerWantsFilePredicate;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.scanner.functors.ConsumerProcessFileClosure;
@ -118,7 +119,7 @@ public class RepositoryScannerInstance
stats = new RepositoryScanStatistics();
stats.setRepositoryId( repository.getId() );
Closure triggerBeginScan =
Closure<RepositoryContentConsumer> triggerBeginScan =
new TriggerBeginScanClosure( repository, new Date( System.currentTimeMillis() ), true );
IterableUtils.forEach( knownConsumerList, triggerBeginScan );
@ -240,7 +241,7 @@ public class RepositoryScannerInstance
consumerProcessFile.setBasefile( basefile );
consumerWantsFile.setBasefile( basefile );
Closure processIfWanted = IfClosure.ifClosure( consumerWantsFile, consumerProcessFile );
Closure<RepositoryContentConsumer> processIfWanted = IfClosure.ifClosure( consumerWantsFile, consumerProcessFile );
IterableUtils.forEach( this.knownConsumers, processIfWanted );
if ( consumerWantsFile.getWantedFileCount() <= 0 )

View File

@ -105,6 +105,7 @@ public class MavenContentProvider implements RepositoryContentProvider
return content;
}
@SuppressWarnings( "unchecked" )
@Override
public <T extends RepositoryContent, V extends Repository> T createContent( Class<T> clazz, V repository ) throws RepositoryException
{

View File

@ -84,6 +84,7 @@ public class MavenRemoteRepository extends AbstractRemoteRepository
return CAPABILITIES;
}
@SuppressWarnings( "unchecked" )
@Override
public <T extends RepositoryFeature<T>> RepositoryFeature<T> getFeature( Class<T> clazz ) throws UnsupportedFeatureException
{

View File

@ -74,8 +74,8 @@ public class CassandraUtils
return HFactory.createColumn( name, //
value, //
(Serializer<A>) SerializerTypeInferer.getSerializer( name ), //
(Serializer<B>) SerializerTypeInferer.getSerializer( value ) );
SerializerTypeInferer.getSerializer( name ), //
SerializerTypeInferer.getSerializer( value ) );
}
public static String getStringValue( ColumnSlice<String, String> columnSlice, ColumnNames columnName )
@ -94,14 +94,14 @@ public class CassandraUtils
return hColumn == null ? null : hColumn.getValue();
}
public static Long getLongValue( ColumnSlice<String, ?> columnSlice, String columnName )
public static Long getLongValue( ColumnSlice<String, Long> columnSlice, String columnName )
{
if ( StringUtils.isEmpty( columnName ) )
{
return null;
}
HColumn<String, Long> hColumn = (HColumn<String, Long>) columnSlice.getColumnByName( columnName );
HColumn<String, Long> hColumn = columnSlice.getColumnByName( columnName );
return hColumn == null ? null : hColumn.getValue();
}

View File

@ -149,7 +149,7 @@ public class FileMetadataRepository
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
// remove properties that are not references or artifacts
for ( Object key : new ArrayList( properties.keySet() ) )
for ( Object key : new ArrayList<>( properties.keySet() ) )
{
String name = (String) key;
if ( !name.contains( ":" ) && !name.equals( "facetIds" ) )
@ -227,7 +227,7 @@ public class FileMetadataRepository
i++;
}
Set<String> facetIds = new LinkedHashSet<String>( versionMetadata.getFacetIds() );
Set<String> facetIds = new LinkedHashSet<>( versionMetadata.getFacetIds( ) );
facetIds.addAll( Arrays.asList( properties.getProperty( "facetIds", "" ).split( "," ) ) );
properties.setProperty( "facetIds", join( facetIds ) );
@ -258,7 +258,7 @@ public class FileMetadataRepository
List<Object> propsToRemove = new ArrayList<>();
for ( MetadataFacet facet : facetList )
{
for ( Object key : new ArrayList( properties.keySet() ) )
for ( Object key : new ArrayList<>( properties.keySet() ) )
{
String keyString = (String) key;
if ( keyString.startsWith( prefix + facet.getFacetId() + ":" ) )
@ -384,7 +384,7 @@ public class FileMetadataRepository
{
metadataFacet = metadataFacetFactory.createMetadataFacet( repositoryId, name );
Map<String, String> map = new HashMap<>();
for ( Object key : new ArrayList( properties.keySet() ) )
for ( Object key : new ArrayList<>( properties.keySet() ) )
{
String property = (String) key;
map.put( property, properties.getProperty( property ) );
@ -457,7 +457,7 @@ public class FileMetadataRepository
{
getArtifactsByDateRange( artifacts, repoId, ns, startTime, endTime );
}
Collections.sort( artifacts, new ArtifactComparator() );
artifacts.sort(new ArtifactComparator() );
return artifacts;
}
catch ( MetadataResolutionException e )
@ -577,7 +577,7 @@ public class FileMetadataRepository
MetadataFacet facet = factory.createMetadataFacet();
String prefix = propertyPrefix + facet.getFacetId();
Map<String, String> map = new HashMap<>();
for ( Object key : new ArrayList( properties.keySet() ) )
for ( Object key : new ArrayList<>( properties.keySet() ) )
{
String property = (String) key;
if ( property.startsWith( prefix ) )
@ -726,7 +726,7 @@ public class FileMetadataRepository
properties.remove( "artifact:facetIds:" + id );
String prefix = "artifact:facet:" + id + ":";
for ( Object key : new ArrayList( properties.keySet() ) )
for ( Object key : new ArrayList<>( properties.keySet() ) )
{
String property = (String) key;
if ( property.startsWith( prefix ) )
@ -763,7 +763,7 @@ public class FileMetadataRepository
properties.remove( "artifact:facetIds:" + id );
String prefix = "artifact:facet:" + id + ":";
for ( Object key : new ArrayList( properties.keySet() ) )
for ( Object key : new ArrayList<>( properties.keySet() ) )
{
String property = (String) key;
if ( property.startsWith( prefix ) )
@ -928,7 +928,7 @@ public class FileMetadataRepository
}
properties.setProperty( "artifact:version:" + id, artifact.getVersion() );
Set<String> facetIds = new LinkedHashSet<String>( artifact.getFacetIds() );
Set<String> facetIds = new LinkedHashSet<>( artifact.getFacetIds( ) );
String property = "artifact:facetIds:" + id;
facetIds.addAll( Arrays.asList( properties.getProperty( property, "" ).split( "," ) ) );
properties.setProperty( property, join( facetIds ) );
@ -1102,7 +1102,7 @@ public class FileMetadataRepository
}
else
{
mailingList.setOtherArchives( Collections.<String>emptyList() );
mailingList.setOtherArchives( Collections.emptyList() );
}
mailingList.setPostAddress( properties.getProperty( "mailingList." + i + ".post" ) );
mailingList.setSubscribeAddress( properties.getProperty( "mailingList." + i + ".subscribe" ) );
@ -1159,7 +1159,7 @@ public class FileMetadataRepository
{
MetadataFacet facet = factory.createMetadataFacet();
Map<String, String> map = new HashMap<>();
for ( Object key : new ArrayList( properties.keySet() ) )
for ( Object key : new ArrayList<>( properties.keySet() ) )
{
String property = (String) key;
if ( property.startsWith( facet.getFacetId() ) )
@ -1195,7 +1195,7 @@ public class FileMetadataRepository
Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
Set<String> versions = new HashSet<String>();
Set<String> versions = new HashSet<>( );
for ( Map.Entry entry : properties.entrySet() )
{
String name = (String) entry.getKey();
@ -1438,24 +1438,19 @@ public class FileMetadataRepository
{
for ( String version : getProjectVersions( repoId, ns, project ) )
{
for ( ArtifactMetadata artifact : getArtifacts( repoId, ns, project, version ) )
{
artifacts.add( artifact );
}
artifacts.addAll( getArtifacts( repoId, ns, project, version ) );
}
}
}
@Override
public List<ArtifactMetadata> searchArtifacts( String text, String repositoryId, boolean exact )
throws MetadataRepositoryException
{
throw new UnsupportedOperationException( "searchArtifacts not yet implemented in File backend" );
}
@Override
public List<ArtifactMetadata> searchArtifacts( String key, String text, String repositoryId, boolean exact )
throws MetadataRepositoryException
{
throw new UnsupportedOperationException( "searchArtifacts not yet implemented in File backend" );
}