Implementing new stream methods

This commit is contained in:
Martin Stockhammer 2019-08-20 00:08:08 +02:00
parent 6cf4073a9e
commit d4ce38836d
13 changed files with 383 additions and 175 deletions

View File

@ -23,7 +23,9 @@ import com.sun.syndication.feed.synd.SyndEntry;
import com.sun.syndication.feed.synd.SyndFeed;
import junit.framework.TestCase;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.repository.AbstractMetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.rss.RssFeedGenerator;
@ -34,6 +36,7 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
@ -41,6 +44,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import java.util.stream.Stream;
@RunWith (ArchivaBlockJUnit4ClassRunner.class)
public class NewArtifactsRssFeedProcessorTest
@ -155,6 +159,8 @@ public class NewArtifactsRssFeedProcessorTest
private List<ArtifactMetadata> artifactsByDateRange;
@Override
public List<ArtifactMetadata> getArtifactsByDateRange( RepositorySession session, String repoId, Date from, Date to )
{
@ -164,7 +170,6 @@ public class NewArtifactsRssFeedProcessorTest
return artifactsByDateRange;
}
public void setFrom( Date from )
{
this.from = from;

View File

@ -227,6 +227,7 @@ public class ArtifactMetadata
{
return false;
}
// We allow some
if ( fileLastModified != null
? !fileLastModified.equals( that.fileLastModified )
: that.fileLastModified != null )

View File

@ -21,18 +21,33 @@ package org.apache.archiva.metadata.repository;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import java.time.ZonedDateTime;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.stream.Stream;
public abstract class AbstractMetadataRepository
implements MetadataRepository
{
protected MetadataService metadataService;
public AbstractMetadataRepository() {
}
public AbstractMetadataRepository( MetadataService metadataService )
{
this.metadataService = metadataService;
}
@Override
public void updateProject( RepositorySession session, String repositoryId, ProjectMetadata project )
throws MetadataRepositoryException
@ -77,13 +92,6 @@ public abstract class AbstractMetadataRepository
return false;
}
@Override
public MetadataFacet getMetadataFacet( RepositorySession session, String repositoryId, String facetId, String name )
throws MetadataRepositoryException
{
throw new UnsupportedOperationException();
}
@Override
public void addMetadataFacet( RepositorySession session, String repositoryId, MetadataFacet metadataFacet )
throws MetadataRepositoryException
@ -296,4 +304,59 @@ public abstract class AbstractMetadataRepository
throw new UnsupportedOperationException();
}
@Override
public <T extends MetadataFacet> Stream<T> getMetadataFacetStream( RepositorySession session, String repositoryId, Class<T> facetClazz ) throws MetadataRepositoryException
{
return getMetadataFacetStream( session, repositoryId, facetClazz, 0, Long.MAX_VALUE );
}
@Override
public Stream<ArtifactMetadata> getArtifactsByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime ) throws MetadataRepositoryException
{
return getArtifactsByDateRangeStream( session, repositoryId, startTime, endTime, 0, Long.MAX_VALUE );
}
@Override
public MetadataFacet getMetadataFacet( RepositorySession session, String repositoryId, String facetId, String name )
throws MetadataRepositoryException
{
return getMetadataFacet( session, repositoryId, getFactoryClassForId( facetId ), name );
}
@Override
public <T extends MetadataFacet> Stream<T> getMetadataFacetStream( RepositorySession session, String repositoryId, Class<T> facetClazz, long offset, long maxEntries ) throws MetadataRepositoryException
{
throw new UnsupportedOperationException();
}
@Override
public <T extends MetadataFacet> T getMetadataFacet( RepositorySession session, String repositoryId, Class<T> clazz, String name ) throws MetadataRepositoryException
{
throw new UnsupportedOperationException();
}
@Override
public Stream<ArtifactMetadata> getArtifactsByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime, long offset, long maxEntries ) throws MetadataRepositoryException
{
throw new UnsupportedOperationException();
}
protected <T extends MetadataFacet> MetadataFacetFactory getFacetFactory(Class<T> facetClazz) {
return metadataService.getFactory( facetClazz );
}
protected MetadataFacetFactory getFacetFactory(String facetId) {
return metadataService.getFactory( facetId );
}
protected Set<String> getSupportedFacets() {
return metadataService.getSupportedFacets( );
}
protected Class<? extends MetadataFacet> getFactoryClassForId( String facetId ) {
return metadataService.getFactoryClassForId( facetId );
}
}

View File

@ -156,7 +156,7 @@ public abstract class AbstractMetadataRepositoryTest
@Override
public TestMetadataFacet createMetadataFacet( String repositoryId, String name )
{
return new TestMetadataFacet( TEST_METADATA_VALUE );
return new TestMetadataFacet( TEST_FACET_ID, TEST_METADATA_VALUE, name );
}
@Override
@ -895,6 +895,34 @@ public abstract class AbstractMetadataRepositoryTest
}
}
@Test
public void testGetMetadataFacetsStreamWithLimit( )
throws Exception
{
try ( RepositorySession session = getSessionFactory( ).createSession( ) )
{
for (int i = 0; i<500; i++)
{
getRepository( ).addMetadataFacet( session, TEST_REPO_ID, new TestMetadataFacet( TEST_FACET_ID, TEST_VALUE, TEST_NAME+"/"+i ) );
}
}
try ( RepositorySession session = getSessionFactory( ).createSession( ) )
{
tryAssert( ( ) -> {
Stream<TestMetadataFacet> str = getRepository( ).getMetadataFacetStream( session, TEST_REPO_ID, TestMetadataFacet.class, 0, 100 );
assertNotNull( str );
List<TestMetadataFacet> result = str.collect( Collectors.toList( ) );
assertEquals( 100, result.size( ) );
for (int i=0; i<10; i++) {
log.info( "Result {}", result.get( i ).getName( ) );
}
assertEquals( TEST_NAME+"/"+0, result.get( 0 ).getName( ) );
}, 2, 500 );
}
}
@Test
public void testGetMetadataFacetsWhenEmpty( )
throws Exception
@ -1320,8 +1348,11 @@ public abstract class AbstractMetadataRepositoryTest
session.save( );
// test it restricts to the appropriate repository
tryAssert( ( ) -> assertEquals( Collections.singletonList( artifact ), getRepository( ).getArtifacts( session, TEST_REPO_ID ) ) );
tryAssert( ( ) -> assertEquals( Collections.singletonList( secondArtifact ), getRepository( ).getArtifacts( session, OTHER_REPO_ID ) ) );
tryAssert( ( ) -> {
List<ArtifactMetadata> artifact1 = getRepository( ).getArtifacts( session, TEST_REPO_ID );
assertEquals( Collections.singletonList( artifact ), artifact1 );
assertEquals( Collections.singletonList( secondArtifact ), getRepository( ).getArtifacts( session, OTHER_REPO_ID ) );
});
}
}
@ -2254,18 +2285,36 @@ public abstract class AbstractMetadataRepositoryTest
private String value;
private String name = TEST_NAME;
private TestMetadataFacet( String value )
{
this.value = value;
testFacetId = TEST_FACET_ID;
}
private TestMetadataFacet( String facetId, String value )
{
this.value = value;
testFacetId = facetId;
}
private TestMetadataFacet( String facetId, String value, String name)
{
this.value = value;
testFacetId = facetId;
this.name = name;
}
private TestMetadataFacet( String facetId, String value, String name, Map<String, String> additionalProps )
{
this( facetId, value, name );
this.additionalProps = additionalProps;
}
private TestMetadataFacet( String facetId, String value, Map<String, String> additionalProps )
{
this( facetId, value );
@ -2281,7 +2330,7 @@ public abstract class AbstractMetadataRepositoryTest
@Override
public String getName( )
{
return TEST_NAME;
return name;
}
@Override
@ -2341,7 +2390,7 @@ public abstract class AbstractMetadataRepositoryTest
@Override
public String toString( )
{
return "TestMetadataFacet{" + "value='" + value + '\'' + '}';
return "TestMetadataFacet{ name='"+ name+ "' value='" + value + '\'' + '}';
}
@Override

View File

@ -52,9 +52,11 @@ import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.model.Scm;
import org.apache.archiva.metadata.repository.AbstractMetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.cassandra.model.ArtifactMetadataModel;
import org.apache.archiva.metadata.repository.cassandra.model.MetadataFacetModel;
@ -62,11 +64,13 @@ import org.apache.archiva.metadata.repository.cassandra.model.Namespace;
import org.apache.archiva.metadata.repository.cassandra.model.Project;
import org.apache.archiva.metadata.repository.cassandra.model.ProjectVersionMetadataModel;
import org.apache.archiva.metadata.repository.cassandra.model.Repository;
import org.apache.archiva.repository.RepositoryException;
import org.apache.commons.lang.StringUtils;
import org.modelmapper.ModelMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@ -78,7 +82,12 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Spliterator;
import java.util.UUID;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static org.apache.archiva.metadata.repository.cassandra.CassandraUtils.*;
import static org.apache.archiva.metadata.repository.cassandra.model.ColumnNames.*;
@ -88,15 +97,13 @@ import static org.apache.archiva.metadata.repository.cassandra.model.ColumnNames
* @since 2.0.0
*/
public class CassandraMetadataRepository
implements MetadataRepository
extends AbstractMetadataRepository implements MetadataRepository
{
private Logger logger = LoggerFactory.getLogger( getClass() );
private ArchivaConfiguration configuration;
private final Map<String, MetadataFacetFactory> metadataFacetFactories;
private final CassandraArchivaManager cassandraArchivaManager;
private final ColumnFamilyTemplate<String, String> projectVersionMetadataTemplate;
@ -117,11 +124,11 @@ public class CassandraMetadataRepository
private final StringSerializer ss = StringSerializer.get();
public CassandraMetadataRepository( Map<String, MetadataFacetFactory> metadataFacetFactories,
public CassandraMetadataRepository( MetadataService metadataService,
ArchivaConfiguration configuration,
CassandraArchivaManager cassandraArchivaManager )
{
this.metadataFacetFactories = metadataFacetFactories;
super( metadataService );
this.configuration = configuration;
this.cassandraArchivaManager = cassandraArchivaManager;
this.keyspace = cassandraArchivaManager.getKeyspace();
@ -1026,7 +1033,7 @@ public class CassandraMetadataRepository
{
for ( Map.Entry<String, Map<String, String>> entry : metadataFacetsPerFacetIds.entrySet() )
{
MetadataFacetFactory metadataFacetFactory = metadataFacetFactories.get( entry.getKey() );
MetadataFacetFactory metadataFacetFactory = getFacetFactory( entry.getKey() );
if ( metadataFacetFactory != null )
{
MetadataFacet metadataFacet = metadataFacetFactory.createMetadataFacet();
@ -1445,7 +1452,7 @@ public class CassandraMetadataRepository
String cf = cassandraArchivaManager.getMetadataFacetFamilyName();
for ( final String facetId : metadataFacetFactories.keySet() )
for ( final String facetId : getSupportedFacets() )
{
MetadataFacet metadataFacet = facetedMetadata.getFacet( facetId );
if ( metadataFacet == null )
@ -1518,6 +1525,75 @@ public class CassandraMetadataRepository
return facets;
}
private <T> Spliterator<T> createResultSpliterator( QueryResult<OrderedRows<String, String, String>> result, Function<Row<String, String, String>, T> converter) throws MetadataRepositoryException
{
final int size = result.get().getCount();
return new Spliterator<T>( )
{
@Override
public boolean tryAdvance( Consumer<? super T> action )
{
if (size>=1)
{
for ( Row<String, String, String> row : result.get( ) )
{
T item = converter.apply( row );
if ( item != null )
{
action.accept( item );
return true;
}
}
}
return false;
}
@Override
public Spliterator<T> trySplit( )
{
return null;
}
@Override
public long estimateSize( )
{
return size;
}
@Override
public int characteristics( )
{
return ORDERED+NONNULL+SIZED;
}
};
}
@Override
public <T extends MetadataFacet> Stream<T> getMetadataFacetStream( RepositorySession session, String repositoryId, Class<T> facetClazz, long offset, long maxEntries ) throws MetadataRepositoryException
{
final MetadataFacetFactory<T> metadataFacetFactory = getFacetFactory( facetClazz );
final String facetId = metadataFacetFactory.getFacetId( );
QueryResult<OrderedRows<String, String, String>> result = HFactory //
.createRangeSlicesQuery( keyspace, ss, ss, ss ) //
.setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
.setColumnNames( NAME.toString() ) //
.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
.addEqualsExpression( FACET_ID.toString(), facetId ) //
.execute();
return StreamSupport.stream( createResultSpliterator( result, ( Row<String, String, String> row)-> {
ColumnSlice<String, String> columnSlice = row.getColumnSlice();
String name = getStringValue( columnSlice, NAME.toString( ) );
T metadataFacet = metadataFacetFactory.createMetadataFacet( repositoryId, name );
Map<String, String> map = new HashMap<>( );
map.put( getStringValue( columnSlice, KEY.toString() ), getStringValue( columnSlice, VALUE.toString() ) );
metadataFacet.fromProperties( map );
return metadataFacet;
}), false );
}
@Override
public boolean hasMetadataFacet( RepositorySession session, String repositoryId, String facetId )
throws MetadataRepositoryException
@ -1530,7 +1606,7 @@ public class CassandraMetadataRepository
throws MetadataRepositoryException
{
MetadataFacetFactory metadataFacetFactory = metadataFacetFactories.get( facetId );
MetadataFacetFactory metadataFacetFactory = getFacetFactory( facetId );
if ( metadataFacetFactory == null )
{
return null;
@ -1561,6 +1637,12 @@ public class CassandraMetadataRepository
return metadataFacet;
}
@Override
public <T extends MetadataFacet> T getMetadataFacet( RepositorySession session, String repositoryId, Class<T> clazz, String name ) throws MetadataRepositoryException
{
return null;
}
@Override
public void addMetadataFacet( RepositorySession session, String repositoryId, MetadataFacet metadataFacet )
throws MetadataRepositoryException
@ -1703,6 +1785,12 @@ public class CassandraMetadataRepository
return artifactMetadatas;
}
@Override
public Stream<ArtifactMetadata> getArtifactsByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime, long offset, long maxEntries ) throws MetadataRepositoryException
{
return null;
}
protected ArtifactMetadata mapArtifactMetadataLongColumnSlice( ColumnSlice<String, Long> columnSlice )
{
@ -2175,7 +2263,7 @@ public class CassandraMetadataRepository
for ( Map.Entry<String, List<MetadataFacetModel>> entry : metadataFacetValuesPerFacetId.entrySet() )
{
MetadataFacetFactory metadataFacetFactory = metadataFacetFactories.get( entry.getKey() );
MetadataFacetFactory metadataFacetFactory = getFacetFactory( entry.getKey() );
if ( metadataFacetFactory != null )
{
List<MetadataFacetModel> facetModels = entry.getValue();

View File

@ -24,6 +24,7 @@ import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractRepositorySessionFactory;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolver;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.commons.lang.StringUtils;
@ -44,8 +45,6 @@ public class CassandraRepositorySessionFactory extends AbstractRepositorySession
implements RepositorySessionFactory
{
private Map<String, MetadataFacetFactory> metadataFacetFactories;
@Inject
@Named(value = "archivaConfiguration#default")
private ArchivaConfiguration configuration;
@ -59,18 +58,11 @@ public class CassandraRepositorySessionFactory extends AbstractRepositorySession
@Inject
private CassandraArchivaManager cassandraArchivaManager;
@Inject
private MetadataService metadataService;
public void initialize()
{
Map<String, MetadataFacetFactory> tmpMetadataFacetFactories =
applicationContext.getBeansOfType( MetadataFacetFactory.class );
// olamy with spring the ID.toString() is now "metadataFacetFactory#hint"
// whereas was only hint with plexus so let remove metadataFacetFactory#
metadataFacetFactories = new HashMap<>( tmpMetadataFacetFactories.size() );
for ( Map.Entry<String, MetadataFacetFactory> entry : tmpMetadataFacetFactories.entrySet() )
{
metadataFacetFactories.put( StringUtils.substringAfterLast( entry.getKey(), "#" ), entry.getValue() );
}
}
@Override
@ -83,7 +75,7 @@ public class CassandraRepositorySessionFactory extends AbstractRepositorySession
public RepositorySession createSession() throws MetadataRepositoryException
{
CassandraMetadataRepository metadataRepository =
new CassandraMetadataRepository( metadataFacetFactories, configuration, cassandraArchivaManager );
new CassandraMetadataRepository( metadataService, configuration, cassandraArchivaManager );
return new RepositorySession( metadataRepository, metadataResolver );
}

View File

@ -22,6 +22,7 @@ package org.apache.archiva.metadata.repository.cassandra;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.cassandra.model.ProjectVersionMetadataModel;
@ -36,6 +37,7 @@ import javax.inject.Named;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
@ -85,9 +87,11 @@ public class CassandraMetadataRepositoryTest
org.apache.archiva.common.utils.FileUtils.deleteDirectory( directory );
}
Map<String, MetadataFacetFactory> factories = createTestMetadataFacetFactories();
List<MetadataFacetFactory> factories = createTestMetadataFacetFactories();
MetadataService metadataService = new MetadataService( );
metadataService.setMetadataFacetFactories( factories );
this.cmr = new CassandraMetadataRepository( factories, null, cassandraArchivaManager );
this.cmr = new CassandraMetadataRepository( metadataService, null, cassandraArchivaManager );
sessionFactoryControl = EasyMock.createControl( );
sessionFactory = sessionFactoryControl.createMock( RepositorySessionFactory.class );

View File

@ -34,9 +34,11 @@ import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.model.Scm;
import org.apache.archiva.metadata.repository.AbstractMetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -50,6 +52,7 @@ import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -68,9 +71,8 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
public class FileMetadataRepository
implements MetadataRepository
extends AbstractMetadataRepository implements MetadataRepository
{
private final Map<String, MetadataFacetFactory> metadataFacetFactories;
private final ArchivaConfiguration configuration;
@ -84,25 +86,33 @@ public class FileMetadataRepository
private static final String METADATA_KEY = "metadata";
public FileMetadataRepository( Map<String, MetadataFacetFactory> metadataFacetFactories,
private Map<String, Path> baseDirectory = new HashMap<>( );
public FileMetadataRepository( MetadataService metadataService,
ArchivaConfiguration configuration )
{
this.metadataFacetFactories = metadataFacetFactories;
super( metadataService );
this.configuration = configuration;
}
private Path getBaseDirectory(String repoId )
throws IOException
{
// TODO: should be configurable, like the index
ManagedRepositoryConfiguration managedRepositoryConfiguration =
configuration.getConfiguration().getManagedRepositoriesAsMap().get( repoId );
if ( managedRepositoryConfiguration == null )
if (!baseDirectory.containsKey( repoId ))
{
return Files.createTempDirectory( repoId );
Path baseDir;
ManagedRepositoryConfiguration managedRepositoryConfiguration =
configuration.getConfiguration( ).getManagedRepositoriesAsMap( ).get( repoId );
if ( managedRepositoryConfiguration == null )
{
baseDir = Files.createTempDirectory( repoId );
} else
{
baseDir = Paths.get( managedRepositoryConfiguration.getLocation( ) );
}
baseDirectory.put(repoId, baseDir.resolve( ".archiva" ));
}
String basedir = managedRepositoryConfiguration.getLocation();
return Paths.get( basedir, ".archiva" );
return baseDirectory.get(repoId);
}
private Path getDirectory( String repoId )
@ -339,6 +349,30 @@ public class FileMetadataRepository
}
}
@Override
public <T extends MetadataFacet> Stream<T> getMetadataFacetStream( RepositorySession session, String repositoryId, Class<T> facetClazz, long offset, long maxEntries ) throws MetadataRepositoryException
{
final MetadataFacetFactory<T> metadataFacetFactory = getFacetFactory( facetClazz );
if (metadataFacetFactory==null) {
return null;
}
final String facetId = metadataFacetFactory.getFacetId( );
final String searchFile = METADATA_KEY + ".properties";
try
{
Path directory = getMetadataDirectory( repositoryId, facetId );
return Files.walk( directory, FileVisitOption.FOLLOW_LINKS ).filter( Files::isDirectory )
.filter( path -> Files.exists( path.resolve( searchFile ) ) )
.map(path -> directory.relativize(path).toString())
.sorted()
.skip( offset )
.limit(maxEntries)
.map(name -> getMetadataFacet( session, repositoryId, facetClazz, name ));
} catch (IOException e) {
throw new MetadataRepositoryException( e.getMessage( ), e );
}
}
@Override
public boolean hasMetadataFacet( RepositorySession session, String repositoryId, String facetId )
throws MetadataRepositoryException
@ -362,8 +396,14 @@ public class FileMetadataRepository
@Override
public MetadataFacet getMetadataFacet( RepositorySession session, String repositoryId, String facetId, String name )
public <T extends MetadataFacet> T getMetadataFacet( RepositorySession session, String repositoryId, Class<T> facetClazz, String name )
{
final MetadataFacetFactory<T> metadataFacetFactory = getFacetFactory( facetClazz );
if (metadataFacetFactory==null) {
return null;
}
final String facetId = metadataFacetFactory.getFacetId( );
Properties properties;
try
{
@ -379,8 +419,7 @@ public class FileMetadataRepository
log.error( "Could not read properties from {}, {}: {}", repositoryId, facetId, e.getMessage(), e );
return null;
}
MetadataFacet metadataFacet = null;
MetadataFacetFactory metadataFacetFactory = metadataFacetFactories.get( facetId );
T metadataFacet = null;
if ( metadataFacetFactory != null )
{
metadataFacet = metadataFacetFactory.createMetadataFacet( repositoryId, name );
@ -395,6 +434,8 @@ public class FileMetadataRepository
return metadataFacet;
}
@Override
public void addMetadataFacet( RepositorySession session, String repositoryId, MetadataFacet metadataFacet )
{
@ -467,6 +508,18 @@ public class FileMetadataRepository
}
}
@Override
public Stream<ArtifactMetadata> getArtifactsByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime ) throws MetadataRepositoryException
{
return null;
}
@Override
public Stream<ArtifactMetadata> getArtifactsByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime, long offset, long maxEntries ) throws MetadataRepositoryException
{
return null;
}
private void getArtifactsByDateRange( RepositorySession session, List<ArtifactMetadata> artifacts, String repoId, String ns, Date startTime,
Date endTime )
throws MetadataRepositoryException
@ -568,7 +621,7 @@ public class FileMetadataRepository
String propertyPrefix = "artifact:facet:" + id + ":";
for ( String facetId : value.split( "," ) )
{
MetadataFacetFactory factory = metadataFacetFactories.get( facetId );
MetadataFacetFactory factory = getFacetFactory( facetId );
if ( factory == null )
{
log.error( "Attempted to load unknown artifact metadata facet: {}", facetId );
@ -1132,7 +1185,7 @@ public class FileMetadataRepository
{
for ( String facetId : facetIds.split( "," ) )
{
MetadataFacetFactory factory = metadataFacetFactories.get( facetId );
MetadataFacetFactory factory = getFacetFactory( facetId );
if ( factory == null )
{
log.error( "Attempted to load unknown project version metadata facet: {}", facetId );

View File

@ -25,6 +25,7 @@ import org.apache.archiva.metadata.repository.AbstractRepositorySessionFactory;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolver;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.commons.lang.StringUtils;
@ -43,8 +44,6 @@ import java.util.Map;
public class FileRepositorySessionFactory extends AbstractRepositorySessionFactory
implements RepositorySessionFactory
{
private Map<String, MetadataFacetFactory> metadataFacetFactories;
@Inject
@Named( value = "archivaConfiguration#default" )
private ArchivaConfiguration configuration;
@ -55,20 +54,13 @@ public class FileRepositorySessionFactory extends AbstractRepositorySessionFacto
@Inject
private ApplicationContext applicationContext;
@Inject
private MetadataService metadataService;
public void initialize()
{
Map<String, MetadataFacetFactory> tmpMetadataFacetFactories =
applicationContext.getBeansOfType( MetadataFacetFactory.class );
// olamy with spring the "id" is now "metadataFacetFactory#hint"
// whereas was only hint with plexus so let remove metadataFacetFactory#
metadataFacetFactories = new HashMap<>( tmpMetadataFacetFactories.size() );
for ( Map.Entry<String, MetadataFacetFactory> entry : tmpMetadataFacetFactories.entrySet() )
{
metadataFacetFactories.put( StringUtils.substringAfterLast( entry.getKey(), "#" ), entry.getValue() );
}
}
@Override
@ -79,7 +71,7 @@ public class FileRepositorySessionFactory extends AbstractRepositorySessionFacto
@Override
public RepositorySession createSession() throws MetadataRepositoryException
{
MetadataRepository metadataRepository = new FileMetadataRepository( metadataFacetFactories, configuration );
MetadataRepository metadataRepository = new FileMetadataRepository( metadataService, configuration );
return new RepositorySession( metadataRepository, metadataResolver );
}

View File

@ -25,6 +25,7 @@ import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.junit.Before;
import org.junit.Ignore;
@ -32,6 +33,7 @@ import org.junit.Ignore;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import static org.mockito.Mockito.mock;
@ -69,9 +71,11 @@ public class FileMetadataRepositoryTest
org.apache.archiva.common.utils.FileUtils.deleteDirectory( directory );
}
ArchivaConfiguration config = createTestConfiguration( directory );
Map<String, MetadataFacetFactory> factories = createTestMetadataFacetFactories();
List<MetadataFacetFactory> factories = createTestMetadataFacetFactories();
MetadataService metadataService = new MetadataService( );
metadataService.setMetadataFacetFactories( factories );
this.repository = new FileMetadataRepository( factories, config );
this.repository = new FileMetadataRepository( metadataService, config );
}
@Override

View File

@ -35,10 +35,12 @@ import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.model.Scm;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.metadata.repository.AbstractMetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.MetadataSessionException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatisticsProvider;
@ -98,7 +100,7 @@ import static org.apache.archiva.metadata.repository.jcr.JcrConstants.*;
* TODO revise reference storage
*/
public class JcrMetadataRepository
implements MetadataRepository, RepositoryStatisticsProvider
extends AbstractMetadataRepository implements MetadataRepository, RepositoryStatisticsProvider
{
@ -120,8 +122,6 @@ public class JcrMetadataRepository
private static final String QUERY_ARTIFACT_2 = "')";
private MetadataService metadataService;
private Logger log = LoggerFactory.getLogger( JcrMetadataRepository.class );
private Repository repository;
@ -129,7 +129,7 @@ public class JcrMetadataRepository
public JcrMetadataRepository( MetadataService metadataService, Repository repository )
throws RepositoryException
{
this.metadataService = metadataService;
super( metadataService );
this.repository = repository;
}
@ -542,21 +542,22 @@ public class JcrMetadataRepository
}
@Override
public <T extends MetadataFacet> Stream<T> getMetadataFacetStream( RepositorySession session, String repositoryId, Class<T> facetClazz ) throws MetadataRepositoryException
public <T extends MetadataFacet> Stream<T> getMetadataFacetStream( RepositorySession session, String repositoryId, Class<T> facetClazz,
long offset, long maxEntries) throws MetadataRepositoryException
{
final Session jcrSession = getSession( session );
final MetadataFacetFactory<T> factory = metadataService.getFactory( facetClazz );
final String facetId = factory.getFacetId( );
final String facetPath = getFacetPath( repositoryId, facetId );
String q = "SELECT * FROM ["+FACET_NODE_TYPE+"] AS facet WHERE ISDESCENDANTNODE(facet, [/"+facetPath+"])";
final String facetPath = '/'+getFacetPath( repositoryId, facetId );
String q = "SELECT * FROM ["+FACET_NODE_TYPE+"] AS facet WHERE ISDESCENDANTNODE(facet, ["+facetPath+"]) ORDER BY [facet].[archiva:name]";
Map<String, String> params = new HashMap<>( );
QueryResult result = runNativeJcrQuery( jcrSession, q, params );
QueryResult result = runNativeJcrQuery( jcrSession, q, params, offset, maxEntries );
return StreamSupport.stream( createResultSpliterator( result, (Row row)-> {
try
{
Node node = row.getNode( "facet" );
String path = StringUtils.removeStart( node.getPath(), facetPath);
return createFacet( factory, node, repositoryId, path );
String facetName = node.getProperty( "archiva:name" ).getString();
return createFacet( factory, node, repositoryId, facetName );
}
catch ( RepositoryException e )
{
@ -566,12 +567,6 @@ public class JcrMetadataRepository
}
@Override
public <T extends MetadataFacet> Stream<T> getMetadataFacetStream( RepositorySession session, String repositoryId, Class<T> facetClazz, long offset, long maxEntries ) throws MetadataRepositoryException
{
return null;
}
private void recurse( List<String> facets, String prefix, Node node )
throws RepositoryException
{
@ -599,14 +594,14 @@ public class JcrMetadataRepository
return null;
}
final Session jcrSession = getSession( session );
final MetadataFacetFactory<T> factory = metadataService.getFactory( clazz );
final MetadataFacetFactory<T> factory = getFacetFactory( clazz );
final String facetId = factory.getFacetId( );
try
{
Node root = jcrSession.getRootNode();
Node node = root.getNode( getFacetPath( repositoryId, facetId, name ) );
if ( metadataService.getSupportedFacets().size()==0)
if ( getSupportedFacets().size()==0)
{
return null;
}
@ -645,13 +640,6 @@ public class JcrMetadataRepository
return null;
}
@Override
public MetadataFacet getMetadataFacet( RepositorySession session, String repositoryId, String facetId, String name )
throws MetadataRepositoryException
{
return getMetadataFacet( session, repositoryId, metadataService.getFactoryClassForId( facetId ), name );
}
@Override
public void addMetadataFacet( RepositorySession session, String repositoryId, MetadataFacet metadataFacet )
throws MetadataRepositoryException
@ -665,19 +653,21 @@ public class JcrMetadataRepository
String id = metadataFacet.getFacetId();
Node facetNode = JcrUtils.getOrAddNode( facets, id );
Node node = getOrAddNodeByPath( facetNode, metadataFacet.getName() );
if (!node.isNodeType( FACET_NODE_TYPE ))
Node facetInstance = getOrAddNodeByPath( facetNode, metadataFacet.getName() );
if (!facetInstance.isNodeType( FACET_NODE_TYPE ))
{
node.addMixin( FACET_NODE_TYPE );
node.setProperty( "facetId", id );
facetInstance.addMixin( FACET_NODE_TYPE );
facetInstance.setProperty( "archiva:facetId", id );
facetInstance.setProperty( "archiva:name", metadataFacet.getName( ) );
}
for ( Map.Entry<String, String> entry : metadataFacet.toProperties().entrySet() )
{
node.setProperty( entry.getKey(), entry.getValue() );
facetInstance.setProperty( entry.getKey(), entry.getValue() );
}
session.save();
}
catch ( RepositoryException e )
catch ( RepositoryException | MetadataSessionException e )
{
throw new MetadataRepositoryException( e.getMessage(), e );
}
@ -695,7 +685,7 @@ public class JcrMetadataRepository
if ( root.hasNode( path ) )
{
Node node = root.getNode( path );
if ( node.isNodeType( org.apache.archiva.metadata.repository.jcr.JcrConstants.NAMESPACE_NODE_TYPE ) )
if ( node.isNodeType( NAMESPACE_NODE_TYPE ) )
{
node.remove();
}
@ -801,12 +791,6 @@ public class JcrMetadataRepository
return artifacts;
}
@Override
public Stream<ArtifactMetadata> getArtifactsByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime ) throws MetadataRepositoryException
{
return null;
}
@Override
public Stream<ArtifactMetadata> getArtifactsByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime, long offset, long maxEntries ) throws MetadataRepositoryException
{
@ -887,7 +871,12 @@ public class JcrMetadataRepository
return artifacts;
}
public QueryResult runNativeJcrQuery( final Session jcrSession, final String q, final Map<String, String> bindingParam)
public QueryResult runNativeJcrQuery( final Session jcrSession, final String q, final Map<String, String> bindingParam) throws MetadataRepositoryException
{
return runNativeJcrQuery( jcrSession, q, bindingParam, 0, Long.MAX_VALUE );
}
public QueryResult runNativeJcrQuery( final Session jcrSession, final String q, final Map<String, String> bindingParam, long offset, long maxEntries)
throws MetadataRepositoryException
{
Map<String, String> bindings;
@ -899,8 +888,10 @@ public class JcrMetadataRepository
try
{
log.debug( "Query: {}", q );
log.debug( "Query: offset={}, limit={}, query={}", offset, maxEntries, q );
Query query = jcrSession.getWorkspace().getQueryManager().createQuery( q, Query.JCR_SQL2 );
query.setLimit( maxEntries );
query.setOffset( offset );
ValueFactory valueFactory = jcrSession.getValueFactory();
for ( Entry<String, String> entry : bindings.entrySet() )
{
@ -1350,7 +1341,7 @@ public class JcrMetadataRepository
try
{
return getNodeNames( getSession(session), path, org.apache.archiva.metadata.repository.jcr.JcrConstants.NAMESPACE_NODE_TYPE );
return getNodeNames( getSession(session), path, NAMESPACE_NODE_TYPE );
}
catch ( MetadataRepositoryException e )
{
@ -1823,7 +1814,7 @@ public class JcrMetadataRepository
throws RepositoryException
{
Node repo = getOrAddRepositoryContentNode( jcrSession, repositoryId );
return getOrAddNodeByPath( repo, namespace.replace( '.', '/' ), org.apache.archiva.metadata.repository.jcr.JcrConstants.NAMESPACE_NODE_TYPE );
return getOrAddNodeByPath( repo, namespace.replace( '.', '/' ), NAMESPACE_NODE_TYPE );
}
private Node getOrAddProjectNode( Session jcrSession, String repositoryId, String namespace, String projectId )

View File

@ -77,6 +77,7 @@ import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.jcr.Repository;
import java.io.Closeable;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@ -467,7 +468,7 @@ public class OakRepositoryFactory
FACET_NODE_TYPE //
), Type.STRINGS );
IndexDefinitionBuilder idxBuilder = new IndexDefinitionBuilder( lucene );
idxBuilder.async( "async", "nrt" ).includedPaths( "/repositories" ).evaluatePathRestrictions();
idxBuilder.async( "async", "nrt", "sync" ).includedPaths( "/repositories" ).evaluatePathRestrictions();
initBaseRule(idxBuilder.indexRule( REPOSITORY_NODE_TYPE ));
initBaseRule(idxBuilder.indexRule( NAMESPACE_NODE_TYPE ))
@ -485,8 +486,12 @@ public class OakRepositoryFactory
.property( "whenGathered" ).type("Date").propertyIndex().analyzed().ordered()
.property("size").type("Long").propertyIndex().analyzed().ordered()
.property("version").propertyIndex().analyzed().ordered();
initRegexAll( idxBuilder.indexRule( FACET_NODE_TYPE ) )
.property("facetId").propertyIndex().analyzed().ordered();
.property("archiva:facetId").propertyIndex().analyzed().ordered()
.property("archiva:name").propertyIndex().analyzed().ordered();
idxBuilder.indexRule( MIXIN_META_SCM )
.property( "scm.connection" ).propertyIndex()
.property( "scm.developerConnection" ).propertyIndex()
@ -525,9 +530,6 @@ public class OakRepositoryFactory
log.info( "Index: {} repo-lucene: {}", lucene, lucene.getChildNode( "repo-lucene" ) );
log.info( "repo-lucene Properties: {}", lucene.getChildNode( "repo-lucene" ).getProperties( ) );
} else {
NodeBuilder lucene = oakIdx.child( "repo-lucene" );
lucene.setProperty( "reindex", true );
log.info( "No Index update" );
}
// IndexUtils.createIndexDefinition( )
@ -557,73 +559,36 @@ public class OakRepositoryFactory
Repository r = jcr.createRepository();
stopWatch.stop();
log.info( "time to create jcr repository: {} ms", stopWatch.getTime() );
// try
// {
// Thread.currentThread().sleep( 1000 );
// }
// catch ( InterruptedException e )
// {
// log.error( e.getMessage(), e );
// }
return r;
}
private void closeSilently( Closeable service) {
if (service!=null) {
try
{
service.close();
}
catch ( Throwable e )
{
//
}
}
}
public void close()
{
log.info( "Closing JCR RepositoryFactory" );
if ( fileStore != null )
{
fileStore.close();
}
if (backgroundObserver != null){
backgroundObserver.close();
}
if (externalIndexObserver != null){
externalIndexObserver.close();
}
if (indexProvider != null) {
indexProvider.close();
indexProvider = null;
}
if (documentQueue != null){
try
{
documentQueue.close();
}
catch ( IOException e )
{
e.printStackTrace( );
}
}
if (nrtIndexFactory != null){
try
{
nrtIndexFactory.close();
}
catch ( IOException e )
{
e.printStackTrace( );
}
}
//Close the copier first i.e. before executorService
if (indexCopier != null){
try
{
indexCopier.close();
}
catch ( IOException e )
{
e.printStackTrace( );
}
}
closeSilently( fileStore );
closeSilently( backgroundObserver );
closeSilently( externalIndexObserver );
closeSilently( indexProvider );
indexProvider = null;
closeSilently( documentQueue );
closeSilently( nrtIndexFactory );
closeSilently( indexCopier );
if (executorService != null){
executorService.shutdown();

View File

@ -106,4 +106,5 @@
+ * (archiva:facet) multiple
[archiva:facet] > archiva:base mixin
- facetId
- archiva:facetId
- archiva:name