mirror of https://github.com/apache/archiva.git
[MRM-1161], [MRM-1179], [MRM-1143] and [MRM-1160]
o upgraded nexus indexer to 2.0.0 o queue all indexing tasks to avoid interrupting a current indexing task being executed o cleaned up nexus indexer and index cleanup consumers git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@777816 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8b08426a74
commit
02a22f45a5
|
@ -19,10 +19,6 @@ package org.apache.archiva.consumers.lucene;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
|
||||
import org.apache.maven.archiva.consumers.ConsumerException;
|
||||
|
@ -35,12 +31,12 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.sonatype.nexus.index.ArtifactContext;
|
||||
import org.sonatype.nexus.index.ArtifactContextProducer;
|
||||
import org.sonatype.nexus.index.ArtifactInfo;
|
||||
import org.sonatype.nexus.index.DefaultArtifactContextProducer;
|
||||
import org.sonatype.nexus.index.NexusIndexer;
|
||||
import org.sonatype.nexus.index.context.DefaultIndexingContext;
|
||||
import org.sonatype.nexus.index.context.IndexingContext;
|
||||
import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
|
||||
import org.sonatype.nexus.index.creator.AbstractIndexCreator;
|
||||
import org.sonatype.nexus.index.IndexerEngine;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -59,37 +55,41 @@ public class LuceneCleanupRemoveIndexedConsumer
|
|||
|
||||
private RepositoryContentFactory repoFactory;
|
||||
|
||||
private NexusIndexer indexer;
|
||||
|
||||
private ArtifactContextProducer artifactContextProducer;
|
||||
|
||||
private IndexingContext context;
|
||||
|
||||
public LuceneCleanupRemoveIndexedConsumer( RepositoryContentFactory repoFactory, NexusIndexer indexer )
|
||||
|
||||
private IndexerEngine indexerEngine;
|
||||
|
||||
//TODO - deng - use indexerEngine to remove documents instead of directly using the IndexingContext!
|
||||
|
||||
public LuceneCleanupRemoveIndexedConsumer( RepositoryContentFactory repoFactory, IndexerEngine indexerEngine )
|
||||
{
|
||||
this.repoFactory = repoFactory;
|
||||
this.indexer = indexer;
|
||||
this.indexerEngine = indexerEngine;
|
||||
this.artifactContextProducer = new DefaultArtifactContextProducer();
|
||||
}
|
||||
|
||||
|
||||
public void beginScan()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void completeScan()
|
||||
{
|
||||
try
|
||||
/*synchronized( indexerEngine )
|
||||
{
|
||||
context.getIndexWriter().close();
|
||||
try
|
||||
{
|
||||
//context.getIndexWriter().close();
|
||||
|
||||
//indexerEngine.endIndexing( context );
|
||||
indexer.removeIndexingContext( context, false );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error( e.getMessage() );
|
||||
}
|
||||
//indexerEngine.endIndexing( context );
|
||||
//indexer.removeIndexingContext( context, false );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error( e.getMessage() );
|
||||
}
|
||||
} */
|
||||
}
|
||||
|
||||
public List<String> getIncludedTypes()
|
||||
|
@ -101,11 +101,21 @@ public class LuceneCleanupRemoveIndexedConsumer
|
|||
public void processArchivaArtifact( ArchivaArtifact artifact )
|
||||
throws ConsumerException
|
||||
{
|
||||
try
|
||||
{
|
||||
ManagedRepositoryContent repoContent =
|
||||
repoFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
|
||||
|
||||
//synchronized( context )
|
||||
//{
|
||||
// TODO - deng - block this if there is the nexus indexer consumer is executing?
|
||||
ManagedRepositoryContent repoContent = null;
|
||||
|
||||
try
|
||||
{
|
||||
repoContent =
|
||||
repoFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
|
||||
}
|
||||
catch ( RepositoryException e )
|
||||
{
|
||||
throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
|
||||
}
|
||||
|
||||
ManagedRepositoryConfiguration repository = repoContent.getRepository();
|
||||
String indexDir = repository.getIndexDir();
|
||||
File managedRepository = new File( repository.getLocation() );
|
||||
|
@ -118,70 +128,81 @@ public class LuceneCleanupRemoveIndexedConsumer
|
|||
else
|
||||
{
|
||||
indexDirectory = new File( managedRepository, ".indexer" );
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
context =
|
||||
new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
|
||||
indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
|
||||
//context =
|
||||
// indexer.addIndexingContext( repository.getId(), repository.getId(), managedRepository,
|
||||
// indexDirectory, null, null, NexusIndexer.FULL_INDEX );
|
||||
context.setSearchable( repository.isScanned() );
|
||||
}
|
||||
catch ( UnsupportedExistingLuceneIndexException e )
|
||||
{
|
||||
log.warn( "Unsupported index format.", e );
|
||||
return;
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.warn( "Unable to open index at " + indexDirectory.getAbsoluteFile(), e );
|
||||
return;
|
||||
}
|
||||
|
||||
synchronized ( indexer )
|
||||
try
|
||||
{
|
||||
try
|
||||
File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
|
||||
|
||||
if ( !artifactFile.exists() )
|
||||
{
|
||||
context =
|
||||
indexer.addIndexingContext( repository.getId(), repository.getId(), managedRepository,
|
||||
indexDirectory, null, null, NexusIndexer.FULL_INDEX );
|
||||
context.setSearchable( repository.isScanned() );
|
||||
ArtifactContext artifactContext =
|
||||
artifactContextProducer.getArtifactContext( context, artifactFile );
|
||||
|
||||
File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
|
||||
|
||||
if ( !artifactFile.exists() )
|
||||
if ( artifactContext != null )
|
||||
{
|
||||
ArtifactContext artifactContext =
|
||||
artifactContextProducer.getArtifactContext( context, artifactFile );
|
||||
//indexerEngine.remove( context, artifactContext );
|
||||
|
||||
if ( artifactContext != null )
|
||||
{
|
||||
//indexerEngine.remove( context, artifactContext );
|
||||
|
||||
// hack for deleting documents - indexer engine's remove(...) isn't working for me
|
||||
removeDocuments( artifactContext );
|
||||
}
|
||||
indexerEngine.remove( context, artifactContext );
|
||||
|
||||
context.close( false );
|
||||
// hack for deleting documents - indexer engine's remove(...) isn't working for me
|
||||
//removeDocuments( artifactContext );
|
||||
}
|
||||
}
|
||||
catch ( UnsupportedExistingLuceneIndexException e )
|
||||
{
|
||||
log.error( "Unsupported index format.", e );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error( "Unable to open index at " + indexDirectory.getAbsoluteFile(), e );
|
||||
}
|
||||
}
|
||||
}
|
||||
catch ( RepositoryException e )
|
||||
{
|
||||
throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
|
||||
}
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error( "Unable to open index at " + indexDirectory.getAbsoluteFile(), e );
|
||||
}
|
||||
// }
|
||||
}
|
||||
|
||||
private void removeDocuments( ArtifactContext ac )
|
||||
/* private void removeDocuments( ArtifactContext ac )
|
||||
throws IOException
|
||||
{
|
||||
IndexWriter w = context.getIndexWriter();
|
||||
|
||||
ArtifactInfo ai = ac.getArtifactInfo();
|
||||
String uinfo = AbstractIndexCreator.getGAV( ai.groupId, ai.artifactId, ai.version, ai.classifier, ai.packaging );
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add( new Field( ArtifactInfo.DELETED, uinfo, Field.Store.YES, Field.Index.NO ) );
|
||||
doc.add( new Field( ArtifactInfo.LAST_MODIFIED, Long.toString( System.currentTimeMillis() ), Field.Store.YES,
|
||||
Field.Index.NO ) );
|
||||
|
||||
w.addDocument( doc );
|
||||
|
||||
w.deleteDocuments( new Term( ArtifactInfo.UINFO, uinfo ) );
|
||||
|
||||
w.commit();
|
||||
|
||||
context.updateTimestamp();
|
||||
}
|
||||
synchronized( indexerEngine )
|
||||
{
|
||||
IndexWriter w = context.getIndexWriter();
|
||||
|
||||
ArtifactInfo ai = ac.getArtifactInfo();
|
||||
String uinfo = AbstractIndexCreator.getGAV( ai.groupId, ai.artifactId, ai.version, ai.classifier, ai.packaging );
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add( new Field( ArtifactInfo.DELETED, uinfo, Field.Store.YES, Field.Index.NO ) );
|
||||
doc.add( new Field( ArtifactInfo.LAST_MODIFIED, Long.toString( System.currentTimeMillis() ), Field.Store.YES,
|
||||
Field.Index.NO ) );
|
||||
|
||||
w.addDocument( doc );
|
||||
|
||||
w.deleteDocuments( new Term( ArtifactInfo.UINFO, uinfo ) );
|
||||
|
||||
w.commit();
|
||||
|
||||
context.updateTimestamp();
|
||||
}
|
||||
}*/
|
||||
|
||||
public String getDescription()
|
||||
{
|
||||
|
|
|
@ -24,13 +24,9 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.zip.ZipException;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
|
||||
import org.apache.maven.archiva.consumers.ConsumerException;
|
||||
|
@ -40,14 +36,14 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.sonatype.nexus.index.ArtifactContext;
|
||||
import org.sonatype.nexus.index.ArtifactContextProducer;
|
||||
import org.sonatype.nexus.index.ArtifactInfo;
|
||||
import org.sonatype.nexus.index.DefaultArtifactContextProducer;
|
||||
import org.sonatype.nexus.index.NexusIndexer;
|
||||
import org.sonatype.nexus.index.context.DefaultIndexingContext;
|
||||
import org.sonatype.nexus.index.context.IndexingContext;
|
||||
import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
|
||||
import org.sonatype.nexus.index.creator.AbstractIndexCreator;
|
||||
import org.sonatype.nexus.index.creator.IndexerEngine;
|
||||
import org.sonatype.nexus.index.IndexerEngine;
|
||||
import org.sonatype.nexus.index.packer.IndexPacker;
|
||||
import org.sonatype.nexus.index.packer.IndexPackingRequest;
|
||||
|
||||
/**
|
||||
* Consumer for indexing the repository to provide search and IDE integration features.
|
||||
|
@ -58,8 +54,6 @@ public class NexusIndexerConsumer
|
|||
{
|
||||
private static final Logger log = LoggerFactory.getLogger( NexusIndexerConsumer.class );
|
||||
|
||||
private NexusIndexer indexer;
|
||||
|
||||
private ArtifactContextProducer artifactContextProducer;
|
||||
|
||||
private IndexPacker indexPacker;
|
||||
|
@ -72,16 +66,23 @@ public class NexusIndexerConsumer
|
|||
|
||||
private IndexerEngine indexerEngine;
|
||||
|
||||
private Set<String> uinfos;
|
||||
|
||||
public NexusIndexerConsumer( NexusIndexer indexer, IndexPacker indexPacker, IndexerEngine indexerEngine )
|
||||
//private IndexingContextMap indexingContextMap;
|
||||
|
||||
public NexusIndexerConsumer( IndexPacker indexPacker, IndexerEngine indexerEngine )
|
||||
{
|
||||
this.indexer = indexer;
|
||||
this.indexPacker = indexPacker;
|
||||
this.indexerEngine = indexerEngine;
|
||||
this.indexerEngine = indexerEngine;
|
||||
this.artifactContextProducer = new DefaultArtifactContextProducer();
|
||||
}
|
||||
|
||||
|
||||
/* public NexusIndexerConsumer( IndexPacker indexPacker, IndexerEngine indexerEngine, IndexingContextMap indexingContextMap )
|
||||
{
|
||||
this.indexPacker = indexPacker;
|
||||
this.indexerEngine = indexerEngine;
|
||||
this.indexingContextMap = indexingContextMap;
|
||||
this.artifactContextProducer = new DefaultArtifactContextProducer();
|
||||
}*/
|
||||
|
||||
public String getDescription()
|
||||
{
|
||||
return "Indexes the repository to provide search and IDE integration features";
|
||||
|
@ -99,53 +100,39 @@ public class NexusIndexerConsumer
|
|||
|
||||
public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered )
|
||||
throws ConsumerException
|
||||
{
|
||||
managedRepository = new File( repository.getLocation() );
|
||||
String indexDir = repository.getIndexDir();
|
||||
|
||||
File indexDirectory = null;
|
||||
if( indexDir != null && !"".equals( indexDir ) )
|
||||
{
|
||||
indexDirectory = new File( repository.getIndexDir() );
|
||||
}
|
||||
else
|
||||
{
|
||||
indexDirectory = new File( managedRepository, ".indexer" );
|
||||
}
|
||||
|
||||
repositoryContent = new ManagedDefaultRepositoryContent();
|
||||
repositoryContent.setRepository( repository );
|
||||
uinfos = new HashSet<String>();
|
||||
|
||||
synchronized ( indexer )
|
||||
{
|
||||
try
|
||||
{
|
||||
//synchronized( context )
|
||||
//{
|
||||
log.debug( "Begin indexing of repository '" + repository.getId() + "'..");
|
||||
|
||||
managedRepository = new File( repository.getLocation() );
|
||||
String indexDir = repository.getIndexDir();
|
||||
|
||||
File indexDirectory = null;
|
||||
if( indexDir != null && !"".equals( indexDir ) )
|
||||
{
|
||||
indexDirectory = new File( repository.getIndexDir() );
|
||||
}
|
||||
else
|
||||
{
|
||||
indexDirectory = new File( managedRepository, ".indexer" );
|
||||
}
|
||||
|
||||
repositoryContent = new ManagedDefaultRepositoryContent();
|
||||
repositoryContent.setRepository( repository );
|
||||
|
||||
try
|
||||
{
|
||||
context =
|
||||
indexer.addIndexingContext( repository.getId(), repository.getId(), managedRepository,
|
||||
indexDirectory, null, null, NexusIndexer.FULL_INDEX );
|
||||
new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
|
||||
indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
|
||||
|
||||
//context = indexingContextMap.addIndexingContext( repository.getId(), repository.getId(), managedRepository,
|
||||
// indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
|
||||
|
||||
context.setSearchable( repository.isScanned() );
|
||||
|
||||
// read index to get all the artifacts already indexed
|
||||
IndexReader r = context.getIndexReader();
|
||||
for ( int i = 0; i < r.numDocs(); i++ )
|
||||
{
|
||||
if ( !r.isDeleted( i ) )
|
||||
{
|
||||
Document d = r.document( i );
|
||||
String uinfo = d.get( ArtifactInfo.UINFO );
|
||||
|
||||
// should we add a check here if the contents of the document still exist in the file system
|
||||
// for cases when there is already an existing index & the contents of that index doesn't exist
|
||||
// in the file system & in the database?
|
||||
if ( uinfo != null )
|
||||
{
|
||||
uinfos.add( uinfo );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
indexerEngine.beginIndexing( context );
|
||||
//indexerEngine.beginIndexing( context );
|
||||
}
|
||||
catch ( UnsupportedExistingLuceneIndexException e )
|
||||
{
|
||||
|
@ -155,68 +142,65 @@ public class NexusIndexerConsumer
|
|||
{
|
||||
throw new ConsumerException( "Could not create index at " + indexDirectory.getAbsoluteFile(), e );
|
||||
}
|
||||
}
|
||||
//}
|
||||
}
|
||||
|
||||
public void processFile( String path )
|
||||
throws ConsumerException
|
||||
{
|
||||
if ( context == null )
|
||||
synchronized ( indexerEngine )
|
||||
{
|
||||
// didn't start correctly, so skip
|
||||
return;
|
||||
}
|
||||
|
||||
File artifactFile = new File( managedRepository, path );
|
||||
ArtifactContext artifactContext = artifactContextProducer.getArtifactContext( context, artifactFile );
|
||||
|
||||
if ( artifactContext != null )
|
||||
{
|
||||
try
|
||||
{
|
||||
ArtifactInfo ai = artifactContext.getArtifactInfo();
|
||||
String uinfo = AbstractIndexCreator.getGAV(
|
||||
ai.groupId, ai.artifactId, ai.version, ai.classifier, ai.packaging );
|
||||
|
||||
// already indexed so update!
|
||||
if ( uinfos.contains( uinfo ) )
|
||||
{
|
||||
indexerEngine.update( context, artifactContext );
|
||||
if ( context == null )
|
||||
{
|
||||
// didn't start correctly, so skip
|
||||
return;
|
||||
}
|
||||
|
||||
File artifactFile = new File( managedRepository, path );
|
||||
ArtifactContext artifactContext = artifactContextProducer.getArtifactContext( context, artifactFile );
|
||||
|
||||
if ( artifactContext != null )
|
||||
{
|
||||
try
|
||||
{
|
||||
indexerEngine.index( context, artifactContext );
|
||||
}
|
||||
else
|
||||
catch ( ZipException e )
|
||||
{
|
||||
indexerEngine.index( context, artifactContext );
|
||||
}
|
||||
}
|
||||
catch ( ZipException e )
|
||||
{
|
||||
// invalid JAR file
|
||||
log.info( e.getMessage() );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
throw new ConsumerException( e.getMessage(), e );
|
||||
// invalid JAR file
|
||||
log.info( e.getMessage() );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
throw new ConsumerException( e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void completeScan()
|
||||
{
|
||||
synchronized( indexer )
|
||||
{
|
||||
//synchronized( context )
|
||||
//{
|
||||
log.debug( "End indexing of repository '" + context.getRepositoryId() + "'..");
|
||||
|
||||
final File indexLocation = new File( managedRepository, ".index" );
|
||||
try
|
||||
{
|
||||
indexerEngine.endIndexing( context );
|
||||
indexPacker.packIndex( context, indexLocation );
|
||||
indexer.removeIndexingContext( context, false );
|
||||
uinfos = null;
|
||||
//indexerEngine.endIndexing( context );
|
||||
|
||||
IndexPackingRequest request = new IndexPackingRequest( context, indexLocation );
|
||||
indexPacker.packIndex( request );
|
||||
|
||||
//indexingContextMap.removeIndexingContext( context.getId() );
|
||||
|
||||
context.close( false );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
log.error( "Could not pack index" + indexLocation.getAbsolutePath(), e );
|
||||
}
|
||||
}
|
||||
//}
|
||||
}
|
||||
|
||||
public List<String> getExcludes()
|
||||
|
|
|
@ -3,14 +3,15 @@
|
|||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
|
||||
|
||||
<bean id="indexerConsumer" class="org.apache.archiva.consumers.lucene.NexusIndexerConsumer">
|
||||
<constructor-arg ref="nexusIndexer#archiva"/>
|
||||
<bean id="indexerConsumer" class="org.apache.archiva.consumers.lucene.NexusIndexerConsumer">
|
||||
<constructor-arg ref="indexPacker"/>
|
||||
<constructor-arg ref="indexerEngine"/>
|
||||
<!-- <constructor-arg ref="indexingContextMap"/> -->
|
||||
</bean>
|
||||
|
||||
<bean id="indexCleanupConsumer" class="org.apache.archiva.consumers.lucene.LuceneCleanupRemoveIndexedConsumer">
|
||||
<constructor-arg ref="repositoryContentFactory"/>
|
||||
<constructor-arg ref="nexusIndexer#archiva"/>
|
||||
<constructor-arg ref="indexerEngine"/>
|
||||
<!-- <constructor-arg ref="nexusIndexer#archiva"/> -->
|
||||
</bean>
|
||||
</beans>
|
|
@ -32,9 +32,7 @@ import org.easymock.MockControl;
|
|||
import org.easymock.classextension.MockClassControl;
|
||||
import org.sonatype.nexus.index.ArtifactContext;
|
||||
import org.sonatype.nexus.index.ArtifactContextProducer;
|
||||
import org.sonatype.nexus.index.ArtifactInfo;
|
||||
import org.sonatype.nexus.index.NexusIndexer;
|
||||
import org.sonatype.nexus.index.context.DefaultIndexingContext;
|
||||
import org.sonatype.nexus.index.IndexerEngine;
|
||||
import org.sonatype.nexus.index.context.IndexingContext;
|
||||
|
||||
public class LuceneCleanupRemoveIndexedConsumerTest
|
||||
|
@ -42,15 +40,15 @@ public class LuceneCleanupRemoveIndexedConsumerTest
|
|||
{
|
||||
private LuceneCleanupRemoveIndexedConsumer consumer;
|
||||
|
||||
private MockControl indexerControl;
|
||||
|
||||
private NexusIndexer indexer;
|
||||
|
||||
private RepositoryContentFactory repoFactory;
|
||||
|
||||
private MockControl repoFactoryControl;
|
||||
|
||||
private ManagedRepositoryConfiguration repositoryConfig;
|
||||
|
||||
private MockControl indexerEngineControl;
|
||||
|
||||
private IndexerEngine indexerEngine;
|
||||
|
||||
private MockControl contextProducerControl;
|
||||
|
||||
|
@ -65,14 +63,14 @@ public class LuceneCleanupRemoveIndexedConsumerTest
|
|||
{
|
||||
super.setUp();
|
||||
|
||||
indexerControl = MockControl.createControl( NexusIndexer.class );
|
||||
indexerControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
|
||||
indexer = (NexusIndexer) indexerControl.getMock();
|
||||
|
||||
indexerEngineControl = MockControl.createControl( IndexerEngine.class );
|
||||
indexerEngineControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
|
||||
indexerEngine = (IndexerEngine) indexerEngineControl.getMock();
|
||||
|
||||
repoFactoryControl = MockClassControl.createControl( RepositoryContentFactory.class );
|
||||
repoFactory = (RepositoryContentFactory) repoFactoryControl.getMock();
|
||||
|
||||
consumer = new LuceneCleanupRemoveIndexedConsumer( repoFactory, indexer );
|
||||
consumer = new LuceneCleanupRemoveIndexedConsumer( repoFactory, indexerEngine );
|
||||
|
||||
repositoryConfig = new ManagedRepositoryConfiguration();
|
||||
repositoryConfig.setId( "test-repo" );
|
||||
|
@ -85,6 +83,7 @@ public class LuceneCleanupRemoveIndexedConsumerTest
|
|||
repositoryConfig.setIndexDir( getBasedir() + "/target/test-classes/test-repo/.cleanup-index" );
|
||||
|
||||
contextProducerControl = MockControl.createControl( ArtifactContextProducer.class );
|
||||
contextProducerControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
|
||||
artifactContextProducer = (ArtifactContextProducer) contextProducerControl.getMock();
|
||||
|
||||
consumer.setArtifactContextProducer( artifactContextProducer );
|
||||
|
@ -109,37 +108,27 @@ public class LuceneCleanupRemoveIndexedConsumerTest
|
|||
ManagedRepositoryContent repoContent = new ManagedDefaultRepositoryContent();
|
||||
repoContent.setRepository( repositoryConfig );
|
||||
|
||||
IndexingContext context =
|
||||
new DefaultIndexingContext( repositoryConfig.getId(), repositoryConfig.getId(),
|
||||
new File( repositoryConfig.getLocation() ),
|
||||
new File( repositoryConfig.getIndexDir() ), null, null,
|
||||
NexusIndexer.FULL_INDEX, false );
|
||||
|
||||
IndexingContext context = null;
|
||||
|
||||
File artifactFile =
|
||||
new File( repositoryConfig.getLocation(),
|
||||
"org/apache/archiva/archiva-lucene-consumers/1.2/archiva-lucene-consumers-1.2.jar" );
|
||||
ArtifactInfo ai = new ArtifactInfo( "test-repo", "org.apache.archiva", "archiva-lucene-consumers", "1.2", null );
|
||||
|
||||
|
||||
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( repositoryConfig.getId() ),
|
||||
repoContent );
|
||||
indexerControl.expectAndReturn( indexer.addIndexingContext( repositoryConfig.getId(), repositoryConfig.getId(),
|
||||
new File( repositoryConfig.getLocation() ),
|
||||
new File( repositoryConfig.getIndexDir() ), null,
|
||||
null, NexusIndexer.FULL_INDEX ), context );
|
||||
contextProducerControl.expectAndReturn( artifactContextProducer.getArtifactContext( context, artifactFile ), ac );
|
||||
acControl.expectAndReturn( ac.getArtifactInfo(), ai );
|
||||
|
||||
repoFactoryControl.replay();
|
||||
indexerControl.replay();
|
||||
contextProducerControl.expectAndReturn( artifactContextProducer.getArtifactContext( context, artifactFile ), ac );
|
||||
indexerEngine.remove( context, ac );
|
||||
indexerEngineControl.setDefaultVoidCallable();
|
||||
|
||||
repoFactoryControl.replay();
|
||||
contextProducerControl.replay();
|
||||
acControl.replay();
|
||||
|
||||
indexerEngineControl.replay();
|
||||
|
||||
consumer.processArchivaArtifact( artifact );
|
||||
|
||||
repoFactoryControl.verify();
|
||||
indexerControl.verify();
|
||||
repoFactoryControl.verify();
|
||||
contextProducerControl.verify();
|
||||
acControl.verify();
|
||||
indexerEngineControl.verify();
|
||||
}
|
||||
|
||||
public void testProcessArtifactArtifactExists()
|
||||
|
@ -150,25 +139,13 @@ public class LuceneCleanupRemoveIndexedConsumerTest
|
|||
ManagedRepositoryContent repoContent = new ManagedDefaultRepositoryContent();
|
||||
repoContent.setRepository( repositoryConfig );
|
||||
|
||||
IndexingContext context =
|
||||
new DefaultIndexingContext( repositoryConfig.getId(), repositoryConfig.getId(),
|
||||
new File( repositoryConfig.getLocation() ),
|
||||
new File( repositoryConfig.getIndexDir() ), null, null,
|
||||
NexusIndexer.FULL_INDEX, false );
|
||||
|
||||
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( repositoryConfig.getId() ),
|
||||
repoContent );
|
||||
indexerControl.expectAndReturn( indexer.addIndexingContext( repositoryConfig.getId(), repositoryConfig.getId(),
|
||||
new File( repositoryConfig.getLocation() ),
|
||||
new File( repositoryConfig.getIndexDir() ), null,
|
||||
null, NexusIndexer.FULL_INDEX ), context );
|
||||
|
||||
repoFactoryControl.replay();
|
||||
indexerControl.replay();
|
||||
|
||||
consumer.processArchivaArtifact( artifact );
|
||||
|
||||
repoFactoryControl.verify();
|
||||
indexerControl.verify();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.sonatype.nexus.index.FlatSearchRequest;
|
|||
import org.sonatype.nexus.index.FlatSearchResponse;
|
||||
import org.sonatype.nexus.index.NexusIndexer;
|
||||
import org.sonatype.nexus.index.context.IndexingContext;
|
||||
import org.sonatype.nexus.index.creator.IndexerEngine;
|
||||
import org.sonatype.nexus.index.IndexerEngine;
|
||||
import org.sonatype.nexus.index.packer.IndexPacker;
|
||||
|
||||
public class NexusIndexerConsumerTest
|
||||
|
@ -65,7 +65,9 @@ public class NexusIndexerConsumerTest
|
|||
|
||||
indexerEngine = ( IndexerEngine ) lookup( IndexerEngine.class );
|
||||
|
||||
nexusIndexerConsumer = new NexusIndexerConsumer( nexusIndexer, indexPacker, indexerEngine );
|
||||
//nexusIndexerConsumer = new NexusIndexerConsumer( nexusIndexer, indexPacker, indexerEngine );
|
||||
|
||||
nexusIndexerConsumer = new NexusIndexerConsumer( indexPacker, indexerEngine );
|
||||
|
||||
repositoryConfig = new ManagedRepositoryConfiguration();
|
||||
repositoryConfig.setId( "test-repo" );
|
||||
|
@ -152,8 +154,8 @@ public class NexusIndexerConsumerTest
|
|||
assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
|
||||
assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
|
||||
|
||||
// should return only 1 hit - artifact should have just been updated and not added as a separate doc
|
||||
assertEquals( 1, topDocs.totalHits );
|
||||
// should return 2 hits - this will be filtered out by the NexusRespositorySearch when it returns the results!
|
||||
assertEquals( 2, topDocs.totalHits );
|
||||
}
|
||||
|
||||
public void testIndexerIndexArtifactThenPom()
|
||||
|
@ -184,5 +186,5 @@ public class NexusIndexerConsumerTest
|
|||
|
||||
// should return only 1 hit
|
||||
assertEquals( 1, topDocs.totalHits );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.sonatype.nexus.index.ArtifactInfo;
|
|||
import org.sonatype.nexus.index.FlatSearchRequest;
|
||||
import org.sonatype.nexus.index.FlatSearchResponse;
|
||||
import org.sonatype.nexus.index.NexusIndexer;
|
||||
import org.sonatype.nexus.index.context.IndexContextInInconsistentStateException;
|
||||
import org.sonatype.nexus.index.context.IndexingContext;
|
||||
import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
|
||||
|
||||
|
@ -155,10 +154,6 @@ public class NexusRepositorySearch
|
|||
|
||||
return convertToSearchResults( response, limits );
|
||||
}
|
||||
catch ( IndexContextInInconsistentStateException e )
|
||||
{
|
||||
throw new RepositorySearchException( e );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
throw new RepositorySearchException( e );
|
||||
|
|
|
@ -4,7 +4,9 @@
|
|||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
|
||||
|
||||
<bean id="nexusSearch" class="org.apache.archiva.indexer.search.NexusRepositorySearch">
|
||||
<constructor-arg ref="nexusIndexer#archiva"/>
|
||||
<constructor-arg ref="nexusIndexer"/>
|
||||
<constructor-arg ref="archivaConfiguration"/>
|
||||
</bean>
|
||||
|
||||
<!-- <bean id="indexingContextMap" class="org.apache.archiva.indexer.IndexingContextMap"/> -->
|
||||
</beans>
|
|
@ -33,9 +33,10 @@ import org.easymock.MockControl;
|
|||
import org.sonatype.nexus.index.ArtifactContext;
|
||||
import org.sonatype.nexus.index.ArtifactContextProducer;
|
||||
import org.sonatype.nexus.index.NexusIndexer;
|
||||
import org.sonatype.nexus.index.context.DefaultIndexingContext;
|
||||
import org.sonatype.nexus.index.context.IndexingContext;
|
||||
import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
|
||||
import org.sonatype.nexus.index.creator.IndexerEngine;
|
||||
import org.sonatype.nexus.index.IndexerEngine;
|
||||
|
||||
public class NexusRepositorySearchTest
|
||||
extends PlexusInSpringTestCase
|
||||
|
@ -119,13 +120,15 @@ public class NexusRepositorySearchTest
|
|||
private void createIndex( String repository, List<File> filesToBeIndexed )
|
||||
throws IOException, UnsupportedExistingLuceneIndexException
|
||||
{
|
||||
context =
|
||||
indexer.addIndexingContext( repository, repository, new File( getBasedir(), "/target/test-classes/" +
|
||||
repository ), new File( getBasedir(), "/target/test-classes/" + repository + "/.indexer" ), null, null,
|
||||
NexusIndexer.FULL_INDEX );
|
||||
context = new DefaultIndexingContext( repository, repository, new File( getBasedir(), "/target/test-classes/" +
|
||||
repository ), new File( getBasedir(), "/target/test-classes/" + repository + "/.indexer" ), null, null,
|
||||
NexusIndexer.FULL_INDEX, false );
|
||||
//indexer.addIndexingContext( repository, repository, new File( getBasedir(), "/target/test-classes/" +
|
||||
// repository ), new File( getBasedir(), "/target/test-classes/" + repository + "/.indexer" ), null, null,
|
||||
// NexusIndexer.FULL_INDEX );
|
||||
context.setSearchable( true );
|
||||
|
||||
indexerEngine.beginIndexing( context );
|
||||
//indexerEngine.beginIndexing( context );
|
||||
|
||||
for ( File artifactFile : filesToBeIndexed )
|
||||
{
|
||||
|
@ -133,8 +136,9 @@ public class NexusRepositorySearchTest
|
|||
indexerEngine.index( context, ac );
|
||||
}
|
||||
|
||||
indexerEngine.endIndexing( context );
|
||||
indexer.removeIndexingContext( context, false );
|
||||
context.close( false );
|
||||
//indexerEngine.endIndexing( context );
|
||||
//indexer.removeIndexingContext( context, false );
|
||||
|
||||
assertTrue( new File( getBasedir(), "/target/test-classes/" + repository + "/.indexer" ).exists() );
|
||||
}
|
||||
|
|
|
@ -41,6 +41,10 @@
|
|||
<groupId>org.apache.archiva</groupId>
|
||||
<artifactId>archiva-repository-layer</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.archiva</groupId>
|
||||
<artifactId>archiva-scheduled</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
|
@ -89,6 +93,11 @@
|
|||
<artifactId>slf4j-simple</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -55,7 +55,9 @@ import org.apache.maven.archiva.repository.RepositoryException;
|
|||
import org.apache.maven.archiva.repository.RepositoryNotFoundException;
|
||||
import org.apache.maven.archiva.repository.metadata.MetadataTools;
|
||||
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
|
||||
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
|
||||
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
|
||||
import org.apache.maven.wagon.ConnectionException;
|
||||
import org.apache.maven.wagon.ResourceDoesNotExistException;
|
||||
import org.apache.maven.wagon.Wagon;
|
||||
|
@ -68,6 +70,7 @@ import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
|
|||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
|
||||
import org.codehaus.plexus.registry.Registry;
|
||||
import org.codehaus.plexus.registry.RegistryListener;
|
||||
import org.codehaus.plexus.taskqueue.TaskQueueException;
|
||||
import org.codehaus.plexus.util.SelectorUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -127,12 +130,12 @@ public class DefaultRepositoryProxyConnectors
|
|||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private RepositoryContentConsumers consumers;
|
||||
|
||||
private WagonFactory wagonFactory;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private WagonFactory wagonFactory;
|
||||
private ArchivaTaskScheduler scheduler;
|
||||
|
||||
public File fetchFromProxies( ManagedRepositoryContent repository, ArtifactReference artifact )
|
||||
throws ProxyDownloadException
|
||||
|
@ -470,7 +473,7 @@ public class DefaultRepositoryProxyConnectors
|
|||
return resource;
|
||||
}
|
||||
|
||||
log.warn( emsg );
|
||||
log.debug( emsg );
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -573,10 +576,26 @@ public class DefaultRepositoryProxyConnectors
|
|||
if ( executeConsumers )
|
||||
{
|
||||
// Just-in-time update of the index and database by executing the consumers for this artifact
|
||||
consumers.executeConsumers( connector.getSourceRepository().getRepository(), resource );
|
||||
//consumers.executeConsumers( connector.getSourceRepository().getRepository(), resource );
|
||||
queueRepositoryTask( connector.getSourceRepository().getRepository().getId(), resource );
|
||||
}
|
||||
|
||||
return resource;
|
||||
}
|
||||
|
||||
private void queueRepositoryTask( String repositoryId, File localFile )
|
||||
{
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repositoryId, localFile.getName(), localFile );
|
||||
|
||||
try
|
||||
{
|
||||
scheduler.queueRepositoryTask( task );
|
||||
}
|
||||
catch ( TaskQueueException e )
|
||||
{
|
||||
log.error( "Unable to queue repository task to execute consumers on resource file ['" +
|
||||
localFile.getName() + "']." );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -21,7 +21,12 @@ package org.apache.maven.archiva.proxy;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import javax.jdo.PersistenceManager;
|
||||
import javax.jdo.PersistenceManagerFactory;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
@ -40,7 +45,10 @@ import org.apache.maven.archiva.policies.PropagateErrorsOnUpdateDownloadPolicy;
|
|||
import org.apache.maven.archiva.policies.ReleasesPolicy;
|
||||
import org.apache.maven.archiva.policies.SnapshotsPolicy;
|
||||
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
|
||||
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
|
||||
import org.codehaus.plexus.jdo.JdoFactory;
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
import org.jpox.SchemaTool;
|
||||
import org.mortbay.jetty.Handler;
|
||||
import org.mortbay.jetty.Request;
|
||||
import org.mortbay.jetty.Server;
|
||||
|
@ -75,7 +83,7 @@ public class HttpProxyTransferTest
|
|||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
|
||||
// Setup source repository (using default layout)
|
||||
String repoPath = "target/test-repository/managed/" + getName();
|
||||
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
package org.apache.maven.archiva.proxy.stubs;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.maven.archiva.database.ArchivaDAO;
|
||||
import org.apache.maven.archiva.database.ArtifactDAO;
|
||||
import org.apache.maven.archiva.database.ProjectModelDAO;
|
||||
import org.apache.maven.archiva.database.RepositoryContentStatisticsDAO;
|
||||
import org.apache.maven.archiva.database.RepositoryProblemDAO;
|
||||
import org.apache.maven.archiva.database.SimpleConstraint;
|
||||
|
||||
/**
|
||||
* Using a stub for faster tests! Not really used for the unit tests, just for dependency injection.
|
||||
*/
|
||||
public class ArchivaDAOStub
|
||||
implements ArchivaDAO
|
||||
{
|
||||
|
||||
public ArtifactDAO getArtifactDAO()
|
||||
{
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public ProjectModelDAO getProjectModelDAO()
|
||||
{
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public RepositoryContentStatisticsDAO getRepositoryContentStatisticsDAO()
|
||||
{
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public RepositoryProblemDAO getRepositoryProblemDAO()
|
||||
{
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<?> query( SimpleConstraint constraint )
|
||||
{
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public Object save( Serializable obj )
|
||||
{
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -73,14 +73,14 @@
|
|||
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
|
||||
<field-name>postDownloadPolicies</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
|
||||
<field-name>urlFailureCache</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -104,5 +104,11 @@
|
|||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -71,14 +71,14 @@
|
|||
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
|
||||
<field-name>postDownloadPolicies</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
|
||||
<field-name>urlFailureCache</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -102,5 +102,11 @@
|
|||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -81,8 +81,8 @@
|
|||
<field-name>urlFailureCache</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
@ -106,6 +106,12 @@
|
|||
<!-- 30 minutes = 1800 seconds -->
|
||||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -63,10 +63,6 @@
|
|||
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
|
||||
<field-name>postDownloadPolicies</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
|
||||
<field-name>urlFailureCache</field-name>
|
||||
|
@ -75,6 +71,10 @@
|
|||
<role>org.apache.maven.archiva.proxy.WagonFactory</role>
|
||||
<role-hint>default</role-hint>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -98,5 +98,11 @@
|
|||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -71,14 +71,14 @@
|
|||
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
|
||||
<field-name>postDownloadPolicies</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
|
||||
<field-name>urlFailureCache</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -102,5 +102,11 @@
|
|||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -71,14 +71,14 @@
|
|||
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
|
||||
<field-name>postDownloadPolicies</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
|
||||
<field-name>urlFailureCache</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -102,5 +102,11 @@
|
|||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -90,14 +90,14 @@
|
|||
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
|
||||
<field-name>postDownloadPolicies</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
|
||||
<field-name>urlFailureCache</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -121,5 +121,11 @@
|
|||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -71,14 +71,14 @@
|
|||
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
|
||||
<field-name>postDownloadPolicies</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
|
||||
<field-name>urlFailureCache</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -102,5 +102,11 @@
|
|||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -71,14 +71,14 @@
|
|||
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
|
||||
<field-name>postDownloadPolicies</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
|
||||
<field-name>consumers</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
|
||||
<field-name>urlFailureCache</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
|
||||
<field-name>scheduler</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -102,5 +102,11 @@
|
|||
<time-to-live-seconds>1800</time-to-live-seconds>
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<implementation>org.apache.maven.archiva.proxy.stubs.ArchivaDAOStub</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
|
|||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskNameSelectionPredicate;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
|
||||
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
|
||||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
|
||||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
|
||||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
|
||||
|
@ -169,25 +170,14 @@ public class DefaultArchivaTaskScheduler
|
|||
// MRM-848: Pre-configured repository initially appear to be empty
|
||||
private synchronized void queueInitialRepoScan( ManagedRepositoryConfiguration repoConfig )
|
||||
{
|
||||
String repoId = repoConfig.getId();
|
||||
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repoId );
|
||||
task.setName( REPOSITORY_JOB + ":" + repoId + ":initial-scan" );
|
||||
task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
|
||||
|
||||
boolean scheduleTask = false;
|
||||
String repoId = repoConfig.getId();
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "initial-scan" );
|
||||
|
||||
if ( queuedRepos.contains( repoId ) )
|
||||
{
|
||||
log.error( "Repository [" + repoId + "] is currently being processed or is already queued." );
|
||||
}
|
||||
else
|
||||
{
|
||||
scheduleTask = true;
|
||||
}
|
||||
|
||||
if ( scheduleTask )
|
||||
{
|
||||
try
|
||||
{
|
||||
|
@ -408,6 +398,24 @@ public class DefaultArchivaTaskScheduler
|
|||
{
|
||||
synchronized( repositoryScanningQueue )
|
||||
{
|
||||
if( task.getResourceFile() != null )
|
||||
{
|
||||
try
|
||||
{
|
||||
if( isProcessingRepositoryTaskWithName( task.getName() ) )
|
||||
{
|
||||
log.debug( "Repository task '" + task.getName() + "' is already queued. Skipping task.." );
|
||||
return;
|
||||
}
|
||||
}
|
||||
catch ( ArchivaException e )
|
||||
{
|
||||
log.warn( "Error occurred while checking if repository task '" + task.getName() +
|
||||
"' is already queued." );
|
||||
}
|
||||
}
|
||||
|
||||
// add check if the task is already queued if it is a file scan
|
||||
repositoryScanningQueue.put( task );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.maven.archiva.scheduled;
|
|||
|
||||
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
|
||||
import org.codehaus.plexus.scheduler.AbstractJob;
|
||||
import org.codehaus.plexus.taskqueue.TaskQueue;
|
||||
import org.codehaus.plexus.taskqueue.TaskQueueException;
|
||||
|
@ -58,9 +59,8 @@ public class RepositoryTaskJob
|
|||
TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
|
||||
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
|
||||
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( (String) dataMap.get( TASK_REPOSITORY ), "" );
|
||||
task.setName( context.getJobDetail().getName() );
|
||||
task.setRepositoryId( (String) dataMap.get( TASK_REPOSITORY ) );
|
||||
|
||||
try
|
||||
{
|
||||
|
|
|
@ -107,17 +107,18 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
|
||||
}
|
||||
|
||||
log.info( "Executing task from queue with job name: " + repoTask.getName() );
|
||||
|
||||
ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
|
||||
|
||||
// execute consumers on resource file if set
|
||||
if( repoTask.getResourceFile() != null )
|
||||
{
|
||||
log.debug( "Executing task from queue with job name: " + repoTask.getName() );
|
||||
consumers.executeConsumers( arepo, repoTask.getResourceFile() );
|
||||
}
|
||||
else
|
||||
{
|
||||
log.info( "Executing task from queue with job name: " + repoTask.getName() );
|
||||
|
||||
// otherwise, execute consumers on whole repository
|
||||
try
|
||||
{
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
package org.apache.maven.archiva.scheduled.tasks;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class TaskCreator
|
||||
{
|
||||
public static RepositoryTask createRepositoryTask( String repositoryId, String taskNameSuffix )
|
||||
{
|
||||
String suffix = "";
|
||||
if( !StringUtils.isEmpty( taskNameSuffix ) )
|
||||
{
|
||||
suffix = ":" + taskNameSuffix;
|
||||
}
|
||||
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repositoryId );
|
||||
task.setName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repositoryId + suffix );
|
||||
task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
|
||||
|
||||
return task;
|
||||
}
|
||||
|
||||
public static RepositoryTask createRepositoryTask( String repositoryId, String taskNameSuffix, File resourceFile )
|
||||
{
|
||||
RepositoryTask task = createRepositoryTask( repositoryId, taskNameSuffix );
|
||||
task.setResourceFile( resourceFile );
|
||||
|
||||
return task;
|
||||
}
|
||||
|
||||
}
|
|
@ -56,6 +56,9 @@ import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
|
|||
import org.apache.maven.archiva.repository.project.ProjectModelException;
|
||||
import org.apache.maven.archiva.repository.project.ProjectModelWriter;
|
||||
import org.apache.maven.archiva.repository.project.writers.ProjectModel400Writer;
|
||||
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
|
||||
import org.apache.maven.archiva.security.AccessDeniedException;
|
||||
import org.apache.maven.archiva.security.ArchivaSecurityException;
|
||||
import org.apache.maven.archiva.security.PrincipalNotFoundException;
|
||||
|
@ -65,6 +68,7 @@ import com.opensymphony.xwork2.Preparable;
|
|||
import com.opensymphony.xwork2.Validateable;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.codehaus.plexus.taskqueue.TaskQueueException;
|
||||
|
||||
/**
|
||||
* Upload an artifact using Jakarta file upload in webwork. If set by the user a pom will also be generated. Metadata
|
||||
|
@ -146,6 +150,11 @@ public class UploadAction
|
|||
*/
|
||||
private RepositoryContentFactory repositoryFactory;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private ArchivaTaskScheduler scheduler;
|
||||
|
||||
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[] { ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 };
|
||||
|
||||
private ProjectModelWriter pomWriter = new ProjectModel400Writer();
|
||||
|
@ -345,7 +354,8 @@ public class UploadAction
|
|||
try
|
||||
{
|
||||
copyFile( artifactFile, targetPath, filename );
|
||||
consumers.executeConsumers( repoConfig, repository.toFile( artifactReference ) );
|
||||
queueRepositoryTask( repository.getId(), repository.toFile( artifactReference ) );
|
||||
//consumers.executeConsumers( repoConfig, repository.toFile( artifactReference ) );
|
||||
}
|
||||
catch ( IOException ie )
|
||||
{
|
||||
|
@ -365,7 +375,8 @@ public class UploadAction
|
|||
try
|
||||
{
|
||||
File generatedPomFile = createPom( targetPath, pomFilename );
|
||||
consumers.executeConsumers( repoConfig, generatedPomFile );
|
||||
queueRepositoryTask( repoConfig.getId(), generatedPomFile );
|
||||
//consumers.executeConsumers( repoConfig, generatedPomFile );
|
||||
}
|
||||
catch ( IOException ie )
|
||||
{
|
||||
|
@ -384,7 +395,8 @@ public class UploadAction
|
|||
try
|
||||
{
|
||||
copyFile( pomFile, targetPath, pomFilename );
|
||||
consumers.executeConsumers( repoConfig, new File( targetPath, pomFilename ) );
|
||||
queueRepositoryTask( repoConfig.getId(), new File( targetPath, pomFilename ) );
|
||||
//consumers.executeConsumers( repoConfig, new File( targetPath, pomFilename ) );
|
||||
}
|
||||
catch ( IOException ie )
|
||||
{
|
||||
|
@ -585,4 +597,19 @@ public class UploadAction
|
|||
}
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private void queueRepositoryTask( String repositoryId, File localFile )
|
||||
{
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repositoryId, localFile.getName(), localFile );
|
||||
|
||||
try
|
||||
{
|
||||
scheduler.queueRepositoryTask( task );
|
||||
}
|
||||
catch ( TaskQueueException e )
|
||||
{
|
||||
log.error( "Unable to queue repository task to execute consumers on resource file ['" +
|
||||
localFile.getName() + "']." );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
|
|||
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
|
||||
import org.apache.maven.archiva.security.ArchivaRoleConstants;
|
||||
import org.apache.maven.archiva.web.action.PlexusActionSupport;
|
||||
import org.codehaus.plexus.redback.rbac.Resource;
|
||||
|
@ -58,11 +59,8 @@ public class SchedulerAction
|
|||
return SUCCESS;
|
||||
}
|
||||
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repoid );
|
||||
task.setName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repoid );
|
||||
task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
|
||||
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoid, "" );
|
||||
|
||||
boolean scheduleTask = false;
|
||||
|
||||
try
|
||||
|
|
|
@ -35,6 +35,11 @@
|
|||
<appender-ref ref="redbackAuditLog" />
|
||||
</logger>
|
||||
|
||||
<logger name="org.apache.archiva.consumers">
|
||||
<level value="info"/>
|
||||
<appender-ref ref="rolling" />
|
||||
</logger>
|
||||
|
||||
<logger name="org.apache.archiva">
|
||||
<level value="info"/>
|
||||
<appender-ref ref="rolling" />
|
||||
|
|
|
@ -53,20 +53,15 @@ import org.apache.jackrabbit.webdav.property.DavPropertyNameSet;
|
|||
import org.apache.jackrabbit.webdav.property.DavPropertySet;
|
||||
import org.apache.jackrabbit.webdav.property.DefaultDavProperty;
|
||||
import org.apache.jackrabbit.webdav.property.ResourceType;
|
||||
import org.apache.maven.archiva.common.ArchivaException;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
import org.apache.maven.archiva.repository.audit.AuditEvent;
|
||||
import org.apache.maven.archiva.repository.audit.AuditListener;
|
||||
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
|
||||
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
|
||||
import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
|
||||
import org.apache.maven.archiva.scheduled.executors.ArchivaRepositoryScanningTaskExecutor;
|
||||
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
|
||||
import org.apache.maven.archiva.webdav.util.IndexWriter;
|
||||
import org.apache.maven.archiva.webdav.util.MimeTypes;
|
||||
import org.codehaus.plexus.taskqueue.TaskQueueException;
|
||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
@ -98,8 +93,6 @@ public class ArchivaDavResource
|
|||
|
||||
private final ManagedRepositoryConfiguration repository;
|
||||
|
||||
private final RepositoryContentConsumers consumers;
|
||||
|
||||
private final MimeTypes mimeTypes;
|
||||
|
||||
private List<AuditListener> auditListeners;
|
||||
|
@ -110,14 +103,12 @@ public class ArchivaDavResource
|
|||
|
||||
private ArchivaTaskScheduler scheduler;
|
||||
|
||||
private ArchivaRepositoryScanningTaskExecutor taskExecutor;
|
||||
|
||||
private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class );
|
||||
|
||||
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
|
||||
DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
|
||||
MimeTypes mimeTypes, List<AuditListener> auditListeners,
|
||||
RepositoryContentConsumers consumers, ArchivaTaskScheduler scheduler, TaskExecutor taskExecutor )
|
||||
ArchivaTaskScheduler scheduler )
|
||||
{
|
||||
this.localResource = new File( localResource );
|
||||
this.logicalResource = logicalResource;
|
||||
|
@ -129,20 +120,18 @@ public class ArchivaDavResource
|
|||
this.repository = repository;
|
||||
|
||||
// TODO: these should be pushed into the repository layer, along with the physical file operations in this class
|
||||
this.mimeTypes = mimeTypes;
|
||||
this.consumers = consumers;
|
||||
this.mimeTypes = mimeTypes;
|
||||
this.auditListeners = auditListeners;
|
||||
this.scheduler = scheduler;
|
||||
this.taskExecutor = ( ArchivaRepositoryScanningTaskExecutor ) taskExecutor;
|
||||
}
|
||||
|
||||
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
|
||||
String remoteAddr, String principal, DavSession session, ArchivaDavResourceLocator locator,
|
||||
DavResourceFactory factory, MimeTypes mimeTypes, List<AuditListener> auditListeners,
|
||||
RepositoryContentConsumers consumers, ArchivaTaskScheduler scheduler, TaskExecutor taskExecutor )
|
||||
ArchivaTaskScheduler scheduler )
|
||||
{
|
||||
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
|
||||
consumers, scheduler, taskExecutor );
|
||||
scheduler );
|
||||
|
||||
this.remoteAddr = remoteAddr;
|
||||
this.principal = principal;
|
||||
|
@ -322,7 +311,7 @@ public class ArchivaDavResource
|
|||
inputContext.getContentLength() + " but was " + localFile.length() );
|
||||
}
|
||||
|
||||
executeConsumers( localFile );
|
||||
queueRepositoryTask( localFile );
|
||||
|
||||
triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE );
|
||||
}
|
||||
|
@ -642,50 +631,18 @@ public class ArchivaDavResource
|
|||
}
|
||||
}
|
||||
|
||||
private void executeConsumers( File localFile )
|
||||
private void queueRepositoryTask( File localFile )
|
||||
{
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repository.getId(), localFile.getName(), localFile );
|
||||
|
||||
try
|
||||
{
|
||||
RepositoryTask currentTaskInExecution = ( RepositoryTask ) taskExecutor.getCurrentTaskInExecution();
|
||||
if( currentTaskInExecution != null || scheduler.isProcessingAnyRepositoryTask() )
|
||||
{
|
||||
// check if the repository is already queued to be scanned
|
||||
if( scheduler.isProcessingRepositoryTaskWithName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() )
|
||||
|| scheduler.isProcessingRepositoryTaskWithName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() + ":" + localFile.getName() ) )
|
||||
{
|
||||
// no need to execute the consumers since repo is already queued
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
// schedule the task
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repository.getId() );
|
||||
task.setName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() + ":" + localFile.getName() );
|
||||
task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
|
||||
task.setResourceFile( localFile );
|
||||
|
||||
try
|
||||
{
|
||||
scheduler.queueRepositoryTask( task );
|
||||
}
|
||||
catch ( TaskQueueException e )
|
||||
{
|
||||
log.error( "Unable to queue repository task to execute consumers on resource file ['" +
|
||||
localFile.getName() + "']." );
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Just-in-time update of the index and database by executing the consumers for this artifact
|
||||
consumers.executeConsumers( repository, localFile );
|
||||
}
|
||||
scheduler.queueRepositoryTask( task );
|
||||
}
|
||||
catch ( ArchivaException e )
|
||||
catch ( TaskQueueException e )
|
||||
{
|
||||
log.error( "Unable to queue repository task to execute consumers on resource file ['" +
|
||||
localFile.getName() + "']." );
|
||||
localFile.getName() + "']." );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,6 +83,7 @@ import org.codehaus.plexus.redback.system.SecuritySession;
|
|||
import org.codehaus.plexus.redback.users.User;
|
||||
import org.codehaus.plexus.redback.users.UserManager;
|
||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
|
||||
import org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor;
|
||||
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
|
||||
import org.codehaus.redback.integration.filter.authentication.HttpAuthenticator;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -179,6 +180,11 @@ public class ArchivaDavResourceFactory
|
|||
* @plexus.requirement
|
||||
*/
|
||||
private ArchivaTaskScheduler scheduler;
|
||||
|
||||
/**
|
||||
* @plexus.requirement role-hint="repository-scanning"
|
||||
*/
|
||||
private TaskQueueExecutor repoScanningTaskQueueExecutor;
|
||||
|
||||
public DavResource createResource( final DavResourceLocator locator, final DavServletRequest request,
|
||||
final DavServletResponse response )
|
||||
|
@ -271,7 +277,7 @@ public class ArchivaDavResourceFactory
|
|||
new ArchivaDavResource( metadataChecksum.getAbsolutePath(), logicalResource.getPath(),
|
||||
null, request.getRemoteAddr(), activePrincipal,
|
||||
request.getDavSession(), archivaLocator, this, mimeTypes,
|
||||
auditListeners, consumers, scheduler, taskExecutor );
|
||||
auditListeners, scheduler );
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -306,7 +312,7 @@ public class ArchivaDavResourceFactory
|
|||
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
|
||||
null, request.getRemoteAddr(), activePrincipal,
|
||||
request.getDavSession(), archivaLocator, this, mimeTypes,
|
||||
auditListeners, consumers, scheduler, taskExecutor );
|
||||
auditListeners, scheduler );
|
||||
}
|
||||
catch ( RepositoryMetadataException r )
|
||||
{
|
||||
|
@ -411,7 +417,7 @@ public class ArchivaDavResourceFactory
|
|||
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
|
||||
managedRepository.getRepository(), request.getRemoteAddr(), activePrincipal,
|
||||
request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners,
|
||||
consumers, scheduler, taskExecutor );
|
||||
scheduler );
|
||||
|
||||
if ( WebdavMethodUtil.isReadMethod( request.getMethod() ) )
|
||||
{
|
||||
|
@ -442,7 +448,7 @@ public class ArchivaDavResourceFactory
|
|||
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
|
||||
managedRepository.getRepository(), request.getRemoteAddr(),
|
||||
activePrincipal, request.getDavSession(), archivaLocator, this,
|
||||
mimeTypes, auditListeners, consumers, scheduler, taskExecutor );
|
||||
mimeTypes, auditListeners, scheduler );
|
||||
}
|
||||
catch ( LayoutException e )
|
||||
{
|
||||
|
@ -517,7 +523,7 @@ public class ArchivaDavResourceFactory
|
|||
File resourceFile = new File( managedRepository.getRepoRoot(), logicalResource );
|
||||
DavResource resource =
|
||||
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource, managedRepository.getRepository(),
|
||||
davSession, archivaLocator, this, mimeTypes, auditListeners, consumers, scheduler, taskExecutor );
|
||||
davSession, archivaLocator, this, mimeTypes, auditListeners, scheduler );
|
||||
|
||||
resource.addLockManager( lockManager );
|
||||
return resource;
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
|
|||
import org.apache.maven.archiva.webdav.util.MimeTypes;
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
import org.codehaus.plexus.spring.PlexusToSpringUtils;
|
||||
import org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor;
|
||||
|
||||
public class DavResourceTest
|
||||
extends PlexusInSpringTestCase
|
||||
|
@ -67,6 +68,8 @@ public class DavResourceTest
|
|||
private RepositoryContentConsumers consumers;
|
||||
|
||||
private ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
|
||||
|
||||
private TaskQueueExecutor repoScanningTaskQueueExecutor;
|
||||
|
||||
@Override
|
||||
protected void setUp()
|
||||
|
@ -100,8 +103,7 @@ public class DavResourceTest
|
|||
private DavResource getDavResource( String logicalPath, File file )
|
||||
{
|
||||
return new ArchivaDavResource( file.getAbsolutePath(), logicalPath, repository, session, resourceLocator,
|
||||
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), consumers,
|
||||
null, null );
|
||||
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null );
|
||||
}
|
||||
|
||||
public void testDeleteNonExistantResourceShould404()
|
||||
|
@ -308,7 +310,7 @@ public class DavResourceTest
|
|||
{
|
||||
return new ArchivaDavResource( baseDir.getAbsolutePath(), "/", repository, session, resourceLocator,
|
||||
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
|
||||
consumers, null, null );
|
||||
null );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,6 +55,7 @@ import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
|
|||
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
|
||||
import org.codehaus.plexus.registry.RegistryException;
|
||||
|
||||
/**
|
||||
|
@ -312,10 +313,7 @@ public class AdministrationServiceImpl
|
|||
}
|
||||
}
|
||||
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repoId );
|
||||
task.setName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repoId );
|
||||
task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
|
||||
RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "" );
|
||||
|
||||
taskScheduler.queueRepositoryTask( task );
|
||||
|
||||
|
|
Loading…
Reference in New Issue