[MRM-336]: Repository Admin - "Index Repository" appears to not do anything.

git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@538404 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2007-05-16 01:59:20 +00:00
parent 6dd144c706
commit 966e374f6c
8 changed files with 147 additions and 64 deletions

View File

@ -0,0 +1,52 @@
package org.apache.maven.archiva.configuration.functors;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.collections.Closure;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import java.util.HashMap;
import java.util.Map;
/**
* RepositoryConfigurationToMapClosure
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class RepositoryConfigurationToMapClosure
implements Closure
{
private Map map = new HashMap();
public void execute( Object input )
{
if ( input instanceof RepositoryConfiguration )
{
RepositoryConfiguration repo = (RepositoryConfiguration) input;
map.put( repo.getId(), repo );
}
}
public Map getMap()
{
return map;
}
}

View File

@ -2191,7 +2191,16 @@
buf.append( "\n Duration : " );
buf.append( org.apache.maven.archiva.common.utils.DateUtil.getDuration( this.getDuration() ) );
buf.append( "\n When Gathered : " ).append( df.format( this.getWhenGathered() ) );
buf.append( "\n When Gathered : " );
if ( this.getWhenGathered() == null )
{
buf.append( "<null>" );
}
else
{
buf.append( df.format( this.getWhenGathered() ) );
}
buf.append( "\n Total File Count : " ).append( this.getTotalFileCount() );
long averageMsPerFile = ( this.getDuration() / this.getTotalFileCount() );
buf.append( "\n Avg Time Per File : " );

View File

@ -33,6 +33,7 @@ import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import org.codehaus.plexus.scheduler.CronExpressionValidator;
import org.codehaus.plexus.scheduler.Scheduler;
import org.codehaus.plexus.taskqueue.TaskQueue;
import org.codehaus.plexus.taskqueue.TaskQueueException;
@ -89,6 +90,8 @@ public class DefaultArchivaTaskScheduler
public static final String REPOSITORY_JOB_TRIGGER = "repository-job-trigger";
public static final String CRON_HOURLY = "0 0 * * * ?";
public void start()
throws StartingException
{
@ -100,7 +103,10 @@ public class DefaultArchivaTaskScheduler
{
RepositoryConfiguration repoConfig = (RepositoryConfiguration) i.next();
scheduleRepositoryJobs( repoConfig );
if ( repoConfig.isManaged() && repoConfig.isIndexed() )
{
scheduleRepositoryJobs( repoConfig );
}
}
scheduleDatabaseJobs();
@ -123,6 +129,15 @@ public class DefaultArchivaTaskScheduler
// get the cron string for these database scanning jobs
String cronString = repoConfig.getRefreshCronExpression();
CronExpressionValidator cronValidator = new CronExpressionValidator();
if ( !cronValidator.validate( cronString ) )
{
getLogger().warn(
"Cron expression [" + cronString + "] for repository [" + repoConfig.getId()
+ "] is invalid. Defaulting to hourly." );
cronString = CRON_HOURLY;
}
// setup the unprocessed artifact job
JobDetail repositoryJob = new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP,
RepositoryTaskJob.class );
@ -345,13 +360,13 @@ public class DefaultArchivaTaskScheduler
return !queue.isEmpty();
}
public void queueRepositoryTask( RepositoryTask task )
throws TaskQueueException
{
repositoryScanningQueue.put( task );
}
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException
{

View File

@ -39,7 +39,7 @@ public class RepositoryTaskJob
static final String TASK_QUEUE = "TASK_QUEUE";
static final String TASK_QUEUE_POLICY = "TASK_QUEUE_POLICY";
static final String TASK_REPOSITORY = "TASK_REPOSITORY";
/**
@ -58,8 +58,9 @@ public class RepositoryTaskJob
TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
ArchivaTask task = new RepositoryTask();
RepositoryTask task = new RepositoryTask();
task.setName( context.getJobDetail().getName() );
task.setRepositoryId( (String) dataMap.get( TASK_REPOSITORY ) );
try
{

View File

@ -20,6 +20,7 @@ package org.apache.maven.archiva.scheduled.executors;
*/
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.RepositoryDAO;
@ -79,8 +80,14 @@ public class ArchivaRepositoryScanningTaskExecutor
throws TaskExecutionException
{
RepositoryTask repoTask = (RepositoryTask) task;
getLogger().info( "Executing task from queue with job name: " + repoTask.getName() );
if ( StringUtils.isBlank( repoTask.getRepositoryId() ) )
{
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
}
getLogger().info( "Executing task from queue with job name: " + repoTask.getName() );
try
{
ArchivaRepository arepo = repositoryDAO.getRepository( repoTask.getRepositoryId() );
@ -97,7 +104,7 @@ public class ArchivaRepositoryScanningTaskExecutor
RepositoryContentStatistics stats = repoScanner.scan( arepo, sinceWhen );
dao.save( stats );
stats = (RepositoryContentStatistics) dao.save( stats );
getLogger().info( "Finished repository task: " + stats.toDump( arepo ) );
}

View File

@ -59,6 +59,10 @@
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-security</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-core-consumers</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-database-consumers</artifactId>

View File

@ -19,12 +19,15 @@ package org.apache.maven.archiva.web.repository;
* under the License.
*/
import org.apache.commons.collections.Closure;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.functors.IfClosure;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.configuration.functors.LocalRepositoryPredicate;
import org.apache.maven.archiva.configuration.functors.RepositoryConfigurationToMapClosure;
import org.apache.maven.archiva.model.RepositoryURL;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.codehaus.plexus.redback.authentication.AuthenticationException;
import org.codehaus.plexus.redback.authentication.AuthenticationResult;
@ -45,8 +48,10 @@ import org.codehaus.plexus.webdav.util.WebdavMethodUtil;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
@ -78,16 +83,13 @@ public class RepositoryServlet
*/
private AuditLog audit;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* @plexus.requirement
*/
private ArchivaConfiguration configuration;
private Map repositoryMap = new HashMap();
public void initComponents()
throws ServletException
{
@ -97,7 +99,8 @@ public class RepositoryServlet
httpAuth = (HttpAuthenticator) lookup( HttpAuthenticator.ROLE, "basic" );
audit = (AuditLog) lookup( AuditLog.ROLE );
dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
updateRepositoryMap();
configuration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
configuration.addChangeListener( this );
}
@ -105,70 +108,60 @@ public class RepositoryServlet
public void initServers( ServletConfig servletConfig )
throws DavServerException
{
try
List repositories = configuration.getConfiguration().getRepositories();
Iterator itrepos = repositories.iterator();
while ( itrepos.hasNext() )
{
List repositories = dao.getRepositoryDAO().getRepositories();
Iterator itrepos = repositories.iterator();
while ( itrepos.hasNext() )
RepositoryConfiguration repo = (RepositoryConfiguration) itrepos.next();
if ( !repo.isManaged() )
{
ArchivaRepository repo = (ArchivaRepository) itrepos.next();
if ( !repo.isManaged() )
// Skip non-managed.
continue;
}
RepositoryURL url = new RepositoryURL( repo.getUrl() );
File repoDir = new File( url.getPath() );
if ( !repoDir.exists() )
{
if ( !repoDir.mkdirs() )
{
// Skip non-managed.
// Skip invalid directories.
log( "Unable to create missing directory for " + url.getPath() );
continue;
}
File repoDir = new File( repo.getUrl().getPath() );
if ( !repoDir.exists() )
{
if ( !repoDir.mkdirs() )
{
// Skip invalid directories.
log( "Unable to create missing directory for " + repo.getUrl().getPath() );
continue;
}
}
DavServerComponent server = createServer( repo.getId(), repoDir, servletConfig );
server.addListener( audit );
}
}
catch ( ArchivaDatabaseException e )
{
throw new DavServerException( "Unable to initialized dav servers: " + e.getMessage(), e );
DavServerComponent server = createServer( repo.getId(), repoDir, servletConfig );
server.addListener( audit );
}
}
public ArchivaRepository getRepository( DavServerRequest request )
public RepositoryConfiguration getRepository( DavServerRequest request )
{
String id = request.getPrefix();
try
{
return dao.getRepositoryDAO().getRepository( id );
}
catch ( ObjectNotFoundException e )
{
log( "Unable to find repository for id [" + id + "]" );
return null;
}
catch ( ArchivaDatabaseException e )
{
log( "Unable to find repository for id [" + id + "]: " + e.getMessage(), e );
return null;
}
return (RepositoryConfiguration) repositoryMap.get( request.getPrefix() );
}
public String getRepositoryName( DavServerRequest request )
{
ArchivaRepository repoConfig = getRepository( request );
RepositoryConfiguration repoConfig = getRepository( request );
if ( repoConfig == null )
{
return "Unknown";
}
return repoConfig.getModel().getName();
return repoConfig.getName();
}
private void updateRepositoryMap()
{
RepositoryConfigurationToMapClosure repoMapClosure = new RepositoryConfigurationToMapClosure();
Closure localRepoMap = IfClosure.getInstance( LocalRepositoryPredicate.getInstance(), repoMapClosure );
CollectionUtils.forAllDo( configuration.getConfiguration().getRepositories(), localRepoMap );
this.repositoryMap.clear();
this.repositoryMap.putAll( repoMapClosure.getMap() );
}
public boolean isAuthenticated( DavServerRequest davRequest, HttpServletResponse response )
@ -260,6 +253,8 @@ public class RepositoryServlet
{
if ( ConfigurationNames.isRepositories( propertyName ) )
{
updateRepositoryMap();
getDavManager().removeAllServers();
try

View File

@ -161,7 +161,7 @@
<table>
<tr>
<th>Last Indexed</th>
<td>${repository.stats.whenStarted}</td>
<td>${repository.stats.whenGathered}</td>
</tr>
<tr>
<th>Duration</th>