o synchronize repository scanning to prevent index writer/reader from being closed when repo consumers are executed from proxy requests and a

repository scanning is in progress
o updated test cases to accomodate changes


git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@757116 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2009-03-22 02:24:21 +00:00
parent 737bf95b4c
commit 30ecf02f41
16 changed files with 570 additions and 216 deletions

View File

@ -32,8 +32,6 @@ import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.audit.AuditEvent;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for all repository purge tasks.

View File

@ -201,6 +201,8 @@ public class NexusIndexerConsumer
}
public void completeScan()
{
synchronized( indexer )
{
final File indexLocation = new File( managedRepository, ".index" );
try
@ -215,6 +217,7 @@ public class NexusIndexerConsumer
log.error( "Could not pack index" + indexLocation.getAbsolutePath(), e );
}
}
}
public List<String> getExcludes()
{

View File

@ -46,7 +46,7 @@ public class TriggerScanCompletedClosure
{
RepositoryContentConsumer consumer = (RepositoryContentConsumer) input;
consumer.completeScan();
log.info( "Consumer [" + consumer.getId() + "] completed for repository [" + repository.getId() + "]" );
log.debug( "Consumer [" + consumer.getId() + "] completed for repository [" + repository.getId() + "]" );
}
}
}

View File

@ -222,7 +222,10 @@ public class RepositoryContentConsumers
}
/**
* A convienence method to execute all of the active selected consumers for a particular arbitrary file.
* A convenience method to execute all of the active selected consumers for a particular arbitrary file.
* NOTE: Make sure that there is no repository scanning task executing before invoking this so as to prevent
* the index writer/reader of the current index-content consumer executing from getting closed. For an example,
* see ArchivaDavResource#executeConsumers( File ).
*
* @param repository the repository configuration to use.
* @param localFile the local file to execute the consumers against.

View File

@ -4,8 +4,6 @@
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
<bean id="repositoryContentConsumers" class="org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers" scope="prototype">
<constructor-arg>
<ref bean="archivaConfiguration"/>
</constructor-arg>
<constructor-arg ref="archivaConfiguration"/>
</bean>
</beans>

View File

@ -45,6 +45,9 @@ public interface ArchivaTaskScheduler
public boolean isProcessingRepositoryTask( String repositoryId )
throws ArchivaException;
public boolean isProcessingRepositoryTaskWithName( String taskName )
throws ArchivaException;
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException;

View File

@ -31,6 +31,7 @@ import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskNameSelectionPredicate;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
@ -324,6 +325,8 @@ public class DefaultArchivaTaskScheduler
@SuppressWarnings("unchecked")
public boolean isProcessingAnyRepositoryTask()
throws ArchivaException
{
synchronized( repositoryScanningQueue )
{
List<? extends Task> queue = null;
@ -338,10 +341,13 @@ public class DefaultArchivaTaskScheduler
return !queue.isEmpty();
}
}
@SuppressWarnings("unchecked")
public boolean isProcessingRepositoryTask( String repositoryId )
throws ArchivaException
{
synchronized( repositoryScanningQueue )
{
List<? extends Task> queue = null;
@ -356,6 +362,28 @@ public class DefaultArchivaTaskScheduler
return CollectionUtils.exists( queue, new RepositoryTaskSelectionPredicate( repositoryId ) );
}
}
@SuppressWarnings("unchecked")
public boolean isProcessingRepositoryTaskWithName( String taskName )
throws ArchivaException
{
synchronized( repositoryScanningQueue )
{
List<? extends Task> queue = null;
try
{
queue = repositoryScanningQueue.getQueueSnapshot();
}
catch ( TaskQueueException e )
{
throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
}
return CollectionUtils.exists( queue, new RepositoryTaskNameSelectionPredicate( taskName ) );
}
}
@SuppressWarnings("unchecked")
public boolean isProcessingDatabaseTask()
@ -377,9 +405,12 @@ public class DefaultArchivaTaskScheduler
public void queueRepositoryTask( RepositoryTask task )
throws TaskQueueException
{
synchronized( repositoryScanningQueue )
{
repositoryScanningQueue.put( task );
}
}
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException

View File

@ -34,6 +34,7 @@ import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
@ -80,6 +81,13 @@ public class ArchivaRepositoryScanningTaskExecutor
*/
private RepositoryScanner repoScanner;
/**
* @plexus.requirement
*/
private RepositoryContentConsumers consumers;
private Task task;
public void initialize()
throws InitializationException
{
@ -90,6 +98,8 @@ public class ArchivaRepositoryScanningTaskExecutor
public void executeTask( Task task )
throws TaskExecutionException
{
this.task = task;
RepositoryTask repoTask = (RepositoryTask) task;
if ( StringUtils.isBlank( repoTask.getRepositoryId() ) )
@ -99,9 +109,18 @@ public class ArchivaRepositoryScanningTaskExecutor
log.info( "Executing task from queue with job name: " + repoTask.getName() );
ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
// execute consumers on resource file if set
if( repoTask.getResourceFile() != null )
{
consumers.executeConsumers( arepo, repoTask.getResourceFile() );
}
else
{
// otherwise, execute consumers on whole repository
try
{
ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
if ( arepo == null )
{
throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
@ -124,12 +143,15 @@ public class ArchivaRepositoryScanningTaskExecutor
RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats );
dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );
this.task = null;
}
catch ( RepositoryException e )
{
throw new TaskExecutionException( "Repository error when executing repository job.", e );
}
}
}
@SuppressWarnings("unchecked")
private RepositoryContentStatistics constructRepositoryStatistics( ManagedRepositoryConfiguration arepo,
@ -177,4 +199,9 @@ public class ArchivaRepositoryScanningTaskExecutor
return dbstats;
}
public Task getCurrentTaskInExecution()
{
return task;
}
}

View File

@ -1,5 +1,7 @@
package org.apache.maven.archiva.scheduled.tasks;
import java.io.File;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -36,6 +38,8 @@ public class RepositoryTask
long maxExecutionTime;
File resourceFile;
public String getRepositoryId()
{
return repositoryId;
@ -75,4 +79,14 @@ public class RepositoryTask
{
this.queuePolicy = queuePolicy;
}
public File getResourceFile()
{
return resourceFile;
}
public void setResourceFile( File resourceFile )
{
this.resourceFile = resourceFile;
}
}

View File

@ -0,0 +1,47 @@
package org.apache.maven.archiva.scheduled.tasks;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.collections.Predicate;
import org.apache.commons.lang.StringUtils;
public class RepositoryTaskNameSelectionPredicate
implements Predicate
{
private String taskName;
public RepositoryTaskNameSelectionPredicate( String taskName )
{
this.taskName = taskName;
}
public boolean evaluate( Object object )
{
boolean satisfies = false;
if ( object instanceof RepositoryTask )
{
RepositoryTask task = (RepositoryTask) object;
return StringUtils.equals( taskName, task.getName() );
}
return satisfies;
}
}

View File

@ -38,6 +38,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-scheduled</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-proxy</artifactId>
@ -113,6 +117,11 @@
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.maven.wagon</groupId>
<artifactId>wagon-http-lightweight</artifactId>

View File

@ -53,15 +53,25 @@ import org.apache.jackrabbit.webdav.property.DavPropertyNameSet;
import org.apache.jackrabbit.webdav.property.DavPropertySet;
import org.apache.jackrabbit.webdav.property.DefaultDavProperty;
import org.apache.jackrabbit.webdav.property.ResourceType;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.repository.audit.AuditEvent;
import org.apache.maven.archiva.repository.audit.AuditListener;
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
import org.apache.maven.archiva.scheduled.executors.ArchivaRepositoryScanningTaskExecutor;
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.apache.maven.archiva.webdav.util.IndexWriter;
import org.apache.maven.archiva.webdav.util.MimeTypes;
import org.codehaus.plexus.taskqueue.TaskQueueException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*/
@ -98,10 +108,16 @@ public class ArchivaDavResource
public static final String COMPLIANCE_CLASS = "1, 2";
private ArchivaTaskScheduler scheduler;
private ArchivaRepositoryScanningTaskExecutor taskExecutor;
private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class );
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
MimeTypes mimeTypes, List<AuditListener> auditListeners,
RepositoryContentConsumers consumers )
RepositoryContentConsumers consumers, ArchivaTaskScheduler scheduler, TaskExecutor taskExecutor )
{
this.localResource = new File( localResource );
this.logicalResource = logicalResource;
@ -116,15 +132,17 @@ public class ArchivaDavResource
this.mimeTypes = mimeTypes;
this.consumers = consumers;
this.auditListeners = auditListeners;
this.scheduler = scheduler;
this.taskExecutor = ( ArchivaRepositoryScanningTaskExecutor ) taskExecutor;
}
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
String remoteAddr, String principal, DavSession session, ArchivaDavResourceLocator locator,
DavResourceFactory factory, MimeTypes mimeTypes, List<AuditListener> auditListeners,
RepositoryContentConsumers consumers )
RepositoryContentConsumers consumers, ArchivaTaskScheduler scheduler, TaskExecutor taskExecutor )
{
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
consumers );
consumers, scheduler, taskExecutor );
this.remoteAddr = remoteAddr;
this.principal = principal;
@ -303,8 +321,7 @@ public class ArchivaDavResource
inputContext.getContentLength() + " but was " + localFile.length() );
}
// Just-in-time update of the index and database by executing the consumers for this artifact
consumers.executeConsumers( repository, localFile );
executeConsumers( localFile );
triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE );
}
@ -623,4 +640,51 @@ public class ArchivaDavResource
listener.auditEvent( event );
}
}
private void executeConsumers( File localFile )
{
try
{
RepositoryTask currentTaskInExecution = ( RepositoryTask ) taskExecutor.getCurrentTaskInExecution();
if( currentTaskInExecution != null || scheduler.isProcessingAnyRepositoryTask() )
{
// check if the repository is already queued to be scanned
if( scheduler.isProcessingRepositoryTaskWithName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() )
|| scheduler.isProcessingRepositoryTaskWithName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() + ":" + localFile.getName() ) )
{
// no need to execute the consumers since repo is already queued
return;
}
else
{
// schedule the task
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repository.getId() );
task.setName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() + ":" + localFile.getName() );
task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
task.setResourceFile( localFile );
try
{
scheduler.queueRepositoryTask( task );
}
catch ( TaskQueueException e )
{
log.error( "Unable to queue repository task to execute consumers on resource file ['" +
localFile.getName() + "']." );
}
}
}
else
{
// Just-in-time update of the index and database by executing the consumers for this artifact
consumers.executeConsumers( repository, localFile );
}
}
catch ( ArchivaException e )
{
log.error( "Unable to queue repository task to execute consumers on resource file ['" +
localFile.getName() + "']." );
}
}
}

View File

@ -61,6 +61,7 @@ import org.apache.maven.archiva.repository.metadata.RepositoryMetadataMerge;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
import org.apache.maven.archiva.security.ServletAuthenticator;
import org.apache.maven.archiva.webdav.util.MimeTypes;
import org.apache.maven.archiva.webdav.util.RepositoryPathUtil;
@ -81,6 +82,7 @@ import org.codehaus.plexus.redback.policy.MustChangePasswordException;
import org.codehaus.plexus.redback.system.SecuritySession;
import org.codehaus.plexus.redback.users.User;
import org.codehaus.plexus.redback.users.UserManager;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.codehaus.redback.integration.filter.authentication.HttpAuthenticator;
import org.slf4j.Logger;
@ -168,6 +170,16 @@ public class ArchivaDavResourceFactory
*/
private Digester digestMd5;
/**
* @plexus.requirement role-hint="repository-scanning"
*/
private TaskExecutor taskExecutor;
/**
* @plexus.requirement
*/
private ArchivaTaskScheduler scheduler;
public DavResource createResource( final DavResourceLocator locator, final DavServletRequest request,
final DavServletResponse response )
throws DavException
@ -259,7 +271,7 @@ public class ArchivaDavResourceFactory
new ArchivaDavResource( metadataChecksum.getAbsolutePath(), logicalResource.getPath(),
null, request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
auditListeners, consumers );
auditListeners, consumers, scheduler, taskExecutor );
}
}
else
@ -294,7 +306,7 @@ public class ArchivaDavResourceFactory
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
null, request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
auditListeners, consumers );
auditListeners, consumers, scheduler, taskExecutor );
}
catch ( RepositoryMetadataException r )
{
@ -399,7 +411,7 @@ public class ArchivaDavResourceFactory
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
managedRepository.getRepository(), request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners,
consumers );
consumers, scheduler, taskExecutor );
if ( WebdavMethodUtil.isReadMethod( request.getMethod() ) )
{
@ -430,7 +442,7 @@ public class ArchivaDavResourceFactory
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
managedRepository.getRepository(), request.getRemoteAddr(),
activePrincipal, request.getDavSession(), archivaLocator, this,
mimeTypes, auditListeners, consumers );
mimeTypes, auditListeners, consumers, scheduler, taskExecutor );
}
catch ( LayoutException e )
{
@ -505,7 +517,7 @@ public class ArchivaDavResourceFactory
File resourceFile = new File( managedRepository.getRepoRoot(), logicalResource );
DavResource resource =
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource, managedRepository.getRepository(),
davSession, archivaLocator, this, mimeTypes, auditListeners, consumers );
davSession, archivaLocator, this, mimeTypes, auditListeners, consumers, scheduler, taskExecutor );
resource.addLockManager( lockManager );
return resource;
@ -999,4 +1011,14 @@ public class ArchivaDavResourceFactory
{
this.httpAuth = httpAuth;
}
public void setTaskExecutor( TaskExecutor taskExecutor )
{
this.taskExecutor = taskExecutor;
}
public void setScheduler( ArchivaTaskScheduler scheduler )
{
this.scheduler = scheduler;
}
}

View File

@ -43,7 +43,8 @@ import org.apache.maven.archiva.webdav.util.MimeTypes;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.plexus.spring.PlexusToSpringUtils;
public class DavResourceTest extends PlexusInSpringTestCase
public class DavResourceTest
extends PlexusInSpringTestCase
{
private DavSession session;
@ -79,7 +80,8 @@ public class DavResourceTest extends PlexusInSpringTestCase
myResource = new File( baseDir, "myresource.jar" );
assertTrue( "Could not create " + myResource.getAbsolutePath(), myResource.createNewFile() );
resourceFactory = new RootContextDavResourceFactory();
resourceLocator = (ArchivaDavResourceLocator)new ArchivaDavLocatorFactory().createResourceLocator("/", REPOPATH);
resourceLocator =
(ArchivaDavResourceLocator) new ArchivaDavLocatorFactory().createResourceLocator( "/", REPOPATH );
resource = getDavResource( resourceLocator.getHref( false ), myResource );
lockManager = new SimpleLockManager();
resource.addLockManager( lockManager );
@ -98,7 +100,8 @@ public class DavResourceTest extends PlexusInSpringTestCase
private DavResource getDavResource( String logicalPath, File file )
{
return new ArchivaDavResource( file.getAbsolutePath(), logicalPath, repository, session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener>emptyList(), consumers );
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), consumers,
null, null );
}
public void testDeleteNonExistantResourceShould404()
@ -192,7 +195,6 @@ public class DavResourceTest extends PlexusInSpringTestCase
assertNull( resource.getLock( Type.WRITE, Scope.SHARED ) );
}
public void testRefreshLockThrowsExceptionIfNoLockIsPresent()
throws Exception
{
@ -291,15 +293,22 @@ public class DavResourceTest extends PlexusInSpringTestCase
assertEquals( 0, resource.getLocks().length );
}
private class RootContextDavResourceFactory implements DavResourceFactory
private class RootContextDavResourceFactory
implements DavResourceFactory
{
public DavResource createResource( DavResourceLocator locator, DavServletRequest request,
DavServletResponse response )
throws DavException
{
public DavResource createResource(DavResourceLocator locator, DavServletRequest request, DavServletResponse response) throws DavException {
throw new UnsupportedOperationException( "Not supported yet." );
}
public DavResource createResource(DavResourceLocator locator, DavSession session) throws DavException {
public DavResource createResource( DavResourceLocator locator, DavSession session )
throws DavException
{
return new ArchivaDavResource( baseDir.getAbsolutePath(), "/", repository, session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener>emptyList(), consumers );
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
consumers, null, null );
}
}
}

View File

@ -148,7 +148,70 @@
<role-hint>md5</role-hint>
<field-name>digestMd5</field-name>
</requirement>
<requirement>
<role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
<role-hint>repository-scanning</role-hint>
<field-name>taskExecutor</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
<field-name>scheduler</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
<implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
<configuration>
<!-- Database Configuration -->
<driverName>org.hsqldb.jdbcDriver</driverName>
<url>jdbc:hsqldb:mem:TESTDB</url>
<userName>sa</userName>
<password></password>
<!-- JPOX and JDO configuration -->
<persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
<otherProperties>
<property>
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
<property>
<name>org.jpox.autoCreateSchema</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateTables</name>
<value>false</value>
</property>
<property>
<name>org.jpox.validateConstraints</name>
<value>false</value>
</property>
<property>
<name>org.jpox.validateColumns</name>
<value>false</value>
</property>
<property>
<name>org.jpox.autoStartMechanism</name>
<value>None</value>
</property>
<property>
<name>org.jpox.transactionIsolation</name>
<value>READ_UNCOMMITTED</value>
</property>
<property>
<name>org.jpox.poid.transactionIsolation</name>
<value>READ_UNCOMMITTED</value>
</property>
<property>
<name>org.jpox.rdbms.dateTimezone</name>
<value>JDK_DEFAULT_TIMEZONE</value>
</property>
</otherProperties>
</configuration>
</component>
</components>
</plexus>

View File

@ -160,7 +160,70 @@
<role-hint>md5</role-hint>
<field-name>digestMd5</field-name>
</requirement>
<requirement>
<role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
<role-hint>repository-scanning</role-hint>
<field-name>taskExecutor</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
<field-name>scheduler</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
<implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
<configuration>
<!-- Database Configuration -->
<driverName>org.hsqldb.jdbcDriver</driverName>
<url>jdbc:hsqldb:mem:TESTDB</url>
<userName>sa</userName>
<password></password>
<!-- JPOX and JDO configuration -->
<persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
<otherProperties>
<property>
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
<property>
<name>org.jpox.autoCreateSchema</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateTables</name>
<value>false</value>
</property>
<property>
<name>org.jpox.validateConstraints</name>
<value>false</value>
</property>
<property>
<name>org.jpox.validateColumns</name>
<value>false</value>
</property>
<property>
<name>org.jpox.autoStartMechanism</name>
<value>None</value>
</property>
<property>
<name>org.jpox.transactionIsolation</name>
<value>READ_UNCOMMITTED</value>
</property>
<property>
<name>org.jpox.poid.transactionIsolation</name>
<value>READ_UNCOMMITTED</value>
</property>
<property>
<name>org.jpox.rdbms.dateTimezone</name>
<value>JDK_DEFAULT_TIMEZONE</value>
</property>
</otherProperties>
</configuration>
</component>
</components>
</plexus>