mirror of
https://github.com/apache/archiva.git
synced 2025-02-22 18:31:43 +00:00
trying more memory too see what happen on Jenkins
Signed-off-by: olivier lamy <olamy@apache.org>
This commit is contained in:
parent
623eb062da
commit
d93ae430d5
@ -439,7 +439,7 @@
|
||||
<include>**/*Tests.java</include>
|
||||
<include>**/*Test.java</include>
|
||||
</includes>
|
||||
<argLine>-Xmx512m -Xms512m -server -XX:MaxPermSize=256m @{jacocoproperty}</argLine>
|
||||
<argLine>-Xms1024m -Xmx2048m -server -XX:MaxPermSize=256m @{jacocoproperty}</argLine>
|
||||
<systemPropertyVariables>
|
||||
<appserver.base>${project.build.directory}/appserver-base</appserver.base>
|
||||
<plexus.home>${project.build.directory}/appserver-base</plexus.home>
|
||||
|
@ -33,6 +33,7 @@
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.PreDestroy;
|
||||
import javax.inject.Inject;
|
||||
import javax.jcr.Repository;
|
||||
import javax.jcr.RepositoryException;
|
||||
@ -44,7 +45,7 @@
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@Service("repositorySessionFactory#jcr")
|
||||
@Service( "repositorySessionFactory#jcr" )
|
||||
public class JcrRepositorySessionFactory
|
||||
implements RepositorySessionFactory
|
||||
{
|
||||
@ -64,6 +65,8 @@ public class JcrRepositorySessionFactory
|
||||
@Inject
|
||||
private RepositorySessionFactoryBean repositorySessionFactoryBean;
|
||||
|
||||
private RepositoryFactory repositoryFactory;
|
||||
|
||||
@Override
|
||||
public RepositorySession createSession()
|
||||
{
|
||||
@ -86,8 +89,10 @@ public RepositorySession createSession()
|
||||
}
|
||||
|
||||
// Lazy evaluation to avoid problems with circular dependencies during initialization
|
||||
private MetadataResolver getMetadataResolver() {
|
||||
if (this.metadataResolver==null) {
|
||||
private MetadataResolver getMetadataResolver()
|
||||
{
|
||||
if ( this.metadataResolver == null )
|
||||
{
|
||||
this.metadataResolver = applicationContext.getBean( MetadataResolver.class );
|
||||
}
|
||||
return this.metadataResolver;
|
||||
@ -124,11 +129,12 @@ public void initialize()
|
||||
JcrMetadataRepository metadataRepository = null;
|
||||
try
|
||||
{
|
||||
RepositoryFactory factory = new RepositoryFactory();
|
||||
|
||||
repositoryFactory = new RepositoryFactory();
|
||||
// FIXME this need to be configurable
|
||||
Path directoryPath = Paths.get(System.getProperty( "appserver.base" ), "data/jcr");
|
||||
factory.setRepositoryPath( directoryPath );
|
||||
repository = factory.createRepository();
|
||||
Path directoryPath = Paths.get( System.getProperty( "appserver.base" ), "data/jcr" );
|
||||
repositoryFactory.setRepositoryPath( directoryPath );
|
||||
repository = repositoryFactory.createRepository();
|
||||
metadataRepository = new JcrMetadataRepository( metadataFacetFactories, repository );
|
||||
JcrMetadataRepository.initialize( metadataRepository.getJcrSession() );
|
||||
}
|
||||
@ -147,4 +153,10 @@ public void initialize()
|
||||
stopWatch.stop();
|
||||
logger.info( "time to initialize JcrRepositorySessionFactory: {}", stopWatch.getTime() );
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
public void close()
|
||||
{
|
||||
repositoryFactory.close();
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,7 @@
|
||||
*/
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.commons.lang.time.StopWatch;
|
||||
import org.apache.jackrabbit.JcrConstants;
|
||||
import org.apache.jackrabbit.oak.Oak;
|
||||
import org.apache.jackrabbit.oak.api.Type;
|
||||
@ -57,12 +58,10 @@
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.ThreadFactory;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import static org.apache.archiva.metadata.repository.jcr.RepositoryFactory.StoreType.IN_MEMORY_TYPE;
|
||||
import static org.apache.archiva.metadata.repository.jcr.RepositoryFactory.StoreType.SEGMENT_FILE_TYPE;
|
||||
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INCLUDE_PROPERTY_TYPES;
|
||||
|
||||
/**
|
||||
@ -74,109 +73,127 @@
|
||||
public class RepositoryFactory
|
||||
{
|
||||
|
||||
Logger log = LoggerFactory.getLogger( RepositoryFactory.class );
|
||||
private Logger log = LoggerFactory.getLogger( RepositoryFactory.class );
|
||||
|
||||
public static final String SEGMENT_FILE_TYPE = "oak-segment-tar";
|
||||
public static final String IN_MEMORY_TYPE = "oak-memory";
|
||||
private FileStore fileStore;
|
||||
|
||||
String storeType = SEGMENT_FILE_TYPE;
|
||||
public enum StoreType
|
||||
{
|
||||
SEGMENT_FILE_TYPE,
|
||||
IN_MEMORY_TYPE;
|
||||
}
|
||||
|
||||
|
||||
private StoreType storeType = SEGMENT_FILE_TYPE;
|
||||
|
||||
Path repositoryPath = Paths.get( "repository" );
|
||||
|
||||
public Repository createRepository( ) throws IOException, InvalidFileStoreVersionException
|
||||
public Repository createRepository()
|
||||
throws IOException, InvalidFileStoreVersionException
|
||||
{
|
||||
NodeStore nodeStore;
|
||||
if ( SEGMENT_FILE_TYPE.equals( storeType ) )
|
||||
if ( SEGMENT_FILE_TYPE == storeType )
|
||||
{
|
||||
fileStore = FileStoreBuilder.fileStoreBuilder( repositoryPath.toFile() ).build();
|
||||
nodeStore = SegmentNodeStoreBuilders.builder( fileStore ).build();
|
||||
}
|
||||
else if ( IN_MEMORY_TYPE == storeType )
|
||||
{
|
||||
FileStore fs = FileStoreBuilder.fileStoreBuilder( repositoryPath.toFile( ) ).build( );
|
||||
nodeStore = SegmentNodeStoreBuilders.builder( fs ).build( );
|
||||
} else if (IN_MEMORY_TYPE.equals(storeType)) {
|
||||
nodeStore = null;
|
||||
} else {
|
||||
throw new IllegalArgumentException( "Store type "+storeType+" not recognized" );
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IllegalArgumentException( "Store type " + storeType + " not recognized" );
|
||||
}
|
||||
|
||||
Oak oak = nodeStore==null ? new Oak() : new Oak(nodeStore);
|
||||
oak.with( new RepositoryInitializer( )
|
||||
Oak oak = nodeStore == null ? new Oak() : new Oak( nodeStore );
|
||||
oak.with( new RepositoryInitializer()
|
||||
{
|
||||
@Override
|
||||
public void initialize( @Nonnull NodeBuilder root )
|
||||
{
|
||||
log.info("Creating index ");
|
||||
log.info( "Creating index " );
|
||||
|
||||
NodeBuilder lucene = IndexUtils.getOrCreateOakIndex( root ).child("lucene");
|
||||
lucene.setProperty( JcrConstants.JCR_PRIMARYTYPE, "oak:QueryIndexDefinition", Type.NAME);
|
||||
NodeBuilder lucene = IndexUtils.getOrCreateOakIndex( root ).child( "lucene" );
|
||||
lucene.setProperty( JcrConstants.JCR_PRIMARYTYPE, "oak:QueryIndexDefinition", Type.NAME );
|
||||
|
||||
lucene.setProperty("compatVersion", 2);
|
||||
lucene.setProperty("type", "lucene");
|
||||
lucene.setProperty( "compatVersion", 2 );
|
||||
lucene.setProperty( "type", "lucene" );
|
||||
// lucene.setProperty("async", "async");
|
||||
lucene.setProperty(INCLUDE_PROPERTY_TYPES,
|
||||
ImmutableSet.of("String"), Type.STRINGS);
|
||||
lucene.setProperty( INCLUDE_PROPERTY_TYPES, ImmutableSet.of( "String" ), Type.STRINGS );
|
||||
// lucene.setProperty("refresh",true);
|
||||
lucene.setProperty("async",ImmutableSet.of("async", "sync"), Type.STRINGS);
|
||||
NodeBuilder rules = lucene.child("indexRules").
|
||||
setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
|
||||
rules.setProperty(":childOrder",ImmutableSet.of("archiva:projectVersion","archiva:artifact",
|
||||
"archiva:facet","archiva:namespace", "archiva:project"), Type.STRINGS);
|
||||
NodeBuilder allProps = rules.child("archiva:projectVersion")
|
||||
.child("properties").setProperty( JcrConstants.JCR_PRIMARYTYPE,
|
||||
"nt:unstructured", Type.NAME)
|
||||
.setProperty( ":childOrder", ImmutableSet.of("allProps"), Type.STRINGS )
|
||||
.setProperty("indexNodeName",true)
|
||||
.child("allProps").setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
|
||||
allProps.setProperty("name", ".*");
|
||||
allProps.setProperty("isRegexp", true);
|
||||
allProps.setProperty("nodeScopeIndex", true);
|
||||
allProps.setProperty("index",true);
|
||||
allProps.setProperty("analyzed",true);
|
||||
lucene.setProperty( "async", ImmutableSet.of( "async", "sync" ), Type.STRINGS );
|
||||
NodeBuilder rules = lucene.child( "indexRules" ).
|
||||
setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
|
||||
rules.setProperty( ":childOrder", ImmutableSet.of( "archiva:projectVersion", //
|
||||
"archiva:artifact", //
|
||||
"archiva:facet", //
|
||||
"archiva:namespace", //
|
||||
"archiva:project" ), //
|
||||
Type.STRINGS );
|
||||
NodeBuilder allProps = rules.child( "archiva:projectVersion" ) //
|
||||
.child( "properties" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
|
||||
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
|
||||
.setProperty( "indexNodeName", true ) //
|
||||
.child( "allProps" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
|
||||
allProps.setProperty( "name", ".*" );
|
||||
allProps.setProperty( "isRegexp", true );
|
||||
allProps.setProperty( "nodeScopeIndex", true );
|
||||
allProps.setProperty( "index", true );
|
||||
allProps.setProperty( "analyzed", true );
|
||||
// allProps.setProperty("propertyIndex",true);
|
||||
allProps = rules.child("archiva:artifact")
|
||||
.child("properties").setProperty( JcrConstants.JCR_PRIMARYTYPE,
|
||||
"nt:unstructured", Type.NAME)
|
||||
.setProperty( ":childOrder", ImmutableSet.of("allProps"), Type.STRINGS )
|
||||
.setProperty("indexNodeName",true)
|
||||
.child("allProps").setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
|
||||
allProps.setProperty("name", ".*");
|
||||
allProps.setProperty("isRegexp", true);
|
||||
allProps.setProperty("nodeScopeIndex", true);
|
||||
allProps.setProperty("index",true);
|
||||
allProps.setProperty("analyzed",true);
|
||||
allProps = rules.child("archiva:facet")
|
||||
.child("properties").setProperty( JcrConstants.JCR_PRIMARYTYPE,
|
||||
"nt:unstructured", Type.NAME)
|
||||
.setProperty( ":childOrder", ImmutableSet.of("allProps"), Type.STRINGS )
|
||||
.setProperty("indexNodeName",true)
|
||||
.child("allProps").setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
|
||||
allProps.setProperty("name", ".*");
|
||||
allProps.setProperty("isRegexp", true);
|
||||
allProps.setProperty("nodeScopeIndex", true);
|
||||
allProps.setProperty("index",true);
|
||||
allProps.setProperty("analyzed",true);
|
||||
allProps = rules.child("archiva:namespace")
|
||||
.child("properties").setProperty( JcrConstants.JCR_PRIMARYTYPE,
|
||||
"nt:unstructured", Type.NAME)
|
||||
.setProperty( ":childOrder", ImmutableSet.of("allProps"), Type.STRINGS )
|
||||
.setProperty("indexNodeName",true)
|
||||
.child("allProps").setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
|
||||
allProps.setProperty("name", ".*");
|
||||
allProps.setProperty("isRegexp", true);
|
||||
allProps.setProperty("nodeScopeIndex", true);
|
||||
allProps.setProperty("index",true);
|
||||
allProps.setProperty("analyzed",true);
|
||||
allProps = rules.child("archiva:project")
|
||||
.child("properties").setProperty( JcrConstants.JCR_PRIMARYTYPE,
|
||||
"nt:unstructured", Type.NAME)
|
||||
.setProperty( ":childOrder", ImmutableSet.of("allProps"), Type.STRINGS )
|
||||
.setProperty("indexNodeName",true)
|
||||
.child("allProps").setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
|
||||
allProps.setProperty("name", ".*");
|
||||
allProps.setProperty("isRegexp", true);
|
||||
allProps.setProperty("nodeScopeIndex", true);
|
||||
allProps.setProperty("index",true);
|
||||
allProps.setProperty("analyzed",true);
|
||||
allProps = rules.child( "archiva:artifact" ) //
|
||||
.child( "properties" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
|
||||
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
|
||||
.setProperty( "indexNodeName", true ).child( "allProps" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
|
||||
allProps.setProperty( "name", ".*" );
|
||||
allProps.setProperty( "isRegexp", true );
|
||||
allProps.setProperty( "nodeScopeIndex", true );
|
||||
allProps.setProperty( "index", true );
|
||||
allProps.setProperty( "analyzed", true );
|
||||
allProps = rules.child( "archiva:facet" ) //
|
||||
.child( "properties" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
|
||||
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
|
||||
.setProperty( "indexNodeName", true ) //
|
||||
.child( "allProps" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
|
||||
allProps.setProperty( "name", ".*" );
|
||||
allProps.setProperty( "isRegexp", true );
|
||||
allProps.setProperty( "nodeScopeIndex", true );
|
||||
allProps.setProperty( "index", true );
|
||||
allProps.setProperty( "analyzed", true );
|
||||
allProps = rules.child( "archiva:namespace" ) //
|
||||
.child( "properties" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
|
||||
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
|
||||
.setProperty( "indexNodeName", true ) //
|
||||
.child( "allProps" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
|
||||
allProps.setProperty( "name", ".*" );
|
||||
allProps.setProperty( "isRegexp", true );
|
||||
allProps.setProperty( "nodeScopeIndex", true );
|
||||
allProps.setProperty( "index", true );
|
||||
allProps.setProperty( "analyzed", true );
|
||||
allProps = rules.child( "archiva:project" ) //
|
||||
.child( "properties" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, "nt:unstructured", Type.NAME ) //
|
||||
.setProperty( ":childOrder", ImmutableSet.of( "allProps" ), Type.STRINGS ) //
|
||||
.setProperty( "indexNodeName", true ) //
|
||||
.child( "allProps" ) //
|
||||
.setProperty( JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME );
|
||||
allProps.setProperty( "name", ".*" );
|
||||
allProps.setProperty( "isRegexp", true );
|
||||
allProps.setProperty( "nodeScopeIndex", true );
|
||||
allProps.setProperty( "index", true );
|
||||
allProps.setProperty( "analyzed", true );
|
||||
|
||||
log.info("Index: "+lucene+" myIndex "+lucene.getChildNode( "myIndex" ));
|
||||
log.info("myIndex "+lucene.getChildNode( "myIndex" ).getProperties());
|
||||
log.info( "Index: {} myIndex {}", lucene, lucene.getChildNode( "myIndex" ) );
|
||||
log.info( "myIndex {}", lucene.getChildNode( "myIndex" ).getProperties() );
|
||||
// IndexUtils.createIndexDefinition( )
|
||||
|
||||
}
|
||||
@ -184,58 +201,73 @@ public void initialize( @Nonnull NodeBuilder root )
|
||||
|
||||
ExecutorService executorService = createExecutor();
|
||||
StatisticsProvider statsProvider = StatisticsProvider.NOOP;
|
||||
int queueSize = Integer.getInteger("queueSize", 10000);
|
||||
int queueSize = Integer.getInteger( "queueSize", 10000 );
|
||||
File indexDir = Files.createTempDirectory( "archiva_index" ).toFile();
|
||||
log.info("Queue Index "+indexDir.toString());
|
||||
log.info( "Queue Index {}", indexDir.toString() );
|
||||
IndexCopier indexCopier = new IndexCopier( executorService, indexDir, true );
|
||||
NRTIndexFactory nrtIndexFactory = new NRTIndexFactory( indexCopier, statsProvider);
|
||||
MountInfoProvider mountInfoProvider = Mounts.defaultMountInfoProvider( );
|
||||
IndexTracker tracker = new IndexTracker(new DefaultIndexReaderFactory( mountInfoProvider, indexCopier ), nrtIndexFactory);
|
||||
DocumentQueue queue = new DocumentQueue(queueSize, tracker, executorService, statsProvider);
|
||||
LocalIndexObserver localIndexObserver = new LocalIndexObserver( queue, statsProvider);
|
||||
LuceneIndexProvider provider = new LuceneIndexProvider(tracker);
|
||||
NRTIndexFactory nrtIndexFactory = new NRTIndexFactory( indexCopier, statsProvider );
|
||||
MountInfoProvider mountInfoProvider = Mounts.defaultMountInfoProvider();
|
||||
IndexTracker tracker =
|
||||
new IndexTracker( new DefaultIndexReaderFactory( mountInfoProvider, indexCopier ), nrtIndexFactory );
|
||||
DocumentQueue queue = new DocumentQueue( queueSize, tracker, executorService, statsProvider );
|
||||
LocalIndexObserver localIndexObserver = new LocalIndexObserver( queue, statsProvider );
|
||||
LuceneIndexProvider provider = new LuceneIndexProvider( tracker );
|
||||
|
||||
// ExternalObserverBuilder builder = new ExternalObserverBuilder(queue, tracker, statsProvider,
|
||||
// executorService, queueSize);
|
||||
// Observer observer = builder.build();
|
||||
// builder.getBackgroundObserver();
|
||||
|
||||
LuceneIndexEditorProvider editorProvider = new LuceneIndexEditorProvider(null,
|
||||
tracker, new ExtractedTextCache(0, 0), null, mountInfoProvider);
|
||||
editorProvider.setIndexingQueue(queue);
|
||||
LuceneIndexEditorProvider editorProvider = //
|
||||
new LuceneIndexEditorProvider( null, tracker, //
|
||||
new ExtractedTextCache( 0, 0 ), //
|
||||
null, mountInfoProvider );
|
||||
editorProvider.setIndexingQueue( queue );
|
||||
|
||||
|
||||
log.info("Oak: "+oak+" with nodeStore "+nodeStore);
|
||||
Jcr jcr = new Jcr(oak).with( editorProvider ).with( (Observer) provider )
|
||||
.with(localIndexObserver)
|
||||
log.info( "Oak: {} with nodeStore {}", oak, nodeStore );
|
||||
Jcr jcr = new Jcr( oak ).with( editorProvider ) //
|
||||
.with( (Observer) provider ) //
|
||||
.with( localIndexObserver )
|
||||
// .with(observer)
|
||||
.with( ( QueryIndexProvider) provider )
|
||||
.withAsyncIndexing("async",5 );
|
||||
.with( (QueryIndexProvider) provider ); //
|
||||
//.withAsyncIndexing( "async", 5 );
|
||||
StopWatch stopWatch = new StopWatch();
|
||||
stopWatch.start();
|
||||
Repository r = jcr.createRepository();
|
||||
try
|
||||
{
|
||||
Thread.currentThread().sleep(1000);
|
||||
}
|
||||
catch ( InterruptedException e )
|
||||
{
|
||||
e.printStackTrace( );
|
||||
}
|
||||
stopWatch.stop();
|
||||
log.info( "time to create jcr repository: {} ms", stopWatch.getTime() );
|
||||
// try
|
||||
// {
|
||||
// Thread.currentThread().sleep( 1000 );
|
||||
// }
|
||||
// catch ( InterruptedException e )
|
||||
// {
|
||||
// log.error( e.getMessage(), e );
|
||||
// }
|
||||
return r;
|
||||
|
||||
|
||||
}
|
||||
|
||||
public String getStoreType( )
|
||||
public void close()
|
||||
{
|
||||
if ( fileStore != null )
|
||||
{
|
||||
fileStore.close();
|
||||
}
|
||||
}
|
||||
|
||||
public StoreType getStoreType()
|
||||
{
|
||||
return storeType;
|
||||
}
|
||||
|
||||
public void setStoreType( String storeType )
|
||||
public void setStoreType( StoreType storeType )
|
||||
{
|
||||
this.storeType = storeType;
|
||||
}
|
||||
|
||||
public Path getRepositoryPath( )
|
||||
public Path getRepositoryPath()
|
||||
{
|
||||
return repositoryPath;
|
||||
}
|
||||
@ -256,37 +288,50 @@ public void setRepositoryPath( String repositoryPath )
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
e.printStackTrace( );
|
||||
log.error( e.getMessage(), e );
|
||||
throw new IllegalArgumentException( "cannot create directory:" + repositoryPath, e );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private ExecutorService createExecutor() {
|
||||
ThreadPoolExecutor executor = new ThreadPoolExecutor(0, 5, 60L, TimeUnit.SECONDS,
|
||||
new LinkedBlockingQueue<Runnable>(), new ThreadFactory() {
|
||||
private final AtomicInteger counter = new AtomicInteger();
|
||||
private final Thread.UncaughtExceptionHandler handler = new Thread.UncaughtExceptionHandler() {
|
||||
@Override
|
||||
public void uncaughtException(Thread t, Throwable e) {
|
||||
log.warn("Error occurred in asynchronous processing ", e);
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public Thread newThread(@Nonnull Runnable r) {
|
||||
Thread thread = new Thread(r, createName());
|
||||
thread.setDaemon(true);
|
||||
thread.setPriority(Thread.MIN_PRIORITY);
|
||||
thread.setUncaughtExceptionHandler(handler);
|
||||
return thread;
|
||||
}
|
||||
|
||||
private String createName() {
|
||||
return "oak-lucene-" + counter.getAndIncrement();
|
||||
}
|
||||
});
|
||||
executor.setKeepAliveTime(1, TimeUnit.MINUTES);
|
||||
executor.allowCoreThreadTimeOut(true);
|
||||
return executor;
|
||||
private ExecutorService createExecutor()
|
||||
{
|
||||
return Executors.newCachedThreadPool();
|
||||
//
|
||||
// ThreadPoolExecutor executor =
|
||||
// new ThreadPoolExecutor( 0, 5, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(),
|
||||
// new ThreadFactory()
|
||||
// {
|
||||
// private final AtomicInteger counter = new AtomicInteger();
|
||||
//
|
||||
// private final Thread.UncaughtExceptionHandler handler =
|
||||
// new Thread.UncaughtExceptionHandler()
|
||||
// {
|
||||
// @Override
|
||||
// public void uncaughtException( Thread t, Throwable e )
|
||||
// {
|
||||
// log.warn( "Error occurred in asynchronous processing ", e );
|
||||
// }
|
||||
// };
|
||||
//
|
||||
// @Override
|
||||
// public Thread newThread( @Nonnull Runnable r )
|
||||
// {
|
||||
// Thread thread = new Thread( r, createName() );
|
||||
// thread.setDaemon( true );
|
||||
// thread.setPriority( Thread.MIN_PRIORITY );
|
||||
// thread.setUncaughtExceptionHandler( handler );
|
||||
// return thread;
|
||||
// }
|
||||
//
|
||||
// private String createName()
|
||||
// {
|
||||
// return "oak-lucene-" + counter.getAndIncrement();
|
||||
// }
|
||||
// } );
|
||||
// executor.setKeepAliveTime( 1, TimeUnit.MINUTES );
|
||||
// executor.allowCoreThreadTimeOut( true );
|
||||
// return executor;
|
||||
}
|
||||
|
||||
}
|
||||
|
2
pom.xml
2
pom.xml
@ -71,7 +71,7 @@
|
||||
<slf4j.version>1.7.25</slf4j.version>
|
||||
<log4j.version>2.8.2</log4j.version>
|
||||
|
||||
<spring.version>4.3.9.RELEASE</spring.version>
|
||||
<spring.version>4.3.10.RELEASE</spring.version>
|
||||
|
||||
<javax.jcr.version>2.0</javax.jcr.version>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
|
Loading…
x
Reference in New Issue
Block a user