[MRM-454]

-synchronized the index operations in LuceneRepositoryContentIndex (used 'repository' as the lock) to avoid index locking
-added method for deleting artifacts from the index during repository purge
-updated repository purge tests


git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@588598 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2007-10-26 10:36:48 +00:00
parent c5f4e5079b
commit 2395fea5f0
12 changed files with 483 additions and 161 deletions

View File

@ -1,27 +1,32 @@
package org.apache.maven.archiva.consumers.core.repository;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
@ -29,6 +34,9 @@ import org.apache.maven.archiva.repository.layout.LayoutException;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
@ -43,16 +51,20 @@ public abstract class AbstractRepositoryPurge
protected ArtifactDAO artifactDao;
public AbstractRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao )
private Map<String, RepositoryContentIndex> indices;
public AbstractRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
Map<String, RepositoryContentIndex> indices )
{
this.repository = repository;
this.artifactDao = artifactDao;
this.indices = indices;
}
/**
* Get all files from the directory that matches the specified filename.
*
* @param dir the directory to be scanned
*
* @param dir the directory to be scanned
* @param filename the filename to be matched
* @return
*/
@ -78,16 +90,36 @@ public abstract class AbstractRepositoryPurge
/**
* Purge the repo. Update db and index of removed artifacts.
*
*
* @param artifactFiles
* @throws RepositoryIndexException
*/
protected void purge( Set<ArtifactReference> references )
{
List<LuceneRepositoryContentRecord> fileContentRecords = new ArrayList<LuceneRepositoryContentRecord>();
List<LuceneRepositoryContentRecord> hashcodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
List<LuceneRepositoryContentRecord> bytecodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
for ( ArtifactReference reference : references )
{
File artifactFile = repository.toFile( reference );
ArchivaArtifact artifact =
new ArchivaArtifact( reference.getGroupId(), reference.getArtifactId(), reference.getVersion(),
reference.getClassifier(), reference.getType() );
FileContentRecord fileContentRecord = new FileContentRecord();
fileContentRecord.setFilename( repository.toPath( artifact ) );
fileContentRecords.add( fileContentRecord );
HashcodesRecord hashcodesRecord = new HashcodesRecord();
hashcodesRecord.setArtifact( artifact );
hashcodeRecords.add( hashcodesRecord );
BytecodeRecord bytecodeRecord = new BytecodeRecord();
bytecodeRecord.setArtifact( artifact );
bytecodeRecords.add( bytecodeRecord );
artifactFile.delete();
purgeSupportFiles( artifactFile );
@ -107,13 +139,21 @@ public abstract class AbstractRepositoryPurge
// Ignore
}
}
try
{
updateIndices( fileContentRecords, hashcodeRecords, bytecodeRecords );
}
catch ( RepositoryIndexException e )
{
// Ignore
}
}
/**
* <p>
* This find support files for the artifactFile and deletes them.
* </p>
*
* <p>
* Support Files are things like ".sha1", ".md5", ".asc", etc.
* </p>
@ -147,12 +187,27 @@ public abstract class AbstractRepositoryPurge
throws ArchivaDatabaseException, LayoutException
{
ArtifactReference artifact = repository.toArtifactReference( path );
ArchivaArtifact queriedArtifact = artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion(), artifact.getClassifier(),
artifact.getType() );
ArchivaArtifact queriedArtifact =
artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
artifact.getClassifier(), artifact.getType() );
artifactDao.deleteArtifact( queriedArtifact );
// TODO [MRM-37]: re-run the database consumers to clean up
}
private void updateIndices( List<LuceneRepositoryContentRecord> fileContentRecords,
List<LuceneRepositoryContentRecord> hashcodeRecords,
List<LuceneRepositoryContentRecord> bytecodeRecords )
throws RepositoryIndexException
{
RepositoryContentIndex index = indices.get( "filecontent" );
index.deleteRecords( fileContentRecords );
index = indices.get( "hashcodes" );
index.deleteRecords( hashcodeRecords );
index = indices.get( "bytecode" );
index.deleteRecords( bytecodeRecords );
}
}

View File

@ -22,6 +22,7 @@ package org.apache.maven.archiva.consumers.core.repository;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.model.VersionedReference;
@ -36,6 +37,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* <p>
@ -68,9 +70,9 @@ public class CleanupReleasedSnapshotsRepositoryPurge
private MetadataTools metadataTools;
public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
MetadataTools metadataTools )
MetadataTools metadataTools, Map<String, RepositoryContentIndex> indices )
{
super( repository, artifactDao );
super( repository, artifactDao, indices );
this.metadataTools = metadataTools;
}

View File

@ -22,6 +22,7 @@ package org.apache.maven.archiva.consumers.core.repository;
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
@ -32,6 +33,7 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
@ -48,9 +50,9 @@ public class DaysOldRepositoryPurge
private int daysOlder;
public DaysOldRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
int daysOlder )
int daysOlder, Map<String, RepositoryContentIndex> indices )
{
super( repository, artifactDao );
super( repository, artifactDao, indices );
this.daysOlder = daysOlder;
timestampParser = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
timestampParser.setTimeZone( DateUtils.UTC_TIME_ZONE );

View File

@ -26,6 +26,8 @@ import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
@ -37,7 +39,9 @@ import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Consumer for removing old snapshots in the repository based on the criteria
@ -98,6 +102,11 @@ public class RepositoryPurgeConsumer
private RepositoryPurge cleanUp;
private boolean deleteReleasedSnapshots;
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;
public String getId()
{
@ -129,22 +138,27 @@ public class RepositoryPurgeConsumer
{
try
{
Map<String, RepositoryContentIndex> indices = new HashMap<String, RepositoryContentIndex>();
indices.put( "bytecode", indexFactory.createBytecodeIndex( repository ) );
indices.put( "hashcodes", indexFactory.createHashcodeIndex( repository ) );
indices.put( "filecontent", indexFactory.createFileContentIndex( repository ) );
ManagedRepositoryContent repositoryContent = repositoryFactory.getManagedRepositoryContent( repository
.getId() );
if ( repository.getDaysOlder() != 0 )
{
repoPurge = new DaysOldRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository
.getDaysOlder() );
.getDaysOlder(), indices );
}
else
{
repoPurge = new RetentionCountRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository
.getRetentionCount() );
.getRetentionCount(), indices );
}
cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, dao.getArtifactDAO(),
metadataTools );
metadataTools, indices );
deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
}

View File

@ -22,6 +22,7 @@ package org.apache.maven.archiva.consumers.core.repository;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
@ -32,6 +33,7 @@ import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
@ -45,9 +47,9 @@ public class RetentionCountRepositoryPurge
private int retentionCount;
public RetentionCountRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
int retentionCount )
int retentionCount, Map<String, RepositoryContentIndex> indices )
{
super( repository, artifactDao );
super( repository, artifactDao, indices );
this.retentionCount = retentionCount;
}

View File

@ -20,28 +20,37 @@ package org.apache.maven.archiva.consumers.core.repository;
*/
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.custommonkey.xmlunit.XMLAssert;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
public class CleanupReleasedSnapshotsRepositoryPurgeTest
extends AbstractRepositoryPurgeTest
{
{
protected void setUp()
throws Exception
{
super.setUp();
Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
MetadataTools metadataTools = (MetadataTools) lookup( MetadataTools.class );
repoPurge = new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), dao, metadataTools );
repoPurge = new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), dao, metadataTools, map );
}
public void testReleasedSnapshots()
@ -50,6 +59,7 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
populateReleasedSnapshotsTest();
String repoRoot = prepareTestRepo();
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT );

View File

@ -21,7 +21,12 @@ package org.apache.maven.archiva.consumers.core.repository;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
@ -35,8 +40,13 @@ public class DaysOldRepositoryPurgeTest
{
super.setUp();
Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
repoPurge =
new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration().getDaysOlder() );
new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration().getDaysOlder(), map );
}
private void setLastModified( String dirPath )

View File

@ -20,7 +20,12 @@ package org.apache.maven.archiva.consumers.core.repository;
*/
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
/**
* Test RetentionsCountRepositoryPurgeTest
@ -36,8 +41,13 @@ public class RetentionCountRepositoryPurgeTest
{
super.setUp();
Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
repoPurge = new RetentionCountRepositoryPurge( getRepository(), dao,
getRepoConfiguration().getRetentionCount() );
getRepoConfiguration().getRetentionCount(), map );
}
/**

View File

@ -0,0 +1,54 @@
package org.apache.maven.archiva.consumers.core.repository.stubs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
/**
* LuceneRepositoryContenIndexFactoryStub
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/
public class LuceneRepositoryContentIndexFactoryStub
implements RepositoryContentIndexFactory
{
public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}
public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}
public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}
}

View File

@ -0,0 +1,130 @@
package org.apache.maven.archiva.consumers.core.repository.stubs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.util.Collection;
import junit.framework.Assert;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Searchable;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/
public class LuceneRepositoryContentIndexStub
implements RepositoryContentIndex
{
public void deleteRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
Assert.assertEquals( 2, records.size() );
}
public boolean exists()
throws RepositoryIndexException
{
// TODO Auto-generated method stub
return false;
}
public Collection getAllRecordKeys()
throws RepositoryIndexException
{
// TODO Auto-generated method stub
return null;
}
public Analyzer getAnalyzer()
{
// TODO Auto-generated method stub
return null;
}
public LuceneEntryConverter getEntryConverter()
{
// TODO Auto-generated method stub
return null;
}
public String getId()
{
// TODO Auto-generated method stub
return null;
}
public File getIndexDirectory()
{
// TODO Auto-generated method stub
return null;
}
public QueryParser getQueryParser()
{
// TODO Auto-generated method stub
return null;
}
public ManagedRepositoryConfiguration getRepository()
{
// TODO Auto-generated method stub
return null;
}
public Searchable getSearchable()
throws RepositoryIndexSearchException
{
// TODO Auto-generated method stub
return null;
}
public void indexRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
public void modifyRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
public void modifyRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
}

View File

@ -45,6 +45,11 @@
<requirement>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<field-name>indexFactory</field-name>
</requirement>
</requirements>
<configuration>
<id>repository-purge</id>
@ -122,6 +127,11 @@
<requirement>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<field-name>indexFactory</field-name>
</requirement>
</requirements>
<configuration>
<id>repository-purge</id>
@ -266,6 +276,13 @@
</otherProperties>
</configuration>
</component>
<!-- LuceneRepositoryIndexFactory -->
<component>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<implementation>org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexFactoryStub</implementation>
</component>
</components>
</component-set>

View File

@ -84,143 +84,156 @@ public class LuceneRepositoryContentIndex
public void modifyRecords( Collection records )
throws RepositoryIndexException
{
IndexModifier indexModifier = null;
try
synchronized( repository )
{
indexModifier = new IndexModifier( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexModifier.setMaxFieldLength( MAX_FIELD_LENGTH );
for ( Iterator i = records.iterator(); i.hasNext(); )
IndexModifier indexModifier = null;
try
{
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
indexModifier = new IndexModifier( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexModifier.setMaxFieldLength( MAX_FIELD_LENGTH );
for ( Iterator i = records.iterator(); i.hasNext(); )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexModifier.deleteDocuments( term );
Document document = indexHandlers.getConverter().convert( record );
indexModifier.addDocument( document );
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexModifier.deleteDocuments( term );
Document document = indexHandlers.getConverter().convert( record );
indexModifier.addDocument( document );
}
}
indexModifier.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexModifier );
}
indexModifier.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexModifier );
}
}
public void modifyRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
IndexModifier indexModifier = null;
try
synchronized( repository )
{
indexModifier = new IndexModifier( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexModifier.setMaxFieldLength( MAX_FIELD_LENGTH );
if ( record != null )
IndexModifier indexModifier = null;
try
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexModifier.deleteDocuments( term );
Document document = indexHandlers.getConverter().convert( record );
indexModifier.addDocument( document );
indexModifier = new IndexModifier( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexModifier.setMaxFieldLength( MAX_FIELD_LENGTH );
if ( record != null )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexModifier.deleteDocuments( term );
Document document = indexHandlers.getConverter().convert( record );
indexModifier.addDocument( document );
}
indexModifier.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexModifier );
}
indexModifier.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexModifier );
}
}
private void addRecords( Collection records )
throws RepositoryIndexException
{
IndexWriter indexWriter;
try
synchronized( repository )
{
indexWriter = new IndexWriter( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexWriter.setMaxFieldLength( MAX_FIELD_LENGTH );
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Unable to open index", e );
}
try
{
for ( Iterator i = records.iterator(); i.hasNext(); )
IndexWriter indexWriter;
try
{
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
{
Document document = indexHandlers.getConverter().convert( record );
indexWriter.addDocument( document );
}
indexWriter = new IndexWriter( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexWriter.setMaxFieldLength( MAX_FIELD_LENGTH );
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Unable to open index", e );
}
try
{
for ( Iterator i = records.iterator(); i.hasNext(); )
{
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
{
Document document = indexHandlers.getConverter().convert( record );
indexWriter.addDocument( document );
}
}
indexWriter.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Failed to add an index document", e );
}
finally
{
closeQuietly( indexWriter );
}
indexWriter.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Failed to add an index document", e );
}
finally
{
closeQuietly( indexWriter );
}
}
public void deleteRecords( Collection records )
throws RepositoryIndexException
{
if ( exists() )
synchronized( repository )
{
IndexReader indexReader = null;
try
if ( exists() )
{
indexReader = IndexReader.open( indexLocation );
for ( Iterator i = records.iterator(); i.hasNext(); )
IndexReader indexReader = null;
try
{
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
indexReader = IndexReader.open( indexLocation );
for ( Iterator i = records.iterator(); i.hasNext(); )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexReader.deleteDocuments( term );
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexReader.deleteDocuments( term );
}
}
}
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexReader );
catch ( IOException e )
{
throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexReader );
}
}
}
}
public Collection getAllRecordKeys()
throws RepositoryIndexException
{
@ -230,38 +243,41 @@ public class LuceneRepositoryContentIndex
private List getAllFieldValues( String fieldName )
throws RepositoryIndexException
{
List keys = new ArrayList();
if ( exists() )
synchronized( repository )
{
IndexReader indexReader = null;
TermEnum terms = null;
try
List keys = new ArrayList();
if ( exists() )
{
indexReader = IndexReader.open( indexLocation );
terms = indexReader.terms( new Term( fieldName, "" ) );
while ( fieldName.equals( terms.term().field() ) )
IndexReader indexReader = null;
TermEnum terms = null;
try
{
keys.add( terms.term().text() );
if ( !terms.next() )
indexReader = IndexReader.open( indexLocation );
terms = indexReader.terms( new Term( fieldName, "" ) );
while ( fieldName.equals( terms.term().field() ) )
{
break;
keys.add( terms.term().text() );
if ( !terms.next() )
{
break;
}
}
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexReader );
closeQuietly( terms );
}
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexReader );
closeQuietly( terms );
}
return keys;
}
return keys;
}
public Searchable getSearchable()