mirror of https://github.com/apache/archiva.git
* Adding CrossRepositorySearch component.
git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/branches/archiva-jpox-database-refactor@531746 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
d9e4b87d60
commit
c5839cb7f1
|
@ -31,7 +31,7 @@
|
|||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.archiva</groupId>
|
||||
<artifactId>archiva-configuration</artifactId>
|
||||
<artifactId>archiva-repository-layer</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.archiva</groupId>
|
||||
|
@ -62,6 +62,12 @@
|
|||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
<version>1.2_Java1.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.maven.archiva.indexer;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
|
||||
import org.apache.maven.archiva.indexer.query.Query;
|
||||
|
||||
|
@ -113,4 +114,18 @@ public interface RepositoryContentIndex
|
|||
* @return the index directory.
|
||||
*/
|
||||
File getIndexDirectory();
|
||||
|
||||
/**
|
||||
* Get the {@link QueryParser} appropriate for searches within this index.
|
||||
*
|
||||
* @return the query parser;
|
||||
*/
|
||||
QueryParser getQueryParser();
|
||||
|
||||
/**
|
||||
* Get the id of index.
|
||||
*
|
||||
* @return the id of index.
|
||||
*/
|
||||
String getId();
|
||||
}
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.maven.archiva.indexer.bytecode;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.queryParser.MultiFieldQueryParser;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
|
||||
|
||||
|
@ -29,16 +31,33 @@ import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
|
|||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public class BytecodeHandlers implements LuceneIndexHandlers
|
||||
public class BytecodeHandlers
|
||||
implements LuceneIndexHandlers
|
||||
{
|
||||
private BytecodeAnalyzer analyzer;
|
||||
|
||||
private BytecodeEntryConverter converter;
|
||||
|
||||
private QueryParser queryParser;
|
||||
|
||||
public BytecodeHandlers()
|
||||
{
|
||||
converter = new BytecodeEntryConverter();
|
||||
analyzer = new BytecodeAnalyzer();
|
||||
queryParser = new MultiFieldQueryParser( new String[] {
|
||||
BytecodeKeys.GROUPID,
|
||||
BytecodeKeys.ARTIFACTID,
|
||||
BytecodeKeys.VERSION,
|
||||
BytecodeKeys.CLASSIFIER,
|
||||
BytecodeKeys.TYPE,
|
||||
BytecodeKeys.CLASSES,
|
||||
BytecodeKeys.FILES,
|
||||
BytecodeKeys.METHODS }, analyzer );
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return BytecodeKeys.ID;
|
||||
}
|
||||
|
||||
public Analyzer getAnalyzer()
|
||||
|
@ -50,4 +69,9 @@ public class BytecodeHandlers implements LuceneIndexHandlers
|
|||
{
|
||||
return converter;
|
||||
}
|
||||
|
||||
public QueryParser getQueryParser()
|
||||
{
|
||||
return queryParser;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,8 +27,11 @@ import org.apache.maven.archiva.indexer.ArtifactKeys;
|
|||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public class BytecodeKeys extends ArtifactKeys
|
||||
public class BytecodeKeys
|
||||
extends ArtifactKeys
|
||||
{
|
||||
public static final String ID = "bytecode";
|
||||
|
||||
public static final String CLASSES = "classes";
|
||||
|
||||
public static final String METHODS = "methods";
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.maven.archiva.indexer.filecontent;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.queryParser.MultiFieldQueryParser;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
|
||||
|
||||
|
@ -29,10 +31,28 @@ import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
|
|||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public class FileContentHandlers implements LuceneIndexHandlers
|
||||
public class FileContentHandlers
|
||||
implements LuceneIndexHandlers
|
||||
{
|
||||
private FileContentAnalyzer analyzer;
|
||||
|
||||
private FileContentConverter converter;
|
||||
|
||||
private QueryParser queryParser;
|
||||
|
||||
public FileContentHandlers()
|
||||
{
|
||||
analyzer = new FileContentAnalyzer();
|
||||
converter = new FileContentConverter();
|
||||
queryParser = new MultiFieldQueryParser( new String[] { FileContentKeys.FILENAME, FileContentKeys.CONTENT },
|
||||
analyzer );
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return FileContentKeys.ID;
|
||||
}
|
||||
|
||||
public Analyzer getAnalyzer()
|
||||
{
|
||||
return analyzer;
|
||||
|
@ -40,6 +60,11 @@ public class FileContentHandlers implements LuceneIndexHandlers
|
|||
|
||||
public LuceneEntryConverter getConverter()
|
||||
{
|
||||
return null;
|
||||
return converter;
|
||||
}
|
||||
|
||||
public QueryParser getQueryParser()
|
||||
{
|
||||
return queryParser;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,8 @@ package org.apache.maven.archiva.indexer.filecontent;
|
|||
*/
|
||||
public class FileContentKeys
|
||||
{
|
||||
public static final String ID = "filecontent";
|
||||
|
||||
public static final String FILENAME = "filename";
|
||||
|
||||
public static final String CONTENT = "content";
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.maven.archiva.indexer.hashcodes;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.queryParser.MultiFieldQueryParser;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
|
||||
|
||||
|
@ -29,16 +31,32 @@ import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
|
|||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public class HashcodesHandlers implements LuceneIndexHandlers
|
||||
public class HashcodesHandlers
|
||||
implements LuceneIndexHandlers
|
||||
{
|
||||
private HashcodesAnalyzer analyzer;
|
||||
|
||||
private HashcodesEntryConverter converter;
|
||||
|
||||
private QueryParser queryParser;
|
||||
|
||||
public HashcodesHandlers()
|
||||
{
|
||||
converter = new HashcodesEntryConverter();
|
||||
analyzer = new HashcodesAnalyzer();
|
||||
queryParser = new MultiFieldQueryParser( new String[] {
|
||||
HashcodesKeys.GROUPID,
|
||||
HashcodesKeys.ARTIFACTID,
|
||||
HashcodesKeys.VERSION,
|
||||
HashcodesKeys.CLASSIFIER,
|
||||
HashcodesKeys.TYPE,
|
||||
HashcodesKeys.MD5,
|
||||
HashcodesKeys.SHA1 }, analyzer );
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return HashcodesKeys.ID;
|
||||
}
|
||||
|
||||
public Analyzer getAnalyzer()
|
||||
|
@ -50,4 +68,9 @@ public class HashcodesHandlers implements LuceneIndexHandlers
|
|||
{
|
||||
return converter;
|
||||
}
|
||||
|
||||
public QueryParser getQueryParser()
|
||||
{
|
||||
return queryParser;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,8 @@ import org.apache.maven.archiva.indexer.ArtifactKeys;
|
|||
*/
|
||||
public class HashcodesKeys extends ArtifactKeys
|
||||
{
|
||||
public static final String ID = "hashcodes";
|
||||
|
||||
public static final String MD5 = "md5";
|
||||
|
||||
public static final String SHA1 = "sha1";
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.maven.archiva.indexer.lucene;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
|
||||
/**
|
||||
* The important bits and pieces for handling a specific lucene index
|
||||
|
@ -42,4 +43,18 @@ public interface LuceneIndexHandlers
|
|||
* @return the analzer to use.
|
||||
*/
|
||||
public Analyzer getAnalyzer();
|
||||
|
||||
/**
|
||||
* Get the {@link QueryParser} appropriate for searches within this index.
|
||||
*
|
||||
* @return the query parser.
|
||||
*/
|
||||
public QueryParser getQueryParser();
|
||||
|
||||
/**
|
||||
* Get the id of the index handler.
|
||||
*
|
||||
* @return the id of the index handler.
|
||||
*/
|
||||
public String getId();
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermEnum;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.lucene.search.Hits;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
|
@ -46,7 +47,8 @@ import java.util.List;
|
|||
*
|
||||
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
|
||||
*/
|
||||
public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
||||
public class LuceneRepositoryContentIndex
|
||||
implements RepositoryContentIndex
|
||||
{
|
||||
/**
|
||||
* The max field length for a field in a document.
|
||||
|
@ -69,14 +71,16 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
this.indexHandlers = handlers;
|
||||
}
|
||||
|
||||
public void indexRecords( Collection records ) throws RepositoryIndexException
|
||||
public void indexRecords( Collection records )
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
deleteRecords( records );
|
||||
|
||||
addRecords( records );
|
||||
}
|
||||
|
||||
public void modifyRecords( Collection records ) throws RepositoryIndexException
|
||||
public void modifyRecords( Collection records )
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
IndexModifier indexModifier = null;
|
||||
try
|
||||
|
@ -111,7 +115,8 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
}
|
||||
}
|
||||
|
||||
public void modifyRecord( LuceneRepositoryContentRecord record ) throws RepositoryIndexException
|
||||
public void modifyRecord( LuceneRepositoryContentRecord record )
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
IndexModifier indexModifier = null;
|
||||
try
|
||||
|
@ -141,7 +146,8 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
}
|
||||
}
|
||||
|
||||
private void addRecords( Collection records ) throws RepositoryIndexException
|
||||
private void addRecords( Collection records )
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
IndexWriter indexWriter;
|
||||
try
|
||||
|
@ -180,7 +186,8 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
}
|
||||
}
|
||||
|
||||
public void deleteRecords( Collection records ) throws RepositoryIndexException
|
||||
public void deleteRecords( Collection records )
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
if ( exists() )
|
||||
{
|
||||
|
@ -212,17 +219,20 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
}
|
||||
}
|
||||
|
||||
public Collection getAllRecords() throws RepositoryIndexSearchException
|
||||
public Collection getAllRecords()
|
||||
throws RepositoryIndexSearchException
|
||||
{
|
||||
return search( new LuceneQuery( new MatchAllDocsQuery() ) );
|
||||
}
|
||||
|
||||
public Collection getAllRecordKeys() throws RepositoryIndexException
|
||||
public Collection getAllRecordKeys()
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
return getAllFieldValues( LuceneDocumentMaker.PRIMARY_KEY );
|
||||
}
|
||||
|
||||
private List getAllFieldValues( String fieldName ) throws RepositoryIndexException
|
||||
private List getAllFieldValues( String fieldName )
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
List keys = new ArrayList();
|
||||
|
||||
|
@ -316,7 +326,8 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
// return new ArrayList( results );
|
||||
// }
|
||||
|
||||
public boolean exists() throws RepositoryIndexException
|
||||
public boolean exists()
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
if ( IndexReader.indexExists( indexLocation ) )
|
||||
{
|
||||
|
@ -343,7 +354,8 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
}
|
||||
}
|
||||
|
||||
public List search( Query query ) throws RepositoryIndexSearchException
|
||||
public List search( Query query )
|
||||
throws RepositoryIndexSearchException
|
||||
{
|
||||
LuceneQuery lQuery = (LuceneQuery) query;
|
||||
|
||||
|
@ -386,6 +398,11 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
return records;
|
||||
}
|
||||
|
||||
public QueryParser getQueryParser()
|
||||
{
|
||||
return this.indexHandlers.getQueryParser();
|
||||
}
|
||||
|
||||
private static void closeQuietly( IndexSearcher searcher )
|
||||
{
|
||||
try
|
||||
|
@ -401,7 +418,8 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
}
|
||||
}
|
||||
|
||||
private static void closeQuietly( TermEnum terms ) throws RepositoryIndexException
|
||||
private static void closeQuietly( TermEnum terms )
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
if ( terms != null )
|
||||
{
|
||||
|
@ -416,7 +434,8 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
}
|
||||
}
|
||||
|
||||
private static void closeQuietly( IndexWriter indexWriter ) throws RepositoryIndexException
|
||||
private static void closeQuietly( IndexWriter indexWriter )
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
try
|
||||
{
|
||||
|
@ -466,4 +485,9 @@ public class LuceneRepositoryContentIndex implements RepositoryContentIndex
|
|||
{
|
||||
return this.indexLocation;
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return this.indexHandlers.getId();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
package org.apache.maven.archiva.indexer.search;
|
||||
|
||||
/**
|
||||
* Search across repositories for specified term.
|
||||
*
|
||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
* @todo add security to not perform search in repositories you don't have access to.
|
||||
*/
|
||||
public interface CrossRepositorySearch
|
||||
{
|
||||
/**
|
||||
* Search for the specific term across all repositories.
|
||||
*
|
||||
* @param term the term to search for.
|
||||
* @return the results.
|
||||
*/
|
||||
public SearchResults searchForTerm( String term );
|
||||
|
||||
/**
|
||||
* Search for the specific MD5 string across all repositories.
|
||||
*
|
||||
* @param md5 the md5 string to search for.
|
||||
* @return the results.
|
||||
*/
|
||||
public SearchResults searchForMd5( String md5 );
|
||||
}
|
|
@ -0,0 +1,270 @@
|
|||
package org.apache.maven.archiva.indexer.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.ConfigurationNames;
|
||||
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
|
||||
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
|
||||
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
|
||||
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
|
||||
import org.apache.maven.archiva.indexer.bytecode.BytecodeKeys;
|
||||
import org.apache.maven.archiva.indexer.filecontent.FileContentKeys;
|
||||
import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
|
||||
import org.apache.maven.archiva.model.ArchivaRepository;
|
||||
import org.apache.maven.archiva.repository.ArchivaConfigurationAdaptor;
|
||||
import org.codehaus.plexus.logging.AbstractLogEnabled;
|
||||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
|
||||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
|
||||
import org.codehaus.plexus.registry.Registry;
|
||||
import org.codehaus.plexus.registry.RegistryListener;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
/**
|
||||
* DefaultCrossRepositorySearch
|
||||
*
|
||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
* @plexus.component role="org.apache.maven.archiva.indexer.search.CrossRepositorySearch" role-hint="default"
|
||||
*/
|
||||
public class DefaultCrossRepositorySearch
|
||||
extends AbstractLogEnabled
|
||||
implements CrossRepositorySearch, RegistryListener, Initializable
|
||||
{
|
||||
|
||||
private static final int UNKNOWN = 0;
|
||||
|
||||
private static final int FILE_CONTENT = 1;
|
||||
|
||||
private static final int BYTECODE = 2;
|
||||
|
||||
private static final int HASHCODE = 3;
|
||||
|
||||
/**
|
||||
* @plexus.requirement role-hint="lucene"
|
||||
*/
|
||||
private RepositoryContentIndexFactory indexFactory;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private ArchivaConfiguration configuration;
|
||||
|
||||
private Map repositoryMap = new HashMap();
|
||||
|
||||
public SearchResults searchForMd5( String md5 )
|
||||
{
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
public SearchResults searchForTerm( String term )
|
||||
{
|
||||
List indexes = new ArrayList();
|
||||
|
||||
indexes.addAll( getBytecodeIndexes() );
|
||||
indexes.addAll( getFileContentIndexes() );
|
||||
indexes.addAll( getHashcodeIndexes() );
|
||||
|
||||
SearchResults results = new SearchResults();
|
||||
|
||||
results.getRepositories().addAll( this.repositoryMap.values() );
|
||||
|
||||
Iterator it = indexes.iterator();
|
||||
while ( it.hasNext() )
|
||||
{
|
||||
RepositoryContentIndex index = (RepositoryContentIndex) it.next();
|
||||
|
||||
try
|
||||
{
|
||||
QueryParser parser = index.getQueryParser();
|
||||
LuceneQuery query = new LuceneQuery( parser.parse( term ) );
|
||||
List hits = index.search( query );
|
||||
|
||||
switch ( getIndexId( index ) )
|
||||
{
|
||||
case BYTECODE:
|
||||
results.getBytecodeHits().addAll( hits );
|
||||
break;
|
||||
case FILE_CONTENT:
|
||||
results.getContentHits().addAll( hits );
|
||||
break;
|
||||
case HASHCODE:
|
||||
results.getHashcodeHits().addAll( hits );
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch ( ParseException e )
|
||||
{
|
||||
getLogger().warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
|
||||
}
|
||||
catch ( RepositoryIndexSearchException e )
|
||||
{
|
||||
getLogger().warn( "Unable to search index [" + index + "] for term [" + term + "]: " + e.getMessage(),
|
||||
e );
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private int getIndexId( RepositoryContentIndex index )
|
||||
{
|
||||
if ( FileContentKeys.ID.equals( index.getId() ) )
|
||||
{
|
||||
return FILE_CONTENT;
|
||||
}
|
||||
|
||||
if ( BytecodeKeys.ID.equals( index.getId() ) )
|
||||
{
|
||||
return BYTECODE;
|
||||
}
|
||||
|
||||
if ( HashcodesKeys.ID.equals( index.getId() ) )
|
||||
{
|
||||
return HASHCODE;
|
||||
}
|
||||
|
||||
return UNKNOWN;
|
||||
}
|
||||
|
||||
public List getBytecodeIndexes()
|
||||
{
|
||||
List ret = new ArrayList();
|
||||
|
||||
synchronized ( this.repositoryMap )
|
||||
{
|
||||
Iterator it = this.repositoryMap.values().iterator();
|
||||
while ( it.hasNext() )
|
||||
{
|
||||
ArchivaRepository repo = (ArchivaRepository) it.next();
|
||||
|
||||
if ( !isSearchAllowed( repo ) )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
ret.add( indexFactory.createBytecodeIndex( repo ) );
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public List getFileContentIndexes()
|
||||
{
|
||||
List ret = new ArrayList();
|
||||
|
||||
synchronized ( this.repositoryMap )
|
||||
{
|
||||
Iterator it = this.repositoryMap.values().iterator();
|
||||
while ( it.hasNext() )
|
||||
{
|
||||
ArchivaRepository repo = (ArchivaRepository) it.next();
|
||||
|
||||
if ( !isSearchAllowed( repo ) )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
ret.add( indexFactory.createFileContentIndex( repo ) );
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public List getHashcodeIndexes()
|
||||
{
|
||||
List ret = new ArrayList();
|
||||
|
||||
synchronized ( this.repositoryMap )
|
||||
{
|
||||
Iterator it = this.repositoryMap.values().iterator();
|
||||
while ( it.hasNext() )
|
||||
{
|
||||
ArchivaRepository repo = (ArchivaRepository) it.next();
|
||||
|
||||
if ( !isSearchAllowed( repo ) )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
ret.add( indexFactory.createHashcodeIndex( repo ) );
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public boolean isSearchAllowed( ArchivaRepository repo )
|
||||
{
|
||||
// TODO: test if user has permissions to search in this repo.
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
|
||||
{
|
||||
if ( ConfigurationNames.isRepositories( propertyName ) )
|
||||
{
|
||||
initRepositoryMap();
|
||||
}
|
||||
}
|
||||
|
||||
public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
|
||||
{
|
||||
/* Nothing to do here */
|
||||
}
|
||||
|
||||
private void initRepositoryMap()
|
||||
{
|
||||
synchronized ( this.repositoryMap )
|
||||
{
|
||||
this.repositoryMap.clear();
|
||||
|
||||
Iterator it = configuration.getConfiguration().createRepositoryMap().entrySet().iterator();
|
||||
while ( it.hasNext() )
|
||||
{
|
||||
Map.Entry entry = (Entry) it.next();
|
||||
String key = (String) entry.getKey();
|
||||
RepositoryConfiguration repoConfig = (RepositoryConfiguration) entry.getValue();
|
||||
ArchivaRepository repository = ArchivaConfigurationAdaptor.toArchivaRepository( repoConfig );
|
||||
this.repositoryMap.put( key, repository );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void initialize()
|
||||
throws InitializationException
|
||||
{
|
||||
initRepositoryMap();
|
||||
configuration.addChangeListener( this );
|
||||
}
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
package org.apache.maven.archiva.indexer.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* SearchResults
|
||||
*
|
||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public class SearchResults
|
||||
{
|
||||
private List repositories = new ArrayList();
|
||||
|
||||
private List contentHits = new ArrayList();
|
||||
|
||||
private List bytecodeHits = new ArrayList();
|
||||
|
||||
private List hashcodeHits = new ArrayList();
|
||||
|
||||
public SearchResults()
|
||||
{
|
||||
/* do nothing */
|
||||
}
|
||||
|
||||
public boolean isEmpty()
|
||||
{
|
||||
return ( bytecodeHits.isEmpty() && hashcodeHits.isEmpty() && contentHits.isEmpty() );
|
||||
}
|
||||
|
||||
public List getBytecodeHits()
|
||||
{
|
||||
return bytecodeHits;
|
||||
}
|
||||
|
||||
public List getContentHits()
|
||||
{
|
||||
return contentHits;
|
||||
}
|
||||
|
||||
public List getHashcodeHits()
|
||||
{
|
||||
return hashcodeHits;
|
||||
}
|
||||
|
||||
public List getRepositories()
|
||||
{
|
||||
return repositories;
|
||||
}
|
||||
|
||||
public void setBytecodeHits( List bytecodeHits )
|
||||
{
|
||||
this.bytecodeHits = bytecodeHits;
|
||||
}
|
||||
|
||||
public void setContentHits( List contentHits )
|
||||
{
|
||||
this.contentHits = contentHits;
|
||||
}
|
||||
|
||||
public void setHashcodeHits( List hashcodeHits )
|
||||
{
|
||||
this.hashcodeHits = hashcodeHits;
|
||||
}
|
||||
|
||||
public void setRepositories( List repositories )
|
||||
{
|
||||
this.repositories = repositories;
|
||||
}
|
||||
}
|
|
@ -46,7 +46,8 @@ import java.util.Map;
|
|||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public abstract class AbstractIndexerTestCase extends PlexusTestCase
|
||||
public abstract class AbstractIndexerTestCase
|
||||
extends PlexusTestCase
|
||||
{
|
||||
protected RepositoryContentIndex index;
|
||||
|
||||
|
@ -72,14 +73,32 @@ public abstract class AbstractIndexerTestCase extends PlexusTestCase
|
|||
|
||||
public abstract LuceneIndexHandlers getIndexHandler();
|
||||
|
||||
protected void setUp() throws Exception
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
RepositoryContentIndexFactory indexFactory =
|
||||
(RepositoryContentIndexFactory) lookup( RepositoryContentIndexFactory.class.getName(), "lucene" );
|
||||
RepositoryContentIndexFactory indexFactory = (RepositoryContentIndexFactory) lookup(
|
||||
RepositoryContentIndexFactory.class
|
||||
.getName(), "lucene" );
|
||||
|
||||
ArchivaRepository repository = createTestIndex( getIndexName() );
|
||||
|
||||
index = createIndex( indexFactory, repository );
|
||||
|
||||
indexHandlers = getIndexHandler();
|
||||
}
|
||||
|
||||
private ArchivaRepository createTestIndex( String indexName )
|
||||
throws Exception, IOException
|
||||
{
|
||||
File repoDir = new File( getBasedir(), "src/test/managed-repository" );
|
||||
File testIndexesDir = new File( getBasedir(), "target/test-indexes" );
|
||||
|
||||
if ( !testIndexesDir.exists() )
|
||||
{
|
||||
testIndexesDir.mkdirs();
|
||||
}
|
||||
|
||||
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
|
||||
|
||||
|
@ -87,7 +106,7 @@ public abstract class AbstractIndexerTestCase extends PlexusTestCase
|
|||
|
||||
ArchivaRepository repository = new ArchivaRepository( "testDefaultRepo", "Test Default Repository", repoUri );
|
||||
|
||||
File indexLocation = new File( "target/index-" + getIndexName() + "-" + getName() + "/" );
|
||||
File indexLocation = new File( testIndexesDir, "/index-" + indexName + "-" + getName() + "/" );
|
||||
|
||||
MockConfiguration config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );
|
||||
|
||||
|
@ -103,10 +122,7 @@ public abstract class AbstractIndexerTestCase extends PlexusTestCase
|
|||
}
|
||||
|
||||
config.getConfiguration().addRepository( repoConfig );
|
||||
|
||||
index = createIndex( indexFactory, repository );
|
||||
|
||||
indexHandlers = getIndexHandler();
|
||||
return repository;
|
||||
}
|
||||
|
||||
protected Map getArchivaArtifactDumpMap()
|
||||
|
@ -189,12 +205,14 @@ public abstract class AbstractIndexerTestCase extends PlexusTestCase
|
|||
return artifact;
|
||||
}
|
||||
|
||||
protected void createEmptyIndex() throws IOException
|
||||
protected void createEmptyIndex()
|
||||
throws IOException
|
||||
{
|
||||
createIndex( Collections.EMPTY_LIST );
|
||||
}
|
||||
|
||||
protected void createIndex( List documents ) throws IOException
|
||||
protected void createIndex( List documents )
|
||||
throws IOException
|
||||
{
|
||||
IndexWriter writer = new IndexWriter( index.getIndexDirectory(), indexHandlers.getAnalyzer(), true );
|
||||
for ( Iterator i = documents.iterator(); i.hasNext(); )
|
||||
|
|
|
@ -21,8 +21,14 @@ package org.apache.maven.archiva.indexer;
|
|||
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.codehaus.plexus.registry.Registry;
|
||||
import org.codehaus.plexus.registry.RegistryException;
|
||||
import org.codehaus.plexus.registry.RegistryListener;
|
||||
import org.easymock.MockControl;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* MockConfiguration
|
||||
|
@ -37,9 +43,21 @@ public class MockConfiguration implements ArchivaConfiguration
|
|||
{
|
||||
private Configuration configuration = new Configuration();
|
||||
|
||||
private List listeners = new ArrayList();
|
||||
|
||||
private MockControl registryControl;
|
||||
|
||||
private Registry registryMock;
|
||||
|
||||
public MockConfiguration()
|
||||
{
|
||||
registryControl = MockControl.createNiceControl( Registry.class );
|
||||
registryMock = (Registry) registryControl.getMock();
|
||||
}
|
||||
|
||||
public void addChangeListener( RegistryListener listener )
|
||||
{
|
||||
/* do nothing */
|
||||
listeners.add( listener );
|
||||
}
|
||||
|
||||
public Configuration getConfiguration()
|
||||
|
@ -47,8 +65,26 @@ public class MockConfiguration implements ArchivaConfiguration
|
|||
return configuration;
|
||||
}
|
||||
|
||||
public void save( Configuration configuration ) throws RegistryException
|
||||
public void save( Configuration configuration )
|
||||
throws RegistryException
|
||||
{
|
||||
/* do nothing */
|
||||
}
|
||||
|
||||
public void triggerChange( String name, String value )
|
||||
{
|
||||
Iterator it = listeners.iterator();
|
||||
while ( it.hasNext() )
|
||||
{
|
||||
RegistryListener listener = (RegistryListener) it.next();
|
||||
try
|
||||
{
|
||||
listener.afterConfigurationChange( registryMock, name, value );
|
||||
}
|
||||
catch ( Exception e )
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,141 @@
|
|||
package org.apache.maven.archiva.indexer.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
|
||||
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecordLoader;
|
||||
import org.apache.maven.archiva.model.ArchivaArtifact;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.AssertionFailedError;
|
||||
|
||||
/**
|
||||
* BytecodeIndexPopulator
|
||||
*
|
||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public class BytecodeIndexPopulator
|
||||
implements IndexPopulator
|
||||
{
|
||||
|
||||
public Map getObjectMap()
|
||||
{
|
||||
|
||||
Map dumps = new HashMap();
|
||||
|
||||
// archiva-common-1.0.jar.txt
|
||||
dumps.put( "archiva-common", createArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "",
|
||||
"jar" ) );
|
||||
|
||||
// continuum-webapp-1.0.3-SNAPSHOT.war.txt
|
||||
dumps.put( "continuum-webapp", createArchivaArtifact( "org.apache.maven.continuum", "continuum-webapp",
|
||||
"1.0.3-SNAPSHOT", "", "war" ) );
|
||||
|
||||
// daytrader-ear-1.1.ear.txt
|
||||
dumps.put( "daytrader-ear", createArchivaArtifact( "org.apache.geronimo", "daytrader-ear", "1.1", "", "ear" ) );
|
||||
|
||||
// maven-archetype-simple-1.0-alpha-4.jar.txt
|
||||
dumps.put( "maven-archetype-simple", createArchivaArtifact( "org.apache.maven", "maven-archetype-simple",
|
||||
"1.0-alpha-4", "", "maven-archetype" ) );
|
||||
|
||||
// maven-help-plugin-2.0.2-20070119.121239-2.jar.txt
|
||||
dumps.put( "maven-help-plugin", createArchivaArtifact( "org.apache.maven.plugins", "maven-help-plugin",
|
||||
"2.0.2-20070119.121239-2", "", "maven-plugin" ) );
|
||||
|
||||
// redback-authorization-open-1.0-alpha-1-SNAPSHOT.jar.txt
|
||||
dumps.put( "redback-authorization-open", createArchivaArtifact( "org.codehaus.plexus.redback",
|
||||
"redback-authorization-open",
|
||||
"1.0-alpha-1-SNAPSHOT", "", "jar" ) );
|
||||
|
||||
// testng-5.1-jdk15.jar.txt
|
||||
dumps.put( "testng", createArchivaArtifact( "org.testng", "testng", "5.1", "jdk15", "jar" ) );
|
||||
|
||||
// wagon-provider-api-1.0-beta-3-20070209.213958-2.jar.txt
|
||||
dumps.put( "wagon-provider-api", createArchivaArtifact( "org.apache.maven.wagon", "wagon-provider-api",
|
||||
"1.0-beta-3-20070209.213958-2", "", "jar" ) );
|
||||
|
||||
return dumps;
|
||||
|
||||
}
|
||||
|
||||
private ArchivaArtifact createArchivaArtifact( String groupId, String artifactId, String version,
|
||||
String classifier, String type )
|
||||
{
|
||||
ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
|
||||
return artifact;
|
||||
}
|
||||
|
||||
public Map populate( File basedir )
|
||||
{
|
||||
Map records = new HashMap();
|
||||
|
||||
Map artifactDumps = getObjectMap();
|
||||
for ( Iterator iter = artifactDumps.entrySet().iterator(); iter.hasNext(); )
|
||||
{
|
||||
Map.Entry entry = (Map.Entry) iter.next();
|
||||
ArchivaArtifact artifact = (ArchivaArtifact) entry.getValue();
|
||||
File dumpFile = getDumpFile( basedir, artifact );
|
||||
BytecodeRecord record = BytecodeRecordLoader.loadRecord( dumpFile, artifact );
|
||||
records.put( entry.getKey(), record );
|
||||
}
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
protected File getDumpFile( File basedir, ArchivaArtifact artifact )
|
||||
{
|
||||
File dumpDir = new File( basedir, "src/test/artifact-dumps" );
|
||||
StringBuffer filename = new StringBuffer();
|
||||
|
||||
filename.append( artifact.getArtifactId() ).append( "-" ).append( artifact.getVersion() );
|
||||
|
||||
if ( artifact.hasClassifier() )
|
||||
{
|
||||
filename.append( "-" ).append( artifact.getClassifier() );
|
||||
}
|
||||
|
||||
filename.append( "." );
|
||||
|
||||
// TODO: use the ArtifactExtensionMapping object!
|
||||
if ( "maven-plugin".equals( artifact.getType() ) || "maven-archetype".equals( artifact.getType() ) )
|
||||
{
|
||||
filename.append( "jar" );
|
||||
}
|
||||
else
|
||||
{
|
||||
filename.append( artifact.getType() );
|
||||
}
|
||||
filename.append( ".txt" );
|
||||
|
||||
File dumpFile = new File( dumpDir, filename.toString() );
|
||||
|
||||
if ( !dumpFile.exists() )
|
||||
{
|
||||
throw new AssertionFailedError( "Dump file " + dumpFile.getAbsolutePath() + " does not exist (should it?)." );
|
||||
}
|
||||
|
||||
return dumpFile;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,131 @@
|
|||
package org.apache.maven.archiva.indexer.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
|
||||
import org.apache.maven.archiva.indexer.MockConfiguration;
|
||||
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
|
||||
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
|
||||
import org.apache.maven.archiva.model.ArchivaRepository;
|
||||
import org.codehaus.plexus.PlexusTestCase;
|
||||
import org.codehaus.plexus.util.FileUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* DefaultCrossRepositorySearchTest
|
||||
*
|
||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public class DefaultCrossRepositorySearchTest
|
||||
extends PlexusTestCase
|
||||
{
|
||||
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
RepositoryContentIndexFactory indexFactory = (RepositoryContentIndexFactory) lookup(
|
||||
RepositoryContentIndexFactory.class
|
||||
.getName(), "lucene" );
|
||||
|
||||
File repoDir = new File( getBasedir(), "src/test/managed-repository" );
|
||||
|
||||
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
|
||||
|
||||
String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" );
|
||||
|
||||
ArchivaRepository repository = new ArchivaRepository( "testDefaultRepo", "Test Default Repository", repoUri );
|
||||
|
||||
File indexLocation = new File( "target/index-crossrepo-" + getName() + "/" );
|
||||
|
||||
MockConfiguration config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );
|
||||
|
||||
RepositoryConfiguration repoConfig = new RepositoryConfiguration();
|
||||
repoConfig.setId( repository.getId() );
|
||||
repoConfig.setName( repository.getModel().getName() );
|
||||
repoConfig.setUrl( repository.getModel().getUrl() );
|
||||
repoConfig.setIndexDir( indexLocation.getAbsolutePath() );
|
||||
|
||||
if ( indexLocation.exists() )
|
||||
{
|
||||
FileUtils.deleteDirectory( indexLocation );
|
||||
}
|
||||
|
||||
config.getConfiguration().addRepository( repoConfig );
|
||||
|
||||
// Create the (empty) indexes.
|
||||
RepositoryContentIndex indexHashcode = indexFactory.createHashcodeIndex( repository );
|
||||
RepositoryContentIndex indexBytecode = indexFactory.createBytecodeIndex( repository );
|
||||
RepositoryContentIndex indexContents = indexFactory.createFileContentIndex( repository );
|
||||
|
||||
// Now populate them.
|
||||
Map hashcodesMap = ( new HashcodesIndexPopulator() ).populate( new File( getBasedir() ) );
|
||||
indexHashcode.indexRecords( hashcodesMap.values() );
|
||||
Map bytecodeMap = ( new BytecodeIndexPopulator() ).populate( new File( getBasedir() ) );
|
||||
indexBytecode.indexRecords( bytecodeMap.values() );
|
||||
Map contentMap = ( new FileContentIndexPopulator() ).populate( new File( getBasedir() ) );
|
||||
indexContents.indexRecords( contentMap.values() );
|
||||
}
|
||||
|
||||
private CrossRepositorySearch lookupCrossRepositorySearch()
|
||||
throws Exception
|
||||
{
|
||||
CrossRepositorySearch search = (CrossRepositorySearch) lookup( CrossRepositorySearch.class.getName(), "default" );
|
||||
assertNotNull( "CrossRepositorySearch:default should not be null.", search );
|
||||
return search;
|
||||
}
|
||||
|
||||
public void testSearchTerm()
|
||||
throws Exception
|
||||
{
|
||||
CrossRepositorySearch search = lookupCrossRepositorySearch();
|
||||
|
||||
SearchResults results = search.searchForTerm( "org" );
|
||||
assertHitCounts( 1, 8, 8, 1, results );
|
||||
|
||||
results = search.searchForTerm( "junit" );
|
||||
assertHitCounts( 1, 1, 0, 1, results );
|
||||
|
||||
results = search.searchForTerm( "monosodium" );
|
||||
assertHitCounts( 1, 0, 0, 0, results );
|
||||
}
|
||||
|
||||
private void assertHitCounts( int repoCount, int bytecodeCount, int hashcodeCount, int contentCount,
|
||||
SearchResults results )
|
||||
{
|
||||
assertNotNull( "Search Results should not be null.", results );
|
||||
assertEquals( "Repository Hits", repoCount, results.getRepositories().size() );
|
||||
|
||||
if ( ( bytecodeCount != results.getBytecodeHits().size() )
|
||||
|| ( hashcodeCount != results.getHashcodeHits().size() )
|
||||
|| ( contentCount != results.getContentHits().size() ) )
|
||||
{
|
||||
fail( "Failed to get expected results hit count. Expected: (bytecode,hashcode,content) <" + bytecodeCount
|
||||
+ "," + hashcodeCount + "," + contentCount + ">, but got <" + results.getBytecodeHits().size() + ","
|
||||
+ results.getHashcodeHits().size() + "," + results.getContentHits().size() + "> instead." );
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
package org.apache.maven.archiva.indexer.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.AssertionFailedError;
|
||||
|
||||
/**
|
||||
* FileContentIndexPopulator
|
||||
*
|
||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public class FileContentIndexPopulator
|
||||
implements IndexPopulator
|
||||
{
|
||||
public Map getObjectMap()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
public Map populate( File basedir )
|
||||
{
|
||||
Map map = new HashMap();
|
||||
|
||||
File repoDir = new File( basedir, "src/test/managed-repository" );
|
||||
|
||||
map.put( "parent-pom-1",
|
||||
createFileContentRecord( repoDir, "org/apache/maven/archiva/record/parent-pom/1/parent-pom-1.pom" ) );
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
private FileContentRecord createFileContentRecord( File repoDir, String path )
|
||||
{
|
||||
File pathToFile = new File( repoDir, path );
|
||||
|
||||
if ( !pathToFile.exists() )
|
||||
{
|
||||
throw new AssertionFailedError( "Can't find test file: " + pathToFile.getAbsolutePath() );
|
||||
}
|
||||
|
||||
FileContentRecord record = new FileContentRecord();
|
||||
record.setFile( pathToFile );
|
||||
|
||||
try
|
||||
{
|
||||
record.setContents( FileUtils.readFileToString( pathToFile, null ) );
|
||||
}
|
||||
catch ( IOException e )
|
||||
{
|
||||
e.printStackTrace();
|
||||
throw new AssertionFailedError( "Can't load test file contents: " + pathToFile.getAbsolutePath() );
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,114 @@
|
|||
package org.apache.maven.archiva.indexer.search;
|
||||
|
||||
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
|
||||
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecordLoader;
|
||||
import org.apache.maven.archiva.model.ArchivaArtifact;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.AssertionFailedError;
|
||||
|
||||
public class HashcodesIndexPopulator
|
||||
implements IndexPopulator
|
||||
{
|
||||
|
||||
public Map getObjectMap()
|
||||
{
|
||||
Map dumps = new HashMap();
|
||||
|
||||
// archiva-common-1.0.jar.txt
|
||||
dumps.put( "archiva-common", createArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "",
|
||||
"jar" ) );
|
||||
|
||||
// continuum-webapp-1.0.3-SNAPSHOT.war.txt
|
||||
dumps.put( "continuum-webapp", createArchivaArtifact( "org.apache.maven.continuum", "continuum-webapp",
|
||||
"1.0.3-SNAPSHOT", "", "war" ) );
|
||||
|
||||
// daytrader-ear-1.1.ear.txt
|
||||
dumps.put( "daytrader-ear", createArchivaArtifact( "org.apache.geronimo", "daytrader-ear", "1.1", "", "ear" ) );
|
||||
|
||||
// maven-archetype-simple-1.0-alpha-4.jar.txt
|
||||
dumps.put( "maven-archetype-simple", createArchivaArtifact( "org.apache.maven", "maven-archetype-simple",
|
||||
"1.0-alpha-4", "", "maven-archetype" ) );
|
||||
|
||||
// maven-help-plugin-2.0.2-20070119.121239-2.jar.txt
|
||||
dumps.put( "maven-help-plugin", createArchivaArtifact( "org.apache.maven.plugins", "maven-help-plugin",
|
||||
"2.0.2-20070119.121239-2", "", "maven-plugin" ) );
|
||||
|
||||
// redback-authorization-open-1.0-alpha-1-SNAPSHOT.jar.txt
|
||||
dumps.put( "redback-authorization-open", createArchivaArtifact( "org.codehaus.plexus.redback",
|
||||
"redback-authorization-open",
|
||||
"1.0-alpha-1-SNAPSHOT", "", "jar" ) );
|
||||
|
||||
// testng-5.1-jdk15.jar.txt
|
||||
dumps.put( "testng", createArchivaArtifact( "org.testng", "testng", "5.1", "jdk15", "jar" ) );
|
||||
|
||||
// wagon-provider-api-1.0-beta-3-20070209.213958-2.jar.txt
|
||||
dumps.put( "wagon-provider-api", createArchivaArtifact( "org.apache.maven.wagon", "wagon-provider-api",
|
||||
"1.0-beta-3-20070209.213958-2", "", "jar" ) );
|
||||
|
||||
return dumps;
|
||||
}
|
||||
|
||||
public Map populate( File basedir )
|
||||
{
|
||||
Map records = new HashMap();
|
||||
|
||||
Map artifactDumps = getObjectMap();
|
||||
for ( Iterator iter = artifactDumps.entrySet().iterator(); iter.hasNext(); )
|
||||
{
|
||||
Map.Entry entry = (Map.Entry) iter.next();
|
||||
ArchivaArtifact artifact = (ArchivaArtifact) entry.getValue();
|
||||
File dumpFile = getDumpFile( basedir, artifact );
|
||||
HashcodesRecord record = HashcodesRecordLoader.loadRecord( dumpFile, artifact );
|
||||
records.put( entry.getKey(), record );
|
||||
}
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
protected File getDumpFile( File basedir, ArchivaArtifact artifact )
|
||||
{
|
||||
File dumpDir = new File( basedir, "src/test/artifact-dumps" );
|
||||
StringBuffer filename = new StringBuffer();
|
||||
|
||||
filename.append( artifact.getArtifactId() ).append( "-" ).append( artifact.getVersion() );
|
||||
|
||||
if ( artifact.hasClassifier() )
|
||||
{
|
||||
filename.append( "-" ).append( artifact.getClassifier() );
|
||||
}
|
||||
|
||||
filename.append( "." );
|
||||
|
||||
// TODO: use the ArtifactExtensionMapping object!
|
||||
if ( "maven-plugin".equals( artifact.getType() ) || "maven-archetype".equals( artifact.getType() ) )
|
||||
{
|
||||
filename.append( "jar" );
|
||||
}
|
||||
else
|
||||
{
|
||||
filename.append( artifact.getType() );
|
||||
}
|
||||
filename.append( ".txt" );
|
||||
|
||||
File dumpFile = new File( dumpDir, filename.toString() );
|
||||
|
||||
if ( !dumpFile.exists() )
|
||||
{
|
||||
throw new AssertionFailedError( "Dump file " + dumpFile.getAbsolutePath() + " does not exist (should it?)." );
|
||||
}
|
||||
|
||||
return dumpFile;
|
||||
}
|
||||
|
||||
private ArchivaArtifact createArchivaArtifact( String groupId, String artifactId, String version,
|
||||
String classifier, String type )
|
||||
{
|
||||
ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
|
||||
return artifact;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
package org.apache.maven.archiva.indexer.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* IndexPopulator
|
||||
*
|
||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||
* @version $Id$
|
||||
*/
|
||||
public interface IndexPopulator
|
||||
{
|
||||
public Map getObjectMap();
|
||||
|
||||
public Map populate( File basedir );
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
<component-set>
|
||||
<components>
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
|
||||
<role-hint>mock</role-hint>
|
||||
<implementation>org.apache.maven.archiva.indexer.MockConfiguration</implementation>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
|
||||
<role-hint>lucene</role-hint>
|
||||
<implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
|
||||
<description>Factory for Lucene repository content index instances.</description>
|
||||
<requirements>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
|
||||
<role-hint>mock</role-hint>
|
||||
<field-name>configuration</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.maven.archiva.indexer.search.CrossRepositorySearch</role>
|
||||
<role-hint>default</role-hint>
|
||||
<implementation>org.apache.maven.archiva.indexer.search.DefaultCrossRepositorySearch</implementation>
|
||||
<description>DefaultCrossRepositorySearch</description>
|
||||
<requirements>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
|
||||
<role-hint>lucene</role-hint>
|
||||
<field-name>indexFactory</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
|
||||
<role-hint>mock</role-hint>
|
||||
<field-name>configuration</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
|
@ -19,26 +19,14 @@ package org.apache.maven.archiva.web.action;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queryParser.MultiFieldQueryParser;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
|
||||
import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
|
||||
import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
|
||||
import org.apache.maven.archiva.indexer.RepositoryIndexException;
|
||||
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
|
||||
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryArtifactIndex;
|
||||
import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
|
||||
import org.apache.maven.archiva.web.util.VersionMerger;
|
||||
import org.apache.maven.archiva.indexer.search.CrossRepositorySearch;
|
||||
import org.apache.maven.archiva.indexer.search.SearchResults;
|
||||
import org.codehaus.plexus.xwork.action.PlexusActionSupport;
|
||||
|
||||
import java.io.File;
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* Search all indexed fields by the given criteria.
|
||||
|
@ -59,60 +47,46 @@ public class SearchAction
|
|||
private String md5;
|
||||
|
||||
/**
|
||||
* Search results.
|
||||
* The Search Results.
|
||||
*/
|
||||
private Collection searchResults;
|
||||
private SearchResults results;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
* @plexus.requirement role-hint="default"
|
||||
*/
|
||||
private RepositoryArtifactIndexFactory factory;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private ConfiguredRepositoryFactory repositoryFactory;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private ArchivaConfiguration archivaConfiguration;
|
||||
private CrossRepositorySearch crossRepoSearch;
|
||||
|
||||
private static final String RESULTS = "results";
|
||||
|
||||
private static final String ARTIFACT = "artifact";
|
||||
|
||||
private String infoMessage;
|
||||
|
||||
public String quickSearch()
|
||||
throws MalformedURLException, RepositoryIndexException, RepositoryIndexSearchException, ParseException
|
||||
{
|
||||
// TODO: give action message if indexing is in progress
|
||||
/* TODO: give action message if indexing is in progress.
|
||||
* This should be based off a count of 'unprocessed' artifacts.
|
||||
* This (yet to be written) routine could tell the user that X artifacts are not yet
|
||||
* present in the full text search.
|
||||
*/
|
||||
|
||||
assert q != null && q.length() != 0;
|
||||
|
||||
RepositoryArtifactIndex index = getIndex();
|
||||
results = crossRepoSearch.searchForTerm( q );
|
||||
|
||||
if ( !index.exists() )
|
||||
{
|
||||
addActionError( "The repository is not yet indexed. Please wait, and then try again." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
MultiFieldQueryParser parser = new MultiFieldQueryParser( new String[]{StandardIndexRecordFields.GROUPID,
|
||||
StandardIndexRecordFields.ARTIFACTID, StandardIndexRecordFields.BASE_VERSION,
|
||||
StandardIndexRecordFields.CLASSIFIER, StandardIndexRecordFields.CLASSES, StandardIndexRecordFields.FILES,
|
||||
StandardIndexRecordFields.TYPE, StandardIndexRecordFields.PROJECT_NAME,
|
||||
StandardIndexRecordFields.PROJECT_DESCRIPTION}, LuceneRepositoryArtifactIndex.getAnalyzer() );
|
||||
searchResults = index.search( new LuceneQuery( parser.parse( q ) ) );
|
||||
|
||||
if ( searchResults.isEmpty() )
|
||||
if ( results.isEmpty() )
|
||||
{
|
||||
addActionError( "No results found" );
|
||||
return INPUT;
|
||||
}
|
||||
|
||||
searchResults = VersionMerger.merge( searchResults );
|
||||
// TODO: filter / combine the artifacts by version? (is that even possible with non-artifact hits?)
|
||||
|
||||
/* I don't think that we should, as I expect us to utilize the 'score' system in lucene in
|
||||
* the future to return relevant links better.
|
||||
* I expect the lucene scoring system to take multiple hits on different areas of a single document
|
||||
* to result in a higher score.
|
||||
* - Joakim
|
||||
*/
|
||||
|
||||
return SUCCESS;
|
||||
}
|
||||
|
@ -124,23 +98,15 @@ public class SearchAction
|
|||
|
||||
assert md5 != null && md5.length() != 0;
|
||||
|
||||
RepositoryArtifactIndex index = getIndex();
|
||||
results = crossRepoSearch.searchForMd5( q );
|
||||
|
||||
if ( !index.exists() )
|
||||
{
|
||||
addActionError( "The repository is not yet indexed. Please wait, and then try again." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
searchResults = index.search(
|
||||
new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.MD5, md5.toLowerCase() ) ) ) );
|
||||
|
||||
if ( searchResults.isEmpty() )
|
||||
if ( results.isEmpty() )
|
||||
{
|
||||
addActionError( "No results found" );
|
||||
return INPUT;
|
||||
}
|
||||
if ( searchResults.size() == 1 )
|
||||
|
||||
if ( results.getHashcodeHits().size() == 1 )
|
||||
{
|
||||
return ARTIFACT;
|
||||
}
|
||||
|
@ -150,15 +116,6 @@ public class SearchAction
|
|||
}
|
||||
}
|
||||
|
||||
private RepositoryArtifactIndex getIndex()
|
||||
throws RepositoryIndexException
|
||||
{
|
||||
Configuration configuration = archivaConfiguration.getConfiguration();
|
||||
File indexPath = new File( configuration.getIndexPath() );
|
||||
|
||||
return factory.createStandardIndex( indexPath );
|
||||
}
|
||||
|
||||
public String doInput()
|
||||
{
|
||||
return INPUT;
|
||||
|
@ -183,19 +140,4 @@ public class SearchAction
|
|||
{
|
||||
this.md5 = md5;
|
||||
}
|
||||
|
||||
public Collection getSearchResults()
|
||||
{
|
||||
return searchResults;
|
||||
}
|
||||
|
||||
public String getInfoMessage()
|
||||
{
|
||||
return infoMessage;
|
||||
}
|
||||
|
||||
public void setInfoMessage( String infoMessage )
|
||||
{
|
||||
this.infoMessage = infoMessage;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue