MRM-1037 - Search Usability

* timestamp versions are merged to -SNAPSHOT versions
* duplicate artifacts are now merge by use of boolean filters
* we now search the correct fields
* content search has been removed (more accurate results)
* added more tokenizers for groupId, artifactId, version, etc
* Artifact Id's are weighted to improve quicksearch results



git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@726928 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
James William Dumay 2008-12-16 02:13:25 +00:00
parent 679221b1ea
commit 20080174a4
25 changed files with 316 additions and 175 deletions

View File

@ -159,10 +159,8 @@ public void processFile( String path )
FileContentRecord record = new FileContentRecord(); FileContentRecord record = new FileContentRecord();
try try
{ {
File file = new File( repositoryDir, path );
record.setRepositoryId( this.repository.getId() ); record.setRepositoryId( this.repository.getId() );
record.setFilename( path ); record.setFilename( path );
record.setContents( FileUtils.readFileToString( file, null ) );
// Test for possible artifact reference syntax. // Test for possible artifact reference syntax.
try try
@ -179,10 +177,6 @@ public void processFile( String path )
index.modifyRecord( record ); index.modifyRecord( record );
} }
catch ( IOException e )
{
triggerConsumerError( READ_CONTENT, "Unable to read file contents: " + e.getMessage() );
}
catch ( RepositoryIndexException e ) catch ( RepositoryIndexException e )
{ {
triggerConsumerError( INDEX_ERROR, "Unable to index file contents: " + e.getMessage() ); triggerConsumerError( INDEX_ERROR, "Unable to index file contents: " + e.getMessage() );

View File

@ -40,6 +40,10 @@
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId> <artifactId>lucene-core</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queries</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.codehaus.plexus</groupId> <groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-spring</artifactId> <artifactId>plexus-spring</artifactId>

View File

@ -23,8 +23,11 @@
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.maven.archiva.indexer.lucene.analyzers.FilenamesTokenizer; import org.apache.maven.archiva.indexer.lucene.analyzers.FilenamesTokenizer;
import org.apache.maven.archiva.indexer.lucene.analyzers.ArtifactIdTokenizer;
import org.apache.maven.archiva.indexer.lucene.analyzers.GroupIdTokenizer;
import java.io.Reader; import java.io.Reader;
import org.apache.maven.archiva.indexer.lucene.analyzers.VersionTokenizer;
/** /**
* FileContentAnalyzer * FileContentAnalyzer
@ -42,6 +45,21 @@ public TokenStream tokenStream( String field, Reader reader )
return new FilenamesTokenizer( reader ); return new FilenamesTokenizer( reader );
} }
if ( FileContentKeys.ARTIFACTID.equals( field ))
{
return new ArtifactIdTokenizer(reader);
}
if ( FileContentKeys.GROUPID.equals( field ) )
{
return new GroupIdTokenizer(reader);
}
if ( FileContentKeys.VERSION.equals( field ))
{
return new VersionTokenizer(reader);
}
return STANDARD.tokenStream( field, reader ); return STANDARD.tokenStream( field, reader );
} }
} }

View File

@ -37,7 +37,6 @@
public class FileContentConverter public class FileContentConverter
implements LuceneEntryConverter implements LuceneEntryConverter
{ {
public Document convert( LuceneRepositoryContentRecord record ) public Document convert( LuceneRepositoryContentRecord record )
{ {
if ( !( record instanceof FileContentRecord ) ) if ( !( record instanceof FileContentRecord ) )
@ -55,16 +54,15 @@ public Document convert( LuceneRepositoryContentRecord record )
// Artifact Reference // Artifact Reference
doc.addFieldTokenized( ArtifactKeys.GROUPID, filecontent.getArtifact().getGroupId() ); doc.addFieldTokenized( ArtifactKeys.GROUPID, filecontent.getArtifact().getGroupId() );
doc.addFieldExact( ArtifactKeys.GROUPID_EXACT, filecontent.getArtifact().getGroupId() ); doc.addFieldExact( ArtifactKeys.GROUPID_EXACT, filecontent.getArtifact().getGroupId() );
doc.addFieldTokenized( ArtifactKeys.ARTIFACTID, filecontent.getArtifact().getArtifactId() ); doc.addFieldTokenized( ArtifactKeys.ARTIFACTID, filecontent.getArtifact().getArtifactId()); //, 2.0f);
doc.addFieldExact( ArtifactKeys.ARTIFACTID_EXACT, filecontent.getArtifact().getArtifactId() ); doc.addFieldExact( ArtifactKeys.ARTIFACTID_EXACT, filecontent.getArtifact().getArtifactId(), 2.0f);
doc.addFieldTokenized( ArtifactKeys.VERSION, filecontent.getArtifact().getVersion() ); doc.addFieldTokenized( ArtifactKeys.VERSION, filecontent.getArtifact().getVersion() );
doc.addFieldExact( ArtifactKeys.VERSION_EXACT, filecontent.getArtifact().getVersion() ); doc.addFieldExact( ArtifactKeys.VERSION_EXACT, filecontent.getArtifact().getVersion() );
doc.addFieldTokenized( ArtifactKeys.TYPE, filecontent.getArtifact().getType() ); doc.addFieldTokenized( ArtifactKeys.TYPE, filecontent.getArtifact().getType() );
doc.addFieldUntokenized( ArtifactKeys.CLASSIFIER, filecontent.getArtifact().getClassifier() ); doc.addFieldUntokenized( ArtifactKeys.CLASSIFIER, filecontent.getArtifact().getClassifier() );
} }
doc.addFieldTokenized( FileContentKeys.FILENAME, filecontent.getFilename() ); doc.addFieldTokenized( FileContentKeys.FILENAME, filecontent.getFilename() );
doc.addFieldTokenized( FileContentKeys.CONTENT, filecontent.getContents() );
return doc.getDocument(); return doc.getDocument();
} }
@ -91,7 +89,6 @@ public LuceneRepositoryContentRecord convert( Document document )
// Filecontent Specifics // Filecontent Specifics
record.setFilename( document.get( FileContentKeys.FILENAME ) ); record.setFilename( document.get( FileContentKeys.FILENAME ) );
record.setContents( document.get( FileContentKeys.CONTENT ) );
return record; return record;
} }

View File

@ -43,8 +43,17 @@ public FileContentHandlers()
{ {
analyzer = new FileContentAnalyzer(); analyzer = new FileContentAnalyzer();
converter = new FileContentConverter(); converter = new FileContentConverter();
queryParser = new MultiFieldQueryParser( new String[] { FileContentKeys.FILENAME, FileContentKeys.CONTENT }, queryParser = new MultiFieldQueryParser( new String[] {
analyzer ); FileContentKeys.FILENAME,
FileContentKeys.ARTIFACTID,
FileContentKeys.GROUPID,
FileContentKeys.ARTIFACTID_EXACT,
FileContentKeys.GROUPID_EXACT,
FileContentKeys.VERSION,
FileContentKeys.VERSION_EXACT},
analyzer );
//We prefer the narrowing approach to search results.
queryParser.setDefaultOperator(MultiFieldQueryParser.Operator.AND);
} }
public String getId() public String getId()

View File

@ -32,6 +32,4 @@ public class FileContentKeys
public static final String ID = "filecontent"; public static final String ID = "filecontent";
public static final String FILENAME = "filename"; public static final String FILENAME = "filename";
public static final String CONTENT = "content";
} }

View File

@ -39,8 +39,6 @@ public class FileContentRecord
*/ */
private ArchivaArtifact artifact; private ArchivaArtifact artifact;
private String contents;
public String getRepositoryId() public String getRepositoryId()
{ {
return repositoryId; return repositoryId;
@ -51,16 +49,6 @@ public void setRepositoryId( String repositoryId )
this.repositoryId = repositoryId; this.repositoryId = repositoryId;
} }
public String getContents()
{
return contents;
}
public void setContents( String contents )
{
this.contents = contents;
}
public String getPrimaryKey() public String getPrimaryKey()
{ {
return repositoryId + ":" + filename; return repositoryId + ":" + filename;

View File

@ -81,6 +81,18 @@ public LuceneDocumentMaker addFieldTokenized( String key, String value )
return this; return this;
} }
public LuceneDocumentMaker addFieldTokenized( String key, String value, float boost )
{
if ( value != null )
{
Field field = new Field( key, value, Field.Store.YES, Field.Index.TOKENIZED );
field.setBoost(boost);
document.add( field );
}
return this;
}
public LuceneDocumentMaker addFieldTokenized( String key, List list ) public LuceneDocumentMaker addFieldTokenized( String key, List list )
{ {
if ( ( list != null ) && ( !list.isEmpty() ) ) if ( ( list != null ) && ( !list.isEmpty() ) )
@ -101,6 +113,18 @@ public LuceneDocumentMaker addFieldUntokenized( String name, String value )
return this; return this;
} }
public LuceneDocumentMaker addFieldUntokenized( String name, String value, float boost )
{
if ( value != null )
{
Field field = new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED );
field.setBoost(boost);
document.add( field );
}
return this;
}
public LuceneDocumentMaker addFieldExact( String name, String value ) public LuceneDocumentMaker addFieldExact( String name, String value )
{ {
if ( value != null ) if ( value != null )
@ -111,6 +135,18 @@ public LuceneDocumentMaker addFieldExact( String name, String value )
return this; return this;
} }
public LuceneDocumentMaker addFieldExact( String name, String value, float boost )
{
if ( value != null )
{
Field field = new Field( name, value, Field.Store.NO, Field.Index.UN_TOKENIZED );
field.setBoost(boost);
document.add( field );
}
return this;
}
public Document getDocument() public Document getDocument()
{ {
return this.document; return this.document;

View File

@ -0,0 +1,45 @@
package org.apache.maven.archiva.indexer.lucene.analyzers;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.Reader;
import org.apache.lucene.analysis.CharTokenizer;
/**
* Lucene Tokenizer for {@link ArtifactKeys#ARTIFACTID} fields.
*/
public class ArtifactIdTokenizer extends CharTokenizer
{
public ArtifactIdTokenizer( Reader reader )
{
super( reader );
}
/**
* Break on "-" for "atlassian-plugins-core"
* @param c
* @return
*/
@Override
protected boolean isTokenChar(char c)
{
return (c != '-');
}
}

View File

@ -28,8 +28,11 @@
import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanFilter;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DuplicateFilter;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilterClause;
import org.apache.lucene.search.Hits; import org.apache.lucene.search.Hits;
import org.apache.lucene.search.MultiSearcher; import org.apache.lucene.search.MultiSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -46,6 +49,7 @@
import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers; import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers;
import org.apache.maven.archiva.indexer.bytecode.BytecodeKeys; import org.apache.maven.archiva.indexer.bytecode.BytecodeKeys;
import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers; import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers;
import org.apache.maven.archiva.indexer.filecontent.FileContentKeys;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers; import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys; import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter; import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
@ -208,10 +212,17 @@ public SearchResults searchForTerm( String principal, List<String> selectedRepos
QueryParser parser = new FileContentHandlers().getQueryParser(); QueryParser parser = new FileContentHandlers().getQueryParser();
LuceneQuery query = null; LuceneQuery query = null;
SearchResults results = null; SearchResults results = null;
BooleanFilter duplicateFilter = new BooleanFilter();
DuplicateFilter artifactIdDuplicateFilter = new DuplicateFilter(FileContentKeys.ARTIFACTID_EXACT);
duplicateFilter.add(new FilterClause(artifactIdDuplicateFilter, BooleanClause.Occur.SHOULD));
DuplicateFilter groupIdDuplicateFilter = new DuplicateFilter(FileContentKeys.GROUPID_EXACT);
duplicateFilter.add(new FilterClause(groupIdDuplicateFilter, BooleanClause.Occur.SHOULD));
if ( previousSearchTerms == null || previousSearchTerms.isEmpty() ) if ( previousSearchTerms == null || previousSearchTerms.isEmpty() )
{ {
query = new LuceneQuery( parser.parse( term ) ); query = new LuceneQuery( parser.parse( term ) );
results = searchAll( query, limits, indexes, null ); results = searchAll( query, limits, indexes, duplicateFilter );
} }
else else
{ {
@ -224,7 +235,8 @@ public SearchResults searchForTerm( String principal, List<String> selectedRepos
query = new LuceneQuery( booleanQuery ); query = new LuceneQuery( booleanQuery );
Filter filter = new QueryWrapperFilter( parser.parse( term ) ); Filter filter = new QueryWrapperFilter( parser.parse( term ) );
results = searchAll( query, limits, indexes, filter ); duplicateFilter.add(new FilterClause(filter, BooleanClause.Occur.SHOULD));
results = searchAll( query, limits, indexes, duplicateFilter );
} }
results.getRepositories().addAll( this.localIndexedRepositories ); results.getRepositories().addAll( this.localIndexedRepositories );
@ -268,7 +280,7 @@ private SearchResults searchAll( LuceneQuery luceneQuery, SearchResultLimits lim
{ {
// Create a multi-searcher for looking up the information. // Create a multi-searcher for looking up the information.
searcher = new MultiSearcher( searchables ); searcher = new MultiSearcher( searchables );
// Perform the search. // Perform the search.
Hits hits = null; Hits hits = null;
if ( filter != null ) if ( filter != null )

View File

@ -48,10 +48,9 @@ public class SearchResultHit
private String repositoryId = ""; private String repositoryId = "";
// Advanced hit, if artifact, all versions of artifact private List<String> versions = new ArrayList();
private List artifacts = new ArrayList();
private List versions = new ArrayList(); private ArchivaArtifact artifact;
public String getContext() public String getContext()
{ {
@ -88,11 +87,10 @@ public void setArtifactId( String artifactId )
this.artifactId = artifactId; this.artifactId = artifactId;
} }
public void addArtifact( ArchivaArtifact artifact ) public void setArtifact( ArchivaArtifact artifact )
{ {
this.artifacts.add( artifact ); this.artifact = artifact;
final String ver = artifact.getVersion();
String ver = artifact.getVersion();
if ( !this.versions.contains( ver ) ) if ( !this.versions.contains( ver ) )
{ {
@ -115,9 +113,9 @@ public void addArtifact( ArchivaArtifact artifact )
} }
} }
public List getArtifacts() public ArchivaArtifact getArtifact()
{ {
return artifacts; return artifact;
} }
public String getGroupId() public String getGroupId()
@ -135,11 +133,21 @@ public String getVersion()
return version; return version;
} }
public List getVersions() public void setVersion(String version)
{
this.version = version;
}
public List<String> getVersions()
{ {
return versions; return versions;
} }
public void setVersions(List<String> versions)
{
this.versions = versions;
}
public String getRepositoryId() public String getRepositoryId()
{ {
return repositoryId; return repositoryId;

View File

@ -40,7 +40,7 @@ public class SearchResults
{ {
private List repositories = new ArrayList(); private List repositories = new ArrayList();
private Map hits = new HashMap(); private Map<String, SearchResultHit> hits = new HashMap();
private int totalHits; private int totalHits;
@ -82,7 +82,7 @@ private void addBytecodeHit( BytecodeRecord bytecode )
} }
hit.setRepositoryId( bytecode.getRepositoryId() ); hit.setRepositoryId( bytecode.getRepositoryId() );
hit.addArtifact( bytecode.getArtifact() ); hit.setArtifact( bytecode.getArtifact() );
hit.setContext( null ); // TODO: provide context on why this is a valuable hit. hit.setContext( null ); // TODO: provide context on why this is a valuable hit.
this.hits.put( key, hit ); this.hits.put( key, hit );
@ -111,18 +111,16 @@ private void addHashcodeHit( HashcodesRecord hashcodes )
hit = new SearchResultHit(); hit = new SearchResultHit();
} }
hit.addArtifact( hashcodes.getArtifact() ); hit.setArtifact( hashcodes.getArtifact() );
hit.setContext( null ); // TODO: provide context on why this is a valuable hit. hit.setContext( null ); // TODO: provide context on why this is a valuable hit.
this.hits.put( key, hit ); hits.put( key, hit );
} }
public void addFileContentHit( FileContentRecord filecontent ) public void addFileContentHit( FileContentRecord filecontent )
{ {
String key = filecontent.getPrimaryKey(); final String key = filecontent.getPrimaryKey();
SearchResultHit hit = hits.get( key );
SearchResultHit hit = (SearchResultHit) this.hits.get( key );
if ( hit == null ) if ( hit == null )
{ {
// Only need to worry about this hit if it is truely new. // Only need to worry about this hit if it is truely new.
@ -135,7 +133,7 @@ public void addFileContentHit( FileContentRecord filecontent )
// Test for possible artifact reference ... // Test for possible artifact reference ...
if( filecontent.getArtifact() != null ) if( filecontent.getArtifact() != null )
{ {
hit.addArtifact( filecontent.getArtifact() ); hit.setArtifact( filecontent.getArtifact() );
} }
this.hits.put( key, hit ); this.hits.put( key, hit );
@ -147,7 +145,7 @@ public void addFileContentHit( FileContentRecord filecontent )
* *
* @return the list of {@link SearchResultHit} objects. * @return the list of {@link SearchResultHit} objects.
*/ */
public List getHits() public List<SearchResultHit> getHits()
{ {
return new ArrayList( hits.values() ); return new ArrayList( hits.values() );
} }

View File

@ -126,119 +126,77 @@ private CrossRepositorySearch lookupCrossRepositorySearch()
return search; return search;
} }
public void testSearchTerm_Org() public void testSearchArtifactIdHasMoreWieghtThanGroupId() throws Exception
throws Exception {
{
CrossRepositorySearch search = lookupCrossRepositorySearch(); CrossRepositorySearch search = lookupCrossRepositorySearch();
String expectedRepos[] = new String[] { String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID TEST_DEFAULT_REPO_ID
}; };
String expectedResults[] = new String[] {
"org","org2","org3","org4","org5","org6","org7"
};
assertSearchResults( expectedRepos, expectedResults, search, "org", null, false );
}
public void testSearchTerm_Junit() List<SearchResultHit> expectedHits = new ArrayList<SearchResultHit>();
throws Exception SearchResultHit hit = new SearchResultHit();
{ hit.setGroupId("ant");
CrossRepositorySearch search = lookupCrossRepositorySearch(); hit.setArtifactId("ant");
hit.setVersion("1.5");
String expectedRepos[] = new String[] { expectedHits.add(hit);
TEST_DEFAULT_REPO_ID
}; hit = new SearchResultHit();
hit.setGroupId("ant");
String expectedResults[] = new String[] { hit.setArtifactId("ant-optional");
"junit","junit2","junit3" hit.setVersion("1.5.1");
}; expectedHits.add(hit);
assertSearchResults( expectedRepos, expectedResults, search, "junit", null, false ); hit = new SearchResultHit();
hit.setGroupId("ant");
hit.setArtifactId("ant-junit");
hit.setVersion("1.6.5");
expectedHits.add(hit);
assertSearchResults( expectedRepos, expectedHits, search, "ant", null, false );
} }
public void testSearchInvalidTerm() public void testSearchInvalidTerm()
throws Exception throws Exception
{ {
CrossRepositorySearch search = lookupCrossRepositorySearch(); CrossRepositorySearch search = lookupCrossRepositorySearch();
String expectedRepos[] = new String[] { String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID TEST_DEFAULT_REPO_ID
}; };
String expectedResults[] = new String[] {
// Nothing.
};
assertSearchResults( expectedRepos, expectedResults, search, "monosodium", null, false );
}
public void testSearchWithinSearchResults()
throws Exception
{
CrossRepositorySearch search = lookupCrossRepositorySearch();
String expectedRepos[] = new String[] { assertSearchResults( expectedRepos, new ArrayList<SearchResultHit>(), search, "monosodium", null, false );
TEST_DEFAULT_REPO_ID
};
String expectedResults[] = new String[] {
"org","org2","org3","org4","org5","org6","org7"
};
// first search
assertSearchResults( expectedRepos, expectedResults, search, "org", null, false );
List<String> previousSearchTerms = new ArrayList<String>();
previousSearchTerms.add( "org" );
String secondSearchExpectedResults[] = new String[] {
"org.apache.maven.archiva.record", "org.apache.maven.archiva.record2",
"org.apache.maven.archiva.record3", "org.apache.maven.archiva.record4",
"org.apache.maven.archiva.record5", "org.apache.maven.archiva.record6",
"org.apache.maven.archiva.record7"
};
//second search
assertSearchResults( expectedRepos, secondSearchExpectedResults, search, "org.apache.maven.archiva.record",
previousSearchTerms, false );
previousSearchTerms.add( "org.apache.maven.archiva.record" );
String thirdSearchExpectedResults[] = new String[] {
"junit", "junit2", "junit3"
};
//third search
assertSearchResults( expectedRepos, thirdSearchExpectedResults, search, "junit", previousSearchTerms, false );
} }
public void testSearchForClassesAndPackages() public void testSearchForClassesAndPackages()
throws Exception throws Exception
{ {
CrossRepositorySearch search = lookupCrossRepositorySearch(); CrossRepositorySearch search = lookupCrossRepositorySearch();
String expectedRepos[] = new String[] { String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID TEST_DEFAULT_REPO_ID
}; };
String expectedResults[] = new String[] { SearchResultHit archivaCommon = new SearchResultHit();
"archiva-common-1.0.jar" archivaCommon.setArtifactId("archiva-common");
}; archivaCommon.setGroupId("org.apache.maven.archiva");
archivaCommon.setVersion("1.0");
// class with packagename search // class with packagename search
assertSearchResults( expectedRepos, expectedResults, search, assertSearchResults( expectedRepos, Arrays.asList(archivaCommon), search,
"org.apache.maven.archiva.common.utils.BaseFile", null, true ); "org.apache.maven.archiva.common.utils.BaseFile", null, true );
// class name search // class name search
assertSearchResults( expectedRepos, expectedResults, search, assertSearchResults( expectedRepos, Arrays.asList(archivaCommon), search,
"BaseFile", null, true ); "BaseFile", null, true );
String expectedMethodSearchResults[] = new String[] { SearchResultHit hit = new SearchResultHit();
"continuum-webapp-1.0.3-SNAPSHOT.war" hit.setGroupId("org.apache.maven.continuum");
}; hit.setArtifactId("continuum-webapp");
hit.setVersion("1.0.3-SNAPSHOT");
// method search // method search
assertSearchResults( expectedRepos, expectedMethodSearchResults, search, assertSearchResults( expectedRepos, Arrays.asList(hit), search,
"org.apache.maven.continuum.web.action.BuildDefinitionAction.isBuildFresh", null, true ); "org.apache.maven.continuum.web.action.BuildDefinitionAction.isBuildFresh", null, true );
} }
public void testExecuteFilteredSearch() public void testExecuteFilteredSearch()
@ -312,7 +270,7 @@ private void assertFilteredSearchResults ( String expectedRepos[], String expect
assertEquals( "Search Result Hits", expectedResults.length, results.getHits().size() ); assertEquals( "Search Result Hits", expectedResults.length, results.getHits().size() );
} }
private void assertSearchResults( String expectedRepos[], String expectedResults[], CrossRepositorySearch search, private void assertSearchResults( String expectedRepos[], List<SearchResultHit> expectedResults, CrossRepositorySearch search,
String term, List<String> previousSearchTerms, boolean bytecode ) String term, List<String> previousSearchTerms, boolean bytecode )
throws Exception throws Exception
{ {
@ -325,8 +283,8 @@ private void assertSearchResults( String expectedRepos[], String expectedResults
SearchResults results = null; SearchResults results = null;
if( previousSearchTerms == null ) if( previousSearchTerms == null )
{ {
if( bytecode ) if( bytecode )
{ {
results = search.searchForBytecode( "guest", selectedRepos, term, limits ); results = search.searchForBytecode( "guest", selectedRepos, term, limits );
} }
@ -346,9 +304,16 @@ private void assertSearchResults( String expectedRepos[], String expectedResults
// TODO: test the repository ids returned. // TODO: test the repository ids returned.
assertEquals( "Search Result Hits", expectedResults.length, results.getHits().size() ); assertEquals( "Search Result Hits", expectedResults.size(), results.getHits().size() );
// TODO: test the order of hits.
// TODO: test the value of the hits. for (int i = 0; i < expectedResults.size(); i++)
{
final SearchResultHit expectedResult = expectedResults.get(i);
final SearchResultHit hit = results.getHits().get(i);
assertEquals("artifactid", expectedResult.getArtifactId(), hit.getArtifactId());
assertEquals("groupid", expectedResult.getGroupId(), hit.getGroupId());
assertEquals("version", expectedResult.getVersion(), hit.getVersion());
}
} }
protected ManagedRepositoryConfiguration createRepository( String id, String name, File location ) protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )

View File

@ -19,16 +19,18 @@
* under the License. * under the License.
*/ */
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord; import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.ArchivaArtifact;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import junit.framework.AssertionFailedError; import junit.framework.AssertionFailedError;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.content.DefaultPathParser;
import org.apache.maven.archiva.repository.content.PathParser;
import org.apache.maven.archiva.repository.layout.LayoutException;
/** /**
* FileContentIndexPopulator * FileContentIndexPopulator
@ -62,6 +64,11 @@ public Map<String, FileContentRecord> populate( File basedir )
map.put( "test-pom-1.0", createFileContentRecord( repoDir, prefix + "test-pom/1.0/test-pom-1.0.pom" ) ); map.put( "test-pom-1.0", createFileContentRecord( repoDir, prefix + "test-pom/1.0/test-pom-1.0.pom" ) );
map.put( "test-skin-1.0", createFileContentRecord( repoDir, prefix + "test-skin/1.0/test-skin-1.0.pom" ) ); map.put( "test-skin-1.0", createFileContentRecord( repoDir, prefix + "test-skin/1.0/test-skin-1.0.pom" ) );
map.put("ant-1.5.pom", createFileContentRecord(repoDir, "ant/ant/1.5/ant-1.5.pom"));
map.put("ant-1.5.1.pom", createFileContentRecord(repoDir, "ant/ant/1.5.1/ant-1.5.1.pom"));
map.put("ant-junit-1.6.5.pom", createFileContentRecord(repoDir, "ant/ant-junit/1.6.5/ant-junit-1.6.5.pom"));
map.put("ant-optional-1.5.1.pom", createFileContentRecord(repoDir, "ant/ant-optional/1.5.1/ant-optional-1.5.1.pom"));
return map; return map;
} }
@ -78,14 +85,16 @@ private FileContentRecord createFileContentRecord( File repoDir, String path )
record.setRepositoryId( "test-repo" ); record.setRepositoryId( "test-repo" );
record.setFilename( path ); record.setFilename( path );
PathParser pathParser = new DefaultPathParser();
try try
{ {
record.setContents( FileUtils.readFileToString( pathToFile, null ) ); ArtifactReference reference = pathParser.toArtifactReference(path);
ArchivaArtifact artifact = new ArchivaArtifact( reference );
record.setArtifact(artifact);
} }
catch ( IOException e ) catch (LayoutException e)
{ {
e.printStackTrace(); throw new RuntimeException(e);
throw new AssertionFailedError( "Can't load test file contents: " + pathToFile.getAbsolutePath() );
} }
return record; return record;

View File

@ -0,0 +1,6 @@
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>ant</groupId>
<artifactId>ant-junit</artifactId>
<version>1.6.5</version>
</project>

View File

@ -0,0 +1,6 @@
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>ant</groupId>
<artifactId>ant-optional</artifactId>
<version>1.5.1</version>
</project>

View File

@ -0,0 +1,6 @@
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>ant</groupId>
<artifactId>ant</artifactId>
<version>1.5.1</version>
</project>

View File

@ -0,0 +1,6 @@
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>ant</groupId>
<artifactId>ant</artifactId>
<version>1.5</version>
</project>

View File

@ -131,6 +131,7 @@ public boolean hasClassifier()
return StringUtils.isNotEmpty( model.getClassifier() ); return StringUtils.isNotEmpty( model.getClassifier() );
} }
@Override
public int hashCode() public int hashCode()
{ {
final int PRIME = 31; final int PRIME = 31;
@ -146,6 +147,7 @@ public int hashCode()
return result; return result;
} }
@Override
public boolean equals( Object obj ) public boolean equals( Object obj )
{ {
if ( this == obj ) if ( this == obj )
@ -180,6 +182,7 @@ public boolean equals( Object obj )
return true; return true;
} }
@Override
public String toString() public String toString()
{ {
StringBuffer sb = new StringBuffer(); StringBuffer sb = new StringBuffer();

View File

@ -46,6 +46,9 @@
import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.Preparable; import com.opensymphony.xwork2.Preparable;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.constraints.UniqueVersionConstraint;
import org.apache.maven.archiva.indexer.search.SearchResultHit;
/** /**
* Search all indexed fields by the given criteria. * Search all indexed fields by the given criteria.
@ -127,8 +130,6 @@ public class SearchAction
private boolean fromResultsPage; private boolean fromResultsPage;
private int num;
public boolean isFromResultsPage() public boolean isFromResultsPage()
{ {
return fromResultsPage; return fromResultsPage;
@ -231,7 +232,8 @@ public String quickSearch()
return GlobalResults.ACCESS_TO_NO_REPOS; return GlobalResults.ACCESS_TO_NO_REPOS;
} }
if( SearchUtil.isBytecodeSearch( q ) ) final boolean isbytecodeSearch = SearchUtil.isBytecodeSearch( q );
if( isbytecodeSearch )
{ {
results = crossRepoSearch.searchForBytecode( getPrincipal(), selectedRepos, SearchUtil.removeBytecodeKeyword( q ), limits ); results = crossRepoSearch.searchForBytecode( getPrincipal(), selectedRepos, SearchUtil.removeBytecodeKeyword( q ), limits );
} }
@ -274,9 +276,41 @@ public String quickSearch()
buildCompleteQueryString( q ); buildCompleteQueryString( q );
} }
if (!isbytecodeSearch)
{
//Lets get the versions for the artifact we just found and display them
//Yes, this is in the lucene index but its more challenging to get them out when we are searching by project
for (SearchResultHit resultHit : results.getHits())
{
final List<String> versions = dao.query(new UniqueVersionConstraint(getObservableRepos(), resultHit.getGroupId(), resultHit.getArtifactId()));
if (versions != null && !versions.isEmpty())
{
resultHit.setVersion(null);
resultHit.setVersions(filterTimestampedSnapshots(versions));
}
}
}
return SUCCESS; return SUCCESS;
} }
/**
* Remove timestamped snapshots from versions
*/
private static List<String> filterTimestampedSnapshots(List<String> versions)
{
final List<String> filtered = new ArrayList<String>();
for (final String version : versions)
{
final String baseVersion = VersionUtil.getBaseVersion(version);
if (!filtered.contains(baseVersion))
{
filtered.add(baseVersion);
}
}
return filtered;
}
public String findArtifact() public String findArtifact()
throws Exception throws Exception
{ {
@ -329,7 +363,6 @@ private List<String> getObservableRepos()
catch ( AccessDeniedException e ) catch ( AccessDeniedException e )
{ {
getLogger().warn( e.getMessage(), e ); getLogger().warn( e.getMessage(), e );
// TODO: pass this onto the screen.
} }
catch ( ArchivaSecurityException e ) catch ( ArchivaSecurityException e )
{ {

View File

@ -108,10 +108,10 @@ public String artifact()
this.repositoryId = this.repositoryId =
repoBrowsing.getRepositoryId( getPrincipal(), getObservableRepos(), groupId, artifactId, version ); repoBrowsing.getRepositoryId( getPrincipal(), getObservableRepos(), groupId, artifactId, version );
} }
catch ( ObjectNotFoundException oe ) catch ( ObjectNotFoundException e )
{ {
addActionError( "Unable to find project model for [" + groupId + ":" + artifactId + ":" + version + "]." ); getLogger().debug(e.getMessage(), e);
addActionError( e.getMessage() );
return ERROR; return ERROR;
} }
@ -208,6 +208,7 @@ private List<String> getObservableRepos()
return Collections.emptyList(); return Collections.emptyList();
} }
@Override
public void validate() public void validate()
{ {
if ( StringUtils.isBlank( groupId ) ) if ( StringUtils.isBlank( groupId ) )

View File

@ -20,7 +20,7 @@
<%@ taglib uri="/struts-tags" prefix="s" %> <%@ taglib uri="/struts-tags" prefix="s" %>
<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %> <%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %>
<%@ taglib prefix="fn" uri="http://java.sun.com/jsp/jstl/functions" %> <%@ taglib prefix="fn" uri="http://java.sun.com/jsp/jstl/functions" %>
<%@ taglib prefix="my" tagdir="/WEB-INF/tags" %> <%@ taglib prefix="archiva" tagdir="/WEB-INF/tags" %>
<html> <html>
<head> <head>
@ -135,10 +135,10 @@
<s:param name="className" value="%{#attr.className}"/> <s:param name="className" value="%{#attr.className}"/>
<s:param name="repositoryId" value="%{#attr.repositoryId}"/> <s:param name="repositoryId" value="%{#attr.repositoryId}"/>
<s:param name="filterSearch" value="%{#attr.filterSearch}"/> <s:param name="filterSearch" value="%{#attr.filterSearch}"/>
<s:param name="fromResultsPage" value="true"/> <s:param name="fromResultsPage" value="true"/>
<s:param name="currentPage" value="%{#attr.currentPage - 1}"/> <s:param name="currentPage" value="%{#attr.currentPage - 1}"/>
<s:param name="searchResultsOnly" value="%{#attr.searchResultsOnly}"/> <s:param name="searchResultsOnly" value="%{#attr.searchResultsOnly}"/>
<s:param name="completeQueryString" value="%{#attr.completeQueryString}"/> <s:param name="completeQueryString" value="%{#attr.completeQueryString}"/>
</s:url> </s:url>
</c:set> </c:set>
<c:set var="nextPageUrl"> <c:set var="nextPageUrl">
@ -151,10 +151,10 @@
<s:param name="className" value="%{#attr.className}"/> <s:param name="className" value="%{#attr.className}"/>
<s:param name="repositoryId" value="%{#attr.repositoryId}"/> <s:param name="repositoryId" value="%{#attr.repositoryId}"/>
<s:param name="filterSearch" value="%{#attr.filterSearch}"/> <s:param name="filterSearch" value="%{#attr.filterSearch}"/>
<s:param name="fromResultsPage" value="true"/> <s:param name="fromResultsPage" value="true"/>
<s:param name="currentPage" value="%{#attr.currentPage + 1}"/> <s:param name="currentPage" value="%{#attr.currentPage + 1}"/>
<s:param name="searchResultsOnly" value="%{#attr.searchResultsOnly}"/> <s:param name="searchResultsOnly" value="%{#attr.searchResultsOnly}"/>
<s:param name="completeQueryString" value="%{#attr.completeQueryString}"/> <s:param name="completeQueryString" value="%{#attr.completeQueryString}"/>
</s:url> </s:url>
</c:set> </c:set>
</c:if> </c:if>
@ -259,12 +259,12 @@
<c:choose> <c:choose>
<c:when test="${not empty (record.groupId)}"> <c:when test="${not empty (record.groupId)}">
<h3 class="artifact-title"> <h3 class="artifact-title">
<my:showArtifactTitle groupId="${record.groupId}" artifactId="${record.artifactId}"
version="${record.version}"/> <archiva:showArtifactTitle groupId="${record.groupId}" artifactId="${record.artifactId}"/>
</h3> </h3>
<p> <p>
<my:showArtifactLink groupId="${record.groupId}" artifactId="${record.artifactId}" <archiva:showArtifactLink groupId="${record.groupId}" artifactId="${record.artifactId}"
version="${record.version}" versions="${record.versions}" repositoryId="${record.repositoryId}"/> versions="${record.versions}" repositoryId="${record.repositoryId}"/>
</p> </p>
</c:when> </c:when>
<c:otherwise> <c:otherwise>
@ -292,12 +292,11 @@
<c:choose> <c:choose>
<c:when test="${not empty (artifactModel.groupId)}"> <c:when test="${not empty (artifactModel.groupId)}">
<h3 class="artifact-title"> <h3 class="artifact-title">
<my:showArtifactTitle groupId="${artifactModel.groupId}" artifactId="${artifactModel.artifactId}" <archiva:showArtifactTitle groupId="${artifactModel.groupId}" artifactId="${artifactModel.artifactId}"/>
version="${artifactModel.version}"/>
</h3> </h3>
<p> <p>
<my:showArtifactLink groupId="${artifactModel.groupId}" artifactId="${artifactModel.artifactId}" <archiva:showArtifactLink groupId="${artifactModel.groupId}" artifactId="${artifactModel.artifactId}"
version="${artifactModel.version}" versions="${artifactModel.versions}"/> versions="${artifactModel.versions}"/>
</p> </p>
</c:when> </c:when>
<c:otherwise> <c:otherwise>

View File

@ -30,10 +30,7 @@
<%@ attribute name="repositoryId" %> <%@ attribute name="repositoryId" %>
<span class="artifact-link"> <span class="artifact-link">
<a href="${pageContext.request.contextPath}/repository/${repositoryId}">${repositoryId}</a> <archiva:groupIdLink var="${groupId}" includeTop="false" />
<strong> : </strong>
<archiva:groupIdLink var="${groupId}" includeTop="false" />
<c:if test="${!empty (artifactId)}"> <c:if test="${!empty (artifactId)}">
<c:set var="url"> <c:set var="url">
<s:url action="browseArtifact" namespace="/"> <s:url action="browseArtifact" namespace="/">

View File

@ -152,7 +152,6 @@ public void testQuickSearchArtifactBytecodeSearch()
FileContentRecord record = new FileContentRecord(); FileContentRecord record = new FileContentRecord();
record.setRepositoryId( "repo1.mirror" ); record.setRepositoryId( "repo1.mirror" );
record.setArtifact( artifact ); record.setArtifact( artifact );
record.setContents( "org.apache.archiva:archiva-test:1.0:jar org.apache.archiva.test.MyClassName" );
record.setFilename( "archiva-test-1.0.jar" ); record.setFilename( "archiva-test-1.0.jar" );
results.addHit( record ); results.addHit( record );
@ -198,7 +197,6 @@ public void testQuickSearchArtifactRegularSearch()
FileContentRecord record = new FileContentRecord(); FileContentRecord record = new FileContentRecord();
record.setRepositoryId( "repo1.mirror" ); record.setRepositoryId( "repo1.mirror" );
record.setArtifact( artifact ); record.setArtifact( artifact );
record.setContents( "org.apache.archiva:archiva-test:1.0:jar" );
record.setFilename( "archiva-test-1.0.jar" ); record.setFilename( "archiva-test-1.0.jar" );
results.addHit( record ); results.addHit( record );

View File

@ -518,6 +518,11 @@
<artifactId>lucene-core</artifactId> <artifactId>lucene-core</artifactId>
<version>2.4.0</version> <version>2.4.0</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queries</artifactId>
<version>2.4.0</version>
</dependency>
<dependency> <dependency>
<groupId>javax.mail</groupId> <groupId>javax.mail</groupId>
<artifactId>mail</artifactId> <artifactId>mail</artifactId>