[MRM-516] Search results return results for all repositories, regardless of security.

Added selectedRepos to interface/impl for CrossRepositorySearch.searchTerms().



git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/branches/archiva-backend-security@591500 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2007-11-02 22:35:07 +00:00
parent 597723968f
commit c6bf224e8b
11 changed files with 251 additions and 224 deletions

View File

@ -1,5 +1,7 @@
package org.apache.maven.archiva.indexer.search;
import java.util.List;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -35,7 +37,7 @@ public interface CrossRepositorySearch
* @param limits the limits to apply to the search results.
* @return the results.
*/
public SearchResults searchForTerm( String term, SearchResultLimits limits );
public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term, SearchResultLimits limits );
/**
* Search for the specific bytecode across all repositories.
@ -44,7 +46,7 @@ public interface CrossRepositorySearch
* @param limits the limits to apply to the search results.
* @return the results.
*/
public SearchResults searchForBytecode( String term, SearchResultLimits limits );
public SearchResults searchForBytecode( String principal, List<String> selectedRepos, String term, SearchResultLimits limits );
/**
* Search for the specific checksum string across all repositories.
@ -53,5 +55,5 @@ public interface CrossRepositorySearch
* @param limits the limits to apply to the search results.
* @return the results.
*/
public SearchResults searchForChecksum( String checksum, SearchResultLimits limits );
public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum, SearchResultLimits limits );
}

View File

@ -33,6 +33,9 @@ import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers;
import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers;
import org.apache.maven.archiva.indexer.functors.UserAllowedToSearchRepositoryPredicate;
@ -63,40 +66,20 @@ public class DefaultCrossRepositorySearch
implements CrossRepositorySearch, RegistryListener, Initializable
{
/**
* @plexus.requirement role-hint="bytecode"
* @plexus.requirement role-hint="lucene"
*/
private Transformer bytecodeIndexTransformer;
/**
* @plexus.requirement role-hint="filecontent"
*/
private Transformer filecontentIndexTransformer;
/**
* @plexus.requirement role-hint="hashcodes"
*/
private Transformer hashcodesIndexTransformer;
/**
* @plexus.requirement role-hint="searchable"
*/
private Transformer searchableTransformer;
/**
* @plexus.requirement role-hint="index-exists"
*/
private Predicate indexExistsPredicate;
private RepositoryContentIndexFactory indexFactory;
/**
* @plexus.requirement
*/
private ArchivaConfiguration configuration;
private List localIndexedRepositories = new ArrayList();
private List<ManagedRepositoryConfiguration> localIndexedRepositories = new ArrayList<ManagedRepositoryConfiguration>();
public SearchResults searchForChecksum( String checksum, SearchResultLimits limits )
public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum, SearchResultLimits limits )
{
List indexes = getHashcodeIndexes();
List<RepositoryContentIndex> indexes = getHashcodeIndexes( principal, selectedRepos );
try
{
@ -117,9 +100,9 @@ public class DefaultCrossRepositorySearch
return new SearchResults();
}
public SearchResults searchForBytecode( String term, SearchResultLimits limits )
public SearchResults searchForBytecode( String principal, List<String> selectedRepos, String term, SearchResultLimits limits )
{
List indexes = getHashcodeIndexes();
List<RepositoryContentIndex> indexes = getHashcodeIndexes( principal, selectedRepos );
try
{
@ -139,9 +122,9 @@ public class DefaultCrossRepositorySearch
return new SearchResults();
}
public SearchResults searchForTerm( String term, SearchResultLimits limits )
public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term, SearchResultLimits limits )
{
List indexes = getFileContentIndexes();
List<RepositoryContentIndex> indexes = getFileContentIndexes( principal, selectedRepos );
try
{
@ -161,7 +144,7 @@ public class DefaultCrossRepositorySearch
return new SearchResults();
}
private SearchResults searchAll( LuceneQuery luceneQuery, SearchResultLimits limits, List indexes )
private SearchResults searchAll( LuceneQuery luceneQuery, SearchResultLimits limits, List<RepositoryContentIndex> indexes )
{
org.apache.lucene.search.Query specificQuery = luceneQuery.getLuceneQuery();
@ -175,12 +158,11 @@ public class DefaultCrossRepositorySearch
// Setup the converter
LuceneEntryConverter converter = null;
RepositoryContentIndex index = (RepositoryContentIndex) indexes.get( 0 );
RepositoryContentIndex index = indexes.get( 0 );
converter = index.getEntryConverter();
// Process indexes into an array of Searchables.
List searchableList = new ArrayList( indexes );
CollectionUtils.transform( searchableList, searchableTransformer );
List<Searchable> searchableList = toSearchables( indexes );
Searchable searchables[] = new Searchable[searchableList.size()];
searchableList.toArray( searchables );
@ -258,52 +240,102 @@ public class DefaultCrossRepositorySearch
return results;
}
private Predicate getAllowedToSearchReposPredicate()
private List<Searchable> toSearchables( List<RepositoryContentIndex> indexes )
{
return new UserAllowedToSearchRepositoryPredicate();
List<Searchable> searchableList = new ArrayList<Searchable>();
for ( RepositoryContentIndex contentIndex : indexes )
{
try
{
searchableList.add( contentIndex.getSearchable() );
}
catch ( RepositoryIndexSearchException e )
{
getLogger().warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :"
+ e.getMessage(), e );
}
}
return searchableList;
}
public List getBytecodeIndexes()
public List<RepositoryContentIndex> getBytecodeIndexes( String principal, List<String> selectedRepos )
{
List ret = new ArrayList();
List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();
synchronized ( this.localIndexedRepositories )
for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
ret.addAll( CollectionUtils.select( this.localIndexedRepositories, getAllowedToSearchReposPredicate() ) );
CollectionUtils.transform( ret, bytecodeIndexTransformer );
CollectionUtils.filter( ret, indexExistsPredicate );
// Only used selected repo
if ( selectedRepos.contains( repoConfig.getId() ) )
{
RepositoryContentIndex index = indexFactory.createBytecodeIndex( repoConfig );
// If they exist.
if ( indexExists( index ) )
{
ret.add( index );
}
}
}
return ret;
}
public List getFileContentIndexes()
public List<RepositoryContentIndex> getFileContentIndexes( String principal, List<String> selectedRepos )
{
List ret = new ArrayList();
List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();
synchronized ( this.localIndexedRepositories )
for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
ret.addAll( CollectionUtils.select( this.localIndexedRepositories, getAllowedToSearchReposPredicate() ) );
CollectionUtils.transform( ret, filecontentIndexTransformer );
CollectionUtils.filter( ret, indexExistsPredicate );
// Only used selected repo
if ( selectedRepos.contains( repoConfig.getId() ) )
{
RepositoryContentIndex index = indexFactory.createFileContentIndex( repoConfig );
// If they exist.
if ( indexExists( index ) )
{
ret.add( index );
}
}
}
return ret;
}
public List getHashcodeIndexes()
public List<RepositoryContentIndex> getHashcodeIndexes( String principal, List<String> selectedRepos )
{
List ret = new ArrayList();
List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();
synchronized ( this.localIndexedRepositories )
for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
ret.addAll( CollectionUtils.select( this.localIndexedRepositories, getAllowedToSearchReposPredicate() ) );
CollectionUtils.transform( ret, hashcodesIndexTransformer );
CollectionUtils.filter( ret, indexExistsPredicate );
// Only used selected repo
if ( selectedRepos.contains( repoConfig.getId() ) )
{
RepositoryContentIndex index = indexFactory.createHashcodeIndex( repoConfig );
// If they exist.
if ( indexExists( index ) )
{
ret.add( index );
}
}
}
return ret;
}
private boolean indexExists( RepositoryContentIndex index )
{
try
{
return index.exists();
}
catch ( RepositoryIndexException e )
{
getLogger().info(
"Repository Content Index [" + index.getId() + "] for repository ["
+ index.getRepository().getId() + "] does not exist yet in ["
+ index.getIndexDirectory().getAbsolutePath() + "]." );
return false;
}
}
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{

View File

@ -1,46 +0,0 @@
package org.apache.maven.archiva.indexer;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.Test;
import junit.framework.TestSuite;
/**
* AllTests - conveinence test suite for IDE users.
*
* @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class AllTests
{
public static Test suite()
{
TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.indexer" );
//$JUnit-BEGIN$
suite.addTest( org.apache.maven.archiva.indexer.bytecode.AllTests.suite() );
suite.addTest( org.apache.maven.archiva.indexer.hashcodes.AllTests.suite() );
suite.addTest( org.apache.maven.archiva.indexer.query.AllTests.suite() );
suite.addTest( org.apache.maven.archiva.indexer.search.AllTests.suite() );
//$JUnit-END$
return suite;
}
}

View File

@ -1,41 +0,0 @@
package org.apache.maven.archiva.indexer.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.Test;
import junit.framework.TestSuite;
/**
* AllTests - conveinence test suite for IDE users.
*
* @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class AllTests
{
public static Test suite()
{
TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.indexer.search" );
//$JUnit-BEGIN$
suite.addTestSuite( DefaultCrossRepositorySearchTest.class );
//$JUnit-END$
return suite;
}
}

View File

@ -19,15 +19,16 @@ package org.apache.maven.archiva.indexer.search;
* under the License.
*/
import junit.framework.AssertionFailedError;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecordLoader;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import junit.framework.AssertionFailedError;
/**
* BytecodeIndexPopulator
@ -39,10 +40,10 @@ public class BytecodeIndexPopulator
implements IndexPopulator
{
public Map getObjectMap()
public Map<String,ArchivaArtifact> getObjectMap()
{
Map dumps = new HashMap();
Map<String,ArchivaArtifact> dumps = new HashMap<String,ArchivaArtifact>();
// archiva-common-1.0.jar.txt
dumps.put( "archiva-common",
@ -86,15 +87,13 @@ public class BytecodeIndexPopulator
return artifact;
}
public Map populate( File basedir )
public Map<String, BytecodeRecord> populate( File basedir )
{
Map records = new HashMap();
Map<String, BytecodeRecord> records = new HashMap<String, BytecodeRecord>();
Map artifactDumps = getObjectMap();
for ( Iterator iter = artifactDumps.entrySet().iterator(); iter.hasNext(); )
for ( Entry<String, ArchivaArtifact> entry : getObjectMap().entrySet() )
{
Map.Entry entry = (Map.Entry) iter.next();
ArchivaArtifact artifact = (ArchivaArtifact) entry.getValue();
ArchivaArtifact artifact = entry.getValue();
File dumpFile = getDumpFile( basedir, artifact );
BytecodeRecord record = BytecodeRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );

View File

@ -28,10 +28,16 @@ import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.MockConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.util.FileUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
@ -47,6 +53,7 @@ public class DefaultCrossRepositorySearchTest
private static final String TEST_DEFAULT_REPO_ID = "testDefaultRepo";
@Override
protected void setUp()
throws Exception
{
@ -86,17 +93,17 @@ public class DefaultCrossRepositorySearchTest
RepositoryContentIndex indexContents = indexFactory.createFileContentIndex( repository );
// Now populate them.
Map hashcodesMap = ( new HashcodesIndexPopulator() ).populate( new File( getBasedir() ) );
Map<String, HashcodesRecord> hashcodesMap = new HashcodesIndexPopulator().populate( new File( getBasedir() ) );
indexHashcode.indexRecords( hashcodesMap.values() );
assertEquals( "Hashcode Key Count", hashcodesMap.size(), indexHashcode.getAllRecordKeys().size() );
assertRecordCount( indexHashcode, hashcodesMap.size() );
Map bytecodeMap = ( new BytecodeIndexPopulator() ).populate( new File( getBasedir() ) );
Map<String, BytecodeRecord> bytecodeMap = new BytecodeIndexPopulator().populate( new File( getBasedir() ) );
indexBytecode.indexRecords( bytecodeMap.values() );
assertEquals( "Bytecode Key Count", bytecodeMap.size(), indexBytecode.getAllRecordKeys().size() );
assertRecordCount( indexBytecode, bytecodeMap.size() );
Map contentMap = ( new FileContentIndexPopulator() ).populate( new File( getBasedir() ) );
Map<String, FileContentRecord> contentMap = new FileContentIndexPopulator().populate( new File( getBasedir() ) );
indexContents.indexRecords( contentMap.values() );
assertEquals( "File Content Key Count", contentMap.size(), indexContents.getAllRecordKeys().size() );
assertRecordCount( indexContents, contentMap.size() );
@ -125,23 +132,31 @@ public class DefaultCrossRepositorySearchTest
{
CrossRepositorySearch search = lookupCrossRepositorySearch();
SearchResultLimits limits = new SearchResultLimits( 0 );
limits.setPageSize( 20 );
SearchResults results = search.searchForTerm( "org", limits );
assertResults( 1, 7, results );
String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID
};
String expectedResults[] = new String[] {
"org","org2","org3","org4","org5","org6","org7"
};
assertSearchResults( expectedRepos, expectedResults, search, "org" );
}
public void testSearchTerm_Junit()
throws Exception
{
CrossRepositorySearch search = lookupCrossRepositorySearch();
SearchResultLimits limits = new SearchResultLimits( 0 );
limits.setPageSize( 20 );
SearchResults results = search.searchForTerm( "junit", limits );
assertResults( 1, 3, results );
String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID
};
String expectedResults[] = new String[] {
"junit","junit2","junit3"
};
assertSearchResults( expectedRepos, expectedResults, search, "junit" );
}
public void testSearchInvalidTerm()
@ -149,21 +164,37 @@ public class DefaultCrossRepositorySearchTest
{
CrossRepositorySearch search = lookupCrossRepositorySearch();
SearchResultLimits limits = new SearchResultLimits( 0 );
limits.setPageSize( 20 );
SearchResults results = search.searchForTerm( "monosodium", limits );
assertResults( 1, 0, results );
}
private void assertResults( int repoCount, int hitCount, SearchResults results )
{
assertNotNull( "Search Results should not be null.", results );
assertEquals( "Repository Hits", repoCount, results.getRepositories().size() );
assertEquals( "Search Result Hits", hitCount, results.getHits().size() );
String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID
};
String expectedResults[] = new String[] {
// Nothing.
};
assertSearchResults( expectedRepos, expectedResults, search, "monosodium" );
}
private void assertSearchResults( String expectedRepos[], String expectedResults[], CrossRepositorySearch search, String term )
throws Exception
{
SearchResultLimits limits = new SearchResultLimits( 0 );
limits.setPageSize( 20 );
List<String> selectedRepos = new ArrayList<String>();
selectedRepos.addAll( Arrays.asList( expectedRepos ) );
SearchResults results = search.searchForTerm( "guest", selectedRepos, term, limits );
assertNotNull( "Search Results should not be null.", results );
assertEquals( "Repository Hits", expectedRepos.length, results.getRepositories().size() );
// TODO: test the repository ids returned.
assertEquals( "Search Result Hits", expectedResults.length, results.getHits().size() );
// TODO: test the order of hits.
// TODO: test the value of the hits.
}
protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
{
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();

View File

@ -21,6 +21,7 @@ package org.apache.maven.archiva.indexer.search;
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.io.File;
import java.io.IOException;
@ -38,14 +39,14 @@ import junit.framework.AssertionFailedError;
public class FileContentIndexPopulator
implements IndexPopulator
{
public Map getObjectMap()
public Map<String, ArchivaArtifact> getObjectMap()
{
return null;
}
public Map populate( File basedir )
public Map<String, FileContentRecord> populate( File basedir )
{
Map map = new HashMap();
Map<String, FileContentRecord> map = new HashMap<String, FileContentRecord>();
File repoDir = new File( basedir, "src/test/managed-repository" );

View File

@ -1,22 +1,48 @@
package org.apache.maven.archiva.indexer.search;
import junit.framework.AssertionFailedError;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecordLoader;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import junit.framework.AssertionFailedError;
/**
* HashcodesIndexPopulator
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class HashcodesIndexPopulator
implements IndexPopulator
{
public Map getObjectMap()
public Map<String, ArchivaArtifact> getObjectMap()
{
Map dumps = new HashMap();
Map<String, ArchivaArtifact> dumps = new HashMap<String, ArchivaArtifact>();
// archiva-common-1.0.jar.txt
dumps.put( "archiva-common",
@ -52,15 +78,13 @@ public class HashcodesIndexPopulator
return dumps;
}
public Map populate( File basedir )
public Map<String, HashcodesRecord> populate( File basedir )
{
Map records = new HashMap();
Map<String, HashcodesRecord> records = new HashMap<String, HashcodesRecord>();
Map artifactDumps = getObjectMap();
for ( Iterator iter = artifactDumps.entrySet().iterator(); iter.hasNext(); )
for ( Entry<String, ArchivaArtifact> entry : getObjectMap().entrySet() )
{
Map.Entry entry = (Map.Entry) iter.next();
ArchivaArtifact artifact = (ArchivaArtifact) entry.getValue();
ArchivaArtifact artifact = entry.getValue();
File dumpFile = getDumpFile( basedir, artifact );
HashcodesRecord record = HashcodesRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );

View File

@ -19,6 +19,9 @@ package org.apache.maven.archiva.indexer.search;
* under the License.
*/
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.io.File;
import java.util.Map;
@ -30,7 +33,7 @@ import java.util.Map;
*/
public interface IndexPopulator
{
public Map getObjectMap();
public Map<String, ArchivaArtifact> getObjectMap();
public Map populate( File basedir );
public Map<String, ? extends LuceneRepositoryContentRecord> populate( File basedir );
}

View File

@ -25,29 +25,9 @@
<description>DefaultCrossRepositorySearch</description>
<requirements>
<requirement>
<role>org.apache.commons.collections.Transformer</role>
<role-hint>bytecode</role-hint>
<field-name>bytecodeIndexTransformer</field-name>
</requirement>
<requirement>
<role>org.apache.commons.collections.Transformer</role>
<role-hint>filecontent</role-hint>
<field-name>filecontentIndexTransformer</field-name>
</requirement>
<requirement>
<role>org.apache.commons.collections.Transformer</role>
<role-hint>hashcodes</role-hint>
<field-name>hashcodesIndexTransformer</field-name>
</requirement>
<requirement>
<role>org.apache.commons.collections.Transformer</role>
<role-hint>searchable</role-hint>
<field-name>searchableTransformer</field-name>
</requirement>
<requirement>
<role>org.apache.commons.collections.Predicate</role>
<role-hint>index-exists</role-hint>
<field-name>indexExistsPredicate</field-name>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<field-name>indexFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>

View File

@ -28,9 +28,15 @@ import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.search.CrossRepositorySearch;
import org.apache.maven.archiva.indexer.search.SearchResultLimits;
import org.apache.maven.archiva.indexer.search.SearchResults;
import org.apache.maven.archiva.security.AccessDeniedException;
import org.apache.maven.archiva.security.ArchivaSecurityException;
import org.apache.maven.archiva.security.ArchivaUser;
import org.apache.maven.archiva.security.PrincipalNotFoundException;
import org.apache.maven.archiva.security.UserRepositories;
import org.codehaus.plexus.xwork.action.PlexusActionSupport;
import java.net.MalformedURLException;
import java.util.Collections;
import java.util.List;
/**
@ -60,6 +66,16 @@ public class SearchAction
* @plexus.requirement role-hint="default"
*/
private CrossRepositorySearch crossRepoSearch;
/**
* @plexus.requirement
*/
private UserRepositories userRepositories;
/**
* @plexus.requirement role-hint="xwork"
*/
private ArchivaUser archivaUser;
private static final String RESULTS = "results";
@ -80,7 +96,7 @@ public class SearchAction
SearchResultLimits limits = new SearchResultLimits( 0 );
results = crossRepoSearch.searchForTerm( q, limits );
results = crossRepoSearch.searchForTerm( getPrincipal(), getObservableRepos(), q, limits );
if ( results.isEmpty() )
{
@ -125,16 +141,42 @@ public class SearchAction
// 1 hit? return it's information directly!
return ARTIFACT;
}
else
{
return RESULTS;
}
return RESULTS;
}
@Override
public String doInput()
{
return INPUT;
}
private String getPrincipal()
{
return archivaUser.getActivePrincipal();
}
private List<String> getObservableRepos()
{
try
{
return userRepositories.getObservableRepositoryIds( getPrincipal() );
}
catch ( PrincipalNotFoundException e )
{
getLogger().warn( e.getMessage(), e );
}
catch ( AccessDeniedException e )
{
getLogger().warn( e.getMessage(), e );
// TODO: pass this onto the screen.
}
catch ( ArchivaSecurityException e )
{
getLogger().warn( e.getMessage(), e );
}
return Collections.emptyList();
}
public String getQ()
{