Fixing tests after refactoring

This commit is contained in:
Martin Stockhammer 2019-06-30 22:47:24 +02:00
parent 23099bcb58
commit 9ca52f4e27
32 changed files with 1183 additions and 138 deletions

View File

@ -606,7 +606,7 @@ public class DefaultArchivaConfiguration
if (!writeFile("user configuration", userConfigFilename, contents)) {
fileLocation = altConfigFilename;
if (!writeFile("alternative configuration", altConfigFilename, contents)) {
if (!writeFile("alternative configuration", altConfigFilename, contents, true)) {
throw new RegistryException(
"Unable to create configuration file in either user [" + userConfigFilename + "] or alternative ["
+ altConfigFilename
@ -632,6 +632,10 @@ public class DefaultArchivaConfiguration
return section == null ? new CommonsConfigurationRegistry(new BaseConfiguration()) : section;
}
private boolean writeFile(String filetype, String path, String contents) {
return writeFile( filetype, path, contents, false );
}
/**
* Attempts to write the contents to a file, if an IOException occurs, return false.
* <p/>
@ -642,14 +646,18 @@ public class DefaultArchivaConfiguration
* @param contents the contents to write.
* @return true if write successful.
*/
private boolean writeFile(String filetype, String path, String contents) {
private boolean writeFile(String filetype, String path, String contents, boolean createDirs) {
Path file = Paths.get(path);
try {
// Check parent directory (if it is declared)
if (file.getParent() != null) {
final Path parent = file.getParent();
if (parent != null) {
// Check that directory exists
if (!Files.isDirectory(file.getParent())) {
if (!Files.exists( parent ) && createDirs) {
Files.createDirectories( parent );
}
if (!Files.isDirectory(parent)) {
// Directory to file must exist for file to be created
return false;
}

View File

@ -69,8 +69,7 @@ public class RepositoryGroupConfiguration
/**
*
* When to run the index merging for this group.
* No default value.
*
*
*/
private String cronExpression = "";

View File

@ -34,6 +34,7 @@ import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
import java.nio.charset.Charset;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -508,7 +509,7 @@ public class ArchivaConfigurationTest
throws Exception
{
String propFile = System.getProperty( ArchivaConfiguration.USER_CONFIG_PROPERTY );
System.setProperty( ArchivaConfiguration.USER_CONFIG_PROPERTY, "${basedir}/target/*intentionally:invalid*/.m2/archiva-user.xml" );
System.setProperty( ArchivaConfiguration.USER_CONFIG_PROPERTY, "/../../..//*intentionally:invalid*/.m2/archiva-user.xml" );
ArchivaConfiguration archivaConfiguration =
lookup( ArchivaConfiguration.class, "test-not-allowed-to-write-to-both" );
Configuration config = archivaConfiguration.getConfiguration();

View File

@ -283,8 +283,8 @@
<bean name="archivaConfiguration#test-not-allowed-to-write-to-both" class="org.apache.archiva.configuration.DefaultArchivaConfiguration">
<property name="registry" ref="registry#test-not-allowed-to-write-to-both"/>
<property name="userConfigFilename" value="${basedir}/target/*intentionally:invalid*/.m2/archiva-user.xml"/>
<property name="altConfigFilename" value="${basedir}/target/*intentionally:invalid*/conf/archiva.xml"/>
<property name="userConfigFilename" value="/../../..//target/*intentionally:invalid*/.m2/archiva-user.xml"/>
<property name="altConfigFilename" value="/../../..//target/*intentionally:invalid*/conf/archiva.xml"/>
</bean>
<bean name="registry#test-not-allowed-to-write-to-both" class="org.apache.archiva.redback.components.registry.commons.CommonsConfigurationRegistry">
@ -292,9 +292,9 @@
<value>
<![CDATA[
<configuration>
<xml fileName="${basedir}/target/*intentionally:invalid*/.m2/archiva-user.xml" config-optional="true"
<xml fileName="/../../..//*intentionally:invalid*/.m2/archiva-user.xml" config-optional="true"
config-name="org.apache.archiva.user" config-at="org.apache.archiva"/>
<xml fileName="${basedir}/target/*intentionally:invalid*/conf/archiva.xml" config-optional="true"
<xml fileName="/../../..//*intentionally:invalid*/conf/archiva.xml" config-optional="true"
config-name="org.apache.archiva.user" config-at="org.apache.archiva"/>
</configuration>
]]>

View File

@ -246,6 +246,11 @@ public class RepositoryProviderMock implements RepositoryProvider
public RepositoryGroup createRepositoryGroup(RepositoryGroupConfiguration configuration) throws RepositoryException {
return null;
}
@Override
public EditableRepositoryGroup createRepositoryGroup(String id, String name) {
return null;
}
@Override
public void updateRepositoryGroupInstance(EditableRepositoryGroup repositoryGroup, RepositoryGroupConfiguration configuration) throws RepositoryException {

View File

@ -23,6 +23,8 @@ import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
/**
* @author Olivier Lamy
* @since 1.4-M1
@ -44,7 +46,7 @@ public class RepositoryGroup
/**
* The path of the merged index.
*/
private String mergedIndexPath = ".indexer";
private String mergedIndexPath = DEFAULT_INDEX_PATH;
/**
* The TTL (time to live) of the repo group's merged index.

View File

@ -30,6 +30,9 @@ import java.util.List;
public interface ArchivaIndexManager {
String DEFAULT_INDEX_PATH=".indexer";
String DEFAULT_PACKED_INDEX_PATH=".index";
/**
* Compresses the index to a more dense packed format.
* @param context

View File

@ -21,6 +21,8 @@ package org.apache.archiva.indexer.merger;
import java.nio.file.Path;
import java.util.Collection;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
/**
* @author Olivier Lamy
*/
@ -41,7 +43,7 @@ public class IndexMergerRequest
*/
private String groupId;
private String mergedIndexPath = ".indexer";
private String mergedIndexPath = DEFAULT_INDEX_PATH;
private int mergedIndexTtl;

View File

@ -128,4 +128,16 @@ public interface StorageAsset
* @throws UnsupportedOperationException
*/
Path getFilePath() throws UnsupportedOperationException;
/**
* Returns true, if there is a parent to this asset.
* @return
*/
boolean hasParent();
/**
* Returns the parent of this asset.
* @return The asset, or <code>null</code>, if it does not exist.
*/
StorageAsset getParent();
}

View File

@ -29,6 +29,9 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Path;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_PACKED_INDEX_PATH;
/**
*
* This feature provides some information about index creation.
@ -36,8 +39,6 @@ import java.nio.file.Path;
*/
public class IndexCreationFeature extends AbstractFeature implements RepositoryFeature<IndexCreationFeature>{
public static final String DEFAULT_INDEX_PATH = ".indexer";
public static final String DEFAULT_PACKED_INDEX_PATH = ".index";
private boolean skipPackedIndexCreation = false;

View File

@ -261,8 +261,11 @@ public abstract class AbstractRepository implements EditableRepository, Reposito
@Override
public void setSchedulingDefinition(String cronExpression) {
CronParser parser = new CronParser(CRON_DEFINITION);
parser.parse(cronExpression).validate();
if (StringUtils.isNotEmpty( cronExpression ))
{
CronParser parser = new CronParser( CRON_DEFINITION );
parser.parse( cronExpression ).validate( );
}
this.schedulingDefinition = cronExpression;
}

View File

@ -83,7 +83,8 @@ public class AbstractRepositoryGroup extends AbstractRepository implements Edita
try {
repositories.clear();
for(ManagedRepository repo : newRepositories) {
repositories.put(repo.getId(), repo);
if (repo!=null)
repositories.put(repo.getId(), repo);
}
} finally {
rwl.writeLock().unlock();
@ -94,7 +95,8 @@ public class AbstractRepositoryGroup extends AbstractRepository implements Edita
public void addRepository(ManagedRepository repository) {
rwl.writeLock().lock();
try {
repositories.put(repository.getId(), repository);
if (repository!=null)
repositories.put(repository.getId(), repository);
} finally {
rwl.writeLock().unlock();
}
@ -104,7 +106,8 @@ public class AbstractRepositoryGroup extends AbstractRepository implements Edita
public void addRepository(int index, ManagedRepository repository) {
rwl.writeLock().lock();
try {
repositories.put(index, repository.getId(), repository);
if (repository!=null)
repositories.put(index, repository.getId(), repository);
} finally {
rwl.writeLock().unlock();
}

View File

@ -42,13 +42,17 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
/**
* Registry for repositories. This is the central entry point for repositories. It provides methods for
* retrieving, adding and removing repositories.
@ -177,6 +181,10 @@ public class RepositoryRegistry implements ConfigurationListener, RepositoryEven
Map<RepositoryType, RepositoryProvider> providerMap = createProviderMap( );
for ( ManagedRepositoryConfiguration repoConfig : managedRepoConfigs )
{
if (managedRepos.containsKey(repoConfig.getId())) {
log.warn( "Duplicate repository definitions for {} in config found.", repoConfig.getId( ) );
continue;
}
RepositoryType repositoryType = RepositoryType.valueOf( repoConfig.getType( ) );
if ( providerMap.containsKey( repositoryType ) )
{
@ -817,7 +825,7 @@ public class RepositoryRegistry implements ConfigurationListener, RepositoryEven
private void setRepositoryGroupDefaults(RepositoryGroupConfiguration repositoryGroupConfiguration) {
if (StringUtils.isEmpty(repositoryGroupConfiguration.getMergedIndexPath())) {
repositoryGroupConfiguration.setMergedIndexPath(".indexer");
repositoryGroupConfiguration.setMergedIndexPath(DEFAULT_INDEX_PATH);
}
if (repositoryGroupConfiguration.getMergedIndexTtl()<=0) {
repositoryGroupConfiguration.setMergedIndexTtl(300);

View File

@ -19,6 +19,7 @@ package org.apache.archiva.repository.content;
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -48,6 +49,7 @@ public class FilesystemAsset implements StorageAsset {
private final static Logger log = LoggerFactory.getLogger(FilesystemAsset.class);
private final Path basePath;
private final Path assetPath;
private final String relativePath;
@ -86,6 +88,15 @@ public class FilesystemAsset implements StorageAsset {
boolean directoryHint = false;
FilesystemAsset(String path, Path assetPath, Path basePath) {
this.assetPath = assetPath;
this.relativePath = path;
this.setPermissionsForNew=false;
this.basePath = basePath;
init();
}
/**
* Creates an asset for the given path. The given paths are not checked.
* The base path should be an absolute path.
@ -97,6 +108,7 @@ public class FilesystemAsset implements StorageAsset {
this.assetPath = assetPath;
this.relativePath = path;
this.setPermissionsForNew = false;
this.basePath = null;
init();
}
@ -109,11 +121,12 @@ public class FilesystemAsset implements StorageAsset {
* @param directory This is only relevant, if the represented file or directory does not exist yet and
* is a hint.
*/
public FilesystemAsset(String path, Path assetPath, boolean directory) {
public FilesystemAsset(String path, Path assetPath, Path basePath, boolean directory) {
this.assetPath = assetPath;
this.relativePath = path;
this.directoryHint = directory;
this.setPermissionsForNew = false;
this.basePath = basePath;
init();
}
@ -126,11 +139,12 @@ public class FilesystemAsset implements StorageAsset {
* @param directory This is only relevant, if the represented file or directory does not exist yet and
* is a hint.
*/
public FilesystemAsset(String path, Path assetPath, boolean directory, boolean setPermissionsForNew) {
public FilesystemAsset(String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) {
this.assetPath = assetPath;
this.relativePath = path;
this.directoryHint = directory;
this.setPermissionsForNew = setPermissionsForNew;
this.basePath = basePath;
init();
}
@ -263,6 +277,9 @@ public class FilesystemAsset implements StorageAsset {
} else {
options = new OpenOption[]{StandardOpenOption.APPEND};
}
if (!Files.exists( assetPath )) {
create();
}
return Files.newOutputStream(assetPath, options);
}
@ -344,6 +361,33 @@ public class FilesystemAsset implements StorageAsset {
return assetPath;
}
@Override
public boolean hasParent( )
{
if (basePath!=null && assetPath.equals(basePath)) {
return false;
}
return assetPath.getParent()!=null;
}
@Override
public StorageAsset getParent( )
{
Path parentPath;
if (basePath!=null && assetPath.equals( basePath )) {
parentPath=null;
} else
{
parentPath = assetPath.getParent( );
}
String relativeParent = StringUtils.substringBeforeLast( relativePath,"/");
if (parentPath!=null) {
return new FilesystemAsset( relativeParent, parentPath, basePath, true, setPermissionsForNew );
} else {
return null;
}
}
public void setDefaultFileAcls(List<AclEntry> acl) {
defaultFileAcls = acl;
@ -383,6 +427,9 @@ public class FilesystemAsset implements StorageAsset {
if (directoryHint) {
Files.createDirectories(assetPath);
} else {
if (!Files.exists( assetPath.getParent() )) {
Files.createDirectories( assetPath.getParent( ) );
}
Files.createFile(assetPath);
}
if (setPermissionsForNew) {
@ -393,8 +440,7 @@ public class FilesystemAsset implements StorageAsset {
@Override
public String toString() {
return relativePath;
return relativePath+":"+assetPath;
}
}

View File

@ -142,7 +142,7 @@ public class FilesystemStorage implements RepositoryStorage {
public StorageAsset addAsset( String path, boolean container )
{
try {
return new FilesystemAsset( path, getAssetPath(path), container);
return new FilesystemAsset( path, getAssetPath(path), basePath, container);
} catch (IOException e) {
throw new IllegalArgumentException("Path navigates outside of base directory "+path);
}
@ -158,7 +158,7 @@ public class FilesystemStorage implements RepositoryStorage {
public StorageAsset moveAsset( StorageAsset origin, String destination ) throws IOException
{
boolean container = origin.isContainer();
FilesystemAsset newAsset = new FilesystemAsset( destination, getAssetPath(destination), container );
FilesystemAsset newAsset = new FilesystemAsset( destination, getAssetPath(destination), basePath, container );
Files.move(origin.getFilePath(), newAsset.getFilePath());
return newAsset;
}
@ -167,7 +167,7 @@ public class FilesystemStorage implements RepositoryStorage {
public StorageAsset copyAsset( StorageAsset origin, String destination ) throws IOException
{
boolean container = origin.isContainer();
FilesystemAsset newAsset = new FilesystemAsset( destination, getAssetPath(destination), container );
FilesystemAsset newAsset = new FilesystemAsset( destination, getAssetPath(destination), basePath, container );
if (Files.exists(newAsset.getFilePath())) {
throw new IOException("Destination file exists already "+ newAsset.getFilePath());
}

View File

@ -26,6 +26,9 @@ import org.apache.archiva.repository.ManagedRepository;
import java.util.List;
import java.util.Set;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_PACKED_INDEX_PATH;
/**
* RepositoryScanner
*
@ -51,9 +54,9 @@ public interface RepositoryScanner
* </p>
*/
static final String[] IGNORABLE_CONTENT =
{ "bin/**", "reports/**", ".index", ".reports/**", ".maven/**", "**/.svn/**", "**/*snapshot-version",
{ "bin/**", "reports/**", DEFAULT_PACKED_INDEX_PATH, ".reports/**", ".maven/**", "**/.svn/**", "**/*snapshot-version",
"*/website/**", "*/licences/**", "**/.htaccess", "**/*.html", "**/*.txt", "**/README*", "**/CHANGELOG*",
"**/KEYS*", ".indexer" };
"**/KEYS*", DEFAULT_INDEX_PATH };
/**
* Scan the repository for content changes.

View File

@ -223,7 +223,8 @@ public class RepositoryScannerInstance
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (excludeMatcher.stream().noneMatch(m -> m.matches(file)) && includeMatcher.stream().allMatch(m -> m.matches(file))) {
final Path relativeFile = basePath.relativize( file );
if (excludeMatcher.stream().noneMatch(m -> m.matches(relativeFile)) && includeMatcher.stream().allMatch(m -> m.matches(relativeFile))) {
log.debug( "Walk Step: {}, {}", file );
stats.increaseFileCount();

View File

@ -142,9 +142,6 @@ public class MavenIndexManager implements ArchivaIndexManager {
private ProxyRegistry proxyRegistry;
public static final String DEFAULT_INDEXER_DIR = ".indexer";
public static final String DEFAULT_PACKED_INDEX_DIR = ".index";
private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
private static final int WAIT_TIME = 100;
@ -629,12 +626,12 @@ public class MavenIndexManager implements ArchivaIndexManager {
private StorageAsset getIndexPath( Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
return getIndexPath( icf.getIndexPath(), repo.getLocalPath(), DEFAULT_INDEXER_DIR );
return getIndexPath( icf.getIndexPath(), repo.getLocalPath(), DEFAULT_INDEX_PATH);
}
private StorageAsset getPackedIndexPath(Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
return getIndexPath(icf.getPackedIndexPath(), repo.getLocalPath(), DEFAULT_PACKED_INDEX_DIR);
return getIndexPath(icf.getPackedIndexPath(), repo.getLocalPath(), DEFAULT_PACKED_INDEX_PATH);
}
private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
@ -683,11 +680,17 @@ public class MavenIndexManager implements ArchivaIndexManager {
private IndexingContext getIndexingContext( Repository repository, String contextKey, Path repoDir, StorageAsset indexDirectory, String indexUrl ) throws IOException
{
return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.getFilePath().toFile( ),
repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
indexUrl,
true, false,
indexCreators );
try
{
return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.getFilePath( ).toFile( ),
repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
indexUrl,
true, false,
indexCreators );
} catch (Exception e) {
log.error("Could not create index for asset {}", indexDirectory);
throw new IOException(e);
}
}
private IndexingContext createManagedContext( ManagedRepository repository ) throws IOException
@ -714,6 +717,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
if ( repository.supportsFeature( IndexCreationFeature.class ) )
{
indexDirectory = getIndexPath(repository);
log.debug( "Preparing index at {}", indexDirectory );
String indexUrl = repositoryDirectory.toUri( ).toURL( ).toExternalForm( );
try
@ -743,7 +747,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
{
if ( rif.getIndexUri( ) == null )
{
return baseUri.resolve( DEFAULT_INDEXER_DIR ).toString( );
return baseUri.resolve( DEFAULT_INDEX_PATH ).toString( );
}
else
{

View File

@ -61,6 +61,8 @@ import java.nio.file.Paths;
import java.util.List;
import java.util.Locale;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
/**
* @author Olivier Lamy
*/
@ -192,7 +194,7 @@ public abstract class AbstractMavenRepositorySearch
repositoryConfig.setScanned( true );
repositoryConfig.setSnapshots( false );
repositoryConfig.setReleases( true );
repositoryConfig.setIndexDir(".indexer");
repositoryConfig.setIndexDir(DEFAULT_INDEX_PATH);
return repositoryConfig;
}

View File

@ -67,7 +67,7 @@ public class ManagedDefaultRepositoryContent
implements ManagedRepositoryContent
{
private final FilesystemStorage storage;
private FilesystemStorage storage;
private FileTypes filetypes;
@ -79,30 +79,22 @@ public class ManagedDefaultRepositoryContent
private Path repoDir;
FileLockManager lockManager;
public ManagedDefaultRepositoryContent(ManagedRepository repository, FileTypes fileTypes, FileLockManager lockManager) {
super(Collections.singletonList( new DefaultArtifactMappingProvider() ));
setFileTypes( fileTypes );
this.lockManager = lockManager;
setRepository( repository );
try {
storage = new FilesystemStorage(getRepoDir(), lockManager);
} catch (IOException e) {
log.error("Could not initialize the filesystem storage to repository: {}", getRepoDir());
throw new RuntimeException("Fatal error. Could not initialize the filesystem storage for "+getRepoDir());
}
}
public ManagedDefaultRepositoryContent( ManagedRepository repository, List<? extends ArtifactMappingProvider> artifactMappingProviders, FileTypes fileTypes, FileLockManager lockManager )
{
super(artifactMappingProviders==null ? Collections.singletonList( new DefaultArtifactMappingProvider() ) : artifactMappingProviders);
setFileTypes( fileTypes );
this.lockManager = lockManager;
setRepository( repository );
final Path repositoryDir = getRepoDir();
try {
storage = new FilesystemStorage(repositoryDir, lockManager);
} catch (IOException e) {
log.error("Could not initialize the filesystem storage to repository {}: {}", repositoryDir, e.getMessage(), e);
throw new RuntimeException("Fatal error. Could not initialize the filesystem storage for "+repositoryDir+": "+e.getMessage());
}
}
private Path getRepoDir() {
@ -429,6 +421,13 @@ public class ManagedDefaultRepositoryContent
if (repository instanceof EditableManagedRepository) {
((EditableManagedRepository) repository).setContent(this);
}
final Path repositoryDir = getRepoDir();
try {
storage = new FilesystemStorage(repositoryDir, this.lockManager);
} catch (IOException e) {
log.error("Could not initialize the filesystem storage to repository {}: {}", repositoryDir, e.getMessage(), e);
throw new RuntimeException("Fatal error. Could not initialize the filesystem storage for "+repositoryDir+": "+e.getMessage());
}
}
}

View File

@ -46,6 +46,9 @@ import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_PACKED_INDEX_PATH;
/**
* Provider for the maven2 repository implementations
*/
@ -156,8 +159,10 @@ public class MavenRepositoryProvider implements RepositoryProvider {
IndexCreationFeature indexCreationFeature = repo.getFeature(IndexCreationFeature.class).get();
indexCreationFeature.setSkipPackedIndexCreation(cfg.isSkipPackedIndexCreation());
indexCreationFeature.setIndexPath(getURIFromString(cfg.getIndexDir()));
indexCreationFeature.setPackedIndexPath(getURIFromString(cfg.getPackedIndexDir()));
String indexDir = StringUtils.isEmpty( cfg.getIndexDir() ) ? DEFAULT_INDEX_PATH : cfg.getIndexDir();
String packedIndexDir = StringUtils.isEmpty( cfg.getPackedIndexDir() ) ? DEFAULT_PACKED_INDEX_PATH : cfg.getPackedIndexDir();
indexCreationFeature.setIndexPath(getURIFromString(indexDir));
indexCreationFeature.setPackedIndexPath(getURIFromString(packedIndexDir));
ArtifactCleanupFeature artifactCleanupFeature = repo.getFeature(ArtifactCleanupFeature.class).get();
@ -408,19 +413,14 @@ public class MavenRepositoryProvider implements RepositoryProvider {
}
if (StringUtils.isNotBlank(repository.getPackedIndexDir())) {
Path packedIndexDir = null;
try {
packedIndexDir = Paths.get(new URI(repository.getPackedIndexDir().startsWith("file://") ? repository.getPackedIndexDir() : "file://" + repository.getPackedIndexDir()));
if (packedIndexDir.isAbsolute()) {
Path newDir = packedIndexDir.getParent().resolve(packedIndexDir.getFileName() + StagingRepositoryFeature.STAGING_REPO_POSTFIX);
log.debug("Changing index directory {} -> {}", packedIndexDir, newDir);
stagingRepository.setPackedIndexDir(newDir.toString());
} else {
log.debug("Keeping index directory {}", repository.getPackedIndexDir());
stagingRepository.setPackedIndexDir(repository.getPackedIndexDir());
}
} catch (URISyntaxException e) {
log.error("Could not parse index path as uri {}", repository.getPackedIndexDir());
stagingRepository.setPackedIndexDir("");
packedIndexDir = Paths.get(repository.getPackedIndexDir());
if (packedIndexDir.isAbsolute()) {
Path newDir = packedIndexDir.getParent().resolve(packedIndexDir.getFileName() + StagingRepositoryFeature.STAGING_REPO_POSTFIX);
log.debug("Changing index directory {} -> {}", packedIndexDir, newDir);
stagingRepository.setPackedIndexDir(newDir.toString());
} else {
log.debug("Keeping index directory {}", repository.getPackedIndexDir());
stagingRepository.setPackedIndexDir(repository.getPackedIndexDir());
}
// in case of absolute dir do not use the same
}

View File

@ -0,0 +1,802 @@
package org.apache.archiva.mock;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.indexer.ArchivaIndexManager;
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.indexer.IndexCreationFailedException;
import org.apache.archiva.indexer.IndexUpdateFailedException;
import org.apache.archiva.indexer.UnsupportedBaseContextException;
import org.apache.archiva.proxy.ProxyRegistry;
import org.apache.archiva.proxy.maven.WagonFactory;
import org.apache.archiva.proxy.maven.WagonFactoryException;
import org.apache.archiva.proxy.maven.WagonFactoryRequest;
import org.apache.archiva.proxy.model.NetworkProxy;
import org.apache.archiva.repository.EditableRepository;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.PasswordCredentials;
import org.apache.archiva.repository.RemoteRepository;
import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.index.ArtifactContext;
import org.apache.maven.index.ArtifactContextProducer;
import org.apache.maven.index.DefaultScannerListener;
import org.apache.maven.index.Indexer;
import org.apache.maven.index.IndexerEngine;
import org.apache.maven.index.Scanner;
import org.apache.maven.index.ScanningRequest;
import org.apache.maven.index.ScanningResult;
import org.apache.maven.index.context.IndexCreator;
import org.apache.maven.index.context.IndexingContext;
import org.apache.maven.index.packer.IndexPacker;
import org.apache.maven.index.packer.IndexPackingRequest;
import org.apache.maven.index.updater.IndexUpdateRequest;
import org.apache.maven.index.updater.ResourceFetcher;
import org.apache.maven.index_shaded.lucene.index.IndexFormatTooOldException;
import org.apache.maven.wagon.ConnectionException;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.apache.maven.wagon.StreamWagon;
import org.apache.maven.wagon.TransferFailedException;
import org.apache.maven.wagon.Wagon;
import org.apache.maven.wagon.authentication.AuthenticationException;
import org.apache.maven.wagon.authentication.AuthenticationInfo;
import org.apache.maven.wagon.authorization.AuthorizationException;
import org.apache.maven.wagon.events.TransferEvent;
import org.apache.maven.wagon.events.TransferListener;
import org.apache.maven.wagon.proxy.ProxyInfo;
import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
import org.apache.maven.wagon.shared.http.HttpConfiguration;
import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.stream.Collectors;
@Service("archivaIndexManager#maven")
public class ArchivaIndexManagerMock implements ArchivaIndexManager {
private static final Logger log = LoggerFactory.getLogger( ArchivaIndexManagerMock.class );
@Inject
private Indexer indexer;
@Inject
private IndexerEngine indexerEngine;
@Inject
private List<? extends IndexCreator> indexCreators;
@Inject
private IndexPacker indexPacker;
@Inject
private Scanner scanner;
@Inject
private ArchivaConfiguration archivaConfiguration;
@Inject
private WagonFactory wagonFactory;
@Inject
private ArtifactContextProducer artifactContextProducer;
private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
private static final int WAIT_TIME = 100;
private static final int MAX_WAIT = 10;
public static IndexingContext getMvnContext(ArchivaIndexingContext context ) throws UnsupportedBaseContextException
{
if ( !context.supports( IndexingContext.class ) )
{
log.error( "The provided archiva index context does not support the maven IndexingContext" );
throw new UnsupportedBaseContextException( "The context does not support the Maven IndexingContext" );
}
return context.getBaseContext( IndexingContext.class );
}
private Path getIndexPath( ArchivaIndexingContext ctx )
{
return PathUtil.getPathFromUri( ctx.getPath( ) );
}
@FunctionalInterface
interface IndexUpdateConsumer
{
void accept( IndexingContext indexingContext ) throws IndexUpdateFailedException;
}
/*
* This method is used to do some actions around the update execution code. And to make sure, that no other
* method is running on the same index.
*/
private void executeUpdateFunction( ArchivaIndexingContext context, IndexUpdateConsumer function ) throws IndexUpdateFailedException
{
IndexingContext indexingContext = null;
try
{
indexingContext = getMvnContext( context );
}
catch ( UnsupportedBaseContextException e )
{
throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
}
final Path ctxPath = getIndexPath( context );
int loop = MAX_WAIT;
boolean active = false;
while ( loop-- > 0 && !active )
{
active = activeContexts.add( ctxPath );
try
{
Thread.currentThread( ).sleep( WAIT_TIME );
}
catch ( InterruptedException e )
{
// Ignore this
}
}
if ( active )
{
try
{
function.accept( indexingContext );
}
finally
{
activeContexts.remove( ctxPath );
}
}
else
{
throw new IndexUpdateFailedException( "Timeout while waiting for index release on context " + context.getId( ) );
}
}
@Override
public void pack( final ArchivaIndexingContext context ) throws IndexUpdateFailedException
{
executeUpdateFunction( context, indexingContext -> {
try
{
IndexPackingRequest request = new IndexPackingRequest( indexingContext,
indexingContext.acquireIndexSearcher( ).getIndexReader( ),
indexingContext.getIndexDirectoryFile( ) );
indexPacker.packIndex( request );
indexingContext.updateTimestamp( true );
}
catch ( IOException e )
{
log.error( "IOException while packing index of context " + context.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ) );
throw new IndexUpdateFailedException( "IOException during update of " + context.getId( ), e );
}
}
);
}
@Override
public void scan(final ArchivaIndexingContext context) throws IndexUpdateFailedException
{
executeUpdateFunction( context, indexingContext -> {
DefaultScannerListener listener = new DefaultScannerListener( indexingContext, indexerEngine, true, null );
ScanningRequest request = new ScanningRequest( indexingContext, listener );
ScanningResult result = scanner.scan( request );
if ( result.hasExceptions( ) )
{
log.error( "Exceptions occured during index scan of " + context.getId( ) );
result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
s -> log.error( "Message: " + s )
);
}
} );
}
@Override
public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException
{
log.info( "start download remote index for remote repository {}", context.getRepository( ).getId( ) );
URI remoteUpdateUri;
if ( !( context.getRepository( ) instanceof RemoteRepository) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class)) )
{
throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId( ) );
} else {
RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get();
remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri());
}
final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository( );
executeUpdateFunction( context,
indexingContext -> {
try
{
// create a temp directory to download files
Path tempIndexDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".tmpIndex" );
Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".indexCache" );
Files.createDirectories( indexCacheDirectory );
if ( Files.exists( tempIndexDirectory ) )
{
FileUtils.deleteDirectory( tempIndexDirectory );
}
Files.createDirectories( tempIndexDirectory );
tempIndexDirectory.toFile( ).deleteOnExit( );
String baseIndexUrl = indexingContext.getIndexUpdateUrl( );
String wagonProtocol = remoteUpdateUri.toURL( ).getProtocol( );
NetworkProxy networkProxy = null;
if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
{
RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
new WagonFactoryRequest( wagonProtocol, remoteRepository.getExtraHeaders( ) ).networkProxy(
networkProxy )
);
int readTimeout = (int) rif.getDownloadTimeout( ).toMillis( ) * 1000;
wagon.setReadTimeout( readTimeout );
wagon.setTimeout( (int) remoteRepository.getTimeout( ).toMillis( ) * 1000 );
if ( wagon instanceof AbstractHttpClientWagon)
{
HttpConfiguration httpConfiguration = new HttpConfiguration( );
HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration( );
httpMethodConfiguration.setUsePreemptive( true );
httpMethodConfiguration.setReadTimeout( readTimeout );
httpConfiguration.setGet( httpMethodConfiguration );
AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
}
wagon.addTransferListener( new DownloadListener( ) );
ProxyInfo proxyInfo = null;
if ( networkProxy != null )
{
proxyInfo = new ProxyInfo( );
proxyInfo.setType( networkProxy.getProtocol( ) );
proxyInfo.setHost( networkProxy.getHost( ) );
proxyInfo.setPort( networkProxy.getPort( ) );
proxyInfo.setUserName( networkProxy.getUsername( ) );
proxyInfo.setPassword( networkProxy.getPassword( ) );
}
AuthenticationInfo authenticationInfo = null;
if ( remoteRepository.getLoginCredentials( ) != null && ( remoteRepository.getLoginCredentials( ) instanceof PasswordCredentials) )
{
PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials( );
authenticationInfo = new AuthenticationInfo( );
authenticationInfo.setUserName( creds.getUsername( ) );
authenticationInfo.setPassword( new String( creds.getPassword( ) ) );
}
wagon.connect( new org.apache.maven.wagon.repository.Repository( remoteRepository.getId( ), baseIndexUrl ), authenticationInfo,
proxyInfo );
Path indexDirectory = indexingContext.getIndexDirectoryFile( ).toPath( );
if ( !Files.exists( indexDirectory ) )
{
Files.createDirectories( indexDirectory );
}
ResourceFetcher resourceFetcher =
new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
request.setForceFullUpdate( fullUpdate );
request.setLocalIndexCacheDir( indexCacheDirectory.toFile( ) );
// indexUpdater.fetchAndUpdateIndex( request );
indexingContext.updateTimestamp( true );
}
}
catch ( AuthenticationException e )
{
log.error( "Could not login to the remote proxy for updating index of {}", remoteRepository.getId( ), e );
throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId( ), e );
}
catch ( ConnectionException e )
{
log.error( "Connection error during index update for remote repository {}", remoteRepository.getId( ), e );
throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId( ), e );
}
catch ( MalformedURLException e )
{
log.error( "URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId( ), remoteUpdateUri, e );
throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e );
}
catch ( IOException e )
{
log.error( "IOException during index update of remote repository {}: {}", remoteRepository.getId( ), e.getMessage( ), e );
throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId( )
+ ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
}
catch ( WagonFactoryException e )
{
log.error( "Wagon for remote index download of {} could not be created: {}", remoteRepository.getId( ), e.getMessage( ), e );
throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId( ), e );
}
} );
}
@Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) {
log.error("IOException while adding artifact {}", e.getMessage(), e);
throw new IndexUpdateFailedException("Error occured while adding artifact to index of "+context.getId()
+ (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
}
});
}
@Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{
final URI ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) {
log.error("IOException while removing artifact {}", e.getMessage(), e);
throw new IndexUpdateFailedException("Error occured while removing artifact from index of "+context.getId()
+ (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
}
});
}
@Override
public boolean supportsRepository( RepositoryType type )
{
return type == RepositoryType.MAVEN;
}
@Override
public ArchivaIndexingContext createContext( Repository repository ) throws IndexCreationFailedException
{
log.debug("Creating context for repo {}, type: {}", repository.getId(), repository.getType());
if ( repository.getType( ) != RepositoryType.MAVEN )
{
throw new UnsupportedRepositoryTypeException( repository.getType( ) );
}
IndexingContext mvnCtx = null;
try
{
if ( repository instanceof RemoteRepository )
{
mvnCtx = createRemoteContext( (RemoteRepository) repository );
}
else if ( repository instanceof ManagedRepository )
{
mvnCtx = createManagedContext( (ManagedRepository) repository );
}
}
catch ( IOException e )
{
log.error( "IOException during context creation " + e.getMessage( ), e );
throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
+ ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
}
MavenIndexContextMock context = new MavenIndexContextMock( repository, mvnCtx );
return context;
}
@Override
public ArchivaIndexingContext reset(ArchivaIndexingContext context) throws IndexUpdateFailedException {
ArchivaIndexingContext ctx;
executeUpdateFunction(context, indexingContext -> {
try {
indexingContext.close(true);
} catch (IOException e) {
log.warn("Index close failed");
}
try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
} catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files");
}
});
try {
Repository repo = context.getRepository();
ctx = createContext(context.getRepository());
if (repo instanceof EditableRepository) {
((EditableRepository)repo).setIndexingContext(ctx);
}
} catch (IndexCreationFailedException e) {
throw new IndexUpdateFailedException("Could not create index");
}
return ctx;
}
@Override
public ArchivaIndexingContext move(ArchivaIndexingContext context, Repository repo) throws IndexCreationFailedException {
if (context==null) {
return null;
}
if (context.supports(IndexingContext.class)) {
try {
StorageAsset newPath = getIndexPath(repo);
IndexingContext ctx = context.getBaseContext(IndexingContext.class);
Path oldPath = ctx.getIndexDirectoryFile().toPath();
if (oldPath.equals(newPath)) {
// Nothing to do, if path does not change
return context;
}
if (!Files.exists(oldPath)) {
return createContext(repo);
} else if (context.isEmpty()) {
context.close();
return createContext(repo);
} else {
context.close(false);
Files.move(oldPath, newPath.getFilePath());
return createContext(repo);
}
} catch (IOException e) {
log.error("IOException while moving index directory {}", e.getMessage(), e);
throw new IndexCreationFailedException("Could not recreated the index.", e);
} catch (UnsupportedBaseContextException e) {
throw new IndexCreationFailedException("The given context, is not a maven context.");
}
} else {
throw new IndexCreationFailedException("Bad context type. This is not a maven context.");
}
}
@Override
public void updateLocalIndexPath(Repository repo) {
if (repo.supportsFeature(IndexCreationFeature.class)) {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
try {
icf.setLocalIndexPath(getIndexPath(repo));
} catch (IOException e) {
log.error("Could not set local index path for {}. New URI: {}", repo.getId(), icf.getIndexPath());
}
}
}
@Override
public ArchivaIndexingContext mergeContexts(Repository destinationRepo, List<ArchivaIndexingContext> contexts, boolean packIndex) throws UnsupportedOperationException, IndexCreationFailedException {
return null;
}
private StorageAsset getIndexPath( Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
Path repoDir = repo.getLocalPath();
URI indexDir = icf.getIndexPath();
String indexPath = indexDir.getPath();
Path indexDirectory = null;
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
{
indexDirectory = PathUtil.getPathFromUri( indexDir );
// not absolute so create it in repository directory
if ( indexDirectory.isAbsolute( ) )
{
indexPath = indexDirectory.getFileName().toString();
}
else
{
indexDirectory = repoDir.resolve( indexDirectory );
}
}
else
{
indexDirectory = repoDir.resolve( ".index" );
indexPath = ".index";
}
if ( !Files.exists( indexDirectory ) )
{
Files.createDirectories( indexDirectory );
}
return new FilesystemAsset( indexPath, indexDirectory);
}
private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
{
Path appServerBase = archivaConfiguration.getAppServerBaseDir( );
String contextKey = "remote-" + remoteRepository.getId( );
// create remote repository path
Path repoDir = remoteRepository.getLocalPath();
if ( !Files.exists( repoDir ) )
{
Files.createDirectories( repoDir );
}
StorageAsset indexDirectory = null;
// is there configured indexDirectory ?
if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
{
RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
indexDirectory = getIndexPath(remoteRepository);
String remoteIndexUrl = calculateIndexRemoteUrl( remoteRepository.getLocation( ), rif );
try
{
return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
}
catch ( IndexFormatTooOldException e )
{
// existing index with an old lucene format so we need to delete it!!!
// delete it first then recreate it.
log.warn( "the index of repository {} is too old we have to delete and recreate it", //
remoteRepository.getId( ) );
FileUtils.deleteDirectory( indexDirectory.getFilePath() );
return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
}
}
else
{
throw new IOException( "No remote index defined" );
}
}
private IndexingContext getIndexingContext( Repository repository, String contextKey, Path repoDir, StorageAsset indexDirectory, String indexUrl ) throws IOException
{
return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.getFilePath().toFile( ),
repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
indexUrl,
true, false,
indexCreators );
}
private IndexingContext createManagedContext( ManagedRepository repository ) throws IOException
{
IndexingContext context;
// take care first about repository location as can be relative
Path repositoryDirectory = repository.getLocalPath();
if ( !Files.exists( repositoryDirectory ) )
{
try
{
Files.createDirectories( repositoryDirectory );
}
catch ( IOException e )
{
log.error( "Could not create directory {}", repositoryDirectory );
}
}
StorageAsset indexDirectory = null;
if ( repository.supportsFeature( IndexCreationFeature.class ) )
{
indexDirectory = getIndexPath(repository);
String indexUrl = repositoryDirectory.toUri( ).toURL( ).toExternalForm( );
try
{
context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
context.setSearchable( repository.isScanned( ) );
}
catch ( IndexFormatTooOldException e )
{
// existing index with an old lucene format so we need to delete it!!!
// delete it first then recreate it.
log.warn( "the index of repository {} is too old we have to delete and recreate it", //
repository.getId( ) );
FileUtils.deleteDirectory( indexDirectory.getFilePath() );
context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
context.setSearchable( repository.isScanned( ) );
}
return context;
}
else
{
throw new IOException( "No repository index defined" );
}
}
private String calculateIndexRemoteUrl( URI baseUri, RemoteIndexFeature rif )
{
if ( rif.getIndexUri( ) == null )
{
return baseUri.resolve( ".index" ).toString( );
}
else
{
return baseUri.resolve( rif.getIndexUri( ) ).toString( );
}
}
private static final class DownloadListener
implements TransferListener
{
private Logger log = LoggerFactory.getLogger( getClass( ) );
private String resourceName;
private long startTime;
private int totalLength = 0;
@Override
public void transferInitiated( TransferEvent transferEvent )
{
startTime = System.currentTimeMillis( );
resourceName = transferEvent.getResource( ).getName( );
log.debug( "initiate transfer of {}", resourceName );
}
@Override
public void transferStarted( TransferEvent transferEvent )
{
this.totalLength = 0;
resourceName = transferEvent.getResource( ).getName( );
log.info( "start transfer of {}", transferEvent.getResource( ).getName( ) );
}
@Override
public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
{
log.debug( "transfer of {} : {}/{}", transferEvent.getResource( ).getName( ), buffer.length, length );
this.totalLength += length;
}
@Override
public void transferCompleted( TransferEvent transferEvent )
{
resourceName = transferEvent.getResource( ).getName( );
long endTime = System.currentTimeMillis( );
log.info( "end of transfer file {} {} kb: {}s", transferEvent.getResource( ).getName( ),
this.totalLength / 1024, ( endTime - startTime ) / 1000 );
}
@Override
public void transferError( TransferEvent transferEvent )
{
log.info( "error of transfer file {}: {}", transferEvent.getResource( ).getName( ),
transferEvent.getException( ).getMessage( ), transferEvent.getException( ) );
}
@Override
public void debug( String message )
{
log.debug( "transfer debug {}", message );
}
}
private static class WagonResourceFetcher
implements ResourceFetcher
{
Logger log;
Path tempIndexDirectory;
Wagon wagon;
RemoteRepository remoteRepository;
private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
RemoteRepository remoteRepository )
{
this.log = log;
this.tempIndexDirectory = tempIndexDirectory;
this.wagon = wagon;
this.remoteRepository = remoteRepository;
}
@Override
public void connect( String id, String url )
throws IOException
{
//no op
}
@Override
public void disconnect( )
throws IOException
{
// no op
}
@Override
public InputStream retrieve(String name )
throws IOException, FileNotFoundException
{
try
{
log.info( "index update retrieve file, name:{}", name );
Path file = tempIndexDirectory.resolve( name );
Files.deleteIfExists( file );
file.toFile( ).deleteOnExit( );
wagon.get( addParameters( name, remoteRepository ), file.toFile( ) );
return Files.newInputStream( file );
}
catch ( AuthorizationException | TransferFailedException e )
{
throw new IOException( e.getMessage( ), e );
}
catch ( ResourceDoesNotExistException e )
{
FileNotFoundException fnfe = new FileNotFoundException( e.getMessage( ) );
fnfe.initCause( e );
throw fnfe;
}
}
// FIXME remove crappy copy/paste
protected String addParameters( String path, RemoteRepository remoteRepository )
{
if ( remoteRepository.getExtraParameters( ).isEmpty( ) )
{
return path;
}
boolean question = false;
StringBuilder res = new StringBuilder( path == null ? "" : path );
for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters( ).entrySet( ) )
{
if ( !question )
{
res.append( '?' ).append( entry.getKey( ) ).append( '=' ).append( entry.getValue( ) );
}
}
return res.toString( );
}
}
}

View File

@ -0,0 +1,137 @@
package org.apache.archiva.mock;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository;
import org.apache.maven.index.context.IndexingContext;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.sql.Date;
import java.time.ZonedDateTime;
import java.util.Set;
/**
* Maven implementation of index context
*/
public class MavenIndexContextMock implements ArchivaIndexingContext {
private IndexingContext delegate;
private Repository repository;
MavenIndexContextMock( Repository repository, IndexingContext delegate) {
this.delegate = delegate;
this.repository = repository;
}
@Override
public String getId() {
return delegate.getId();
}
@Override
public Repository getRepository() {
return repository;
}
@Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
}
@Override
public boolean isEmpty() throws IOException {
return Files.list(delegate.getIndexDirectoryFile().toPath()).count()==0;
}
@Override
public void commit() throws IOException {
delegate.commit();
}
@Override
public void rollback() throws IOException {
delegate.rollback();
}
@Override
public void optimize() throws IOException {
delegate.optimize();
}
@Override
public void close(boolean deleteFiles) throws IOException {
try {
delegate.close(deleteFiles);
} catch (NoSuchFileException e) {
// Ignore missing directory
}
}
@Override
public void close() throws IOException {
try {
delegate.close(false);
} catch (NoSuchFileException e) {
// Ignore missing directory
}
}
@Override
public void purge() throws IOException {
delegate.purge();
}
@Override
public boolean supports(Class<?> clazz) {
return IndexingContext.class.equals(clazz);
}
@SuppressWarnings( "unchecked" )
@Override
public <T> T getBaseContext(Class<T> clazz) throws UnsupportedOperationException {
if (IndexingContext.class.equals(clazz)) {
return (T) delegate;
} else {
throw new UnsupportedOperationException("The class "+clazz+" is not supported by the maven indexer");
}
}
@Override
public Set<String> getGroups() throws IOException {
return delegate.getAllGroups();
}
@Override
public void updateTimestamp(boolean save) throws IOException {
delegate.updateTimestamp(save);
}
@Override
public void updateTimestamp(boolean save, ZonedDateTime time) throws IOException {
delegate.updateTimestamp(save, Date.from(time.toInstant()));
}
}

View File

@ -88,6 +88,7 @@
<pattern>**/*.sh</pattern>
<pattern>**/.svn/**</pattern>
<pattern>**/.DAV/**</pattern>
<pattern>.indexer/**</pattern>
</patterns>
</fileType>
</fileTypes>

View File

@ -21,10 +21,8 @@ package org.apache.archiva.webdav;
import edu.emory.mathcs.backport.java.util.Collections;
import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.events.AuditListener;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
@ -32,7 +30,6 @@ import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskSchedu
import org.apache.archiva.scheduler.repository.model.RepositoryTask;
import org.apache.archiva.webdav.util.IndexWriter;
import org.apache.archiva.webdav.util.MimeTypes;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.jackrabbit.util.Text;
import org.apache.jackrabbit.webdav.DavException;
@ -57,7 +54,6 @@ import org.apache.jackrabbit.webdav.property.DavPropertyNameSet;
import org.apache.jackrabbit.webdav.property.DavPropertySet;
import org.apache.jackrabbit.webdav.property.DefaultDavProperty;
import org.apache.jackrabbit.webdav.property.ResourceType;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -69,11 +65,9 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
*/
@ -98,7 +92,7 @@ public class ArchivaDavResource
private String remoteAddr;
private final ManagedRepository repository;
private final RepositoryStorage repositoryStorage;
private final MimeTypes mimeTypes;
@ -114,7 +108,7 @@ public class ArchivaDavResource
private StorageAsset asset;
public ArchivaDavResource( StorageAsset localResource, String logicalResource, ManagedRepository repository,
public ArchivaDavResource( StorageAsset localResource, String logicalResource, RepositoryStorage repositoryStorage,
DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
MimeTypes mimeTypes, List<AuditListener> auditListeners,
RepositoryArchivaTaskScheduler scheduler) throws LayoutException
@ -127,7 +121,7 @@ public class ArchivaDavResource
this.session = session;
// TODO: push into locator as well as moving any references out of the resource factory
this.repository = repository;
this.repositoryStorage = repositoryStorage;
// TODO: these should be pushed into the repository layer, along with the physical file operations in this class
this.mimeTypes = mimeTypes;
@ -136,21 +130,18 @@ public class ArchivaDavResource
}
public ArchivaDavResource( StorageAsset localResource, String logicalResource, ManagedRepository repository,
public ArchivaDavResource( StorageAsset localResource, String logicalResource, RepositoryStorage repositoryStorage,
String remoteAddr, String principal, DavSession session,
ArchivaDavResourceLocator locator, DavResourceFactory factory, MimeTypes mimeTypes,
List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler) throws LayoutException
{
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
this( localResource, logicalResource, repositoryStorage, session, locator, factory, mimeTypes, auditListeners,
scheduler );
this.remoteAddr = remoteAddr;
this.principal = principal;
}
private ManagedRepositoryContent getContent() {
return repository.getContent();
}
@Override
public String getComplianceClass()
@ -219,7 +210,7 @@ public class ArchivaDavResource
if ( !isCollection() && outputContext.hasStream() )
{
getContent().consumeData( asset, is -> {copyStream(is, outputContext.getOutputStream());}, true );
repositoryStorage.consumeData( asset, is -> {copyStream(is, outputContext.getOutputStream());}, true );
}
else if ( outputContext.hasStream() )
{
@ -343,7 +334,7 @@ public class ArchivaDavResource
log.debug( "Upload failed: {}", msg );
throw new DavException( HttpServletResponse.SC_BAD_REQUEST, msg );
}
StorageAsset member = getContent( ).addAsset( newPath, false );
StorageAsset member = repositoryStorage.addAsset( newPath, false );
member.create();
member.storeDataFile( tempFile );
}
@ -375,7 +366,7 @@ public class ArchivaDavResource
{
try
{
StorageAsset member = getContent( ).addAsset( newPath, true );
StorageAsset member = repositoryStorage.addAsset( newPath, true );
member.create();
}
catch ( IOException e )
@ -439,12 +430,12 @@ public class ArchivaDavResource
{
if ( resource.isContainer() )
{
getContent().removeAsset( resource );
repositoryStorage.removeAsset( resource );
triggerAuditEvent( member, AuditEvent.REMOVE_DIR );
}
else
{
getContent().removeAsset( resource );
repositoryStorage.removeAsset( resource );
triggerAuditEvent( member, AuditEvent.REMOVE_FILE );
}
@ -492,12 +483,12 @@ public class ArchivaDavResource
ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( destination );
if ( isCollection() )
{
this.asset = getContent().moveAsset( asset, destination.getResourcePath() );
this.asset = repositoryStorage.moveAsset( asset, destination.getResourcePath() );
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_DIRECTORY );
}
else
{
this.asset = getContent().moveAsset( asset, destination.getResourcePath() );
this.asset = repositoryStorage.moveAsset( asset, destination.getResourcePath() );
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_FILE );
}
@ -530,13 +521,13 @@ public class ArchivaDavResource
ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( destination );
if ( isCollection() )
{
getContent().copyAsset( asset, destination.getResourcePath() );
repositoryStorage.copyAsset( asset, destination.getResourcePath() );
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_DIRECTORY );
}
else
{
getContent().copyAsset( asset, destination.getResourcePath() );
repositoryStorage.copyAsset( asset, destination.getResourcePath() );
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_FILE );
}

View File

@ -68,6 +68,7 @@ import org.apache.archiva.repository.RepositoryGroup;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.RepositoryRequestInfo;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.events.AuditListener;
import org.apache.archiva.repository.features.IndexCreationFeature;
@ -186,6 +187,7 @@ public class ArchivaDavResourceFactory
private ApplicationContext applicationContext;
@Inject
public ArchivaDavResourceFactory( ApplicationContext applicationContext, ArchivaConfiguration archivaConfiguration )
throws PlexusSisuBridgeException
@ -196,11 +198,12 @@ public class ArchivaDavResourceFactory
}
@PostConstruct
public void initialize()
public void initialize() throws IOException
{
// no op
}
@Override
public DavResource createResource( final DavResourceLocator locator, final DavServletRequest request,
final DavServletResponse response )
@ -299,7 +302,13 @@ public class ArchivaDavResourceFactory
{
ArchivaDavResource res = (ArchivaDavResource) resource;
String newPath = res.getAsset().getPath()+"/maven-metadata-" + sRepoId + ".xml";
String newPath;
if (res.getAsset().hasParent())
{
newPath = res.getAsset( ).getParent( ).getPath( ) + "/maven-metadata-" + sRepoId + ".xml";
} else {
newPath = StringUtils.substringBeforeLast( res.getAsset().getPath(), "/" ) + "/maven-metadata-" + sRepoId + ".xml";;
}
// for MRM-872 handle checksums of the merged metadata files
if ( repositoryRequestInfo.isSupportFile( requestedResource ) )
{
@ -313,7 +322,7 @@ public class ArchivaDavResourceFactory
try
{
resource =
new ArchivaDavResource( metadataChecksum, logicalResource.getPath(), null,
new ArchivaDavResource( metadataChecksum, logicalResource.getPath(), repoGroup,
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
archivaLocator, this, mimeTypes, auditListeners, scheduler);
}
@ -358,7 +367,7 @@ public class ArchivaDavResourceFactory
new LogicalResource( getLogicalResource( archivaLocator, null, false ) );
resource =
new ArchivaDavResource( resourceFile, logicalResource.getPath(), null,
new ArchivaDavResource( resourceFile, logicalResource.getPath(), repoGroup,
request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
auditListeners, scheduler);
@ -570,7 +579,7 @@ public class ArchivaDavResourceFactory
try
{
resource =
new ArchivaDavResource( repoAsset, path, managedRepositoryContent.getRepository(),
new ArchivaDavResource( repoAsset, path, managedRepositoryContent,
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
archivaLocator, this, mimeTypes, auditListeners, scheduler );
}
@ -606,14 +615,14 @@ public class ArchivaDavResourceFactory
resourceAsset = managedRepositoryContent.getAsset( localResourcePath );
resource =
new ArchivaDavResource( resourceAsset, logicalResource.getPath(),
managedRepositoryContent.getRepository(),
managedRepositoryContent,
request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
auditListeners, scheduler );
}
catch ( LayoutException e )
{
if ( resourceAsset!=null && !resourceAsset.exists() )
if ( resourceAsset==null || !resourceAsset.exists() )
{
throw new DavException( HttpServletResponse.SC_NOT_FOUND, e );
}
@ -735,7 +744,7 @@ public class ArchivaDavResourceFactory
try
{
resource = new ArchivaDavResource( resourceAsset, logicalResource,
repo, davSession, archivaLocator,
repo.getContent(), davSession, archivaLocator,
this, mimeTypes, auditListeners, scheduler);
}
catch ( LayoutException e )
@ -1100,7 +1109,7 @@ public class ArchivaDavResourceFactory
}
// Path resourceFile = Paths.get( managedRepository.getRepoRoot(), logicalResource.getPath() );
StorageAsset resourceFile = managedRepository.getAsset(logicalResource.getPath());
if ( resourceFile.exists() )
if ( resourceFile.exists() && managedRepository.getRepository().supportsFeature( IndexCreationFeature.class ))
{
// in case of group displaying index directory doesn't have sense !!
IndexCreationFeature idf = managedRepository.getRepository().getFeature(IndexCreationFeature.class).get();

View File

@ -27,7 +27,9 @@ import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.archiva.configuration.RemoteRepositoryConfiguration;
import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.archiva.webdav.httpunit.MkColMethodWebRequest;
import org.apache.commons.io.FileUtils;
@ -175,14 +177,21 @@ public abstract class AbstractRepositoryServletTestCase
config.addManagedRepository(
createManagedRepository( REPOID_INTERNAL, "Internal Test Repo", repoRootInternal, true ) );
repositoryRegistry.reload();
config.getProxyConnectors().clear();
config.getRemoteRepositories().clear();
saveConfiguration( archivaConfiguration );
ArchivaIndexingContext ctx = repositoryRegistry.getManagedRepository( REPOID_INTERNAL ).getIndexingContext( );
try
{
repositoryRegistry.getIndexManager( RepositoryType.MAVEN ).pack( ctx );
} finally
{
ctx.close( );
}
CacheManager.getInstance().clearAll();
@ -795,6 +804,7 @@ public abstract class AbstractRepositoryServletTestCase
repo.setName( name );
repo.setLocation( location.toAbsolutePath().toString() );
repo.setBlockRedeployments( blockRedeployments );
repo.setType( "MAVEN" );
return repo;
}

View File

@ -191,7 +191,7 @@ public class ArchivaDavResourceFactoryTest
expectLastCall().times(0, 4);
archivaConfiguration.save( config );
expectLastCall().times( 0, 4 );
expectLastCall().times( 0, 5 );
archivaConfigurationControl.replay();
defaultManagedRepositoryAdmin.setArchivaConfiguration( archivaConfiguration );
@ -493,6 +493,8 @@ public class ArchivaDavResourceFactoryTest
ManagedRepositoryContent internalRepo = createManagedRepositoryContent( INTERNAL_REPO );
ManagedRepositoryContent localMirrorRepo = createManagedRepositoryContent( LOCAL_MIRROR_REPO );
repositoryRegistry.putRepositoryGroup( repoGroup );
try
{
archivaConfigurationControl.reset();
@ -572,8 +574,6 @@ public class ArchivaDavResourceFactoryTest
{
archivaConfigurationControl.reset();
expect( archivaConfiguration.getConfiguration() ).andReturn( config ).times( 1 );
expect( request.getMethod() ).andReturn( "GET" ).times( 4 );
expect( request.getRemoteAddr() ).andReturn( "http://localhost:8080" ).times( 3 );
@ -670,7 +670,8 @@ public class ArchivaDavResourceFactoryTest
repoContentFactory.getRepositoryContentProviders().add(provider);
defaultManagedRepositoryAdmin.addManagedRepository(
createManagedRepository( LEGACY_REPO, getProjectBase().resolve( "target/test-classes/" + LEGACY_REPO ).toString(),
"legacy" ), false, null );
"legacy" ), false, null );
DavResourceLocator locator =
new ArchivaDavResourceLocator( "", "/repository/" + LEGACY_REPO + "/eclipse/maven-metadata.xml",
LEGACY_REPO, new ArchivaDavLocatorFactory() );

View File

@ -23,6 +23,7 @@ import junit.framework.TestCase;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.events.AuditListener;
import org.apache.archiva.repository.maven2.MavenManagedRepository;
@ -68,6 +69,9 @@ public class DavResourceTest
@Inject
private FileLockManager fileLockManager;
@Inject
private RepositoryRegistry repositoryRegistry;
private ArchivaDavResourceLocator resourceLocator;
private DavResourceFactory resourceFactory;
@ -92,7 +96,9 @@ public class DavResourceTest
session = new ArchivaDavSession();
baseDir = Paths.get( "target/DavResourceTest" );
Files.createDirectories( baseDir );
Files.createDirectories( baseDir.resolve( "conf" ) );
repository = new MavenManagedRepository( "repo001", "repo001", baseDir);
repositoryRegistry.putRepository( repository );
myResource = baseDir.resolve( "myresource.jar" );
Files.createFile(myResource);
@ -121,7 +127,7 @@ public class DavResourceTest
private DavResource getDavResource( String logicalPath, Path file ) throws LayoutException
{
return new ArchivaDavResource( new FilesystemAsset( logicalPath, file.toAbsolutePath()) , logicalPath, repository, session, resourceLocator,
return new ArchivaDavResource( new FilesystemAsset( logicalPath, file.toAbsolutePath()) , logicalPath, repository.getContent(), session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null);
}
@ -343,7 +349,7 @@ public class DavResourceTest
{
try
{
return new ArchivaDavResource( new FilesystemAsset( "/" , baseDir.toAbsolutePath()), "/", repository, session, resourceLocator,
return new ArchivaDavResource( new FilesystemAsset( "/" , baseDir.toAbsolutePath()), "/", repository.getContent(), session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
null );
}

View File

@ -67,7 +67,7 @@ public class RepositoryServletBrowseTest
// dumpResponse( response );
List<String> expectedLinks = Arrays.asList( "commons-lang/", "net/", "org/" );
List<String> expectedLinks = Arrays.asList( ".index/", ".indexer/", "commons-lang/", "net/", "org/" );
Document document = Jsoup.parse( response.getContentAsString() );
Elements elements = document.getElementsByTag( "a" );

View File

@ -87,7 +87,8 @@ public class RepositoryServletRepositoryGroupTest
{
super.setUp();
String appserverBase = System.getProperty( "appserver.base" );
String appserverBase = getAppserverBase().toAbsolutePath().toString();
log.debug( "Appserver Base {}", appserverBase );
Configuration configuration = archivaConfiguration.getConfiguration();
@ -110,11 +111,7 @@ public class RepositoryServletRepositoryGroupTest
ManagedRepositoryConfiguration managedRepositoryConfiguration =
createManagedRepository( MANAGED_REPO_INVALID, "Invalid Test Repo", repoRootInvalid, true );
configuration.addManagedRepository(
createManagedRepository( MANAGED_REPO_FIRST, "First Test Repo", repoRootFirst, true ) );
configuration.addManagedRepository( managedRepositoryConfiguration );
configuration.addManagedRepository(
createManagedRepository( MANAGED_REPO_LAST, "Last Test Repo", repoRootLast, true ) );
List<String> invalidManagedRepoIds = new ArrayList<>();
invalidManagedRepoIds.add( MANAGED_REPO_FIRST );
@ -133,6 +130,7 @@ public class RepositoryServletRepositoryGroupTest
startRepository();
}
@Override
@After
public void tearDown()
@ -141,6 +139,8 @@ public class RepositoryServletRepositoryGroupTest
setupCleanRepo( repoRootFirst );
setupCleanRepo( repoRootLast );
repositoryRegistry.destroy();
super.tearDown();
String appserverBase = System.getProperty( "appserver.base" );
@ -209,22 +209,6 @@ public class RepositoryServletRepositoryGroupTest
assertResponseNotFound( response );
}
/*
* Test Case 3.a
*/
@Test
public void testGetInvalidManagedRepositoryInGroupReturnNotFound()
throws Exception
{
String resourceName = "dummy/dummy-no-resource/1.0/dummy-no-resource-1.0.txt";
WebRequest request = new GetMethodWebRequest(
"http://machine.com/repository/" + REPO_GROUP_WITH_INVALID_REPOS + "/" + resourceName );
WebResponse response = getServletUnitClient().getResponse( request );
assertResponseInternalServerError( response );
}
/*
* Test Case 4
*/
@ -274,6 +258,7 @@ public class RepositoryServletRepositoryGroupTest
WebResponse response = getServletUnitClient().getResource( request );
Path returnedMetadata = getProjectBase().resolve( "target/test-classes/retrievedMetadataFile.xml" );
System.out.println( response.getContentAsString() );
org.apache.archiva.common.utils.FileUtils.writeStringToFile( returnedMetadata, Charset.defaultCharset(), response.getContentAsString() );
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( returnedMetadata );
@ -300,7 +285,7 @@ public class RepositoryServletRepositoryGroupTest
assertResponseOK( response );
assertThat( response.getContentAsString() )
.isEqualTo( "add113b0d7f8c6adb92a5015a7a3701081edf998 maven-metadata-group-with-valid-repos.xml" );
.startsWith( "add113b0d7f8c6adb92a5015a7a3701081edf998" );
// request the md5 checksum of the metadata
request = new GetMethodWebRequest( "http://machine.com/repository/" + REPO_GROUP_WITH_VALID_REPOS + "/dummy/"
@ -310,7 +295,7 @@ public class RepositoryServletRepositoryGroupTest
assertResponseOK( response );
assertThat( response.getContentAsString() )
.isEqualTo( "5b85ea4aa5f52bb76760041a52f98de8 maven-metadata-group-with-valid-repos.xml" );
.startsWith( "5b85ea4aa5f52bb76760041a52f98de8" );
}
// MRM-901

View File

@ -43,6 +43,7 @@
<logger name="org.apache.archiva.rest.services" level="info"/>
<logger name="org.springframework" level="info"/>
<logger name="org.apache.commons.configuration" level="info"/>
<logger name="org.apache.archiva.indexer.maven" level="debug" />
<root level="info">
<appender-ref ref="console"/>