start legacy discoverer tests

PR: MRM-9

git-svn-id: https://svn.apache.org/repos/asf/maven/repository-manager/trunk@349625 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Brett Porter 2005-11-29 04:32:11 +00:00
parent 9c694d371a
commit 5cfe8a688e
5 changed files with 396 additions and 252 deletions

View File

@ -50,9 +50,9 @@ public abstract class AbstractArtifactDiscoverer
private List kickedOutPaths = new ArrayList();
/**
* Scan the repository for artifact paths
* Scan the repository for artifact paths.
*
* @todo operate on better parameters, more collections, less arrays
* @todo replace blacklisted patterns by an artifact filter
*/
protected String[] scanForArtifactPaths( File repositoryBase, String blacklistedPatterns )
{

View File

@ -64,261 +64,248 @@ public class LegacyArtifactDiscoverer
private Artifact buildArtifact( String path )
{
try
StringTokenizer tokens = new StringTokenizer( path, "/\\" );
int numberOfTokens = tokens.countTokens();
if ( numberOfTokens != 3 )
{
StringTokenizer tokens = new StringTokenizer( path, "/\\" );
addKickedOutPath( path );
int numberOfTokens = tokens.countTokens();
if ( numberOfTokens != 3 )
{
addKickedOutPath( path );
return null;
}
String groupId = tokens.nextToken();
String type = tokens.nextToken();
if ( type.endsWith( "s" ) )
{
type = type.substring( 0, type.length() - 1 );
}
// contains artifactId, version, classifier, and extension.
String avceGlob = tokens.nextToken();
LinkedList avceTokenList = new LinkedList();
StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
while ( avceTokenizer.hasMoreTokens() )
{
avceTokenList.addLast( avceTokenizer.nextToken() );
}
String lastAvceToken = (String) avceTokenList.removeLast();
if ( lastAvceToken.endsWith( ".tar.gz" ) )
{
type = "distribution-tgz";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
avceTokenList.addLast( lastAvceToken );
}
else if ( lastAvceToken.endsWith( "sources.jar" ) )
{
type = "java-source";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
avceTokenList.addLast( lastAvceToken );
}
else if ( lastAvceToken.endsWith( ".zip" ) )
{
type = "distribution-zip";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
avceTokenList.addLast( lastAvceToken );
}
else
{
int extPos = lastAvceToken.lastIndexOf( '.' );
if ( extPos > 0 )
{
String ext = lastAvceToken.substring( extPos + 1 );
if ( type.equals( ext ) )
{
lastAvceToken = lastAvceToken.substring( 0, extPos );
avceTokenList.addLast( lastAvceToken );
}
else
{
addKickedOutPath( path );
return null;
}
}
}
// TODO: this is obscene - surely a better way?
String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" +
"([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" +
"([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" +
"([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" +
"([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" +
"([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" +
"([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "([AaBb][_.0-9]*)";
// let's discover the version, and whatever's leftover will be either
// a classifier, or part of the artifactId, depending on position.
// Since version is at the end, we have to move in from the back.
Collections.reverse( avceTokenList );
StringBuffer classifierBuffer = new StringBuffer();
StringBuffer versionBuffer = new StringBuffer();
boolean firstVersionTokenEncountered = false;
boolean firstToken = true;
int tokensIterated = 0;
for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
{
String token = (String) it.next();
boolean tokenIsVersionPart = token.matches( validVersionParts );
StringBuffer bufferToUpdate;
// NOTE: logic in code is reversed, since we're peeling off the back
// Any token after the last versionPart will be in the classifier.
// Any token UP TO first non-versionPart is part of the version.
if ( !tokenIsVersionPart )
{
if ( firstVersionTokenEncountered )
{
break;
}
else
{
bufferToUpdate = classifierBuffer;
}
}
else
{
firstVersionTokenEncountered = true;
bufferToUpdate = versionBuffer;
}
if ( firstToken )
{
firstToken = false;
}
else
{
bufferToUpdate.insert( 0, '-' );
}
bufferToUpdate.insert( 0, token );
tokensIterated++;
}
getLogger().debug( "After parsing loop, state of buffers:\no Version Buffer: \'" + versionBuffer +
"\'\no Classifier Buffer: \'" + classifierBuffer + "\'\no Number of Tokens Iterated: " +
tokensIterated );
// Now, restore the proper ordering so we can build the artifactId.
Collections.reverse( avceTokenList );
getLogger().debug(
"Before repairing bad version and/or cleaning up used tokens, avce token list is:\n" + avceTokenList );
// if we didn't find a version, then punt. Use the last token
// as the version, and set the classifier empty.
if ( versionBuffer.length() < 1 )
{
if ( avceTokenList.size() > 1 )
{
int lastIdx = avceTokenList.size() - 1;
versionBuffer.append( avceTokenList.get( lastIdx ) );
avceTokenList.remove( lastIdx );
}
else
{
getLogger().debug( "Cannot parse version from artifact path: \'" + path + "\'." );
getLogger().debug(
"artifact-version-classifier-extension remaining tokens is: \'" + avceTokenList + "\'" );
}
classifierBuffer.setLength( 0 );
}
else
{
getLogger().debug( "Removing " + tokensIterated + " tokens from avce token list." );
// if everything is kosher, then pop off all the classifier and
// version tokens, leaving the naked artifact id in the list.
avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
}
getLogger().debug( "Now, remainder of avce token list is:\n" + avceTokenList );
StringBuffer artifactIdBuffer = new StringBuffer();
firstToken = true;
for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
{
String token = (String) it.next();
if ( firstToken )
{
firstToken = false;
}
else
{
artifactIdBuffer.append( '-' );
}
artifactIdBuffer.append( token );
}
String artifactId = artifactIdBuffer.toString();
int lastVersionCharIdx = versionBuffer.length() - 1;
if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
{
versionBuffer.setLength( lastVersionCharIdx );
}
String version = versionBuffer.toString();
if ( version.length() < 1 )
{
version = null;
}
getLogger().debug( "Extracted artifact information from path:\n" + "groupId: \'" + groupId + "\'\n" +
"artifactId: \'" + artifactId + "\'\n" + "type: \'" + type + "\'\n" + "version: \'" + version + "\'\n" +
"classifier: \'" + classifierBuffer + "\'" );
Artifact result = null;
if ( classifierBuffer.length() > 0 )
{
getLogger().debug( "Creating artifact with classifier." );
result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
classifierBuffer.toString() );
}
else
{
if ( StringUtils.isNotEmpty( groupId ) && StringUtils.isNotEmpty( artifactId ) &&
StringUtils.isNotEmpty( version ) && StringUtils.isNotEmpty( type ) )
{
result =
artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
}
}
// getLogger().debug(
// "Resulting artifact is: " + result + " and has classifier of: "
// + result.getClassifier() + "\n\n" );
return result;
return null;
}
catch ( RuntimeException e )
String groupId = tokens.nextToken();
String type = tokens.nextToken();
if ( type.endsWith( "s" ) )
{
getLogger().error( "While parsing artifact path: \'" + path + "\'...", e );
throw e;
type = type.substring( 0, type.length() - 1 );
}
// contains artifactId, version, classifier, and extension.
String avceGlob = tokens.nextToken();
LinkedList avceTokenList = new LinkedList();
StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
while ( avceTokenizer.hasMoreTokens() )
{
avceTokenList.addLast( avceTokenizer.nextToken() );
}
String lastAvceToken = (String) avceTokenList.removeLast();
if ( lastAvceToken.endsWith( ".tar.gz" ) )
{
type = "distribution-tgz";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
avceTokenList.addLast( lastAvceToken );
}
else if ( lastAvceToken.endsWith( "sources.jar" ) )
{
type = "java-source";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
avceTokenList.addLast( lastAvceToken );
}
else if ( lastAvceToken.endsWith( ".zip" ) )
{
type = "distribution-zip";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
avceTokenList.addLast( lastAvceToken );
}
else
{
int extPos = lastAvceToken.lastIndexOf( '.' );
if ( extPos > 0 )
{
String ext = lastAvceToken.substring( extPos + 1 );
if ( type.equals( ext ) )
{
lastAvceToken = lastAvceToken.substring( 0, extPos );
avceTokenList.addLast( lastAvceToken );
}
else
{
addKickedOutPath( path );
return null;
}
}
}
// TODO: this is obscene - surely a better way?
String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" +
"([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" +
"([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" +
"([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" +
"([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" +
"([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" +
"([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "([AaBb][_.0-9]*)";
// let's discover the version, and whatever's leftover will be either
// a classifier, or part of the artifactId, depending on position.
// Since version is at the end, we have to move in from the back.
Collections.reverse( avceTokenList );
StringBuffer classifierBuffer = new StringBuffer();
StringBuffer versionBuffer = new StringBuffer();
boolean firstVersionTokenEncountered = false;
boolean firstToken = true;
int tokensIterated = 0;
for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
{
String token = (String) it.next();
boolean tokenIsVersionPart = token.matches( validVersionParts );
StringBuffer bufferToUpdate;
// NOTE: logic in code is reversed, since we're peeling off the back
// Any token after the last versionPart will be in the classifier.
// Any token UP TO first non-versionPart is part of the version.
if ( !tokenIsVersionPart )
{
if ( firstVersionTokenEncountered )
{
break;
}
else
{
bufferToUpdate = classifierBuffer;
}
}
else
{
firstVersionTokenEncountered = true;
bufferToUpdate = versionBuffer;
}
if ( firstToken )
{
firstToken = false;
}
else
{
bufferToUpdate.insert( 0, '-' );
}
bufferToUpdate.insert( 0, token );
tokensIterated++;
}
getLogger().debug( "After parsing loop, state of buffers:\no Version Buffer: \'" + versionBuffer +
"\'\no Classifier Buffer: \'" + classifierBuffer + "\'\no Number of Tokens Iterated: " + tokensIterated );
// Now, restore the proper ordering so we can build the artifactId.
Collections.reverse( avceTokenList );
getLogger().debug(
"Before repairing bad version and/or cleaning up used tokens, avce token list is:\n" + avceTokenList );
// if we didn't find a version, then punt. Use the last token
// as the version, and set the classifier empty.
if ( versionBuffer.length() < 1 )
{
if ( avceTokenList.size() > 1 )
{
int lastIdx = avceTokenList.size() - 1;
versionBuffer.append( avceTokenList.get( lastIdx ) );
avceTokenList.remove( lastIdx );
}
else
{
getLogger().debug( "Cannot parse version from artifact path: \'" + path + "\'." );
getLogger().debug(
"artifact-version-classifier-extension remaining tokens is: \'" + avceTokenList + "\'" );
}
classifierBuffer.setLength( 0 );
}
else
{
getLogger().debug( "Removing " + tokensIterated + " tokens from avce token list." );
// if everything is kosher, then pop off all the classifier and
// version tokens, leaving the naked artifact id in the list.
avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
}
getLogger().debug( "Now, remainder of avce token list is:\n" + avceTokenList );
StringBuffer artifactIdBuffer = new StringBuffer();
firstToken = true;
for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
{
String token = (String) it.next();
if ( firstToken )
{
firstToken = false;
}
else
{
artifactIdBuffer.append( '-' );
}
artifactIdBuffer.append( token );
}
String artifactId = artifactIdBuffer.toString();
int lastVersionCharIdx = versionBuffer.length() - 1;
if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
{
versionBuffer.setLength( lastVersionCharIdx );
}
String version = versionBuffer.toString();
if ( version.length() < 1 )
{
version = null;
}
getLogger().debug( "Extracted artifact information from path:\n" + "groupId: \'" + groupId + "\'\n" +
"artifactId: \'" + artifactId + "\'\n" + "type: \'" + type + "\'\n" + "version: \'" + version + "\'\n" +
"classifier: \'" + classifierBuffer + "\'" );
Artifact result = null;
if ( classifierBuffer.length() > 0 )
{
getLogger().debug( "Creating artifact with classifier." );
result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
classifierBuffer.toString() );
}
else
{
if ( StringUtils.isNotEmpty( groupId ) && StringUtils.isNotEmpty( artifactId ) &&
StringUtils.isNotEmpty( version ) && StringUtils.isNotEmpty( type ) )
{
result = artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
}
}
result.setFile( new File( path ) );
return result;
}
}

View File

@ -0,0 +1,157 @@
package org.apache.maven.repository.discovery;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.codehaus.plexus.PlexusTestCase;
import java.io.File;
import java.util.Iterator;
import java.util.List;
/**
* Test the default artifact discoverer.
*
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
* @version $Id$
*/
public class LegacyArtifactDiscovererTest
extends PlexusTestCase
{
private ArtifactDiscoverer discoverer;
private ArtifactFactory factory;
private File repositoryLocation;
protected void setUp()
throws Exception
{
super.setUp();
discoverer = (ArtifactDiscoverer) lookup( ArtifactDiscoverer.ROLE, "legacy" );
factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
repositoryLocation = getTestFile( "src/test/legacy-repository" );
}
public void testDefaultExcludes()
{
List artifacts = discoverer.discoverArtifacts( repositoryLocation, null, false );
assertNotNull( "Check artifacts not null", artifacts );
boolean found = false;
for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
{
String path = (String) i.next();
found = path.indexOf( ".svn" ) >= 0;
}
assertTrue( "Check exclusion was found", found );
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
Artifact a = (Artifact) i.next();
assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
}
}
public void testStandardExcludes()
{
List artifacts = discoverer.discoverArtifacts( repositoryLocation, null, false );
assertNotNull( "Check artifacts not null", artifacts );
boolean found = false;
for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
{
String path = (String) i.next();
found = path.equals( "KEYS" );
}
assertTrue( "Check exclusion was found", found );
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
Artifact a = (Artifact) i.next();
assertFalse( "Check not KEYS", a.getFile().getName().equals( "KEYS" ) );
}
}
public void testBlacklistedExclude()
{
List artifacts = discoverer.discoverArtifacts( repositoryLocation, "javax.sql/**", false );
assertNotNull( "Check artifacts not null", artifacts );
boolean found = false;
for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
{
String path = (String) i.next();
found = path.replace( '\\', '/' ).equals( "javax.sql/jars/jdbc-2.0.jar" );
}
assertTrue( "Check exclusion was found", found );
assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
}
/*
public void testKickoutWithShortPath()
{
List artifacts = discoverer.discoverArtifacts( repositoryLocation, null, false );
assertNotNull( "Check artifacts not null", artifacts );
boolean found = false;
for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
{
String path = (String) i.next();
found = path.replace( '\\', '/' ).equals( "invalid/invalid-1.0.jar" );
}
assertTrue( "Check exclusion was found", found );
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
Artifact a = (Artifact) i.next();
assertFalse( "Check not invalid-1.0.jar", a.getFile().getName().equals( "invalid-1.0.jar" ) );
}
}
public void testSnapshotInclusion()
{
List artifacts = discoverer.discoverArtifacts( repositoryLocation, null, true );
assertNotNull( "Check artifacts not null", artifacts );
assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
assertTrue( "Check snapshot included",
artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) );
}
public void testSnapshotExclusion()
{
List artifacts = discoverer.discoverArtifacts( repositoryLocation, null, false );
assertNotNull( "Check artifacts not null", artifacts );
assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
assertFalse( "Check snapshot included",
artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) );
}
*/
private Artifact createArtifact( String groupId, String artifactId, String version )
{
return factory.createArtifact( groupId, artifactId, version, null, "jar" );
}
}