Removed dynamic injection of pom build during project construction. Deleted aspects.

git-svn-id: https://svn.apache.org/repos/asf/maven/components/trunk@694109 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Britton Isbell 2008-09-11 04:16:40 +00:00
parent 38cc075d65
commit cacc78b1d0
15 changed files with 0 additions and 2176 deletions

View File

@ -104,12 +104,6 @@ under the License.
<version>1.0-alpha-1</version> <version>1.0-alpha-1</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Needed for backward compat aspect. -->
<dependency>
<groupId>aspectj</groupId>
<artifactId>aspectjrt</artifactId>
</dependency>
</dependencies> </dependencies>
<build> <build>
<plugins> <plugins>
@ -129,19 +123,6 @@ under the License.
</excludes> </excludes>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>aspectj-maven-plugin</artifactId>
<executions>
<execution>
<id>weave-compat</id>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>

View File

@ -1,82 +0,0 @@
package org.apache.maven;
import org.apache.maven.execution.MavenExecutionRequest;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.PluginParameterExpressionEvaluator;
import org.codehaus.plexus.classworlds.realm.ClassRealm;
import org.apache.maven.plugin.Mojo;
import org.apache.maven.plugin.DefaultPluginManager;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.aspectj.lang.JoinPoint;
import java.util.Iterator;
import java.util.List;
public aspect CoreDebuggingAspect
{
// after() throwing ( RuntimeException e ):
//// adviceexecution( )
//// && args( jp )
// call( * *..*.*(..))
// && !within( CoreDebuggingAspect+ )
// && !handler( * )
// {
// System.out.println( "Error: " + e.getClass().getName() + "\nwas in join point: " + thisJoinPoint.toLongString() + "\n(at: " + thisJoinPoint.getSourceLocation() + ")" );
// }
// after( MavenExecutionRequest request ) returning( List projects ):
// call( List DefaultMaven.getProjects( MavenExecutionRequest ) )
// && args( request )
// {
// System.out.println( "Got projects-list of size " + ( projects == null ? "null" : "" + projects.size() ) + ":\n\n" + projects );
// }
// private ClassRealm pluginRealm;
//
// after() returning( ClassRealm realm ):
// call( ClassRealm PluginDescriptor.getClassRealm() )
// && cflow( execution( * DefaultPluginManager.executeMojo( .. ) ) )
// {
// pluginRealm = realm;
// }
//
// after():
// execution( * DefaultPluginManager.executeMojo( .. ) )
// {
// pluginRealm = null;
// }
//
// void around():
// call( void Mojo+.execute( .. ) )
// {
// try
// {
// proceed();
// }
// catch( Error err )
// {
// System.out.println( "Plugin realm was " + pluginRealm + ":\n\n\n" );
// pluginRealm.display();
//
// throw err;
// }
// }
//
// after() returning( List reports ):
// cflow( execution( * PluginParameterExpressionEvaluator.evaluate( .. ) ) )
// && call( List MavenSession.getReports() )
// {
// System.out.println( "Injecting reports for ${reports} expression.\n\n" );
// if ( reports != null && !reports.isEmpty() )
// {
// for ( Iterator it = reports.iterator(); it.hasNext(); )
// {
// Object report = it.next();
// System.out.println( "Report: " + report + " has classloader:\n" + report.getClass().getClassLoader() );
// }
// }
// System.out.println( "\n\n" );
// }
}

View File

@ -1,388 +0,0 @@
package org.apache.maven.compat;
import org.apache.maven.lifecycle.binding.DefaultLifecycleBindingManager;
import org.apache.maven.lifecycle.binding.LifecycleBindingManager;
import org.codehaus.plexus.PlexusContainer;
import org.apache.maven.DefaultMaven;
import org.apache.maven.lifecycle.DefaultLifecycleExecutor;
import org.apache.maven.lifecycle.MojoBindingUtils;
import org.apache.maven.lifecycle.LifecycleUtils;
import org.apache.maven.lifecycle.NoSuchPhaseException;
import org.apache.maven.execution.MavenExecutionRequest;
import org.apache.maven.execution.MavenExecutionResult;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.extension.DefaultExtensionManager;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.metadata.ResolutionGroup;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.lifecycle.model.MojoBinding;
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.DefaultPluginManager;
import org.apache.maven.plugin.InvalidPluginException;
import org.apache.maven.plugin.PluginManager;
import org.apache.maven.plugin.PluginManagerException;
import org.apache.maven.plugin.PluginNotFoundException;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.apache.maven.plugin.version.PluginVersionNotFoundException;
import org.apache.maven.plugin.version.PluginVersionResolutionException;
import org.apache.maven.project.MavenProject;
import org.apache.maven.realm.MavenRealmManager;
import org.apache.maven.settings.MavenSettingsBuilder;
import org.apache.maven.settings.DefaultMavenSettingsBuilder;
import org.apache.maven.settings.Settings;
import org.codehaus.plexus.classworlds.realm.ClassRealm;
import org.codehaus.plexus.classworlds.realm.NoSuchRealmException;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.codehaus.plexus.logging.Logger;
import java.util.List;
import java.io.IOException;
import java.util.Iterator;
import java.util.Set;
import java.util.LinkedHashSet;
public privileged aspect Maven20xCompatAspect
{
// GRAB Session as soon as it's constructed.
private MavenSession session;
// GRAB the request when it's passed into a method that returns a corresponding result.
// NOTE: We'll use this in multiple places below...
private MavenExecutionRequest request;
// Grab this so we have a voice!
private Logger logger;
// used for injecting plexus-utils into extension and plugin artifact sets.
private VersionRange vr = null;
private Artifact plexusUtilsArtifact = null;
private pointcut mavenEmbedderStop():
execution( * org.apache.maven.embedder.MavenEmbedder.stop( .. ) );
// TODO: There must be a more elegant way to release these resources than depending on MavenEmbedder.stop().
after(): mavenEmbedderStop()
{
session = null;
request = null;
logger = null;
vr = null;
plexusUtilsArtifact = null;
}
// pointcut to avoid recursive matching on behavior injected by this aspect.
private pointcut notHere(): !within( Maven20xCompatAspect );
private pointcut sessionCreation( MavenSession session ):
execution( public MavenSession+.new(..) )
&& this( session )
&& notHere();
// capture the session instance.
after( MavenSession session ): sessionCreation( session )
{
if ( logger != null && logger.isDebugEnabled() )
{
logger.debug( "Capturing session for backward compatibility aspect: " + session );
}
this.session = session;
}
// Re-Introduce old verifyPlugin(..) API.
public PluginDescriptor PluginManager.verifyPlugin( Plugin plugin,
MavenProject project,
Settings settings,
ArtifactRepository localRepository )
throws ArtifactResolutionException, ArtifactNotFoundException, PluginNotFoundException,
PluginVersionResolutionException, InvalidPluginException, PluginManagerException,
PluginVersionNotFoundException
{
// this will always be diverted, so no need to do anything.
throw new IllegalStateException( "This introduced method should ALWAYS be intercepted by backward compatibility aspect." );
}
// USE Session to compensate for old verifyPlugin(..) API.
private pointcut verifyPlugin( Plugin plugin, MavenProject project, PluginManager manager ):
execution( PluginDescriptor PluginManager+.verifyPlugin( Plugin, MavenProject, Settings, ArtifactRepository+ ) )
&& args( plugin, project, .. )
&& this( manager );
// redirect the old verifyPlugin(..) call to the new one, using the captured session instance above.
PluginDescriptor around( Plugin plugin,
MavenProject project,
PluginManager manager )
throws ArtifactResolutionException, ArtifactNotFoundException, PluginNotFoundException,
PluginVersionResolutionException, InvalidPluginException, PluginManagerException,
PluginVersionNotFoundException:
verifyPlugin( plugin, project, manager )
{
if ( logger != null && logger.isDebugEnabled() )
{
logger.debug( "Diverting legacy PluginManager.verifyPlugin(..) call to replacement method using session: " + session );
}
return manager.verifyPlugin( plugin, project, session );
}
private pointcut getPluginDescriptorForPrefix( String prefix, PluginManager manager ):
execution( public PluginDescriptor PluginManager+.getPluginDescriptorForPrefix( String ) )
&& args( prefix )
&& this( manager )
&& notHere();
PluginDescriptor around( String prefix, PluginManager manager ): getPluginDescriptorForPrefix( prefix, manager )
{
// TODO: Implement Me!
throw new UnsupportedOperationException( "This method has not yet been implemented in Maven's backward-compatibility library." );
}
public PluginDescriptor PluginManager.getPluginDescriptorForPrefix( String prefix )
{
return null;
}
// Intercept retrieval of artifact dependencies of an extension, inject plexus-utils if it's not there.
private pointcut extDepArtifactsResolved( DefaultExtensionManager mgr ):
call( public Set<Artifact> ResolutionGroup+.getArtifacts() )
&& within( DefaultExtensionManager+ )
&& this( mgr )
&& notHere();
// We use the same hack here to make sure that plexus 1.1 is available for extensions that do
// not declare plexus-utils but need it. MNG-2900
Set<Artifact> around( DefaultExtensionManager mgr ): extDepArtifactsResolved( mgr )
{
Set<Artifact> result = proceed( mgr );
result = checkPlexusUtils( result, mgr.artifactFactory );
return result;
}
// Intercept retrieval of artifact dependencies of a plugin, inject plexus-utils if it's not there.
private pointcut pluginDepArtifactsResolved( DefaultPluginManager mgr ):
call( public Set<Artifact> ResolutionGroup+.getArtifacts() )
&& cflow( execution( List<Artifact> DefaultPluginManager+.getPluginArtifacts(..) ) )
&& this( mgr )
&& notHere();
Set<Artifact> around( DefaultPluginManager mgr ): pluginDepArtifactsResolved( mgr )
{
Set<Artifact> result = proceed( mgr );
result = checkPlexusUtils( result, mgr.artifactFactory );
return result;
}
private pointcut methodsTakingRequest( MavenExecutionRequest request ):
execution( MavenExecutionResult *.*( MavenExecutionRequest ) )
&& !withincode( * *.*( MavenExecutionRequest ) )
&& args( request )
&& notHere();
// capture the request instance before it's passed into any method that returns a corresponding MavenExecutionResult.
Object around( MavenExecutionRequest request ): methodsTakingRequest( request )
{
this.request = request;
try
{
return proceed( request );
}
finally
{
this.request = null;
}
}
// Re-Introduce old buildSettings() API.
public Settings MavenSettingsBuilder.buildSettings()
throws IOException, XmlPullParserException
{
return null;
}
public Settings DefaultMavenSettingsBuilder.buildSettings()
throws IOException, XmlPullParserException
{
return null;
}
// USE Request to compensate for old buildSettings() API.
private pointcut buildSettings( MavenSettingsBuilder builder ):
execution( public Settings MavenSettingsBuilder+.buildSettings() )
&& target( builder );
// redirect old buildSettings() call to the new one, using the request captured above.
Settings around( MavenSettingsBuilder builder )
throws IOException, XmlPullParserException:
buildSettings( builder )
{
return builder.buildSettings( request );
}
private pointcut pluginManager( DefaultPluginManager manager ):
execution( * DefaultPluginManager.*( .. ) )
&& this( manager );
private pointcut pluginRealmCreation( Plugin plugin, DefaultPluginManager manager ):
call( ClassRealm MavenRealmManager+.createPluginRealm( Plugin, Artifact, List, .. ) )
&& cflow( pluginManager( manager ) )
&& args( plugin, .. );
// Add various imports for Xpp3 stuff back into the core realm every time a plugin realm is created.
ClassRealm around( Plugin plugin, DefaultPluginManager manager ): pluginRealmCreation( plugin, manager )
{
ClassRealm pluginRealm = proceed( plugin, manager );
try
{
String parentRealmId = manager.container.getContainerRealm().getId();
// adding for MNG-3012 to try to work around problems with Xpp3Dom (from plexus-utils)
// spawning a ClassCastException when a mojo calls plugin.getConfiguration() from maven-model...
pluginRealm.importFrom( parentRealmId, Xpp3Dom.class.getName() );
pluginRealm.importFrom( parentRealmId, "org.codehaus.plexus.util.xml.pull" );
// Adding for MNG-2878, since maven-reporting-impl was removed from the
// internal list of artifacts managed by maven, the classloader is different
// between maven-reporting-impl and maven-reporting-api...so this resource
// is not available from the AbstractMavenReport since it uses:
// getClass().getResourceAsStream( "/default-report.xml" )
// (maven-reporting-impl version 2.0; line 134; affects: checkstyle plugin, and probably others)
pluginRealm.importFrom( parentRealmId, "/default-report.xml" );
}
catch ( NoSuchRealmException e )
{
// can't happen here. All realms are concretely resolved by now.
}
return pluginRealm;
}
before( DefaultMaven maven ):
execution( MavenExecutionResult DefaultMaven.execute( MavenExecutionRequest ) )
&& this( maven )
{
if ( this.logger == null )
{
this.logger = maven.getLogger();
}
}
private pointcut addMojoBindingCall( String phase, MojoBinding binding ):
call( void LifecycleUtils.addMojoBinding( String, MojoBinding, .. ) )
&& args( phase, binding, .. );
void around( String phase, MojoBinding binding ): addMojoBindingCall( phase, binding )
{
try
{
proceed( phase, binding );
}
catch ( NoSuchPhaseException e )
{
logger.debug( "Mojo execution: " + MojoBindingUtils.toString( binding )
+ " cannot be attached to lifecycle phase: " + phase
+ "; it does not exist. Ignoring this binding." );
}
}
// --------------------------
// UTILITIES
// --------------------------
private Set<Artifact> checkPlexusUtils( Set<Artifact> dependencyArtifacts, ArtifactFactory artifactFactory )
{
// ----------------------------------------------------------------------------
// If the plugin already declares a dependency on plexus-utils then we're all
// set as the plugin author is aware of its use. If we don't have a dependency
// on plexus-utils then we must protect users from stupid plugin authors who
// did not declare a direct dependency on plexus-utils because the version
// Maven uses is hidden from downstream use. We will also bump up any
// anything below 1.1 to 1.1 as this mimics the behaviour in 2.0.5 where
// plexus-utils 1.1 was being forced into use.
// ----------------------------------------------------------------------------
if ( vr == null )
{
try
{
vr = VersionRange.createFromVersionSpec( "[1.1,)" );
}
catch ( InvalidVersionSpecificationException e )
{
// Won't happen
}
}
for ( Iterator i = dependencyArtifacts.iterator(); i.hasNext(); )
{
Artifact a = (Artifact) i.next();
if ( a.getArtifactId().equals( "plexus-utils" )
&& vr.containsVersion( new DefaultArtifactVersion( a.getVersion() ) ) )
{
return dependencyArtifacts;
}
}
// We will add plexus-utils as every plugin was getting this anyway from Maven itself. We will set the
// version to the latest version we know that works as of the 2.0.6 release. We set the scope to runtime
// as this is what's implicitly happening in 2.0.6.
if ( plexusUtilsArtifact == null )
{
plexusUtilsArtifact = artifactFactory.createArtifact( "org.codehaus.plexus",
"plexus-utils",
"1.1",
Artifact.SCOPE_RUNTIME,
"jar" );
}
Set<Artifact> result = new LinkedHashSet<Artifact>( dependencyArtifacts );
result.add( plexusUtilsArtifact );
return result;
}
// This is to support the maven-enforcer-plugin.
private List DefaultLifecycleExecutor.lifecycles;
private pointcut lifecycleExecutorExecute( DefaultLifecycleExecutor executor ):
execution( * DefaultLifecycleExecutor.execute( .. ) )
&& this( executor );
before( DefaultLifecycleExecutor executor ): lifecycleExecutorExecute( executor )
{
PlexusContainer container = executor.container;
DefaultLifecycleBindingManager bindingMgr;
try
{
bindingMgr = (DefaultLifecycleBindingManager) container.lookup( LifecycleBindingManager.ROLE, "default" );
}
catch ( ComponentLookupException e )
{
IllegalStateException err = new IllegalStateException( "Cannot lookup default role-hint for: " + LifecycleBindingManager.ROLE );
err.initCause( e );
throw err;
}
executor.lifecycles = bindingMgr.lifecycles;
}
}

View File

@ -1,14 +0,0 @@
package org.apache.maven.errors;
public abstract aspect AbstractCoreReporterAspect
{
protected pointcut notWithinAspect():
!within( *.*Aspect+ );
protected CoreErrorReporter getReporter()
{
return CoreReporterManager.getReporter();
}
}

View File

@ -1,168 +0,0 @@
package org.apache.maven.errors;
import org.apache.maven.project.MavenProject;
import org.apache.maven.lifecycle.LifecycleLoaderException;
import org.apache.maven.lifecycle.LifecycleSpecificationException;
import org.apache.maven.plugin.loader.PluginLoaderException;
import org.apache.maven.ProjectCycleException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.lifecycle.model.MojoBinding;
import org.apache.maven.lifecycle.LifecycleExecutor;
import org.apache.maven.lifecycle.DefaultLifecycleExecutor;
import org.apache.maven.NoGoalsSpecifiedException;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.execution.ReactorManager;
import org.apache.maven.AggregatedBuildFailureException;
import org.apache.maven.ProjectBuildFailureException;
import org.apache.maven.Maven;
import org.apache.maven.plugin.loader.PluginLoader;
import org.apache.maven.lifecycle.binding.MojoBindingFactory;
public privileged aspect BuildFailureReporterAspect
extends AbstractCoreReporterAspect
{
private pointcut within_le_execute( MavenSession session, ReactorManager reactorManager ):
withincode( void LifecycleExecutor+.execute( MavenSession, ReactorManager, .. ) )
&& args( session, reactorManager, .. );
private pointcut le_execute( MavenSession session, ReactorManager reactorManager ):
execution( void LifecycleExecutor+.execute( MavenSession, ReactorManager, .. ) )
&& args( session, reactorManager, .. );
/**
* Call stack is:
* <br/>
* <pre>
* <code>
* DefaultMaven.execute(MavenExecutionRequest)
* --&gt; DefaultLifecycleExecutor.execute(MavenSession, ReactorManager, EventDispatcher)
* &lt;-- NoGoalsSpecifiedException
* </code>
* </pre>
*/
NoGoalsSpecifiedException around( ReactorManager reactorManager ):
cflow( le_execute( MavenSession, reactorManager ) )
&& call( NoGoalsSpecifiedException.new( .. ) )
{
NoGoalsSpecifiedException err = proceed( reactorManager );
getReporter().reportNoGoalsSpecifiedException( reactorManager.getTopLevelProject(), err );
return err;
}
private pointcut aggregatedBuildFailureException_ctor( MojoBinding binding, MojoFailureException cause ):
call( AggregatedBuildFailureException.new( .., MojoBinding, MojoFailureException ) )
&& args( .., binding, cause );
/**
* Call stack is:
* <br/>
* <pre>
* <code>
* DefaultMaven.execute(MavenExecutionRequest)
* --&gt; DefaultLifecycleExecutor.execute(MavenSession, ReactorManager, EventDispatcher)
* --&gt; DefaultLifecycleExecutor.executeTaskSegments(List, ReactorManager, MavenSession, MavenProject, EventDispatcher)
* --&gt; (@aggregator plugin execution)
* &lt;---------------- AggregatedBuildFailureException
* </code>
* </pre>
*/
after( MavenSession session, MojoBinding binding, MojoFailureException cause ):
cflow( le_execute( session, ReactorManager ) )
&& aggregatedBuildFailureException_ctor( binding, cause )
{
getReporter().reportAggregatedMojoFailureException( session, binding, cause );
}
private pointcut projectBuildFailureException_ctor( MojoBinding binding, MojoFailureException cause ):
call( ProjectBuildFailureException.new( .., MojoBinding, MojoFailureException ) )
&& args( .., binding, cause );
/**
* Call stack is:
* <br/>
* <pre>
* <code>
* DefaultMaven.execute(MavenExecutionRequest)
* --&gt; DefaultLifecycleExecutor.execute(MavenSession, ReactorManager, EventDispatcher)
* --&gt; DefaultLifecycleExecutor.executeTaskSegments(List, ReactorManager, MavenSession, MavenProject, EventDispatcher)
* --&gt; (normal plugin execution)
* &lt;---------------- ProjectBuildFailureException
* </code>
* </pre>
*/
after( MavenSession session, MojoBinding binding, MojoFailureException cause ):
cflow( le_execute( session, ReactorManager ) )
&& projectBuildFailureException_ctor( binding, cause )
{
getReporter().reportProjectMojoFailureException( session, binding, cause );
}
private pointcut mvn_createReactorManager():
execution( ReactorManager Maven+.createReactorManager( .. ) );
/**
* Call stack is:
* <br/>
* <pre>
* <code>
* DefaultMaven.execute(MavenExecutionRequest)
* --&gt; DefaultMaven.createReactorManager(MavenExecutionRequest, MavenExecutionResult)
* &lt;-- ProjectCycleException
* </code>
* </pre>
*/
after( ProjectCycleException err ):
cflow( mvn_createReactorManager() )
&& execution( ProjectCycleException.new( .. ) )
&& this( err )
{
getReporter().reportProjectCycle( err );
}
private pointcut within_le_getMojoDescriptorForDirectInvocation():
withincode( * DefaultLifecycleExecutor.getMojoDescriptorForDirectInvocation( String, MavenSession, MavenProject ) );
/**
* Call stack is:
* <br/>
* <pre>
* <code>
* DefaultMaven.execute(MavenExecutionRequest)
* --&gt; DefaultLifecycleExecutor.isTaskValid(String, MavenSession, MavenProject)
* --&gt; catch( LifecycleSpecificationException )
* &lt;-- TaskValidationResult
* </code>
* </pre>
*/
after( String task, MavenSession session, MavenProject rootProject ) throwing ( LifecycleSpecificationException cause ):
within_le_getMojoDescriptorForDirectInvocation()
&& call( * MojoBindingFactory+.parseMojoBinding( String, MavenProject, MavenSession, .. ) )
&& args( task, rootProject, session, .. )
{
getReporter().reportLifecycleSpecErrorWhileValidatingTask( task, session, rootProject, cause );
}
/**
* Call stack is:
* <br/>
* <pre>
* <code>
* DefaultMaven.execute(MavenExecutionRequest)
* --&gt; DefaultLifecycleExecutor.isTaskValid(String, MavenSession, MavenProject)
* --&gt; catch( LifecycleLoaderException )
* &lt;-- TaskValidationResult
* </code>
* </pre>
*/
after( String task, MavenSession session, MavenProject rootProject ) throwing ( LifecycleLoaderException cause ):
within_le_getMojoDescriptorForDirectInvocation()
&& call( * MojoBindingFactory+.parseMojoBinding( String, MavenProject, MavenSession, .. ) )
&& args( task, rootProject, session, .. )
{
getReporter().reportLifecycleLoaderErrorWhileValidatingTask( task, session, rootProject, cause );
}
}

View File

@ -1,19 +0,0 @@
package org.apache.maven.errors;
import org.apache.maven.errors.DefaultCoreErrorReporter;
import org.apache.maven.errors.CoreErrorReporter;
import org.aspectj.lang.reflect.SourceLocation;
public privileged aspect CoreReporterDebugAspect
{
// before( Throwable key, DefaultCoreErrorReporter reporter ):
// call( void DefaultCoreErrorReporter.registerBuildError( Throwable, .. ) )
// && args( key, .. )
// && target( reporter )
// {
// SourceLocation location = thisJoinPoint.getSourceLocation();
// System.out.println( "Registering: " + key + "\nfrom: " + location.getFileName() + ", line: " + location.getLine() + "\nreporter is: " + reporter );
// }
}

View File

@ -1,253 +0,0 @@
package org.apache.maven.errors;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.metadata.ArtifactMetadataRetrievalException;
import org.apache.maven.execution.MavenExecutionRequest;
import org.apache.maven.plugin.PluginNotFoundException;
import org.apache.maven.plugin.version.PluginVersionNotFoundException;
import org.apache.maven.plugin.version.PluginVersionResolutionException;
import org.apache.maven.plugin.InvalidPluginException;
import org.apache.maven.plugin.PluginManager;
import org.apache.maven.plugin.PluginManagerSupport;
import org.apache.maven.plugin.PluginManagerException;
import org.apache.maven.project.ProjectBuildingException;
import org.apache.maven.model.Model;
import org.apache.maven.model.Plugin;
import org.apache.maven.extension.ExtensionScanningException;
import org.apache.maven.extension.DefaultBuildExtensionScanner;
import org.apache.maven.project.artifact.InvalidDependencyVersionException;
import org.apache.maven.project.ProjectBuilderConfiguration;
import org.apache.maven.project.interpolation.ModelInterpolator;
import org.apache.maven.project.interpolation.ModelInterpolationException;
import org.apache.maven.extension.ExtensionManagerException;
import org.apache.maven.extension.DefaultExtensionManager;
import org.apache.maven.model.Prerequisites;
import org.apache.maven.plugin.DefaultPluginManager;
import org.apache.maven.plugin.version.DefaultPluginVersionManager;
import org.apache.maven.realm.RealmManagementException;
import org.apache.maven.execution.RuntimeInformation;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import java.io.File;
import java.util.HashMap;
import java.util.List;
public privileged aspect ExtensionErrorReporterAspect
extends AbstractCoreReporterAspect
{
before( ProjectBuildingException cause ):
withincode( List<ArtifactRepository> DefaultBuildExtensionScanner.getInitialRemoteRepositories( ProjectBuilderConfiguration ) )
&& call( ExtensionScanningException.new( String, ProjectBuildingException ) )
&& args( *, cause )
{
getReporter().handleSuperPomBuildingError( cause );
}
private pointcut dbes_scanInternal( File pomFile, MavenExecutionRequest request ):
execution( void DefaultBuildExtensionScanner.scanInternal( File, MavenExecutionRequest, .. ) )
&& args( pomFile, request, .. );
after( File pomFile, MavenExecutionRequest request, Model model, ProjectBuilderConfiguration config )
throwing( ModelInterpolationException cause ):
cflow( dbes_scanInternal( pomFile, request ) )
&& within( DefaultBuildExtensionScanner )
&& call( Model ModelInterpolator+.interpolate( Model, File, ProjectBuilderConfiguration, .. ) )
&& args( model, *, config, .. )
{
getReporter().reportErrorInterpolatingModel( model, new HashMap( config.getExecutionProperties() ), pomFile, request, cause );
}
private pointcut dem_addExtension( Artifact extensionArtifact, Artifact projectArtifact, List remoteRepos, MavenExecutionRequest request ):
execution( * DefaultExtensionManager.addExtension( Artifact, Artifact, List, MavenExecutionRequest, .. ) )
&& args( extensionArtifact, projectArtifact, remoteRepos, request, .. );
before( Artifact extensionArtifact, Artifact projectArtifact, List remoteRepos, MavenExecutionRequest request, ArtifactMetadataRetrievalException cause ):
cflow( dem_addExtension( extensionArtifact, projectArtifact, remoteRepos, request ) )
&& call( ExtensionManagerException.new( .., ArtifactMetadataRetrievalException ) )
&& args( .., cause )
{
getReporter().reportErrorResolvingExtensionDirectDependencies( extensionArtifact, projectArtifact, remoteRepos, request, cause );
}
ExtensionManagerException around( Artifact extensionArtifact, Artifact projectArtifact, List remoteRepos, MavenExecutionRequest request, ArtifactResolutionResult resolutionResult ):
cflow( dem_addExtension( extensionArtifact, projectArtifact, remoteRepos, request ) )
&& call( ExtensionManagerException.new( .., ArtifactResolutionResult ) )
&& args( .., resolutionResult )
{
ExtensionManagerException err = proceed( extensionArtifact, projectArtifact, remoteRepos, request, resolutionResult );
getReporter().reportErrorResolvingExtensionDependencies( extensionArtifact, projectArtifact, remoteRepos, request, resolutionResult, err );
return err;
}
private pointcut call_eme_ctor_RealmManagementException( RealmManagementException cause ):
call( ExtensionManagerException.new( .., RealmManagementException ) )
&& args( .., cause );
private pointcut within_dem_addExtension():
withincode( void DefaultExtensionManager.addExtension( Artifact, Artifact, List, MavenExecutionRequest, .. ) );
before( Artifact extensionArtifact, Artifact projectArtifact, List remoteRepos, MavenExecutionRequest request, RealmManagementException cause ):
cflow( dem_addExtension( extensionArtifact, projectArtifact, remoteRepos, request ) )
&& within_dem_addExtension()
&& call_eme_ctor_RealmManagementException( cause )
{
getReporter().reportErrorManagingRealmForExtension( extensionArtifact, projectArtifact, remoteRepos, request, cause );
}
private pointcut dem_addPluginAsExtension( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request ):
execution( * DefaultExtensionManager.addPluginAsExtension( Plugin, Model, List, MavenExecutionRequest ) )
&& args( plugin, originModel, remoteRepos, request );
private pointcut within_dem_addPluginAsExtension():
withincode( void DefaultExtensionManager.addPluginAsExtension( Plugin, Model, List, MavenExecutionRequest, .. ) );
before( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request, RealmManagementException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& within_dem_addPluginAsExtension()
&& call_eme_ctor_RealmManagementException( cause )
{
getReporter().reportErrorConfiguringExtensionPluginRealm( plugin, originModel, remoteRepos, request, cause );
}
before( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request, ArtifactNotFoundException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& within_dem_addPluginAsExtension()
&& call( ExtensionManagerException.new( .., ArtifactNotFoundException ) )
&& args( .., cause )
{
getReporter().reportExtensionPluginArtifactNotFound( plugin, originModel, remoteRepos, request, cause );
}
before( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request, ArtifactResolutionException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& within_dem_addPluginAsExtension()
&& call( ExtensionManagerException.new( .., ArtifactResolutionException ) )
&& args( .., cause )
{
getReporter().reportUnresolvableArtifactWhileAddingExtensionPlugin( plugin, originModel, remoteRepos, request, cause );
}
before( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request, PluginNotFoundException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& within_dem_addPluginAsExtension()
&& call( ExtensionManagerException.new( .., PluginNotFoundException ) )
&& args( .., cause )
{
getReporter().reportExtensionPluginArtifactNotFound( plugin, originModel, remoteRepos, request, cause );
}
private pointcut within_dpvm_resolveMetaVersion():
withincode( * DefaultPluginVersionManager.resolveMetaVersion( .. ) );
after( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request ) throwing ( ArtifactMetadataRetrievalException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& cflow( execution( * PluginManager+.verifyPlugin( .. ) ) )
&& within_dpvm_resolveMetaVersion()
&& call( * ArtifactMetadataSource+.retrieve( .. ) )
{
getReporter().reportUnresolvableExtensionPluginPOM( plugin, originModel, remoteRepos, request, cause );
}
String requiredVersion = null;
String currentVersion = null;
after() returning( String mavenVersion ):
( withincode( * DefaultPluginManager.checkRequiredMavenVersion( .. ) )
|| within_dpvm_resolveMetaVersion() )
&& call( * Prerequisites.getMaven() )
{
requiredVersion = mavenVersion;
}
after() returning( ArtifactVersion mavenVersion ):
( withincode( * DefaultPluginManager.checkRequiredMavenVersion( .. ) )
|| within_dpvm_resolveMetaVersion() )
&& call( * RuntimeInformation+.getApplicationVersion() )
{
currentVersion = mavenVersion.toString();
}
after():
execution( * DefaultPluginManager.verifyVersionedPlugin( .. ) )
{
requiredVersion = null;
currentVersion = null;
}
after():
execution( * DefaultPluginVersionManager.resolveMetaVersion( .. ) )
{
requiredVersion = null;
currentVersion = null;
}
after( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request ) throwing ( InvalidVersionSpecificationException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& cflow( execution( * PluginManager+.verifyPlugin( .. ) ) )
&& withincode( * DefaultPluginVersionManager.resolveMetaVersion( .. ) )
&& call( VersionRange VersionRange.createFromVersionSpec( .. ) )
{
getReporter().reportErrorSearchingforCompatibleExtensionPluginVersion( plugin, originModel, remoteRepos, request, requiredVersion, currentVersion, cause );
}
after( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request ) throwing ( ArtifactMetadataRetrievalException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& cflow( execution( * PluginManager+.verifyPlugin( .. ) ) )
&& withincode( * DefaultPluginVersionManager.resolveMetaVersion( .. ) )
&& call( * ArtifactMetadataSource+.retrieveAvailableVersions( .. ) )
{
getReporter().reportErrorSearchingforCompatibleExtensionPluginVersion( plugin, originModel, remoteRepos, request, requiredVersion, currentVersion, cause );
}
private pointcut dpm_verifyVersionedPlugin( Plugin plugin ):
execution( * DefaultPluginManager.verifyVersionedPlugin( Plugin, .. ) )
&& args( plugin, .. );
after( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request ) throwing ( PluginVersionResolutionException err ):
cflow( dem_addPluginAsExtension( Plugin, originModel, remoteRepos, request ) )
&& cflow( execution( * PluginManager+.verifyPlugin( .. ) ) )
&& cflow( dpm_verifyVersionedPlugin( plugin ) )
&& call( void PluginManagerSupport+.checkRequiredMavenVersion( .. ) )
{
getReporter().reportIncompatibleMavenVersionForExtensionPlugin( plugin, originModel, remoteRepos, request, requiredVersion, currentVersion, err );
}
before( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request, InvalidDependencyVersionException cause ):
cflow( dem_addPluginAsExtension( Plugin, originModel, remoteRepos, request ) )
&& cflow( execution( * PluginManager+.verifyPlugin( .. ) ) )
&& cflow( dpm_verifyVersionedPlugin( plugin ) )
&& call( InvalidPluginException.new( .., InvalidDependencyVersionException ) )
&& args( .., cause )
{
getReporter().reportInvalidDependencyVersionInExtensionPluginPOM( plugin, originModel, remoteRepos, request, cause );
}
before( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request, PluginVersionNotFoundException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& within_dem_addPluginAsExtension()
&& call( ExtensionManagerException.new( .., PluginVersionNotFoundException ) )
&& args( .., cause )
{
getReporter().reportExtensionPluginVersionNotFound( plugin, originModel, remoteRepos, request, cause );
}
before( Plugin plugin, Model originModel, List remoteRepos, MavenExecutionRequest request, PluginManagerException cause ):
cflow( dem_addPluginAsExtension( plugin, originModel, remoteRepos, request ) )
&& within_dem_addPluginAsExtension()
&& call( ExtensionManagerException.new( .., PluginManagerException+ ) )
&& args( .., cause )
{
getReporter().reportErrorConfiguringExtensionPluginRealm( plugin, originModel, remoteRepos, request, cause );
}
}

View File

@ -1,322 +0,0 @@
package org.apache.maven.errors;
import org.codehaus.plexus.logging.console.ConsoleLogger;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.util.introspection.ReflectionValueExtractor;
import org.apache.maven.project.path.PathTranslator;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.artifact.resolver.MultipleArtifactsNotFoundException;
import org.apache.maven.plugin.PluginConfigurationException;
import org.apache.maven.plugin.descriptor.Parameter;
import org.apache.maven.plugin.loader.PluginLoaderException;
import org.apache.maven.plugin.loader.PluginLoader;
import org.apache.maven.plugin.PluginExecutionException;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.InvalidPluginException;
import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.lifecycle.plan.BuildPlanner;
import org.apache.maven.lifecycle.plan.BuildPlan;
import org.apache.maven.lifecycle.model.MojoBinding;
import org.apache.maven.lifecycle.statemgmt.StateManagementUtils;
import org.apache.maven.lifecycle.DefaultLifecycleExecutor;
import org.apache.maven.lifecycle.LifecycleExecutor;
import org.apache.maven.lifecycle.LifecycleLoaderException;
import org.apache.maven.lifecycle.LifecycleSpecificationException;
import org.apache.maven.lifecycle.plan.LifecyclePlannerException;
import org.apache.maven.project.DuplicateArtifactAttachmentException;
import org.apache.maven.project.MavenProject;
import org.apache.maven.plugin.DefaultPluginManager;
import org.apache.maven.plugin.PluginManager;
import org.apache.maven.plugin.PluginParameterExpressionEvaluator;
import org.apache.maven.plugin.PluginParameterException;
import org.apache.maven.plugin.Mojo;
import org.codehaus.plexus.component.configurator.expression.ExpressionEvaluationException;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.codehaus.plexus.configuration.PlexusConfiguration;
import org.codehaus.plexus.PlexusContainer;
import java.util.List;
public privileged aspect LifecycleErrorReporterAspect
extends AbstractCoreReporterAspect
{
private pointcut le_executeGoalAndHandleFailures( MojoBinding binding ):
execution( void DefaultLifecycleExecutor.executeGoalAndHandleFailures( MojoBinding, .. ) )
&& args( binding, .. );
private pointcut le_executeGoalAndHandleFailures_withSession( MojoBinding binding,
MavenSession session ):
execution( void DefaultLifecycleExecutor.executeGoalAndHandleFailures( MojoBinding, MavenSession, .. ) )
&& args( binding, session, .. );
private pointcut pm_executeMojo( MavenProject project ):
execution( void PluginManager+.executeMojo( MavenProject, .. ) )
&& args( project, .. );
private pointcut within_pm_executeMojo( MavenProject project ):
withincode( void PluginManager+.executeMojo( MavenProject, .. ) )
&& args( project, .. );
after( MojoBinding binding,
MavenProject project) throwing ( PluginLoaderException cause ):
( cflow( le_executeGoalAndHandleFailures( MojoBinding ) )
|| cflow( execution( * LifecycleExecutor+.isTaskValid( .. ) ) ) )
&& execution( * PluginLoader+.loadPlugin( MojoBinding, MavenProject, .. ) )
&& args( binding, project, .. )
{
getReporter().reportErrorLoadingPlugin( binding, project, cause );
}
after( String task,
MavenSession session,
MavenProject project) throwing ( InvalidPluginException cause ):
execution( private * DefaultLifecycleExecutor.getMojoDescriptorForDirectInvocation( String, MavenSession, MavenProject ) )
&& args( task, session, project )
{
getReporter().reportInvalidPluginForDirectInvocation( task, session, project, cause );
}
after( MojoBinding binding,
MavenProject project) throwing ( DuplicateArtifactAttachmentException cause ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojo( project ) )
&& call( void Mojo+.execute() )
{
getReporter().reportDuplicateAttachmentException( binding, project, cause );
}
after( MojoBinding binding,
MavenProject project) throwing ( MojoExecutionException cause ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojo( project ) )
&& call( void Mojo+.execute() )
{
// this will be covered by the reportErrorLoadingPlugin(..) method.
if ( !StateManagementUtils.RESOLVE_LATE_BOUND_PLUGIN_GOAL.equals( binding.getGoal() ) )
{
getReporter().reportMojoExecutionException( binding, project, cause );
}
}
PluginExecutionException around( MojoBinding binding,
MavenProject project ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojo( project ) )
&& call( PluginExecutionException.new( .., String ) )
{
PluginExecutionException cause = proceed( binding, project );
getReporter().reportInvalidPluginExecutionEnvironment( binding, project, cause );
return cause;
}
after( MojoBinding binding,
MavenProject project) throwing ( ComponentLookupException cause ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojo( project ) )
&& withincode( Mojo DefaultPluginManager.getConfiguredMojo( .. ) )
&& call( Object PlexusContainer+.lookup( .. ) )
{
getReporter().reportMojoLookupError( binding, project, cause );
}
Parameter currentParameter;
private pointcut paramGetName( Parameter parameter ):
call( String Parameter.getName() )
&& target( parameter );
private pointcut pm_validatePomConfig():
execution( void DefaultPluginManager.validatePomConfiguration( .. ) );
private pointcut within_pm_validatePomConfig():
withincode( void DefaultPluginManager.validatePomConfiguration( .. ) );
private pointcut pm_checkRequiredParameters():
execution( void DefaultPluginManager.checkRequiredParameters( .. ) );
private pointcut within_pm_checkRequiredParameters():
withincode( void DefaultPluginManager.checkRequiredParameters( .. ) );
before( Parameter parameter ):
( within_pm_validatePomConfig()
|| within_pm_checkRequiredParameters() )
&& paramGetName( parameter )
{
currentParameter = parameter;
}
after() returning:
pm_validatePomConfig() ||
pm_checkRequiredParameters()
{
currentParameter = null;
}
private pointcut pm_executeMojoWithSessionAndExec( MavenProject project,
MojoExecution exec,
MavenSession session,
DefaultPluginManager manager ):
execution( void DefaultPluginManager.executeMojo( MavenProject, MojoExecution, MavenSession ) )
&& args( project, exec, session )
&& this( manager );
after( MojoBinding binding,
MavenProject project,
MojoExecution exec,
MavenSession session,
DefaultPluginManager manager) throwing( PluginConfigurationException cause ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojoWithSessionAndExec( project, exec, session, manager ) )
&& pm_validatePomConfig()
{
PathTranslator translator = manager.pathTranslator;
Logger logger = new ConsoleLogger( Logger.LEVEL_INFO, "error reporting" );
getReporter().reportAttemptToOverrideUneditableMojoParameter( currentParameter,
binding,
project,
session,
exec,
translator,
logger,
cause );
}
PluginParameterException around( MojoBinding binding,
MavenProject project,
List invalidParameters ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojo( project ) )
&& cflow( pm_checkRequiredParameters() )
&& call( PluginParameterException.new( .., List ) )
&& args( .., invalidParameters )
{
PluginParameterException err = proceed( binding, project, invalidParameters );
getReporter().reportMissingRequiredMojoParameter( binding, project, invalidParameters, err );
return err;
}
private pointcut ppee_evaluate( String expression ):
execution( Object PluginParameterExpressionEvaluator.evaluate( String ) )
&& args( expression );
private pointcut within_ppee_evaluate( String expression ):
withincode( Object PluginParameterExpressionEvaluator.evaluate( String ) )
&& args( expression );
before( MojoBinding binding,
MavenProject project,
String expression,
ExpressionEvaluationException err ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojo( project ) )
&& cflow( pm_checkRequiredParameters() )
&& cflow( ppee_evaluate( expression ) )
&& execution( ExpressionEvaluationException.new( String ) )
&& this( err )
{
getReporter().reportUseOfBannedMojoParameter( currentParameter,
binding,
project,
expression,
(String) PluginParameterExpressionEvaluator.BANNED_EXPRESSIONS.get( expression ),
err );
}
after( MojoBinding binding,
MavenProject project,
String expression) throwing ( Exception cause ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojo( project ) )
&& cflow( pm_checkRequiredParameters() )
&& cflow( ppee_evaluate( expression ) )
&& within( PluginParameterExpressionEvaluator )
&& call( Object ReflectionValueExtractor.evaluate( String, Object ) )
{
getReporter().reportReflectionErrorWhileEvaluatingMojoParameter( currentParameter,
binding,
project,
expression,
cause );
}
after( MojoBinding binding,
MavenProject project,
PlexusConfiguration config) throwing( PluginConfigurationException cause ):
cflow( le_executeGoalAndHandleFailures( binding ) )
&& cflow( pm_executeMojo( project ) )
&& execution( void DefaultPluginManager.populatePluginFields( *, *, PlexusConfiguration, .. ) )
&& args( *, *, config, .. )
{
getReporter().reportErrorApplyingMojoConfiguration( binding, project, config, cause );
}
private pointcut pm_resolveTransitiveDependencies( MavenProject project,
String scope ):
execution( void DefaultPluginManager.resolveTransitiveDependencies( *, *, String, *, MavenProject, * ) )
&& args( *, *, scope, *, project, * );
after( MavenProject project,
String scope) throwing( ArtifactNotFoundException cause ):
pm_resolveTransitiveDependencies( project, scope )
{
getReporter().reportProjectDependenciesNotFound( project, scope, cause );
}
after( MavenProject project,
String scope) throwing( ArtifactResolutionException cause ):
pm_resolveTransitiveDependencies( project, scope )
{
if ( cause instanceof MultipleArtifactsNotFoundException )
{
getReporter().reportProjectDependenciesNotFound( project,
scope,
(MultipleArtifactsNotFoundException) cause );
}
else
{
getReporter().reportProjectDependenciesUnresolvable( project, scope, cause );
}
}
private pointcut le_getLifecycleBindings( List tasks,
MavenProject configuringProject,
String targetDescription ):
execution( List DefaultLifecycleExecutor.getLifecycleBindings( List, MavenProject, *, String ) )
&& args( tasks, configuringProject, *, targetDescription );
BuildPlan around( List tasks,
MavenProject project,
MavenSession session )
throws LifecycleLoaderException, LifecycleSpecificationException, LifecyclePlannerException:
cflow( execution( * DefaultLifecycleExecutor.*( .. ) ) )
&& execution( BuildPlan BuildPlanner+.constructBuildPlan( List, MavenProject, MavenSession, * ) )
&& args( tasks, project, session, * )
{
try
{
return proceed( tasks, project, session );
}
catch ( LifecycleLoaderException cause )
{
getReporter().reportErrorFormulatingBuildPlan( tasks, project, session, cause );
throw cause;
}
catch ( LifecyclePlannerException cause )
{
getReporter().reportErrorFormulatingBuildPlan( tasks, project, session, cause );
throw cause;
}
catch ( LifecycleSpecificationException cause )
{
getReporter().reportErrorFormulatingBuildPlan( tasks, project, session, cause );
throw cause;
}
}
}

View File

@ -1,75 +0,0 @@
package org.apache.maven.errors;
import org.apache.maven.reactor.MavenExecutionException;
import org.apache.maven.reactor.MissingModuleException;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.execution.MavenExecutionRequest;
import org.apache.maven.execution.RuntimeInformation;
import org.apache.maven.profiles.ProfileManager;
import org.apache.maven.DefaultMaven;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectBuilder;
import org.apache.maven.project.ProjectBuildingException;
import java.io.File;
import java.util.List;
public aspect MavenExecErrorReporterAspect
extends AbstractCoreReporterAspect
{
private pointcut dm_getProjects( MavenExecutionRequest request ):
execution( List DefaultMaven.getProjects( MavenExecutionRequest ) )
&& args( request );
private pointcut dm_collectProjects( MavenExecutionRequest request ):
execution( List DefaultMaven.collectProjects( List, MavenExecutionRequest, boolean ) )
&& args( *, request, * );
private MavenProject currentProject;
private ArtifactVersion mavenVersion;
before( RuntimeInformation ri ):
call( * RuntimeInformation+.getApplicationVersion() )
&& within( DefaultMaven )
&& target( ri )
{
mavenVersion = ri.getApplicationVersion();
}
MavenProject around()
throws ProjectBuildingException:
cflow( dm_collectProjects( MavenExecutionRequest ) )
&& within( DefaultMaven )
&& call( MavenProject MavenProjectBuilder+.build( .. ) )
{
currentProject = proceed();
return currentProject;
}
MavenExecutionException around():
cflow( dm_getProjects( MavenExecutionRequest ) )
&& cflow( dm_collectProjects( MavenExecutionRequest ) )
&& call( MavenExecutionException.new( String, File ) )
{
MavenExecutionException err = proceed();
getReporter().reportInvalidMavenVersion( currentProject, mavenVersion, err );
return err;
}
after( MissingModuleException err ):
execution( MissingModuleException.new( String, File, File ) )
&& this( err )
{
getReporter().reportMissingModulePom( err );
}
after(): dm_collectProjects( MavenExecutionRequest )
{
currentProject = null;
}
}

View File

@ -78,29 +78,6 @@
<filtering>true</filtering> <filtering>true</filtering>
</resource> </resource>
</resources> </resources>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>aspectj-maven-plugin</artifactId>
<executions>
<execution>
<id>compile-aspects</id>
<phase>compile</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<aspectLibraries>
<aspectLibrary>
<groupId>org.apache.maven</groupId>
<artifactId>maven-core</artifactId>
</aspectLibrary>
</aspectLibraries>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build> </build>
<profiles> <profiles>
<profile> <profile>

View File

@ -1,36 +0,0 @@
package org.apache.maven.embedder.cache;
import org.apache.maven.execution.MavenExecutionRequest;
import org.apache.maven.embedder.MavenEmbedder;
public privileged aspect CacheCleanerAspect
{
private pointcut requestAsLastParam( MavenExecutionRequest request, MavenEmbedder embedder ):
execution( * MavenEmbedder.*( .., MavenExecutionRequest ) )
&& args( .., request )
&& this( embedder );
private pointcut requestAsOnlyParam( MavenExecutionRequest request, MavenEmbedder embedder ):
execution( * MavenEmbedder.*( MavenExecutionRequest ) )
&& args( request )
&& this( embedder );
after( MavenExecutionRequest request, MavenEmbedder embedder ): requestAsLastParam( request, embedder )
{
cleanup( request, embedder );
}
after( MavenExecutionRequest request, MavenEmbedder embedder ): requestAsOnlyParam( request, embedder )
{
cleanup( request, embedder );
}
private void cleanup( MavenExecutionRequest request, MavenEmbedder embedder )
{
// TODO: Add this to the eventing-control mechanism that the workspace uses now,
// once we can accommodate context in the event method calls.
request.clearAccumulatedBuildState();
}
}

View File

@ -1,61 +0,0 @@
package org.apache.maven.embedder.event;
import org.apache.maven.monitor.event.MavenEvents;
import org.apache.maven.embedder.MavenEmbedder;
import org.apache.maven.execution.MavenExecutionRequest;
public privileged aspect EmbedderEventDispatcherAspect
{
after( MavenEmbedder embedder ): execution( * MavenEmbedder.start( .. ) ) && this( embedder )
{
if ( embedder.dispatcher != null )
{
embedder.dispatcher.dispatchStart( MavenEvents.EMBEDDER_LIFECYCLE, "" );
}
}
before( MavenEmbedder embedder ): execution( * MavenEmbedder.stop( .. ) ) && this( embedder )
{
if ( embedder.dispatcher != null )
{
embedder.dispatcher.dispatchEnd( MavenEvents.EMBEDDER_LIFECYCLE, "" );
}
}
private pointcut eventedMethods( MavenEmbedder embedder ):
(
execution( * MavenEmbedder.*( .., MavenExecutionRequest ) )
|| execution( * MavenEmbedder.*( MavenExecutionRequest ) )
|| execution( * MavenEmbedder.*( MavenExecutionRequest, .. ) )
|| execution( * MavenEmbedder.resolve( .. ) )
|| execution( * MavenEmbedder.readProject( .. ) )
|| execution( * MavenEmbedder.verifyPlugin( .. ) )
)
&& this( embedder );
before( MavenEmbedder embedder ):
eventedMethods( embedder )
&& !cflowbelow( eventedMethods( MavenEmbedder ) )
{
if ( embedder.dispatcher != null )
{
String target = thisJoinPointStaticPart.getSignature().getName();
embedder.dispatcher.dispatchStart( MavenEvents.EMBEDDER_METHOD, target );
}
}
after( MavenEmbedder embedder ):
eventedMethods( embedder )
&& !cflowbelow( eventedMethods( MavenEmbedder ) )
{
if ( embedder.dispatcher != null )
{
String target = thisJoinPointStaticPart.getSignature().getName();
embedder.dispatcher.dispatchEnd( MavenEvents.EMBEDDER_METHOD, target );
}
}
}

View File

@ -184,13 +184,6 @@ public class DefaultMavenProjectBuilder
project.setFile( projectDescriptor ); project.setFile( projectDescriptor );
project = buildInternal( project.getModel(), config, projectDescriptor, project.getParentFile(), true ); project = buildInternal( project.getModel(), config, projectDescriptor, project.getParentFile(), true );
Build build = project.getBuild();
// NOTE: setting this script-source root before path translation, because
// the plugin tools compose basedir and scriptSourceRoot into a single file.
project.addScriptSourceRoot( build.getScriptSourceDirectory() );
project.addCompileSourceRoot( build.getSourceDirectory() );
project.addTestCompileSourceRoot( build.getTestSourceDirectory() );
project.setFile( projectDescriptor ); project.setFile( projectDescriptor );
setBuildOutputDirectoryOnParent( project ); setBuildOutputDirectoryOnParent( project );
@ -479,19 +472,8 @@ public class DefaultMavenProjectBuilder
projectDir = pomFile.getAbsoluteFile().getParentFile(); projectDir = pomFile.getAbsoluteFile().getParentFile();
} }
Build dynamicBuild = model.getBuild();
if ( dynamicBuild != null )
{
model.setBuild( ModelUtils.cloneBuild( dynamicBuild ) );
}
model = modelInterpolator.interpolate( model, projectDir, config, getLogger().isDebugEnabled() ); model = modelInterpolator.interpolate( model, projectDir, config, getLogger().isDebugEnabled() );
if ( dynamicBuild != null && model.getBuild() != null )
{
mergeDeterministicBuildElements( model.getBuild(), dynamicBuild );
model.setBuild( dynamicBuild );
}
// We will return a different project object using the new model (hence the need to return a project, not just modify the parameter) // We will return a different project object using the new model (hence the need to return a project, not just modify the parameter)
MavenProject project = new MavenProject( model, artifactFactory, mavenTools, repositoryHelper, this, config ); MavenProject project = new MavenProject( model, artifactFactory, mavenTools, repositoryHelper, this, config );
@ -504,59 +486,6 @@ public class DefaultMavenProjectBuilder
return project; return project;
} }
// TODO: Remove this!
@SuppressWarnings("unchecked")
private void mergeDeterministicBuildElements( Build interpolatedBuild, Build dynamicBuild )
{
List<Plugin> dPlugins = dynamicBuild.getPlugins();
if ( dPlugins != null )
{
List<Plugin> iPlugins = interpolatedBuild.getPlugins();
for ( int i = 0; i < dPlugins.size(); i++ )
{
Plugin dPlugin = dPlugins.get( i );
Plugin iPlugin = iPlugins.get( i );
dPlugin.setGroupId( iPlugin.getGroupId() );
dPlugin.setArtifactId( iPlugin.getArtifactId() );
dPlugin.setVersion( iPlugin.getVersion() );
dPlugin.setDependencies( iPlugin.getDependencies() );
}
}
PluginManagement dPluginMgmt = dynamicBuild.getPluginManagement();
if ( dPluginMgmt != null )
{
PluginManagement iPluginMgmt = interpolatedBuild.getPluginManagement();
dPlugins = dPluginMgmt.getPlugins();
if ( dPlugins != null )
{
List<Plugin> iPlugins = iPluginMgmt.getPlugins();
for ( int i = 0; i < dPlugins.size(); i++ )
{
Plugin dPlugin = dPlugins.get( i );
Plugin iPlugin = iPlugins.get( i );
dPlugin.setGroupId( iPlugin.getGroupId() );
dPlugin.setArtifactId( iPlugin.getArtifactId() );
dPlugin.setVersion( iPlugin.getVersion() );
dPlugin.setDependencies( iPlugin.getDependencies() );
}
}
}
if ( dynamicBuild.getExtensions() != null )
{
dynamicBuild.setExtensions( interpolatedBuild.getExtensions() );
}
}
private MavenProject getSuperProject( ProjectBuilderConfiguration config, File projectDescriptor, private MavenProject getSuperProject( ProjectBuilderConfiguration config, File projectDescriptor,
boolean isReactorProject ) boolean isReactorProject )
throws ProjectBuildingException throws ProjectBuildingException

View File

@ -1,589 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.maven.project;
import org.apache.maven.model.Build;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.Resource;
import org.apache.maven.project.interpolation.ModelInterpolationException;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Iterator;
import java.util.List;
public class MavenProjectDynamismTest
extends PlexusTestCase
{
private MavenProjectBuilder projectBuilder;
@Override
public void setUp()
throws Exception
{
super.setUp();
projectBuilder = (MavenProjectBuilder) lookup( MavenProjectBuilder.class.getName() );
}
public void testBuildSectionGroupIdInterpolation()
throws IOException, XmlPullParserException, URISyntaxException, ProjectBuildingException,
ModelInterpolationException
{
MavenProject project = buildProject( "pom-interp.xml" );
projectBuilder.calculateConcreteState( project, new DefaultProjectBuilderConfiguration() );
String basepath = "/" + project.getGroupId();
Build build = project.getBuild();
assertTrue( build.getSourceDirectory().startsWith( basepath ) );
assertTrue( build.getTestSourceDirectory().startsWith( basepath ) );
assertTrue( build.getScriptSourceDirectory().startsWith( basepath ) );
List plugins = build.getPlugins();
assertNotNull( plugins );
assertEquals( 1, plugins.size() );
Plugin plugin = (Plugin) plugins.get( 0 );
assertEquals( "my-plugin", plugin.getArtifactId() );
Xpp3Dom conf = (Xpp3Dom) plugin.getConfiguration();
assertNotNull( conf );
Xpp3Dom[] children = conf.getChildren();
assertEquals( 3, children.length );
for ( int i = 0; i < children.length; i++ )
{
assertEquals( "Configuration parameter: "
+ children[i].getName()
+ " should have a an interpolated POM groupId as its value.",
children[i].getValue(),
project.getGroupId() );
}
projectBuilder.restoreDynamicState( project, new DefaultProjectBuilderConfiguration() );
String projectGidExpr = "${project.groupId}";
String pomGidExpr = "${pom.groupId}";
String nakedGidExpr = "${groupId}";
build = project.getBuild();
assertTrue( build.getSourceDirectory().startsWith( "/" + projectGidExpr ) );
assertTrue( build.getTestSourceDirectory().startsWith( "/" + pomGidExpr ) );
assertTrue( build.getScriptSourceDirectory().startsWith( "/" + nakedGidExpr ) );
plugins = build.getPlugins();
assertNotNull( plugins );
assertEquals( 1, plugins.size() );
plugin = (Plugin) plugins.get( 0 );
assertEquals( "my-plugin", plugin.getArtifactId() );
conf = (Xpp3Dom) plugin.getConfiguration();
assertNotNull( conf );
children = conf.getChildren();
assertEquals( 3, children.length );
assertEquals( "Configuration parameter: " + children[0].getName() + " should have "
+ projectGidExpr + " as its value.", children[0].getValue(), projectGidExpr );
assertEquals( "Configuration parameter: " + children[1].getName() + " should have "
+ pomGidExpr + " as its value.", children[1].getValue(), pomGidExpr );
assertEquals( "Configuration parameter: " + children[2].getName() + " should have "
+ nakedGidExpr + " as its value.", children[2].getValue(), nakedGidExpr );
}
public void testRoundTrip()
throws IOException, XmlPullParserException, URISyntaxException,
ModelInterpolationException, ProjectBuildingException
{
MavenProject project = buildProject( "pom.xml" );
ProjectBuilderConfiguration config = new DefaultProjectBuilderConfiguration();
projectBuilder.calculateConcreteState( project, config );
File baseDir = project.getBasedir();
File buildDir = new File( baseDir, "target" );
String basedirExpr = "${pom.basedir}";
String buildDirExpr = "${pom.build.directory}";
assertTrue( project.isConcrete() );
Build build = project.getBuild();
assertEquals( "Concrete source directory should be absolute.",
new File( baseDir, "/src/main/java" ).getAbsolutePath(),
new File( build.getSourceDirectory() ).getAbsolutePath() );
assertEquals( "Concrete test-source directory should be absolute.",
new File( baseDir, "/src/test/java" ).getAbsolutePath(),
new File( build.getTestSourceDirectory() ).getAbsolutePath() );
assertEquals( "Concrete script-source directory should be absolute.",
new File( baseDir, "/src/main/scripts" ).getAbsolutePath(),
new File( build.getScriptSourceDirectory() ).getAbsolutePath() );
List compileSourceRoots = project.getCompileSourceRoots();
assertNotNull( "Concrete compile-source roots should not be null.", compileSourceRoots );
assertEquals( "Concrete compile-source roots should contain one entry.",
1,
compileSourceRoots.size() );
assertEquals( "Concrete compile-source roots should contain interpolated source-directory value.",
new File( baseDir, "/src/main/java" ).getAbsolutePath(),
new File( (String) compileSourceRoots.get( 0 ) ).getAbsolutePath() );
List testCompileSourceRoots = project.getTestCompileSourceRoots();
assertNotNull( "Concrete test-compile-source roots should not be null.",
testCompileSourceRoots );
assertEquals( "Concrete test-compile-source roots should contain one entry.",
1,
testCompileSourceRoots.size() );
assertEquals( "Concrete test-compile-source roots should contain interpolated test-source-directory value.",
new File( baseDir, "/src/test/java" ).getAbsolutePath(),
new File( (String) testCompileSourceRoots.get( 0 ) ).getAbsolutePath() );
List scriptSourceRoots = project.getScriptSourceRoots();
assertNotNull( "Concrete script-source roots should not be null.", scriptSourceRoots );
assertEquals( "Concrete script-source roots should contain one entry.",
1,
scriptSourceRoots.size() );
assertEquals( "Concrete script-source roots should contain interpolated script-source-directory value.",
new File( baseDir, "/src/main/scripts" ).getAbsolutePath(),
new File( (String) scriptSourceRoots.get( 0 ) ).getAbsolutePath() );
List resources = build.getResources();
assertNotNull( "Concrete resources should not be null.", resources );
assertEquals( "Concrete resources should contain one entry.", 1, resources.size() );
assertEquals( "Concrete resource should contain absolute path.",
new File( buildDir, "generated-resources/plexus" ).getAbsolutePath(),
new File( ( (Resource) resources.get( 0 ) ).getDirectory() ).getAbsolutePath() );
List filters = build.getFilters();
assertNotNull( "Concrete filters should not be null.", filters );
assertEquals( "Concrete filters should contain one entry.", 1, filters.size() );
assertEquals( "Concrete filter entry should contain absolute path.",
new File( buildDir, "/generated-filters.properties" ).getAbsolutePath(),
new File( (String) filters.get( 0 ) ).getAbsolutePath() );
assertEquals( "Concrete output-directory should be absolute.",
new File( buildDir, "/classes" ).getAbsolutePath(),
new File( build.getOutputDirectory() ).getAbsolutePath() );
assertEquals( "Concrete test-output-directory should be absolute.",
new File( buildDir, "/test-classes" ).getAbsolutePath(),
new File( build.getTestOutputDirectory() ).getAbsolutePath() );
assertEquals( "Concrete build directory should be absolute.",
new File( baseDir, "target" ).getAbsolutePath(),
new File( build.getDirectory() ).getAbsolutePath() );
// --------------------------------------------------------------------
// NOW, RESTORE THE DYNAMIC STATE FOR THE BUILD SECTION AND
// ASSOCIATED DIRECTORIES ATTACHED TO THE PROJECT INSTANCE.
// --------------------------------------------------------------------
projectBuilder.restoreDynamicState( project, config );
assertFalse( project.isConcrete() );
build = project.getBuild();
assertEquals( "Restored source directory should be expressed in terms of the basedir.",
basedirExpr + "/src/main/java",
build.getSourceDirectory() );
assertEquals( "Restored test-source directory should be expressed in terms of the basedir.",
basedirExpr + "/src/test/java",
build.getTestSourceDirectory() );
assertEquals( "Restored script-source directory should be expressed in terms of the basedir.",
basedirExpr + "/src/main/scripts",
build.getScriptSourceDirectory() );
compileSourceRoots = project.getCompileSourceRoots();
assertNotNull( "Restored compile-source roots should not be null.", compileSourceRoots );
assertEquals( "Restored compile-source roots should contain one entry.",
1,
compileSourceRoots.size() );
assertEquals( "Restored compile-source roots should contain uninterpolated source-directory value.",
"${pom.basedir}/src/main/java",
compileSourceRoots.get( 0 ) );
testCompileSourceRoots = project.getTestCompileSourceRoots();
assertNotNull( "Restored test-compile-source roots should not be null.",
testCompileSourceRoots );
assertEquals( "Restored test-compile-source roots should contain one entry.",
1,
testCompileSourceRoots.size() );
assertEquals( "Restored test-compile-source roots should contain uninterpolated test-source-directory value.",
"${pom.basedir}/src/test/java",
testCompileSourceRoots.get( 0 ) );
scriptSourceRoots = project.getScriptSourceRoots();
assertNotNull( "Restored script-source roots should not be null.", scriptSourceRoots );
assertEquals( "Restored script-source roots should contain one entry.",
1,
scriptSourceRoots.size() );
assertEquals( "Restored script-source roots should contain uninterpolated script-source-directory value.",
"${pom.basedir}/src/main/scripts",
scriptSourceRoots.get( 0 ) );
resources = build.getResources();
assertNotNull( "Restored resources should not be null.", resources );
assertEquals( "Restored resources should contain one entry.", 1, resources.size() );
assertEquals( "Restored resource should contain uninterpolated reference to build directory.",
buildDirExpr + "/generated-resources/plexus",
( (Resource) resources.get( 0 ) ).getDirectory() );
filters = build.getFilters();
assertNotNull( "Restored filters should not be null.", filters );
assertEquals( "Restored filters should contain one entry.", 1, filters.size() );
assertEquals( "Restored filter entry should contain uninterpolated reference to build directory.",
buildDirExpr + "/generated-filters.properties",
filters.get( 0 ) );
assertEquals( "Restored output-directory should be expressed in terms of the build-directory.",
buildDirExpr + "/classes",
build.getOutputDirectory() );
assertEquals( "Restored test-output-directory should be expressed in terms of the build-directory.",
buildDirExpr + "/test-classes",
build.getTestOutputDirectory() );
assertEquals( "Restored build directory should be relative.",
"target",
build.getDirectory() );
}
public void testShouldPreserveAddedResourceInRestoredState()
throws IOException, XmlPullParserException, URISyntaxException, ProjectBuildingException,
ModelInterpolationException
{
MavenProject project = buildProject( "pom.xml" );
ProjectBuilderConfiguration config = new DefaultProjectBuilderConfiguration();
projectBuilder.calculateConcreteState( project, config );
Build build = project.getBuild();
Resource r = new Resource();
r.setDirectory( "myDir" );
build.addResource( r );
List resources = build.getResources();
assertNotNull( "Concrete resources should not be null.", resources );
assertEquals( "Concrete resources should contain two entries.", 2, resources.size() );
assertResourcePresent( "concrete resources",
new File( build.getDirectory(), "generated-resources/plexus" ).getAbsolutePath(),
resources );
assertResourcePresent( "concrete resources", "myDir", resources );
projectBuilder.restoreDynamicState( project, config );
build = project.getBuild();
resources = build.getResources();
assertNotNull( "Restored resources should not be null.", resources );
assertEquals( "Restored resources should contain two entries.", 2, resources.size() );
assertResourcePresent( "restored resources",
"${pom.build.directory}/generated-resources/plexus",
resources );
assertResourcePresent( "restored resources", "myDir", resources );
}
public void testShouldPreserveAddedFilterInRestoredState()
throws IOException, XmlPullParserException, URISyntaxException, ProjectBuildingException,
ModelInterpolationException
{
MavenProject project = buildProject( "pom.xml" );
ProjectBuilderConfiguration config = new DefaultProjectBuilderConfiguration();
projectBuilder.calculateConcreteState( project, config );
Build build = project.getBuild();
build.addFilter( "myDir/filters.properties" );
List filters = build.getFilters();
assertNotNull( "Concrete filters should not be null.", filters );
assertEquals( "Concrete filters should contain two entries.", 2, filters.size() );
assertFilterPresent( "concrete filters",
new File( build.getDirectory(), "generated-filters.properties" ).getAbsolutePath(),
filters );
assertFilterPresent( "concrete filters", "myDir/filters.properties", filters );
projectBuilder.restoreDynamicState( project, config );
build = project.getBuild();
filters = build.getFilters();
assertNotNull( "Restored filters should not be null.", filters );
assertEquals( "Restored filters should contain two entries.", 2, filters.size() );
assertFilterPresent( "restored filters",
"${pom.build.directory}/generated-filters.properties",
filters );
assertFilterPresent( "restored filters", "myDir/filters.properties", filters );
}
public void testShouldIncorporateChangedBuildDirectoryViaExpressionsOnNextConcreteCalculation()
throws IOException, XmlPullParserException, URISyntaxException, ProjectBuildingException,
ModelInterpolationException
{
MavenProject project = buildProject( "pom.xml" );
ProjectBuilderConfiguration config = new DefaultProjectBuilderConfiguration();
projectBuilder.calculateConcreteState( project, config );
Build build = project.getBuild();
assertEquals( "First concrete build directory should be absolute and point to target dir.",
new File( project.getBasedir(), "target" ).getAbsolutePath(),
build.getDirectory() );
assertEquals( "First concrete build output-directory should be absolute and point to target/classes dir.",
new File( project.getBasedir(), "target/classes" ).getAbsolutePath(),
new File( build.getOutputDirectory() ) .getAbsolutePath() );
build.setDirectory( "target2" );
assertEquals( "AFTER CHANGING BUILD DIRECTORY, build directory should be relative and point to target2 dir.",
"target2",
build.getDirectory() );
assertEquals( "AFTER CHANGING BUILD DIRECTORY, build output-directory should be absolute and still point to target/classes dir.",
new File( project.getBasedir(), "target/classes" ).getAbsolutePath(),
new File( build.getOutputDirectory() ).getAbsolutePath() );
projectBuilder.restoreDynamicState( project, config );
projectBuilder.calculateConcreteState( project, config );
build = project.getBuild();
assertEquals( "Second concrete build directory should be absolute and point to target2 dir.",
new File( project.getBasedir(), "target2" ).getAbsolutePath(),
build.getDirectory() );
assertEquals( "Second concrete build output-directory should be absolute and point to target2/classes dir.",
new File( project.getBasedir(), "target2/classes" ).getAbsolutePath(),
new File( build.getOutputDirectory() ).getAbsolutePath() );
}
public void testShouldPreserveInitialValuesForPropertiesReferencingBuildPaths()
throws IOException, XmlPullParserException, URISyntaxException, ProjectBuildingException,
ModelInterpolationException
{
MavenProject project = buildProject( "pom.xml" );
ProjectBuilderConfiguration config = new DefaultProjectBuilderConfiguration();
projectBuilder.calculateConcreteState( project, config );
project.getBuild().setDirectory( "target2" );
String originalValue = project.getProperties().getProperty( "myProperty" );
projectBuilder.restoreDynamicState( project, config );
projectBuilder.calculateConcreteState( project, config );
assertEquals( "After resetting build-directory and going through a recalculation phase for the project, "
+ "property value for 'myProperty' should STILL be the absolute initial build directory.",
originalValue,
project.getProperties().getProperty( "myProperty" ) );
}
public void testShouldAlignCompileSourceRootsInConcreteState()
throws IOException, XmlPullParserException, URISyntaxException, ProjectBuildingException,
ModelInterpolationException
{
MavenProject project = buildProject( "pom-relative.xml" );
ProjectBuilderConfiguration config = new DefaultProjectBuilderConfiguration();
projectBuilder.calculateConcreteState( project, config );
List compileSourceRoots = project.getCompileSourceRoots();
assertNotNull( "First concrete state compile-source roots should not be null.",
compileSourceRoots );
assertEquals( "First concrete state should contain one compile-source root.",
1,
compileSourceRoots.size() );
assertEquals( "First concrete state should have an absolute path for compile-source root.",
new File( project.getBasedir(), "src/main/java" ).getAbsolutePath(),
compileSourceRoots.get( 0 ) );
String newSourceRoot = new File( project.getBuild().getDirectory(),
"generated-sources/modello" ).getAbsolutePath();
project.addCompileSourceRoot( newSourceRoot );
projectBuilder.restoreDynamicState( project, config );
compileSourceRoots = project.getCompileSourceRoots();
assertNotNull( "Restored dynamic state compile-source roots should not be null.",
compileSourceRoots );
assertEquals( "Restored dynamic state should contain two compile-source roots.",
2,
compileSourceRoots.size() );
assertEquals( "Restored dynamic state should have a relative path for original compile-source root.",
"src/main/java",
compileSourceRoots.get( 0 ) );
assertEquals( "Restored dynamic state should have a relative path for new compile-source root.",
"target/generated-sources/modello",
compileSourceRoots.get( 1 ) );
projectBuilder.calculateConcreteState( project, config );
compileSourceRoots = project.getCompileSourceRoots();
assertNotNull( "Second concrete state compile-source roots should not be null.",
compileSourceRoots );
assertEquals( "Second concrete state should contain two compile-source roots.",
2,
compileSourceRoots.size() );
assertEquals( "Second concrete state should have an absolute path for original compile-source root.",
new File( project.getBasedir(), "src/main/java" ).getAbsolutePath(),
compileSourceRoots.get( 0 ) );
assertEquals( "Second concrete state should have an absolute path for new compile-source root.",
newSourceRoot,
compileSourceRoots.get( 1 ) );
}
// Useful for diagnostics.
// private void displayPOM( Model model )
// throws IOException
// {
// StringWriter writer = new StringWriter();
// new MavenXpp3Writer().write( writer, model );
//
// System.out.println( writer.toString() );
// }
private void assertResourcePresent( String testLabel,
String directory,
List resources )
{
boolean found = false;
if ( resources != null )
{
for ( Iterator it = resources.iterator(); it.hasNext(); )
{
Resource resource = (Resource) it.next();
if ( new File( directory ).getAbsolutePath().equals( new File( resource.getDirectory() ).getAbsolutePath() ) )
{
found = true;
break;
}
}
}
if ( !found )
{
fail( "Missing resource with directory: " + directory + " in " + testLabel );
}
}
private void assertFilterPresent( String testLabel,
String path,
List filters )
{
boolean found = false;
if ( filters != null )
{
for ( Iterator it = filters.iterator(); it.hasNext(); )
{
String filterPath = (String) it.next();
if ( new File( path ).getAbsolutePath().equals( new File( filterPath ).getAbsolutePath() ) )
{
found = true;
break;
}
}
}
if ( !found )
{
fail( "Missing filter with path: " + path + " in " + testLabel );
}
}
private MavenProject buildProject( String path )
throws IOException, XmlPullParserException, URISyntaxException, ProjectBuildingException
{
ClassLoader cloader = Thread.currentThread().getContextClassLoader();
URL resource = cloader.getResource( "project-dynamism/" + path );
if ( resource == null )
{
fail( "Cannot find classpath resource for POM: " + path );
}
File pomFile = new File( resource.getPath() );
pomFile = pomFile.getAbsoluteFile();
MavenProject project = projectBuilder.build( pomFile,
new DefaultProjectBuilderConfiguration() );
assertEquals( pomFile, project.getFile() );
return project;
}
}

View File

@ -1,56 +0,0 @@
package org.apache.maven.project;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.model.Build;
import org.apache.maven.model.Resource;
import java.io.File;
public class ProjectBaseDirectoryAlignmentTest
extends AbstractMavenProjectTestCase
{
private String dir = "src/test/resources/projects/base-directory-alignment/";
public void testProjectDirectoryBaseDirectoryAlignment()
throws Exception
{
File f = getTestFile( dir + "project-which-needs-directory-alignment.xml" );
MavenProject project = getProject( f );
projectBuilder.calculateConcreteState( project, new DefaultProjectBuilderConfiguration() );
assertNotNull( "Test project can't be null!", project );
File basedirFile = new File( getBasedir() );
File sourceDirectoryFile = new File( project.getBuild().getSourceDirectory() );
File testSourceDirectoryFile = new File( project.getBuild().getTestSourceDirectory() );
assertEquals( basedirFile.getCanonicalPath(), sourceDirectoryFile.getCanonicalPath().substring( 0, getBasedir().length() ) );
assertEquals( basedirFile.getCanonicalPath(), testSourceDirectoryFile.getCanonicalPath().substring( 0, getBasedir().length() ) );
Build build = project.getBuild();
Resource resource = (Resource) build.getResources().get( 0 );
assertTrue( resource.getDirectory().startsWith( getBasedir() ) );
}
}