[MRM-1301] migrate reports to metadata repository

git-svn-id: https://svn.apache.org/repos/asf/archiva/branches/MRM-1025@891064 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Brett Porter 2009-12-15 23:07:36 +00:00
parent 7da14962fa
commit 4331620db6
80 changed files with 687 additions and 3961 deletions

View File

@ -162,7 +162,7 @@ public class ArchivaMetadataCreationConsumer
project.setId( artifact.getArtifactId() );
// TODO: maybe not too efficient since it may have already been read and stored for this artifact
ProjectVersionMetadata versionMetadata = null;
ProjectVersionMetadata versionMetadata;
try
{
versionMetadata =
@ -174,6 +174,11 @@ public class ArchivaMetadataCreationConsumer
throw new ConsumerException( e.getMessage(), e );
}
if ( versionMetadata == null )
{
throw new ConsumerException( "Unable to read metadata for artifact: " + artifact );
}
ArtifactMetadata artifactMeta = new ArtifactMetadata();
artifactMeta.setRepositoryId( repository.getId() );
artifactMeta.setNamespace( artifact.getGroupId() );
@ -202,7 +207,6 @@ public class ArchivaMetadataCreationConsumer
log.error( "Error attempting to get SHA-1 checksum for " + file + ": " + e.getMessage() );
}
// TODO: transaction
// read the metadata and update it if it is newer or doesn't exist
metadataRepository.updateArtifact( repository.getId(), project.getNamespace(), project.getId(),

View File

@ -1,125 +0,0 @@
package org.apache.maven.archiva.model;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* RepositoryProblemReport
*
* @version $Id$
*/
public class RepositoryProblemReport
extends RepositoryProblem
{
private static final long serialVersionUID = 4990893576717148324L;
protected String groupURL;
protected String artifactURL;
protected String versionURL;
public RepositoryProblemReport( RepositoryProblem repositoryProblem )
{
setGroupId( repositoryProblem.getGroupId() );
setArtifactId( repositoryProblem.getArtifactId() );
setVersion( repositoryProblem.getVersion() );
setMessage( repositoryProblem.getMessage() );
setOrigin( repositoryProblem.getOrigin() );
setPath( repositoryProblem.getPath() );
setType( repositoryProblem.getType() );
setRepositoryId( repositoryProblem.getRepositoryId() );
}
public void setGroupURL( String groupURL )
{
this.groupURL = groupURL;
}
public String getGroupURL()
{
return groupURL;
}
public void setArtifactURL( String artifactURL )
{
this.artifactURL = artifactURL;
}
public String getArtifactURL()
{
return artifactURL;
}
public void setVersionURL( String versionURL )
{
this.versionURL = versionURL;
}
public String getVersionURL()
{
return versionURL;
}
@Override
public boolean equals( Object o )
{
if ( this == o )
{
return true;
}
if ( o == null || getClass() != o.getClass() )
{
return false;
}
RepositoryProblemReport that = (RepositoryProblemReport) o;
if ( artifactURL != null ? !artifactURL.equals( that.artifactURL ) : that.artifactURL != null )
{
return false;
}
if ( groupURL != null ? !groupURL.equals( that.groupURL ) : that.groupURL != null )
{
return false;
}
if ( versionURL != null ? !versionURL.equals( that.versionURL ) : that.versionURL != null )
{
return false;
}
return true;
}
@Override
public int hashCode()
{
int result = groupURL != null ? groupURL.hashCode() : 0;
result = 31 * result + ( artifactURL != null ? artifactURL.hashCode() : 0 );
result = 31 * result + ( versionURL != null ? versionURL.hashCode() : 0 );
return result;
}
@Override
public String toString()
{
return "RepositoryProblemReport{" + "groupURL='" + groupURL + '\'' + ", artifactURL='" + artifactURL + '\'' +
", versionURL='" + versionURL + '\'' + '}';
}
}

View File

@ -44,14 +44,6 @@
<multiplicity>*</multiplicity>
</association>
</field>
<field>
<name>repositoryProblems</name>
<version>1.0.0+</version>
<association>
<type>RepositoryProblem</type>
<multiplicity>*</multiplicity>
</association>
</field>
</fields>
<codeSegments>
<codeSegment>
@ -838,107 +830,5 @@
</codeSegment>
</codeSegments>
</class>
<!-- _______________________________________________________________
_ _ _ _ _
| | | | ___ __ _| | |_| |__
| |_| |/ _ \/ _` | | __| '_ \
| _ | __/ (_| | | |_| | | |
|_| |_|\___|\__,_|_|\__|_| |_|
-->
<class stash.storable="true"
jpox.table="REPOSITORY_PROBLEMS">
<name>RepositoryProblem</name>
<version>1.0.0+</version>
<fields>
<field stash.maxSize="50">
<name>repositoryId</name>
<version>1.0.0+</version>
<required>true</required>
<type>String</type>
<description>
The repository associated with this path and problem.
</description>
</field>
<field stash.maxSize="250"
jpox.column="REPO_PATH">
<name>path</name>
<version>1.0.0+</version>
<type>String</type>
<required>true</required>
<description>
The path into the repository for the problem.
</description>
</field>
<field stash.maxSize="250">
<name>groupId</name>
<version>1.0.0+</version>
<type>String</type>
<required>false</required>
<description>
The Group ID of the repository content with the problem.
</description>
</field>
<field stash.maxSize="80">
<name>artifactId</name>
<version>1.0.0+</version>
<type>String</type>
<required>false</required>
<description>
The Artifact ID of the repository content with the problem.
</description>
</field>
<field stash.maxSize="50">
<name>version</name>
<version>1.0.0+</version>
<type>String</type>
<required>false</required>
<description>
The version of the repository content with the problem.
</description>
</field>
<field stash.maxSize="50"
jpox.column="PROBLEM_TYPE">
<name>type</name>
<identifier>false</identifier>
<version>1.0.0+</version>
<required>true</required>
<type>String</type>
<description>
The type of health problem.
</description>
</field>
<field stash.maxSize="20"
jpox.column="PROBLEM_ORIGIN">
<name>origin</name>
<identifier>false</identifier>
<version>1.0.0+</version>
<required>true</required>
<type>String</type>
<description>
The origin of the health problem.
</description>
</field>
<field stash.maxSize="4096">
<name>message</name>
<identifier>false</identifier>
<version>1.0.0+</version>
<required>true</required>
<type>String</type>
<description>
The origin of the health problem.
</description>
</field>
</fields>
<codeSegments>
<codeSegment>
<version>1.0.0+</version>
<code><![CDATA[
private static final long serialVersionUID = -3426958367216659565L;
]]></code>
</codeSegment>
</codeSegments>
</class>
</classes>
</model>

View File

@ -116,7 +116,7 @@ public class TestMetadataRepository
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet )
public void addMetadataFacet( String repositoryId, String facetId, MetadataFacet metadataFacet )
{
//To change body of implemented methods use File | Settings | File Templates.
}
@ -126,6 +126,11 @@ public class TestMetadataRepository
//To change body of implemented methods use File | Settings | File Templates.
}
public void removeMetadataFacet( String repoId, String facetId, String name )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public List<ArtifactMetadata> getArtifactsByDateRange( String repoId, Date startTime, Date endTime )
{
return null; //To change body of implemented methods use File | Settings | File Templates.

View File

@ -40,6 +40,4 @@ public interface ArchivaDAO
ArtifactDAO getArtifactDAO();
RepositoryProblemDAO getRepositoryProblemDAO();
}

View File

@ -19,11 +19,7 @@ package org.apache.maven.archiva.database;
* under the License.
*/
import java.util.List;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByArtifactConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
@ -40,11 +36,6 @@ public class RepositoryDatabaseEventListener
*/
private ArtifactDAO artifactDAO;
/**
* @plexus.requirement role-hint="jdo"
*/
private RepositoryProblemDAO repositoryProblemDAO;
public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
{
try
@ -58,25 +49,5 @@ public class RepositoryDatabaseEventListener
{
// ignored
}
try
{
// Remove all repository problems related to this artifact
Constraint artifactConstraint = new RepositoryProblemByArtifactConstraint( artifact );
List<RepositoryProblem> repositoryProblems =
repositoryProblemDAO.queryRepositoryProblems( artifactConstraint );
if ( repositoryProblems != null )
{
for ( RepositoryProblem repositoryProblem : repositoryProblems )
{
repositoryProblemDAO.deleteRepositoryProblem( repositoryProblem );
}
}
}
catch ( ArchivaDatabaseException e )
{
// ignored
}
}
}

View File

@ -1,62 +0,0 @@
package org.apache.maven.archiva.database;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.model.RepositoryProblem;
import java.util.List;
/**
* RepositoryProblemDAO
*
* @version $Id$
*/
public interface RepositoryProblemDAO
{
/* NOTE TO ARCHIVA DEVELOPERS.
*
* Please keep this interface clean and lean.
* We don't want a repeat of the Continuum Store.
* You should have the following methods per object type ...
*
* (Required Methods)
*
* List .queryDatabaseObject( Constraint ) throws ObjectNotFoundException, DatabaseException;
* DatabaseObject .saveDatabaseObject( DatabaseObject ) throws DatabaseException;
*
* (Optional Methods)
*
* DatabaseObject .createDatabaseObject( Required Params ) ;
* DatabaseObject .getDatabaseObject( Id ) throws ObjectNotFoundException, DatabaseException;
* List .getDatabaseObjects() throws ObjectNotFoundException, DatabaseException;
* void .deleteDatabaseObject( DatabaseObject ) throws DatabaseException;
*
* This is the only list of options created in this DAO.
*/
public List <RepositoryProblem> queryRepositoryProblems( Constraint constraint )
throws ObjectNotFoundException, ArchivaDatabaseException;
public RepositoryProblem saveRepositoryProblem( RepositoryProblem problem )
throws ArchivaDatabaseException;
public void deleteRepositoryProblem( RepositoryProblem problem )
throws ArchivaDatabaseException;
}

View File

@ -1,87 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.Constraint;
/**
* Constraint for retrieving artifacts whose sha1 or md5 checksum matches the
* specified value.
*
*/
public class ArtifactsByChecksumConstraint
extends AbstractDeclarativeConstraint
implements Constraint
{
private String whereClause;
public static final String SHA1 = "SHA1";
public static final String MD5 = "MD5";
/**
* Create constraint for checksum (without providing type)
*
* @param desiredChecksum the checksum (either SHA1 or MD5)
*/
public ArtifactsByChecksumConstraint( String desiredChecksum )
{
this( desiredChecksum, null );
}
/**
* Create constraint for specific checksum.
*
* @param desiredChecksum the checksum (either SHA1 or MD5)
* @param type the type of checksum (either {@link #SHA1} or {@link #MD5})
*/
public ArtifactsByChecksumConstraint( String desiredChecksum, String type )
{
if ( StringUtils.isEmpty( type ) )
{
// default for no specified type.
whereClause = "this.checksumSHA1 == desiredChecksum || this.checksumMD5 == desiredChecksum";
}
else if ( !type.equals( SHA1 ) && !type.equals( MD5 ) )
{
// default for type that isn't recognized.
whereClause = "this.checksumSHA1 == desiredChecksum || this.checksumMD5 == desiredChecksum";
}
else if ( type.equals( SHA1 ) || type.equals( MD5 ) )
{
// specific type.
whereClause = "this.checksum" + type.trim() + " == desiredChecksum";
}
declParams = new String[]{"String desiredChecksum"};
params = new Object[]{desiredChecksum.toLowerCase()};
}
public String getSortColumn()
{
return "groupId";
}
public String getWhereCondition()
{
return whereClause;
}
}

View File

@ -1,59 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
import java.util.Calendar;
import java.util.Date;
/**
* Constraint for artifacts that are of a certain age (in days) or older.
*
* @version $Id$
*/
public class OlderArtifactsByAgeConstraint
extends AbstractDeclarativeConstraint
implements Constraint
{
private String whereClause;
public OlderArtifactsByAgeConstraint( int daysOld )
{
Calendar cal = Calendar.getInstance();
cal.add( Calendar.DAY_OF_MONTH, ( -1 ) * daysOld );
Date cutoffDate = cal.getTime();
whereClause = "this.lastModified <= cutoffDate";
declImports = new String[] { "import java.util.Date" };
declParams = new String[] { "java.util.Date cutoffDate" };
params = new Object[] { cutoffDate };
}
public String getSortColumn()
{
return "groupId";
}
public String getWhereCondition()
{
return whereClause;
}
}

View File

@ -1,59 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
import java.util.Calendar;
import java.util.Date;
/**
* Constraint for snapshot artifacts that are of a certain age (in days) or older.
*
* @version $Id$
*/
public class OlderSnapshotArtifactsByAgeConstraint
extends AbstractDeclarativeConstraint
implements Constraint
{
private String whereClause;
public OlderSnapshotArtifactsByAgeConstraint( int daysOld )
{
Calendar cal = Calendar.getInstance();
cal.add( Calendar.DAY_OF_MONTH, ( -1 ) * daysOld );
Date cutoffDate = cal.getTime();
whereClause = "this.lastModified <= cutoffDate && this.snapshot == true";
declImports = new String[] { "import java.util.Date" };
declParams = new String[] { "java.util.Date cutoffDate" };
params = new Object[] { cutoffDate };
}
public String getSortColumn()
{
return "groupId";
}
public String getWhereCondition()
{
return whereClause;
}
}

View File

@ -1,60 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
/**
* RangeConstraint
*/
public class RangeConstraint
extends AbstractDeclarativeConstraint
implements Constraint
{
private String sortColumn;
public RangeConstraint()
{
this.range = null;
this.sortColumn = null;
}
public RangeConstraint( int[] range )
{
this.range = range;
this.sortColumn = null;
}
public RangeConstraint( int[] range, String sortColumn )
{
this.range = range;
this.sortColumn = sortColumn;
}
public String getSortColumn()
{
return sortColumn;
}
public String getWhereCondition()
{
return null;
}
}

View File

@ -1,57 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
/**
* RepositoryProblemByArtifactConstraint
*/
public class RepositoryProblemByArtifactConstraint
extends AbstractDeclarativeConstraint
implements Constraint
{
private String whereClause;
private void createWhereClause( ArchivaArtifact artifact )
{
whereClause =
"groupId.like(desiredGroupId) && artifactId.like(desiredArtifactId) && version.like(desiredVersion)";
declParams = new String[] { "String desiredGroupId" , "String desiredArtifactId" , "String desiredVersion"};
params = new Object[] { artifact.getGroupId() + "%" , artifact.getArtifactId() + "%", artifact.getVersion() + "%"};
}
public RepositoryProblemByArtifactConstraint( ArchivaArtifact desiredArtifact )
{
super();
createWhereClause( desiredArtifact );
}
public String getSortColumn()
{
return "artifactId";
}
public String getWhereCondition()
{
return whereClause;
}
}

View File

@ -1,61 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
/**
* RepositoryProblemByGroupIdConstraint
*/
public class RepositoryProblemByGroupIdConstraint
extends RangeConstraint
implements Constraint
{
private String whereClause;
private void createWhereClause( String desiredGroupId )
{
whereClause = "groupId.like(desiredGroupId)";
declParams = new String[]{"String desiredGroupId"};
params = new Object[]{desiredGroupId + "%"};
}
public RepositoryProblemByGroupIdConstraint( String desiredGroupId )
{
super();
createWhereClause( desiredGroupId );
}
public RepositoryProblemByGroupIdConstraint( int[] range, String desiredGroupId )
{
super( range );
createWhereClause( desiredGroupId );
}
public String getSortColumn()
{
return "repositoryId";
}
public String getWhereCondition()
{
return whereClause;
}
}

View File

@ -1,61 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
/**
* RepositoryProblemByRepositoryIdConstraint
*/
public class RepositoryProblemByRepositoryIdConstraint
extends RangeConstraint
implements Constraint
{
private String whereClause;
private void createWhereClause( String desiredRepositoryId )
{
whereClause = "repositoryId == desiredRepositoryId";
declParams = new String[]{"String desiredRepositoryId"};
params = new Object[]{desiredRepositoryId};
}
public RepositoryProblemByRepositoryIdConstraint( String desiredRepositoryId )
{
super();
createWhereClause( desiredRepositoryId );
}
public RepositoryProblemByRepositoryIdConstraint( int[] range, String desiredRepositoryId )
{
super( range );
createWhereClause( desiredRepositoryId );
}
public String getSortColumn()
{
return "groupId";
}
public String getWhereCondition()
{
return whereClause;
}
}

View File

@ -1,51 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
/**
* RepositoryProblemByTypeConstraint
*
* @version $Id$
*/
public class RepositoryProblemByTypeConstraint
extends AbstractDeclarativeConstraint
implements Constraint
{
private String whereClause;
public RepositoryProblemByTypeConstraint( String desiredType )
{
whereClause = "type == desiredType";
declParams = new String[] { "String desiredType" };
params = new Object[] { desiredType };
}
public String getSortColumn()
{
return "groupId";
}
public String getWhereCondition()
{
return whereClause;
}
}

View File

@ -1,61 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
/**
* RepositoryProblemConstraint
*/
public class RepositoryProblemConstraint
extends RangeConstraint
implements Constraint
{
private String whereClause;
private void createWhereClause( String desiredGroupId, String desiredRepositoryId )
{
whereClause = "groupId.like(desiredGroupId) && repositoryId == desiredRepositoryId";
declParams = new String[]{"String desiredGroupId", "String desiredRepositoryId"};
params = new Object[]{desiredGroupId + "%", desiredRepositoryId};
}
public RepositoryProblemConstraint( String desiredGroupId, String desiredRepositoryId )
{
super();
createWhereClause( desiredGroupId, desiredRepositoryId );
}
public RepositoryProblemConstraint( int[] range, String desiredGroupId, String desiredRepositoryId )
{
super( range );
createWhereClause( desiredGroupId, desiredRepositoryId );
}
public String getSortColumn()
{
return "artifactId";
}
public String getWhereCondition()
{
return whereClause;
}
}

View File

@ -1,79 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import java.util.List;
/**
* SqlBuilder - common sql building mechanisms.
*
* @version $Id$
*/
public class SqlBuilder
{
/**
* Append a sql specific where clause within <code>"()"</code> braces that selects the specific
* repository ids provided.
*
* NOTE: This does not append the "WHERE" statement itself.
*
* @param sql the sql buffer to append to.
* @param fieldId the field id for the repository Id.
* @param selectedRepositoryIds the list of repository ids to provide.
*/
public static void appendWhereSelectedRepositories( StringBuffer sql, String fieldId,
List<String> selectedRepositoryIds )
{
if ( fieldId == null )
{
throw new NullPointerException( "Null field id is not allowed." );
}
if ( StringUtils.isBlank( fieldId ) )
{
throw new IllegalArgumentException( "Blank field id is not allowed." );
}
if ( selectedRepositoryIds == null )
{
throw new NullPointerException( "Selected repositories cannot be null." );
}
if ( selectedRepositoryIds.isEmpty() )
{
throw new IllegalArgumentException( "Selected repositories cannot be null." );
}
sql.append( " (" );
boolean multiple = false;
for ( String repo : selectedRepositoryIds )
{
if ( multiple )
{
sql.append( " || " );
}
sql.append( " " ).append( fieldId ).append( " == \"" ).append( repo ).append( "\"" );
multiple = true;
}
sql.append( " )" );
}
}

View File

@ -1,57 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.Constraint;
/**
* UniqueFieldConstraint
*/
public class UniqueFieldConstraint
extends AbstractSimpleConstraint
implements Constraint
{
private String sql;
public UniqueFieldConstraint( String className, String fieldName )
{
sql = "SELECT " + fieldName + " FROM " + className + " GROUP BY " + fieldName + " ORDER BY " + fieldName +
" ASCENDING";
}
public UniqueFieldConstraint( String className, String fieldName, String fieldNamePrefix )
{
sql = "SELECT " + fieldName + " FROM " + className + " WHERE " + fieldName +
".startsWith( fieldPrefix ) PARAMETERS String fieldPrefix GROUP BY " + fieldName + " ORDER BY " +
fieldName + " ASCENDING";
super.params = new Object[]{fieldNamePrefix};
}
public Class<?> getResultClass()
{
return String.class;
}
public String getSelectSql()
{
return sql;
}
}

View File

@ -23,7 +23,6 @@ import java.util.List;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.database.SimpleConstraint;
/**
@ -46,11 +45,6 @@ public class JdoArchivaDAO
*/
private ArtifactDAO artifactDAO;
/**
* @plexus.requirement role-hint="jdo"
*/
private RepositoryProblemDAO repositoryProblemDAO;
public JdoArchivaDAO()
{
super(); //To change body of overridden methods use File | Settings | File Templates.
@ -66,9 +60,4 @@ public class JdoArchivaDAO
return artifactDAO;
}
public RepositoryProblemDAO getRepositoryProblemDAO()
{
return repositoryProblemDAO;
}
}

View File

@ -1,63 +0,0 @@
package org.apache.maven.archiva.database.jdo;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.model.RepositoryProblem;
import java.util.List;
/**
* JdoRepositoryProblemDAO
*
* @version $Id$
*
* @plexus.component role-hint="jdo"
*/
public class JdoRepositoryProblemDAO
implements RepositoryProblemDAO
{
/**
* @plexus.requirement role-hint="archiva"
*/
private JdoAccess jdo;
@SuppressWarnings("unchecked")
public List<RepositoryProblem> queryRepositoryProblems( Constraint constraint )
throws ObjectNotFoundException, ArchivaDatabaseException
{
return (List<RepositoryProblem>) jdo.queryObjects( RepositoryProblem.class, constraint );
}
public RepositoryProblem saveRepositoryProblem( RepositoryProblem problem )
throws ArchivaDatabaseException
{
return (RepositoryProblem) jdo.saveObject( problem );
}
public void deleteRepositoryProblem( RepositoryProblem problem )
throws ArchivaDatabaseException
{
jdo.removeObject( problem );
}
}

View File

@ -1,44 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.Test;
import junit.framework.TestSuite;
/**
* IDE Provided Utility Class for all tests.
*
* @version $Id$
*/
public class AllTests
{
public static Test suite()
{
TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.database.constraints" );
//$JUnit-BEGIN$
suite.addTestSuite( ArtifactsByChecksumConstraintTest.class );
suite.addTestSuite( OlderArtifactsByAgeConstraintTest.class );
suite.addTestSuite( OlderSnapshotArtifactsByAgeConstraintTest.class );
//$JUnit-END$
return suite;
}
}

View File

@ -1,200 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.Date;
import java.util.List;
/**
* ArtifactsByChecksumConstraintTest
*
* @version
*/
public class ArtifactsByChecksumConstraintTest
extends AbstractArchivaDatabaseTestCase
{
private static final String SHA1_HASH3 = "f3f653289f3217c65324830ab3415bc92feddefa";
private static final String SHA1_HASH2 = "a49810ad3eba8651677ab57cd40a0f76fdef9538";
private static final String SHA1_HASH1 = "232f01b24b1617c46a3d4b0ab3415bc9237dcdec";
private static final String MD5_HASH3 = "5440efd724c9a5246ddc148662a4f20a";
private static final String MD5_HASH2 = "4685525525d82dea68c6a6cd5a08f726";
private static final String MD5_HASH1 = "53e3b856aa1a3f3cb7fe0f7ac6163aaf";
private ArtifactDAO artifactDao;
@Override
protected void setUp()
throws Exception
{
super.setUp();
ArchivaDAO dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
artifactDao = dao.getArtifactDAO();
}
public ArchivaArtifact createArtifact( String artifactId, String version )
{
ArchivaArtifact artifact =
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar", "testable_repo" );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
}
public void testConstraintSHA1()
throws Exception
{
ArchivaArtifact artifact;
// Setup artifacts in fresh DB.
artifact = createArtifact( "test-sha1-one", "1.0" );
artifact.getModel().setChecksumSHA1( SHA1_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-sha1-one", "1.1" );
artifact.getModel().setChecksumSHA1( SHA1_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-sha1-one", "1.2" );
artifact.getModel().setChecksumSHA1( SHA1_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-sha1-two", "1.0" );
artifact.getModel().setChecksumSHA1( SHA1_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-sha1-two", "2.0" );
artifact.getModel().setChecksumSHA1( SHA1_HASH3 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-sha1-two", "2.1" );
artifact.getModel().setChecksumSHA1( SHA1_HASH2 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-sha1-two", "3.0" );
artifact.getModel().setChecksumSHA1( SHA1_HASH2 );
artifactDao.saveArtifact( artifact );
assertConstraint( "Artifacts by SHA1 Checksum", 4,
new ArtifactsByChecksumConstraint( SHA1_HASH1, ArtifactsByChecksumConstraint.SHA1 ) );
assertConstraint( "Artifacts by SHA1 Checksum", 2,
new ArtifactsByChecksumConstraint( SHA1_HASH2, ArtifactsByChecksumConstraint.SHA1 ) );
assertConstraint( "Artifacts by SHA1 Checksum", 1,
new ArtifactsByChecksumConstraint( SHA1_HASH3, ArtifactsByChecksumConstraint.SHA1 ) );
}
public void testConstraintMD5()
throws Exception
{
ArchivaArtifact artifact;
artifact = createArtifact( "test-md5-one", "1.0" );
artifact.getModel().setChecksumMD5( MD5_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-md5-one", "1.1" );
artifact.getModel().setChecksumMD5( MD5_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-md5-one", "1.2" );
artifact.getModel().setChecksumMD5( MD5_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-md5-two", "1.0" );
artifact.getModel().setChecksumMD5( MD5_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-md5-two", "2.0" );
artifact.getModel().setChecksumMD5( MD5_HASH3 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-md5-two", "2.1" );
artifact.getModel().setChecksumMD5( MD5_HASH2 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-md5-two", "3.0" );
artifact.getModel().setChecksumMD5( MD5_HASH2 );
artifactDao.saveArtifact( artifact );
assertConstraint( "Artifacts by MD5 Checksum", 4,
new ArtifactsByChecksumConstraint( MD5_HASH1, ArtifactsByChecksumConstraint.MD5 ) );
assertConstraint( "Artifacts by MD5 Checksum", 2,
new ArtifactsByChecksumConstraint( MD5_HASH2, ArtifactsByChecksumConstraint.MD5 ) );
assertConstraint( "Artifacts by MD5 Checksum", 1,
new ArtifactsByChecksumConstraint( MD5_HASH3, ArtifactsByChecksumConstraint.MD5 ) );
}
public void testConstraintOR()
throws Exception
{
ArchivaArtifact artifact;
artifact = createArtifact( "test-one", "1.0" );
artifact.getModel().setChecksumMD5( MD5_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.1" );
artifact.getModel().setChecksumMD5( MD5_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.2" );
artifact.getModel().setChecksumMD5( MD5_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "1.0" );
artifact.getModel().setChecksumMD5( MD5_HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "2.0" );
artifact.getModel().setChecksumMD5( MD5_HASH3 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "2.1" );
artifact.getModel().setChecksumMD5( MD5_HASH2 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "3.0" );
artifact.getModel().setChecksumMD5( MD5_HASH2 );
artifactDao.saveArtifact( artifact );
assertConstraint( "Artifacts by MD5 Checksum", 4, new ArtifactsByChecksumConstraint( MD5_HASH1 ) );
assertConstraint( "Artifacts by MD5 Checksum", 2, new ArtifactsByChecksumConstraint( MD5_HASH2 ) );
assertConstraint( "Artifacts by MD5 Checksum", 1, new ArtifactsByChecksumConstraint( MD5_HASH3 ) );
}
private void assertConstraint( String msg, int count, ArtifactsByChecksumConstraint constraint )
throws Exception
{
List<ArchivaArtifact> results = artifactDao.queryArtifacts( constraint );
assertNotNull( msg + ": Not Null", results );
assertEquals( msg + ": Results.size", count, results.size() );
}
}

View File

@ -1,102 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.Calendar;
import java.util.List;
/**
* OlderArtifactsByAgeConstraintTest
*
* @version $Id$
*/
public class OlderArtifactsByAgeConstraintTest
extends AbstractArchivaDatabaseTestCase
{
private ArtifactDAO artifactDao;
protected void setUp()
throws Exception
{
super.setUp();
ArchivaDAO dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
artifactDao = dao.getArtifactDAO();
}
public ArchivaArtifact createArtifact( String artifactId, String version, int daysOld )
{
ArchivaArtifact artifact = artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version,
"", "jar", "testable_repo" );
Calendar cal = Calendar.getInstance();
cal.add( Calendar.DAY_OF_MONTH, ( -1 ) * daysOld );
artifact.getModel().setLastModified( cal.getTime() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
}
public void testConstraint()
throws Exception
{
ArchivaArtifact artifact;
// Setup artifacts in fresh DB.
artifact = createArtifact( "test-one", "1.0", 200 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.1", 100 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.2", 50 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "1.0", 200 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "2.0", 150 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "2.1", 100 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "3.0", 5 );
artifactDao.saveArtifact( artifact );
assertConstraint( 6, new OlderArtifactsByAgeConstraint( 7 ) );
assertConstraint( 5, new OlderArtifactsByAgeConstraint( 90 ) );
assertConstraint( 5, new OlderArtifactsByAgeConstraint( 100 ) );
assertConstraint( 3, new OlderArtifactsByAgeConstraint( 150 ) );
assertConstraint( 0, new OlderArtifactsByAgeConstraint( 9000 ) );
}
private void assertConstraint( int expectedHits, Constraint constraint )
throws Exception
{
List<ArchivaArtifact> results = artifactDao.queryArtifacts( constraint );
assertNotNull( "Older Artifacts By Age: Not Null", results );
assertEquals( "Older Artifacts By Age: Results.size", expectedHits, results.size() );
}
}

View File

@ -1,117 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.Calendar;
import java.util.List;
/**
* OlderArtifactsByAgeConstraintTest
*
* @version $Id$
*/
public class OlderSnapshotArtifactsByAgeConstraintTest
extends AbstractArchivaDatabaseTestCase
{
private ArtifactDAO artifactDao;
protected void setUp()
throws Exception
{
super.setUp();
ArchivaDAO dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
artifactDao = dao.getArtifactDAO();
}
public ArchivaArtifact createArtifact( String artifactId, String version, int daysOld )
{
ArchivaArtifact artifact = artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version,
"", "jar", "testable_repo" );
Calendar cal = Calendar.getInstance();
cal.add( Calendar.DAY_OF_MONTH, ( -1 ) * daysOld );
artifact.getModel().setLastModified( cal.getTime() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
}
public void testConstraint()
throws Exception
{
ArchivaArtifact artifact;
// Setup artifacts in fresh DB.
artifact = createArtifact( "test-one", "1.0", 200 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.1-SNAPSHOT", 110 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.1", 100 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.2-20060923.005752-2", 55 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.2-SNAPSHOT", 52 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.2", 50 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "1.0-20060828.144210-1", 220 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "1.0-SNAPSHOT", 210 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "1.0", 200 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "2.0", 150 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "2.1", 100 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "3.0", 5 );
artifactDao.saveArtifact( artifact );
assertConstraint( 5, new OlderSnapshotArtifactsByAgeConstraint( 7 ) );
assertConstraint( 3, new OlderSnapshotArtifactsByAgeConstraint( 90 ) );
assertConstraint( 3, new OlderSnapshotArtifactsByAgeConstraint( 100 ) );
assertConstraint( 2, new OlderSnapshotArtifactsByAgeConstraint( 150 ) );
assertConstraint( 0, new OlderSnapshotArtifactsByAgeConstraint( 500 ) );
}
private void assertConstraint( int expectedHits, Constraint constraint )
throws Exception
{
List<ArchivaArtifact> results = artifactDao.queryArtifacts( constraint );
assertNotNull( "Older Snapshot Artifacts By Age: Not Null", results );
assertEquals( "Older Snapshot Artifacts By Age: Results.size", expectedHits, results.size() );
}
}

View File

@ -1,88 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.model.RepositoryProblem;
import java.util.List;
/**
* RangeConstraintTest
*/
public class RangeConstraintTest
extends AbstractArchivaDatabaseTestCase
{
private RepositoryProblemDAO repoProblemDao;
protected void setUp()
throws Exception
{
super.setUp();
ArchivaDAO dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
repoProblemDao = dao.getRepositoryProblemDAO();
}
public RepositoryProblem createRepoProblem()
{
RepositoryProblem repoProblem = new RepositoryProblem();
repoProblem.setGroupId( "groupId" );
repoProblem.setArtifactId( "artifactId" );
repoProblem.setMessage( "message" );
repoProblem.setOrigin( "origin" );
repoProblem.setPath( "path" );
repoProblem.setRepositoryId( "repositoryId" );
repoProblem.setType( "type" );
repoProblem.setVersion( "version" );
return repoProblem;
}
public void testConstraint()
throws Exception
{
repoProblemDao.saveRepositoryProblem( createRepoProblem() );
repoProblemDao.saveRepositoryProblem( createRepoProblem() );
repoProblemDao.saveRepositoryProblem( createRepoProblem() );
repoProblemDao.saveRepositoryProblem( createRepoProblem() );
repoProblemDao.saveRepositoryProblem( createRepoProblem() );
assertConstraint( 0, new RangeConstraint( new int[]{5, 10} ) );
assertConstraint( 1, new RangeConstraint( new int[]{0, 1} ) );
assertConstraint( 2, new RangeConstraint( new int[]{0, 2} ) );
assertConstraint( 3, new RangeConstraint( new int[]{0, 3} ) );
assertConstraint( 4, new RangeConstraint( new int[]{0, 4} ) );
assertConstraint( 5, new RangeConstraint( new int[]{0, 5} ) );
assertConstraint( 5, new RangeConstraint() );
}
private void assertConstraint( int expectedHits, Constraint constraint )
throws Exception
{
List<RepositoryProblem> results = repoProblemDao.queryRepositoryProblems( constraint );
assertNotNull( "Range Constraint: Not Null", results );
assertEquals( "Range Constraint: Results.size", expectedHits, results.size() );
}
}

View File

@ -1,96 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.model.RepositoryProblem;
/**
* RepositoryProblemByGroupIdConstraintTest
*/
public class RepositoryProblemByGroupIdConstraintTest
extends AbstractArchivaDatabaseTestCase
{
private static final String GROUP_ID_1 = "org.apache.maven.archiva.test.1";
private static final String GROUP_ID_2 = "org.apache.maven.archiva.test.2";
private static final String GROUP_ID_3 = "org.apache.maven.archiva.test.3";
private static final String GROUP_ID_PARTIAL = "org.apache.maven.archiva";
private RepositoryProblemDAO repoProblemDao;
protected void setUp()
throws Exception
{
super.setUp();
ArchivaDAO dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
repoProblemDao = dao.getRepositoryProblemDAO();
}
public RepositoryProblem createRepoProblem( String groupId )
{
RepositoryProblem repoProblem = new RepositoryProblem();
repoProblem.setGroupId( groupId );
repoProblem.setArtifactId( "artifactId" );
repoProblem.setMessage( "message" );
repoProblem.setOrigin( "origin" );
repoProblem.setPath( "path" );
repoProblem.setRepositoryId( "repositoryId" );
repoProblem.setType( "type" );
repoProblem.setVersion( "version" );
return repoProblem;
}
public void testConstraint()
throws Exception
{
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_2 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_2 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_3 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_3 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_3 ) );
assertConstraint( 1, new RepositoryProblemByGroupIdConstraint( GROUP_ID_1 ) );
assertConstraint( 2, new RepositoryProblemByGroupIdConstraint( GROUP_ID_2 ) );
assertConstraint( 3, new RepositoryProblemByGroupIdConstraint( GROUP_ID_3 ) );
assertConstraint( 6, new RepositoryProblemByGroupIdConstraint( GROUP_ID_PARTIAL ) );
}
private void assertConstraint( int expectedHits, Constraint constraint )
throws Exception
{
List<RepositoryProblem> results = repoProblemDao.queryRepositoryProblems( constraint );
assertNotNull( "Repository Problems by Group Id: Not Null", results );
assertEquals( "Repository Problems by Group Id: Results.size", expectedHits, results.size() );
}
}

View File

@ -1,93 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.model.RepositoryProblem;
import java.util.List;
/**
* RepositoryProblemByRepositoryIdConstraintTest
*/
public class RepositoryProblemByRepositoryIdConstraintTest
extends AbstractArchivaDatabaseTestCase
{
private static final String REPO_ID_1 = "test-repo-1";
private static final String REPO_ID_2 = "test-repo-2";
private static final String REPO_ID_3 = "test-repo-3";
private RepositoryProblemDAO repoProblemDao;
protected void setUp()
throws Exception
{
super.setUp();
ArchivaDAO dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
repoProblemDao = dao.getRepositoryProblemDAO();
}
public RepositoryProblem createRepoProblem( String repoId )
{
RepositoryProblem repoProblem = new RepositoryProblem();
repoProblem.setGroupId( "groupId" );
repoProblem.setArtifactId( "artifactId" );
repoProblem.setMessage( "message" );
repoProblem.setOrigin( "origin" );
repoProblem.setPath( "path" );
repoProblem.setRepositoryId( repoId );
repoProblem.setType( "type" );
repoProblem.setVersion( "version" );
return repoProblem;
}
public void testConstraint()
throws Exception
{
repoProblemDao.saveRepositoryProblem( createRepoProblem( REPO_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( REPO_ID_2 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( REPO_ID_2 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( REPO_ID_3 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( REPO_ID_3 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( REPO_ID_3 ) );
assertConstraint( 1, new RepositoryProblemByRepositoryIdConstraint( REPO_ID_1 ) );
assertConstraint( 2, new RepositoryProblemByRepositoryIdConstraint( REPO_ID_2 ) );
assertConstraint( 3, new RepositoryProblemByRepositoryIdConstraint( REPO_ID_3 ) );
}
private void assertConstraint( int expectedHits, Constraint constraint )
throws Exception
{
List<RepositoryProblem> results = repoProblemDao.queryRepositoryProblems( constraint );
assertNotNull( "Repository Problems by Repository Id: Not Null", results );
assertEquals( "Repository Problems by Repository Id: Results.size", expectedHits, results.size() );
}
}

View File

@ -1,129 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.model.RepositoryProblem;
import java.util.List;
/**
* RepositoryProblemConstraintTest
*/
public class RepositoryProblemConstraintTest
extends AbstractArchivaDatabaseTestCase
{
private static final String GROUP_ID_1 = "org.apache.maven.archiva.test.1";
private static final String GROUP_ID_2 = "org.apache.maven.archiva.test.2";
private static final String GROUP_ID_3 = "org.apache.maven.archiva.test.3";
private static final String GROUP_ID_4 = "org.apache.maven.archiva.test.4";
private static final String GROUP_ID_PARTIAL = "org.apache.maven.archiva";
private static final String REPO_ID_1 = "test-repo-1";
private static final String REPO_ID_2 = "test-repo-2";
private static final String REPO_ID_3 = "test-repo-3";
private static final String REPO_ID_4 = "test-repo-4";
private RepositoryProblemDAO repoProblemDao;
protected void setUp()
throws Exception
{
super.setUp();
ArchivaDAO dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
repoProblemDao = dao.getRepositoryProblemDAO();
}
public RepositoryProblem createRepoProblem( String groupId, String repoId )
{
RepositoryProblem repoProblem = new RepositoryProblem();
repoProblem.setGroupId( groupId );
repoProblem.setArtifactId( "artifactId" );
repoProblem.setMessage( "message" );
repoProblem.setOrigin( "origin" );
repoProblem.setPath( "path" );
repoProblem.setRepositoryId( repoId );
repoProblem.setType( "type" );
repoProblem.setVersion( "version" );
return repoProblem;
}
public void testGroupIdConstraint()
throws Exception
{
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1, REPO_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_2, REPO_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_2, REPO_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_3, REPO_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_3, REPO_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_3, REPO_ID_1 ) );
assertConstraint( 1, new RepositoryProblemConstraint( GROUP_ID_1, REPO_ID_1 ) );
assertConstraint( 2, new RepositoryProblemConstraint( GROUP_ID_2, REPO_ID_1 ) );
assertConstraint( 3, new RepositoryProblemConstraint( GROUP_ID_3, REPO_ID_1 ) );
assertConstraint( 0, new RepositoryProblemConstraint( GROUP_ID_4, REPO_ID_1 ) );
assertConstraint( 6, new RepositoryProblemConstraint( GROUP_ID_PARTIAL, REPO_ID_1 ) );
}
public void testRepoIdConstraint()
throws Exception
{
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1, REPO_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1, REPO_ID_2 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1, REPO_ID_2 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1, REPO_ID_3 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1, REPO_ID_3 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1, REPO_ID_3 ) );
assertConstraint( 1, new RepositoryProblemConstraint( GROUP_ID_1, REPO_ID_1 ) );
assertConstraint( 2, new RepositoryProblemConstraint( GROUP_ID_1, REPO_ID_2 ) );
assertConstraint( 3, new RepositoryProblemConstraint( GROUP_ID_1, REPO_ID_3 ) );
assertConstraint( 0, new RepositoryProblemConstraint( GROUP_ID_1, REPO_ID_4 ) );
assertConstraint( 1, new RepositoryProblemConstraint( GROUP_ID_PARTIAL, REPO_ID_1 ) );
assertConstraint( 2, new RepositoryProblemConstraint( GROUP_ID_PARTIAL, REPO_ID_2 ) );
assertConstraint( 3, new RepositoryProblemConstraint( GROUP_ID_PARTIAL, REPO_ID_3 ) );
assertConstraint( 0, new RepositoryProblemConstraint( GROUP_ID_PARTIAL, REPO_ID_4 ) );
}
private void assertConstraint( int expectedHits, Constraint constraint )
throws Exception
{
List<RepositoryProblem> results = repoProblemDao.queryRepositoryProblems( constraint );
assertNotNull( "Repository Problems: Not Null", results );
assertEquals( "Repository Problems: Results.size", expectedHits, results.size() );
}
}

View File

@ -1,117 +0,0 @@
package org.apache.maven.archiva.database.constraints;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.database.SimpleConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaArtifactModel;
import org.apache.maven.archiva.model.RepositoryProblem;
import java.util.Date;
import java.util.List;
/**
* UniqueFieldConstraintTest
*/
public class UniqueFieldConstraintTest
extends AbstractArchivaDatabaseTestCase
{
private static final String GROUP_ID_1 = "org.apache.maven.archiva.test.1";
private static final String GROUP_ID_2 = "org.apache.maven.archiva.test.2";
private static final String GROUP_ID_3 = "org.apache.maven.archiva.test.3";
private ArchivaDAO archivaDao;
private ArtifactDAO artifactDao;
private RepositoryProblemDAO repoProblemDao;
protected void setUp()
throws Exception
{
super.setUp();
archivaDao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
artifactDao = archivaDao.getArtifactDAO();
repoProblemDao = archivaDao.getRepositoryProblemDAO();
}
public ArchivaArtifact createArtifact( String groupId )
{
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, "artifactId", "version", "classifier", "jar", "testrepo" );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "repoId" );
return artifact;
}
public RepositoryProblem createRepoProblem( String groupId )
{
RepositoryProblem repoProblem = new RepositoryProblem();
repoProblem.setGroupId( groupId );
repoProblem.setArtifactId( "artifactId" );
repoProblem.setMessage( "message" );
repoProblem.setOrigin( "origin" );
repoProblem.setPath( "path" );
repoProblem.setRepositoryId( "repoId" );
repoProblem.setType( "type" );
repoProblem.setVersion( "version" );
return repoProblem;
}
public void testArtifact()
throws Exception
{
artifactDao.saveArtifact( createArtifact( GROUP_ID_1 ) );
artifactDao.saveArtifact( createArtifact( GROUP_ID_2 ) );
artifactDao.saveArtifact( createArtifact( GROUP_ID_3 ) );
assertConstraint( 1, new UniqueFieldConstraint( ArchivaArtifactModel.class.getName(), "artifactId" ) );
assertConstraint( 3, new UniqueFieldConstraint( ArchivaArtifactModel.class.getName(), "groupId" ) );
}
public void testRepoProblem()
throws Exception
{
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_1 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_2 ) );
repoProblemDao.saveRepositoryProblem( createRepoProblem( GROUP_ID_3 ) );
assertConstraint( 1, new UniqueFieldConstraint( RepositoryProblem.class.getName(), "artifactId" ) );
assertConstraint( 3, new UniqueFieldConstraint( RepositoryProblem.class.getName(), "groupId" ) );
}
private void assertConstraint( int expectedHits, SimpleConstraint constraint )
throws Exception
{
List<?> results = archivaDao.query( constraint );
assertNotNull( "Repository Problems: Not Null", results );
assertEquals( "Repository Problems: Results.size", expectedHits, results.size() );
}
}

View File

@ -32,6 +32,5 @@ public class JdoArchivaDAOTest
public void testSubDAOs()
{
assertNotNull( "Artifact DAO", dao.getArtifactDAO() );
assertNotNull( "Repository Problem DAO", dao.getRepositoryProblemDAO() );
}
}

View File

@ -1,72 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-reporting</artifactId>
<version>1.3-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>archiva-artifact-reports</artifactId>
<name>Archiva Reporting :: Artifact Reports</name>
<dependencies>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-report-manager</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-database</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-digest</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-utils</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-commons</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-spring</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -1,76 +0,0 @@
package org.apache.maven.archiva.reporting.artifact;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.reporting.DynamicReportSource;
import org.apache.maven.archiva.reporting.DataLimits;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByTypeConstraint;
import java.util.List;
/**
* Report for corrupt artifacts
* <p/>
* <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
public class CorruptArtifactReport
implements DynamicReportSource<RepositoryProblem>
{
public static final String PROBLEM_TYPE_CORRUPT_ARTIFACT = "corrupt-artifact";
/**
* @plexus.configuration default-value="Corrupt Artifact Report"
*/
private String name;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
private Constraint constraint;
public CorruptArtifactReport()
{
constraint = new RepositoryProblemByTypeConstraint( PROBLEM_TYPE_CORRUPT_ARTIFACT );
}
public List<RepositoryProblem> getData()
throws ObjectNotFoundException, ArchivaDatabaseException
{
return dao.getRepositoryProblemDAO().queryRepositoryProblems( constraint );
}
public List<RepositoryProblem> getData( DataLimits limits )
throws ObjectNotFoundException, ArchivaDatabaseException
{
return dao.getRepositoryProblemDAO().queryRepositoryProblems( constraint );
}
public String getName()
{
return name;
}
}

View File

@ -1,80 +0,0 @@
package org.apache.maven.archiva.reporting.artifact;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByTypeConstraint;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.reporting.DataLimits;
import org.apache.maven.archiva.reporting.DynamicReportSource;
import java.util.List;
/**
* DuplicateArtifactReport
*
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.reporting.DynamicReportSource"
* role-hint="duplicate-artifacts"
*/
public class DuplicateArtifactReport
implements DynamicReportSource<RepositoryProblem>
{
public static final String PROBLEM_TYPE_DUPLICATE_ARTIFACTS = "duplicate-artifacts";
/**
* @plexus.configuration default-value="Duplicate Artifact Report"
*/
private String name;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
private Constraint constraint;
public DuplicateArtifactReport()
{
constraint = new RepositoryProblemByTypeConstraint( PROBLEM_TYPE_DUPLICATE_ARTIFACTS );
}
public List<RepositoryProblem> getData()
throws ObjectNotFoundException, ArchivaDatabaseException
{
return dao.getRepositoryProblemDAO().queryRepositoryProblems( constraint );
}
public List<RepositoryProblem> getData( DataLimits limits )
throws ObjectNotFoundException, ArchivaDatabaseException
{
// TODO: implement limits.
return dao.getRepositoryProblemDAO().queryRepositoryProblems( constraint );
}
public String getName()
{
return name;
}
}

View File

@ -1,81 +0,0 @@
package org.apache.maven.archiva.reporting.artifact;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByTypeConstraint;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.reporting.DataLimits;
import org.apache.maven.archiva.reporting.DynamicReportSource;
import java.util.List;
/**
* LocationArtifactsReport
*
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.reporting.DynamicReportSource"
* role-hint="artifact-location"
*/
public class LocationArtifactsReport
implements DynamicReportSource<RepositoryProblem>
{
public static final String PROBLEM_TYPE_BAD_ARTIFACT_LOCATION = "bad-artifact-location";
/**
* @plexus.configuration default-value="Artifact Locations Report"
*/
private String name;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
private Constraint constraint;
public LocationArtifactsReport()
{
constraint = new RepositoryProblemByTypeConstraint( PROBLEM_TYPE_BAD_ARTIFACT_LOCATION );
}
public List<RepositoryProblem> getData()
throws ObjectNotFoundException, ArchivaDatabaseException
{
return dao.getRepositoryProblemDAO().queryRepositoryProblems( constraint );
}
public List<RepositoryProblem> getData( DataLimits limits )
throws ObjectNotFoundException, ArchivaDatabaseException
{
// TODO: implement limits.
return dao.getRepositoryProblemDAO().queryRepositoryProblems( constraint );
}
public String getName()
{
return name;
}
}

View File

@ -1,76 +0,0 @@
package org.apache.maven.archiva.reporting.artifact;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.OlderArtifactsByAgeConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.reporting.DataLimits;
import org.apache.maven.archiva.reporting.DynamicReportSource;
/**
* OldArtifactReport
*
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.reporting.DynamicReportSource"
* role-hint="old-artifacts"
*/
public class OldArtifactReport
implements DynamicReportSource<ArchivaArtifact>
{
/**
* @plexus.configuration default-value="Old Artifacts Report"
*/
private String name;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* The maximum age of an artifact before it is reported old, specified in days. The default is 1 year.
*
* @plexus.configuration default-value="365"
*/
private int cutoffDays;
public List<ArchivaArtifact> getData()
throws ObjectNotFoundException, ArchivaDatabaseException
{
return dao.getArtifactDAO().queryArtifacts( new OlderArtifactsByAgeConstraint( cutoffDays ) );
}
public List<ArchivaArtifact> getData( DataLimits limits )
throws ObjectNotFoundException, ArchivaDatabaseException
{
return dao.getArtifactDAO().queryArtifacts( new OlderArtifactsByAgeConstraint( cutoffDays ) );
}
public String getName()
{
return name;
}
}

View File

@ -1,76 +0,0 @@
package org.apache.maven.archiva.reporting.artifact;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.OlderSnapshotArtifactsByAgeConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.reporting.DataLimits;
import org.apache.maven.archiva.reporting.DynamicReportSource;
/**
* OldSnapshotArtifactReport
*
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.reporting.DynamicReportSource"
* role-hint="old-snapshots"
*/
public class OldSnapshotArtifactReport
implements DynamicReportSource<ArchivaArtifact>
{
/**
* @plexus.configuration default-value="Old Snapshots Report"
*/
private String name;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* The maximum age of a snapshot before it is reported old, specified in days. The default is 1 year.
*
* @plexus.configuration default-value="365"
*/
private int cutoffDays;
public List<ArchivaArtifact> getData()
throws ObjectNotFoundException, ArchivaDatabaseException
{
return dao.getArtifactDAO().queryArtifacts( new OlderSnapshotArtifactsByAgeConstraint( cutoffDays ) );
}
public List<ArchivaArtifact> getData( DataLimits limits )
throws ObjectNotFoundException, ArchivaDatabaseException
{
return dao.getArtifactDAO().queryArtifacts( new OlderSnapshotArtifactsByAgeConstraint( cutoffDays ) );
}
public String getName()
{
return name;
}
}

View File

@ -1,122 +0,0 @@
package org.apache.maven.archiva.reporting.artifact;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.net.URL;
import java.util.Properties;
import java.util.Map.Entry;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.jpox.SchemaTool;
/**
* AbstractArtifactReportsTestCase
*
* @version $Id$
*/
public abstract class AbstractArtifactReportsTestCase
extends PlexusInSpringTestCase
{
protected ArchivaDAO dao;
protected void setUp()
throws Exception
{
super.setUp();
DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
/* derby version
File derbyDbDir = new File( "target/plexus-home/testdb" );
if ( derbyDbDir.exists() )
{
FileUtils.deleteDirectory( derbyDbDir );
}
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
*/
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
// jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
jdoFactory.setProperty( "org.jpox.validateTables", "true" );
jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
Properties properties = jdoFactory.getProperties();
for ( Entry<Object, Object> entry : properties.entrySet() )
{
System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
}
URL jdoFileUrls[] = new URL[] { getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
{
fail( "Unable to process test " + getName() + " - missing package.jdo." );
}
File propsFile = null; // intentional
boolean verbose = true;
SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose );
SchemaTool.createSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose, null );
PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
assertNotNull( pmf );
PersistenceManager pm = pmf.getPersistenceManager();
pm.close();
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
}
}

View File

@ -1,190 +0,0 @@
package org.apache.maven.archiva.reporting.artifact;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.reporting.DynamicReportSource;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
/**
* DuplicateArtifactReportTest
*
* @version $Id$
*/
public class DuplicateArtifactReportTest
extends AbstractArtifactReportsTestCase
{
private static final String TESTABLE_REPO = "testable";
private static final String HASH3 = "94ca33031e37aa3f3b67e5b921c729f08a6bba75";
private static final String HASH2 = "43f7aa390f1a0265fc2de7010133951c0718a67e";
private static final String HASH1 = "8107759ababcbfa34bcb02bc4309caf6354982ab";
private ArtifactDAO artifactDao;
private ManagedRepositoryConfiguration repoConfig;
private ManagedRepositoryContent content;
@Override
protected void setUp()
throws Exception
{
super.setUp();
artifactDao = dao.getArtifactDAO();
ArchivaConfiguration config = (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName(), "default" );
repoConfig = new ManagedRepositoryConfiguration();
repoConfig.setId( TESTABLE_REPO );
repoConfig.setLayout( "default" );
File testRepoDir = new File( getBasedir(), "target/test-repository" );
FileUtils.forceMkdir( testRepoDir );
repoConfig.setLocation( testRepoDir.getAbsolutePath() );
config.getConfiguration().addManagedRepository( repoConfig );
RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class );
content = factory.getManagedRepositoryContent( TESTABLE_REPO );
createArtifactFile( testRepoDir, "test-one", "1.0", "value1" );
createArtifactFile( testRepoDir, "test-one", "1.1", "value1" );
createArtifactFile( testRepoDir, "test-one", "1.2", "value1" );
createArtifactFile( testRepoDir, "test-two", "1.0", "value1" );
createArtifactFile( testRepoDir, "test-two", "2.0", "value3" );
createArtifactFile( testRepoDir, "test-two", "2.1", "value2" );
createArtifactFile( testRepoDir, "test-two", "3.0", "value2" );
}
private void createArtifactFile( File testRepoDir, String artifactId, String version, String value )
throws IOException
{
File file = new File( testRepoDir,
"org/apache/maven/archiva/test/" + artifactId + "/" + version + "/" + artifactId + "-" +
version + ".jar" );
file.getParentFile().mkdirs();
FileUtils.writeStringToFile( file, value );
}
public ArchivaArtifact createArtifact( String artifactId, String version )
{
ArchivaArtifact artifact =
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar",
TESTABLE_REPO );
artifact.getModel().setLastModified( new Date() );
return artifact;
}
public void testSimpleReport()
throws Exception
{
ArchivaArtifact artifact;
// Setup artifacts in fresh DB.
artifact = createArtifact( "test-one", "1.0" );
artifact.getModel().setChecksumSHA1( HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.1" );
artifact.getModel().setChecksumSHA1( HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-one", "1.2" );
artifact.getModel().setChecksumSHA1( HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "1.0" );
artifact.getModel().setChecksumSHA1( HASH1 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "2.0" );
artifact.getModel().setChecksumSHA1( HASH3 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "2.1" );
artifact.getModel().setChecksumSHA1( HASH2 );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "test-two", "3.0" );
artifact.getModel().setChecksumSHA1( HASH2 );
artifactDao.saveArtifact( artifact );
// Setup entries for bad/duplicate in problem DB.
pretendToRunDuplicateArtifactsConsumer();
List<ArchivaArtifact> allArtifacts = artifactDao.queryArtifacts( null );
assertEquals( "Total Artifact Count", 7, allArtifacts.size() );
DuplicateArtifactReport report =
(DuplicateArtifactReport) lookup( DynamicReportSource.class.getName(), "duplicate-artifacts" );
List<RepositoryProblem> results = report.getData();
System.out.println( "Results.size: " + results.size() );
int i = 0;
for ( RepositoryProblem problem : results )
{
System.out.println( "[" + ( i++ ) + "] " + problem.getMessage() );
}
int hash1Count = 4;
int hash2Count = 2;
int hash3Count = 1;
int totals = ( ( hash1Count * hash1Count ) - hash1Count ) + ( ( hash2Count * hash2Count ) - hash2Count ) +
( ( hash3Count * hash3Count ) - hash3Count );
assertEquals( "Total report hits.", totals, results.size() );
}
private void pretendToRunDuplicateArtifactsConsumer()
throws Exception
{
List<ArchivaArtifact> artifacts = dao.getArtifactDAO().queryArtifacts( null );
KnownRepositoryContentConsumer consumer =
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class.getName(),
"duplicate-artifacts" );
consumer.beginScan( repoConfig, new Date() );
try
{
for ( ArchivaArtifact artifact : artifacts )
{
consumer.processFile( content.toPath( artifact ) );
}
}
finally
{
consumer.completeScan();
}
}
}

View File

@ -1,70 +0,0 @@
<component-set>
<components>
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
<implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
<configuration>
<persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
<otherProperties>
<property>
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
</otherProperties>
</configuration>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<role-hint>commons-configuration</role-hint>
<configuration>
<properties>
<system/>
<xml fileName="${appserver.base}/conf/archiva.xml" config-optional="true"
config-name="org.apache.maven.archiva.base" config-at="org.apache.maven.archiva"/>
<xml fileName="${appserver.home}/conf/archiva.xml" config-optional="true"
config-at="org.apache.maven.archiva"/>
<xml fileName="${user.home}/.m2/archiva.xml" config-optional="true"
config-name="org.apache.maven.archiva.user" config-at="org.apache.maven.archiva"/>
<xml fileName="org/apache/maven/archiva/configuration/default-archiva.xml"
config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
</components>
<lifecycle-handler-manager implementation="org.codehaus.plexus.lifecycle.DefaultLifecycleHandlerManager">
<default-lifecycle-handler-id>plexus</default-lifecycle-handler-id>
<lifecycle-handlers>
<lifecycle-handler implementation="org.codehaus.plexus.personality.plexus.PlexusLifecycleHandler">
<id>plexus</id>
<name>Plexus Lifecycle Handler</name>
<begin-segment>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.LogEnablePhase"/>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.CompositionPhase"/>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.ContextualizePhase"/>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.AutoConfigurePhase"/>
<phase implementation="org.codehaus.plexus.registry.RegistryConfigurePhase"/>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.ServiceablePhase"/>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializePhase"/>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.StartPhase"/>
</begin-segment>
<suspend-segment>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.SuspendPhase"/>
</suspend-segment>
<resume-segment>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.ResumePhase"/>
</resume-segment>
<end-segment>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.StopPhase"/>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.DisposePhase"/>
<phase implementation="org.codehaus.plexus.personality.plexus.lifecycle.phase.LogDisablePhase"/>
</end-segment>
</lifecycle-handler>
</lifecycle-handlers>
</lifecycle-handler-manager>
</component-set>

View File

@ -1,76 +0,0 @@
package org.apache.maven.archiva.reporting;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Limits on how much data should be returned by the report sources.
*
* @version $Id$
*/
public class DataLimits
{
private int currentPage = 0;
private int perPageCount = 25;
private int countOfPages = 1;
private int totalCount = 0;
public int getCountOfPages()
{
return countOfPages;
}
public void setCountOfPages( int countOfPages )
{
this.countOfPages = countOfPages;
}
public int getCurrentPage()
{
return currentPage;
}
public void setCurrentPage( int currentPage )
{
this.currentPage = currentPage;
}
public int getPerPageCount()
{
return perPageCount;
}
public void setPerPageCount( int perPageCount )
{
this.perPageCount = perPageCount;
}
public int getTotalCount()
{
return totalCount;
}
public void setTotalCount( int totalCount )
{
this.totalCount = totalCount;
}
}

View File

@ -1,49 +0,0 @@
package org.apache.maven.archiva.reporting;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Map;
/**
* DefaultReportingManager
*
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.reporting.ReportingManager"
* role-hint="default"
*/
public class DefaultReportingManager
implements ReportingManager
{
/**
* @plexus.requirement role="org.apache.maven.archiva.reporting.DynamicReportSource"
*/
private Map<String, DynamicReportSource<?>> reportSourceMap;
public DynamicReportSource<?> getReport( String id )
{
return reportSourceMap.get( id );
}
public Map<String, DynamicReportSource<?>> getAvailableReports()
{
return reportSourceMap;
}
}

View File

@ -1,61 +0,0 @@
package org.apache.maven.archiva.reporting;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException;
/**
* DynamicReportSource
*
* @version $Id$
*/
public interface DynamicReportSource<T>
{
/**
* The human readable name of this report.
*
* @return the name of the report.
*/
public String getName();
/**
* Get the entire list of values for this report.
*
* @return the complete List of objects for this report.
* @throws ArchivaDatabaseException if there was a fundamental issue with accessing the database.
* @throws ObjectNotFoundException if no records were found.
*/
public List<T> getData() throws ObjectNotFoundException, ArchivaDatabaseException;
/**
* Get the entire list of values for this report.
*
* @param limits the limits on the data to fetch. (NOTE: This object is
* updated by the underlying implementation of this interface with
* the current values appropriate for the limits object).
* @return the complete List of objects for this report.
* @throws ArchivaDatabaseException if there was a fundamental issue with accessing the database.
* @throws ObjectNotFoundException if no records were found.
*/
public List<T> getData( DataLimits limits ) throws ObjectNotFoundException, ArchivaDatabaseException;
}

View File

@ -1,49 +0,0 @@
package org.apache.maven.archiva.reporting;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* ReportingException
*
* @version $Id$
*/
public class ReportingException
extends Exception
{
public ReportingException()
{
}
public ReportingException( String message )
{
super( message );
}
public ReportingException( Throwable cause )
{
super( cause );
}
public ReportingException( String message, Throwable cause )
{
super( message, cause );
}
}

View File

@ -1,34 +0,0 @@
package org.apache.maven.archiva.reporting;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Map;
/**
* ReportingManager
*
* @version $Id$
*/
public interface ReportingManager
{
public DynamicReportSource<?> getReport( String id );
public Map<String,DynamicReportSource<?>> getAvailableReports();
}

View File

@ -1,9 +0,0 @@
<component-set>
<components>
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
<implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
</component>
</components>
</component-set>

View File

@ -1,47 +0,0 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
<role-hint>duplicate</role-hint>
<implementation>org.apache.maven.archiva.reporting.processor.DuplicateArtifactFileReportProcessor</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.digest.Digester</role>
<role-hint>md5</role-hint>
<field-name>digester</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory</role>
<field-name>indexFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
<field-name>database</field-name>
</requirement>
</requirements>
<configuration>
<indexDirectory>${basedir}/target/indexDirectory</indexDirectory>
</configuration>
</component>
</components>
</component-set>

View File

@ -1,38 +0,0 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
<role-hint>old-artifact</role-hint>
<implementation>org.apache.maven.archiva.reporting.processor.OldArtifactReportProcessor</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
<field-name>database</field-name>
</requirement>
</requirements>
<configuration>
<maxAge>10</maxAge>
</configuration>
</component>
</components>
</component-set>

View File

@ -1,39 +0,0 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
<role-hint>old-snapshot-artifact</role-hint>
<implementation>org.apache.maven.archiva.reporting.processor.OldSnapshotArtifactReportProcessor</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
<field-name>database</field-name>
</requirement>
</requirements>
<configuration>
<maxAge>3600</maxAge>
<maxSnapshots>2</maxSnapshots>
</configuration>
</component>
</components>
</component-set>

View File

@ -1,34 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright 2005-2006 The Apache Software Foundation.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-modules</artifactId>
<version>1.3-SNAPSHOT</version>
</parent>
<artifactId>archiva-reporting</artifactId>
<name>Archiva Reporting</name>
<packaging>pom</packaging>
<modules>
<module>archiva-report-manager</module>
<module>archiva-artifact-reports</module>
</modules>
</project>

View File

@ -29,18 +29,13 @@
<packaging>war</packaging>
<name>Archiva Web :: Application</name>
<dependencies>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-report-manager</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>repository-statistics</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-artifact-reports</artifactId>
<scope>runtime</scope>
<artifactId>problem-reports</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>

View File

@ -0,0 +1,63 @@
package org.apache.maven.archiva.web.action;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Collections;
import java.util.List;
import org.apache.maven.archiva.security.AccessDeniedException;
import org.apache.maven.archiva.security.ArchivaSecurityException;
import org.apache.maven.archiva.security.PrincipalNotFoundException;
import org.apache.maven.archiva.security.UserRepositories;
public class AbstractRepositoryBasedAction
extends PlexusActionSupport
{
/**
* @plexus.requirement
*/
private UserRepositories userRepositories;
protected List<String> getObservableRepos()
{
try
{
return userRepositories.getObservableRepositoryIds( getPrincipal() );
}
catch ( PrincipalNotFoundException e )
{
log.warn( e.getMessage(), e );
}
catch ( AccessDeniedException e )
{
log.warn( e.getMessage(), e );
}
catch ( ArchivaSecurityException e )
{
log.warn( e.getMessage(), e );
}
return Collections.emptyList();
}
public void setUserRepositories( UserRepositories userRepositories )
{
this.userRepositories = userRepositories;
}
}

View File

@ -32,31 +32,21 @@ import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.metadata.repository.storage.maven2.MavenProjectFacet;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.security.AccessDeniedException;
import org.apache.maven.archiva.security.ArchivaSecurityException;
import org.apache.maven.archiva.security.PrincipalNotFoundException;
import org.apache.maven.archiva.security.UserRepositories;
/**
* Browse the repository.
*
* @todo cache browsing results.
* @todo implement repository selectors (all or specific repository)
* @plexus.component role="com.opensymphony.xwork2.Action" role-hint="browseAction" instantiation-strategy="per-lookup"
*/
public class BrowseAction
extends PlexusActionSupport
extends AbstractRepositoryBasedAction
{
/**
* @plexus.requirement
*/
private MetadataResolver metadataResolver;
/**
* @plexus.requirement
*/
private UserRepositories userRepositories;
private String groupId;
private String artifactId;
@ -289,28 +279,6 @@ public class BrowseAction
}
}
private List<String> getObservableRepos()
{
try
{
return userRepositories.getObservableRepositoryIds( getPrincipal() );
}
catch ( PrincipalNotFoundException e )
{
log.warn( e.getMessage(), e );
}
catch ( AccessDeniedException e )
{
log.warn( e.getMessage(), e );
// TODO: pass this onto the screen.
}
catch ( ArchivaSecurityException e )
{
log.warn( e.getMessage(), e );
}
return Collections.emptyList();
}
public String getGroupId()
{
return groupId;

View File

@ -21,7 +21,6 @@ package org.apache.maven.archiva.web.action;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@ -40,10 +39,6 @@ import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.security.AccessDeniedException;
import org.apache.maven.archiva.security.ArchivaSecurityException;
import org.apache.maven.archiva.security.PrincipalNotFoundException;
import org.apache.maven.archiva.security.UserRepositories;
import org.apache.struts2.ServletActionContext;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
@ -54,7 +49,7 @@ import org.springframework.web.context.support.WebApplicationContextUtils;
* @plexus.component role="com.opensymphony.xwork2.Action" role-hint="searchAction" instantiation-strategy="per-lookup"
*/
public class SearchAction
extends PlexusActionSupport
extends AbstractRepositoryBasedAction
implements Preparable
{
/**
@ -70,11 +65,6 @@ public class SearchAction
*/
private SearchResults results;
/**
* @plexus.requirement
*/
private UserRepositories userRepositories;
private static final String RESULTS = "results";
private static final String ARTIFACT = "artifact";
@ -303,14 +293,6 @@ public class SearchAction
{
totalPages = totalPages + 1;
}
// TODO: filter / combine the artifacts by version? (is that even possible with non-artifact hits?)
/* I don't think that we should, as I expect us to utilize the 'score' system in lucene in
* the future to return relevant links better.
* I expect the lucene scoring system to take multiple hits on different areas of a single document
* to result in a higher score.
* - Joakim
*/
if( !isEqualToPreviousSearchTerm( q ) )
{
@ -357,27 +339,6 @@ public class SearchAction
return INPUT;
}
private List<String> getObservableRepos()
{
try
{
return userRepositories.getObservableRepositoryIds( getPrincipal() );
}
catch ( PrincipalNotFoundException e )
{
log.warn( e.getMessage(), e );
}
catch ( AccessDeniedException e )
{
log.warn( e.getMessage(), e );
}
catch ( ArchivaSecurityException e )
{
log.warn( e.getMessage(), e );
}
return Collections.emptyList();
}
private void buildCompleteQueryString( String searchTerm )
{
if ( searchTerm.indexOf( COMPLETE_QUERY_STRING_SEPARATOR ) != -1 )
@ -594,16 +555,6 @@ public class SearchAction
this.nexusSearch = nexusSearch;
}
public UserRepositories getUserRepositories()
{
return userRepositories;
}
public void setUserRepositories( UserRepositories userRepositories )
{
this.userRepositories = userRepositories;
}
public Map<String, String> getSearchFields()
{
return searchFields;

View File

@ -20,7 +20,6 @@ package org.apache.maven.archiva.web.action;
*/
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.opensymphony.xwork2.Validateable;
@ -31,10 +30,6 @@ import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.MetadataResolver;
import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.security.AccessDeniedException;
import org.apache.maven.archiva.security.ArchivaSecurityException;
import org.apache.maven.archiva.security.PrincipalNotFoundException;
import org.apache.maven.archiva.security.UserRepositories;
/**
* Browse the repository.
@ -44,16 +39,11 @@ import org.apache.maven.archiva.security.UserRepositories;
* @plexus.component role="com.opensymphony.xwork2.Action" role-hint="showArtifactAction" instantiation-strategy="per-lookup"
*/
public class ShowArtifactAction
extends PlexusActionSupport
extends AbstractRepositoryBasedAction
implements Validateable
{
/* .\ Not Exposed \._____________________________________________ */
/**
* @plexus.requirement
*/
private UserRepositories userRepositories;
/**
* @plexus.requirement
*/
@ -209,8 +199,7 @@ public class ShowArtifactAction
*/
public String reports()
{
// TODO: hook up reports on project - this.reports = artifactsDatabase.findArtifactResults( groupId, artifactId,
// version );
// TODO: hook up reports on project
return SUCCESS;
}
@ -274,28 +263,6 @@ public class ShowArtifactAction
return artifact();
}
private List<String> getObservableRepos()
{
try
{
return userRepositories.getObservableRepositoryIds( getPrincipal() );
}
catch ( PrincipalNotFoundException e )
{
log.warn( e.getMessage(), e );
}
catch ( AccessDeniedException e )
{
log.warn( e.getMessage(), e );
// TODO: pass this onto the screen.
}
catch ( ArchivaSecurityException e )
{
log.warn( e.getMessage(), e );
}
return Collections.emptyList();
}
@Override
public void validate()
{

View File

@ -27,31 +27,23 @@ import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import javax.servlet.http.HttpServletRequest;
import com.opensymphony.xwork2.Preparable;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.archiva.reports.RepositoryProblemFacet;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.constraints.RangeConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByGroupIdConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByRepositoryIdConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemConstraint;
import org.apache.maven.archiva.database.constraints.UniqueFieldConstraint;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.model.RepositoryProblemReport;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.apache.maven.archiva.web.action.PlexusActionSupport;
import org.apache.struts2.interceptor.ServletRequestAware;
import org.apache.maven.archiva.web.action.AbstractRepositoryBasedAction;
import org.codehaus.plexus.redback.rbac.Resource;
import org.codehaus.redback.integration.interceptor.SecureAction;
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
@ -63,8 +55,8 @@ import org.slf4j.LoggerFactory;
* @plexus.component role="com.opensymphony.xwork2.Action" role-hint="generateReport" instantiation-strategy="per-lookup"
*/
public class GenerateReportAction
extends PlexusActionSupport
implements SecureAction, ServletRequestAware, Preparable
extends AbstractRepositoryBasedAction
implements SecureAction, Preparable
{
public static final String ALL_REPOSITORIES = "All Repositories";
@ -78,11 +70,6 @@ public class GenerateReportAction
private Logger log = LoggerFactory.getLogger( GenerateReportAction.class );
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* @plexus.requirement
*/
@ -93,8 +80,6 @@ public class GenerateReportAction
*/
private RepositoryStatisticsManager repositoryStatisticsManager;
private HttpServletRequest request;
private String groupId;
private String repositoryId;
@ -113,8 +98,8 @@ public class GenerateReportAction
private Collection<String> repositoryIds;
private Map<String, List<RepositoryProblemReport>> repositoriesMap =
new TreeMap<String, List<RepositoryProblemReport>>();
private Map<String, List<RepositoryProblemFacet>> repositoriesMap =
new TreeMap<String, List<RepositoryProblemFacet>>();
private List<String> availableRepositories;
@ -124,13 +109,17 @@ public class GenerateReportAction
private boolean lastPage;
/**
* @plexus.requirement
*/
private MetadataRepository metadataRepository;
@SuppressWarnings("unchecked")
public void prepare()
{
repositoryIds = new ArrayList<String>();
repositoryIds.add( ALL_REPOSITORIES ); // comes first to be first in the list
repositoryIds.addAll( (List<String>) dao.query(
new UniqueFieldConstraint( RepositoryProblem.class.getName(), "repositoryId" ) ) );
repositoryIds.addAll( getObservableRepos() );
availableRepositories = new ArrayList<String>();
@ -228,8 +217,7 @@ public class GenerateReportAction
if ( stats.isEmpty() )
{
addActionError(
"No statistics available for repository. Repository might not have been scanned." );
addActionError( "No statistics available for repository. Repository might not have been scanned." );
return ERROR;
}
@ -289,7 +277,7 @@ public class GenerateReportAction
selectedRepositories = parseSelectedRepositories();
List<RepositoryStatistics> repositoryStatistics = new ArrayList<RepositoryStatistics>();
StringBuffer input = null;
StringBuffer input;
if ( selectedRepositories.size() > 1 )
{
try
@ -425,6 +413,7 @@ public class GenerateReportAction
}
// hack for parsing the struts list passed as param in <s:url ../>
private List<String> parseSelectedRepositories()
{
List<String> pasedSelectedRepos = new ArrayList<String>();
@ -499,31 +488,65 @@ public class GenerateReportAction
return INPUT;
}
List<RepositoryProblem> problemArtifacts =
dao.getRepositoryProblemDAO().queryRepositoryProblems( configureConstraint() );
String contextPath =
request.getRequestURL().substring( 0, request.getRequestURL().indexOf( request.getRequestURI() ) );
for ( RepositoryProblem problem : problemArtifacts )
List<String> observableRepos = getObservableRepos();
Collection<String> repoIds;
if ( StringUtils.isEmpty( repositoryId ) || ALL_REPOSITORIES.equals( repositoryId ) )
{
RepositoryProblemReport problemArtifactReport = new RepositoryProblemReport( problem );
repoIds = observableRepos;
}
else if ( observableRepos.contains( repositoryId ) )
{
repoIds = Collections.singletonList( repositoryId );
}
else
{
repoIds = Collections.emptyList();
}
problemArtifactReport.setGroupURL( contextPath + "/browse/" + problem.getGroupId() );
problemArtifactReport.setArtifactURL(
contextPath + "/browse/" + problem.getGroupId() + "/" + problem.getArtifactId() );
List<RepositoryProblemReport> problemsList;
if ( repositoriesMap.containsKey( problemArtifactReport.getRepositoryId() ) )
List<RepositoryProblemFacet> problemArtifacts = new ArrayList<RepositoryProblemFacet>();
for ( String repoId : repoIds )
{
// TODO: improve performance by navigating into a group subtree. Currently group is property, not part of name of item
for ( String name : metadataRepository.getMetadataFacets( repoId, RepositoryProblemFacet.FACET_ID ) )
{
problemsList = repositoriesMap.get( problemArtifactReport.getRepositoryId() );
RepositoryProblemFacet metadataFacet =
(RepositoryProblemFacet) metadataRepository.getMetadataFacet( repoId,
RepositoryProblemFacet.FACET_ID,
name );
if ( StringUtils.isEmpty( groupId ) || groupId.equals( metadataFacet.getNamespace() ) )
{
problemArtifacts.add( metadataFacet );
}
}
}
// TODO: getting range only after reading is not efficient for a large number of artifacts
int lowerBound = ( page - 1 ) * rowCount;
int upperBound = ( page * rowCount ) + 1; // Add 1 to check if it's the last page or not.
if ( upperBound <= problemArtifacts.size() )
{
problemArtifacts = problemArtifacts.subList( lowerBound, upperBound );
}
else
{
problemArtifacts = problemArtifacts.subList( lowerBound, problemArtifacts.size() );
}
for ( RepositoryProblemFacet problem : problemArtifacts )
{
List<RepositoryProblemFacet> problemsList;
if ( repositoriesMap.containsKey( problem.getRepositoryId() ) )
{
problemsList = repositoriesMap.get( problem.getRepositoryId() );
}
else
{
problemsList = new ArrayList<RepositoryProblemReport>();
repositoriesMap.put( problemArtifactReport.getRepositoryId(), problemsList );
problemsList = new ArrayList<RepositoryProblemFacet>();
repositoriesMap.put( problem.getRepositoryId(), problemsList );
}
problemsList.add( problemArtifactReport );
problemsList.add( problem );
}
// TODO: handling should be improved
@ -542,36 +565,6 @@ public class GenerateReportAction
}
}
private Constraint configureConstraint()
{
Constraint constraint;
int[] range =
new int[]{( page - 1 ) * rowCount, ( page * rowCount ) + 1}; // Add 1 to check if it's the last page or not.
if ( groupId != null && ( !groupId.equals( "" ) ) )
{
if ( repositoryId != null && ( !repositoryId.equals( "" ) && !repositoryId.equals( ALL_REPOSITORIES ) ) )
{
constraint = new RepositoryProblemConstraint( range, groupId, repositoryId );
}
else
{
constraint = new RepositoryProblemByGroupIdConstraint( range, groupId );
}
}
else if ( repositoryId != null && ( !repositoryId.equals( "" ) && !repositoryId.equals( ALL_REPOSITORIES ) ) )
{
constraint = new RepositoryProblemByRepositoryIdConstraint( range, repositoryId );
}
else
{
constraint = new RangeConstraint( range, "repositoryId" );
}
return constraint;
}
public SecureActionBundle getSecureActionBundle()
throws SecureActionException
{
@ -588,11 +581,6 @@ public class GenerateReportAction
return repositoryIds;
}
public void setServletRequest( HttpServletRequest request )
{
this.request = request;
}
public String getGroupId()
{
return groupId;
@ -633,12 +621,12 @@ public class GenerateReportAction
this.rowCount = rowCount;
}
public void setRepositoriesMap( Map<String, List<RepositoryProblemReport>> repositoriesMap )
public void setRepositoriesMap( Map<String, List<RepositoryProblemFacet>> repositoriesMap )
{
this.repositoriesMap = repositoriesMap;
}
public Map<String, List<RepositoryProblemReport>> getRepositoriesMap()
public Map<String, List<RepositoryProblemFacet>> getRepositoriesMap()
{
return repositoriesMap;
}
@ -717,4 +705,9 @@ public class GenerateReportAction
{
this.repositoryStatisticsManager = repositoryStatisticsManager;
}
public void setMetadataRepository( MetadataRepository metadataRepository )
{
this.metadataRepository = metadataRepository;
}
}

View File

@ -42,8 +42,8 @@
<!-- Web Services : Search Service -->
<bean name="searchService" lazy-init="true" scope="singleton" class="org.apache.archiva.web.xmlrpc.services.SearchServiceImpl">
<constructor-arg ref="xmlRpcUserRepositories"/>
<constructor-arg ref="archivaDAO#jdo"/>
<constructor-arg ref="metadataResolver"/>
<constructor-arg ref="metadataRepository"/>
<constructor-arg ref="nexusSearch"/>
</bean>

View File

@ -37,14 +37,14 @@
<c:forEach var="report" items='${repository.value}'>
<p>
<archiva:groupIdLink var="${report.groupId}" includeTop="true"/>
<archiva:groupIdLink var="${report.namespace}" includeTop="true"/>
<c:set var="url">
<s:url action="browseArtifact" namespace="/">
<s:param name="groupId" value="%{#attr.report.groupId}"/>
<s:param name="artifactId" value="%{#attr.report.artifactId}"/>
<s:param name="groupId" value="%{#attr.report.namespace}"/>
<s:param name="artifactId" value="%{#attr.report.project}"/>
</s:url>
</c:set>
<a href="${url}">${report.artifactId}</a> /
<a href="${url}">${report.project}</a> /
<strong>${report.version}</strong>
</p>
@ -54,18 +54,18 @@
<c:set var="prevPageUrl">
<s:url action="generateReport" namespace="/">
<s:param name="groupId" value="${groupId}"/>
<s:param name="repositoryId" value="${repositoryId}"/>
<s:param name="rowCount" value="${rowCount}"/>
<s:param name="page" value="${page - 1}"/>
<s:param name="groupId" />
<s:param name="repositoryId" />
<s:param name="rowCount" />
<s:param name="page" value="%{#attr.page - 1}"/>
</s:url>
</c:set>
<c:set var="nextPageUrl">
<s:url action="generateReport" namespace="/">
<s:param name="groupId" value="${groupId}"/>
<s:param name="repositoryId" value="${repositoryId}"/>
<s:param name="rowCount" value="${rowCount}"/>
<s:param name="page" value="${page + 1}"/>
<s:param name="groupId" />
<s:param name="repositoryId" />
<s:param name="rowCount" />
<s:param name="page" value="%{#attr.page + 1}"/>
</s:url>
</c:set>
<s:set name="page" value="page"/>

View File

@ -156,7 +156,7 @@ public class TestMetadataRepository
throw new UnsupportedOperationException();
}
public void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet )
public void addMetadataFacet( String repositoryId, String facetId, MetadataFacet metadataFacet )
{
throw new UnsupportedOperationException();
}
@ -166,6 +166,11 @@ public class TestMetadataRepository
throw new UnsupportedOperationException();
}
public void removeMetadataFacet( String repoId, String facetId, String name )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public List<ArtifactMetadata> getArtifactsByDateRange( String repoId, Date startTime, Date endTime )
{
return artifacts;

View File

@ -23,9 +23,7 @@ import java.util.List;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.database.SimpleConstraint;
import org.apache.maven.archiva.database.constraints.UniqueFieldConstraint;
/**
* Stub class for Archiva DAO to avoid having to set up a database for tests.
@ -38,16 +36,8 @@ public class ArchivaDAOStub
private ArtifactDAO artifactDao;
private List<String> repositoryIds;
private RepositoryProblemDAO repositoryProblemDAO;
public List<?> query( SimpleConstraint constraint )
{
if ( constraint instanceof UniqueFieldConstraint )
{
return repositoryIds;
}
throw new UnsupportedOperationException();
}
@ -56,23 +46,9 @@ public class ArchivaDAOStub
return artifactDao;
}
public RepositoryProblemDAO getRepositoryProblemDAO()
{
return repositoryProblemDAO;
}
public void setArtifactDao( ArtifactDAO artifactDao )
{
this.artifactDao = artifactDao;
}
public void setRepositoryIds( List<String> repositoryIds )
{
this.repositoryIds = repositoryIds;
}
public void setRepositoryProblemDAO( RepositoryProblemDAO repositoryProblemDAO )
{
this.repositoryProblemDAO = repositoryProblemDAO;
}
}

View File

@ -1,55 +0,0 @@
package org.apache.maven.archiva.web.action.admin.repositories;
import java.util.List;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.model.RepositoryProblem;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Stub class for Archiva DAO to avoid having to set up a database for tests.
*
* @todo a mock would be better, but that won't play nicely with Plexus injection.
*/
public class RepositoryProblemDAOStub
implements RepositoryProblemDAO
{
public List<RepositoryProblem> queryRepositoryProblems( Constraint constraint )
throws ObjectNotFoundException, ArchivaDatabaseException
{
throw new UnsupportedOperationException( "not implemented for stub" );
}
public RepositoryProblem saveRepositoryProblem( RepositoryProblem problem )
throws ArchivaDatabaseException
{
throw new UnsupportedOperationException( "not implemented for stub" );
}
public void deleteRepositoryProblem( RepositoryProblem problem )
throws ArchivaDatabaseException
{
throw new UnsupportedOperationException( "not implemented for stub" );
}
}

View File

@ -26,22 +26,14 @@ import java.util.Collections;
import java.util.Date;
import java.util.List;
import com.meterware.servletunit.ServletRunner;
import com.meterware.servletunit.ServletUnitClient;
import com.opensymphony.xwork2.Action;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.archiva.reports.RepositoryProblemFacet;
import org.apache.commons.io.IOUtils;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.database.constraints.RangeConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByGroupIdConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByRepositoryIdConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemConstraint;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.model.RepositoryProblemReport;
import org.apache.maven.archiva.web.action.admin.repositories.ArchivaDAOStub;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.easymock.MockControl;
@ -59,36 +51,33 @@ public class GenerateReportActionTest
private static final String INTERNAL = "internal";
private RepositoryProblemDAO repositoryProblemDAO;
private MockControl repositoryProblemDAOControl;
private static final String GROUP_ID = "groupId";
private static final String URL = "http://localhost/reports/generateReport.action";
private RepositoryStatisticsManager repositoryStatisticsManager;
private MockControl repositoryStatisticsManagerControl;
private MockControl metadataRepositoryControl;
private MetadataRepository metadataRepository;
private static final String PROBLEM = "problem";
@Override
protected void setUp()
throws Exception
{
super.setUp();
ArchivaDAOStub archivaDAOStub = (ArchivaDAOStub) lookup( ArchivaDAO.class, "jdo" );
archivaDAOStub.setRepositoryIds( Arrays.asList( "repo1", "repo2" ) );
repositoryProblemDAOControl = MockControl.createControl( RepositoryProblemDAO.class );
repositoryProblemDAO = (RepositoryProblemDAO) repositoryProblemDAOControl.getMock();
archivaDAOStub.setRepositoryProblemDAO( repositoryProblemDAO );
action = (GenerateReportAction) lookup( Action.class, "generateReport" );
repositoryStatisticsManagerControl = MockControl.createControl( RepositoryStatisticsManager.class );
repositoryStatisticsManager = (RepositoryStatisticsManager) repositoryStatisticsManagerControl.getMock();
action.setRepositoryStatisticsManager( repositoryStatisticsManager );
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
action.setMetadataRepository( metadataRepository );
}
private void prepareAction( List<String> selectedRepositories, List<String> availableRepositories )
@ -96,7 +85,7 @@ public class GenerateReportActionTest
action.setSelectedRepositories( selectedRepositories );
action.prepare();
assertEquals( Arrays.asList( GenerateReportAction.ALL_REPOSITORIES, "repo1", "repo2" ),
assertEquals( Arrays.asList( GenerateReportAction.ALL_REPOSITORIES, INTERNAL, SNAPSHOTS ),
action.getRepositoryIds() );
assertEquals( availableRepositories, action.getAvailableRepositories() );
}
@ -424,42 +413,41 @@ public class GenerateReportActionTest
public void testHealthReportSingleRepo()
throws Exception
{
RepositoryProblem problem1 = createProblem( GROUP_ID, "artifactId", INTERNAL );
RepositoryProblem problem2 = createProblem( GROUP_ID, "artifactId-2", INTERNAL );
repositoryProblemDAOControl.expectAndReturn( repositoryProblemDAO.queryRepositoryProblems(
new RepositoryProblemByRepositoryIdConstraint( new int[]{0, 101}, INTERNAL ) ),
Arrays.asList( problem1, problem2 ) );
repositoryProblemDAOControl.replay();
RepositoryProblemFacet problem1 = createProblem( GROUP_ID, "artifactId", INTERNAL );
RepositoryProblemFacet problem2 = createProblem( GROUP_ID, "artifactId-2", INTERNAL );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( INTERNAL, RepositoryProblemFacet.FACET_ID ),
Arrays.asList( problem1.getName(), problem2.getName() ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( INTERNAL, RepositoryProblemFacet.FACET_ID, problem1.getName() ),
problem1 );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( INTERNAL, RepositoryProblemFacet.FACET_ID, problem2.getName() ),
problem2 );
metadataRepositoryControl.replay();
action.setRepositoryId( INTERNAL );
ServletRunner sr = new ServletRunner();
ServletUnitClient sc = sr.newClient();
action.setServletRequest( sc.newInvocation( URL ).getRequest() );
prepareAction( Collections.<String>emptyList(), Arrays.asList( SNAPSHOTS, INTERNAL ) );
String result = action.execute();
assertSuccessResult( result );
RepositoryProblemReport problemReport1 = createProblemReport( problem1 );
RepositoryProblemReport problemReport2 = createProblemReport( problem2 );
assertEquals( Collections.singleton( INTERNAL ), action.getRepositoriesMap().keySet() );
assertEquals( Arrays.asList( problemReport1, problemReport2 ), action.getRepositoriesMap().get( INTERNAL ) );
assertEquals( Arrays.asList( problem1, problem2 ), action.getRepositoriesMap().get( INTERNAL ) );
repositoryProblemDAOControl.verify();
metadataRepositoryControl.verify();
}
public void testHealthReportInvalidRowCount()
throws Exception
{
repositoryProblemDAOControl.replay();
metadataRepositoryControl.replay();
action.setRowCount( 0 );
action.setRepositoryId( INTERNAL );
ServletRunner sr = new ServletRunner();
ServletUnitClient sc = sr.newClient();
action.setServletRequest( sc.newInvocation( URL ).getRequest() );
prepareAction( Collections.<String>emptyList(), Arrays.asList( SNAPSHOTS, INTERNAL ) );
String result = action.execute();
@ -467,113 +455,119 @@ public class GenerateReportActionTest
assertFalse( action.hasActionErrors() );
assertTrue( action.hasFieldErrors() );
repositoryProblemDAOControl.verify();
metadataRepositoryControl.verify();
}
public void testHealthReportAllRepos()
throws Exception
{
RepositoryProblem problem1 = createProblem( GROUP_ID, "artifactId", INTERNAL );
RepositoryProblem problem2 = createProblem( GROUP_ID, "artifactId-2", SNAPSHOTS );
repositoryProblemDAOControl.expectAndReturn(
repositoryProblemDAO.queryRepositoryProblems( new RangeConstraint( new int[]{0, 101} ) ),
Arrays.asList( problem1, problem2 ) );
repositoryProblemDAOControl.replay();
RepositoryProblemFacet problem1 = createProblem( GROUP_ID, "artifactId", INTERNAL );
RepositoryProblemFacet problem2 = createProblem( GROUP_ID, "artifactId-2", SNAPSHOTS );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( INTERNAL, RepositoryProblemFacet.FACET_ID ),
Arrays.asList( problem1.getName() ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( SNAPSHOTS, RepositoryProblemFacet.FACET_ID ),
Arrays.asList( problem2.getName() ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( INTERNAL, RepositoryProblemFacet.FACET_ID, problem1.getName() ),
problem1 );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( SNAPSHOTS, RepositoryProblemFacet.FACET_ID, problem2.getName() ),
problem2 );
metadataRepositoryControl.replay();
action.setRepositoryId( GenerateReportAction.ALL_REPOSITORIES );
ServletRunner sr = new ServletRunner();
ServletUnitClient sc = sr.newClient();
action.setServletRequest( sc.newInvocation( URL ).getRequest() );
prepareAction( Collections.<String>emptyList(), Arrays.asList( SNAPSHOTS, INTERNAL ) );
String result = action.execute();
assertSuccessResult( result );
RepositoryProblemReport problemReport1 = createProblemReport( problem1 );
RepositoryProblemReport problemReport2 = createProblemReport( problem2 );
assertEquals( Arrays.asList( INTERNAL, SNAPSHOTS ),
new ArrayList<String>( action.getRepositoriesMap().keySet() ) );
assertEquals( Arrays.asList( problemReport1 ), action.getRepositoriesMap().get( INTERNAL ) );
assertEquals( Arrays.asList( problemReport2 ), action.getRepositoriesMap().get( SNAPSHOTS ) );
assertEquals( Arrays.asList( problem1 ), action.getRepositoriesMap().get( INTERNAL ) );
assertEquals( Arrays.asList( problem2 ), action.getRepositoriesMap().get( SNAPSHOTS ) );
repositoryProblemDAOControl.verify();
metadataRepositoryControl.verify();
}
public void testHealthReportSingleRepoByCorrectGroupId()
throws Exception
{
RepositoryProblem problem1 = createProblem( GROUP_ID, "artifactId", INTERNAL );
RepositoryProblem problem2 = createProblem( GROUP_ID, "artifactId-2", INTERNAL );
repositoryProblemDAOControl.expectAndReturn( repositoryProblemDAO.queryRepositoryProblems(
new RepositoryProblemConstraint( new int[]{0, 101}, GROUP_ID, INTERNAL ) ),
Arrays.asList( problem1, problem2 ) );
repositoryProblemDAOControl.replay();
RepositoryProblemFacet problem1 = createProblem( GROUP_ID, "artifactId", INTERNAL );
RepositoryProblemFacet problem2 = createProblem( GROUP_ID, "artifactId-2", INTERNAL );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( INTERNAL, RepositoryProblemFacet.FACET_ID ),
Arrays.asList( problem1.getName(), problem2.getName() ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( INTERNAL, RepositoryProblemFacet.FACET_ID, problem1.getName() ),
problem1 );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( INTERNAL, RepositoryProblemFacet.FACET_ID, problem2.getName() ),
problem2 );
metadataRepositoryControl.replay();
action.setGroupId( GROUP_ID );
action.setRepositoryId( INTERNAL );
ServletRunner sr = new ServletRunner();
ServletUnitClient sc = sr.newClient();
action.setServletRequest( sc.newInvocation( URL ).getRequest() );
prepareAction( Collections.<String>emptyList(), Arrays.asList( SNAPSHOTS, INTERNAL ) );
String result = action.execute();
assertSuccessResult( result );
RepositoryProblemReport problemReport1 = createProblemReport( problem1 );
RepositoryProblemReport problemReport2 = createProblemReport( problem2 );
assertEquals( Collections.singleton( INTERNAL ), action.getRepositoriesMap().keySet() );
assertEquals( Arrays.asList( problemReport1, problemReport2 ), action.getRepositoriesMap().get( INTERNAL ) );
assertEquals( Arrays.asList( problem1, problem2 ), action.getRepositoriesMap().get( INTERNAL ) );
repositoryProblemDAOControl.verify();
metadataRepositoryControl.verify();
}
public void testHealthReportSingleRepoByCorrectGroupIdAllRepositories()
throws Exception
{
RepositoryProblem problem1 = createProblem( GROUP_ID, "artifactId", INTERNAL );
RepositoryProblem problem2 = createProblem( GROUP_ID, "artifactId-2", SNAPSHOTS );
repositoryProblemDAOControl.expectAndReturn( repositoryProblemDAO.queryRepositoryProblems(
new RepositoryProblemByGroupIdConstraint( new int[]{0, 101}, GROUP_ID ) ),
Arrays.asList( problem1, problem2 ) );
repositoryProblemDAOControl.replay();
RepositoryProblemFacet problem1 = createProblem( GROUP_ID, "artifactId", INTERNAL );
RepositoryProblemFacet problem2 = createProblem( GROUP_ID, "artifactId-2", SNAPSHOTS );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( INTERNAL, RepositoryProblemFacet.FACET_ID ),
Arrays.asList( problem1.getName() ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( SNAPSHOTS, RepositoryProblemFacet.FACET_ID ),
Arrays.asList( problem2.getName() ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( INTERNAL, RepositoryProblemFacet.FACET_ID, problem1.getName() ),
problem1 );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( SNAPSHOTS, RepositoryProblemFacet.FACET_ID, problem2.getName() ),
problem2 );
metadataRepositoryControl.replay();
action.setGroupId( GROUP_ID );
action.setRepositoryId( GenerateReportAction.ALL_REPOSITORIES );
ServletRunner sr = new ServletRunner();
ServletUnitClient sc = sr.newClient();
action.setServletRequest( sc.newInvocation( URL ).getRequest() );
prepareAction( Collections.<String>emptyList(), Arrays.asList( SNAPSHOTS, INTERNAL ) );
String result = action.execute();
assertSuccessResult( result );
RepositoryProblemReport problemReport1 = createProblemReport( problem1 );
RepositoryProblemReport problemReport2 = createProblemReport( problem2 );
assertEquals( Arrays.asList( INTERNAL, SNAPSHOTS ),
new ArrayList<String>( action.getRepositoriesMap().keySet() ) );
assertEquals( Arrays.asList( problemReport1 ), action.getRepositoriesMap().get( INTERNAL ) );
assertEquals( Arrays.asList( problemReport2 ), action.getRepositoriesMap().get( SNAPSHOTS ) );
assertEquals( Arrays.asList( problem1 ), action.getRepositoriesMap().get( INTERNAL ) );
assertEquals( Arrays.asList( problem2 ), action.getRepositoriesMap().get( SNAPSHOTS ) );
repositoryProblemDAOControl.verify();
metadataRepositoryControl.verify();
}
public void testHealthReportSingleRepoByIncorrectGroupId()
throws Exception
{
repositoryProblemDAOControl.expectAndReturn( repositoryProblemDAO.queryRepositoryProblems(
new RepositoryProblemConstraint( new int[]{0, 101}, "not.it", INTERNAL ) ),
Collections.<Object>emptyList() );
repositoryProblemDAOControl.replay();
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( INTERNAL, RepositoryProblemFacet.FACET_ID ),
Collections.<MetadataFacet>emptyList() );
metadataRepositoryControl.replay();
action.setGroupId( "not.it" );
action.setRepositoryId( INTERNAL );
ServletRunner sr = new ServletRunner();
ServletUnitClient sc = sr.newClient();
action.setServletRequest( sc.newInvocation( URL ).getRequest() );
prepareAction( Collections.<String>emptyList(), Arrays.asList( SNAPSHOTS, INTERNAL ) );
String result = action.execute();
@ -581,7 +575,7 @@ public class GenerateReportActionTest
assertFalse( action.hasActionErrors() );
assertFalse( action.hasFieldErrors() );
repositoryProblemDAOControl.verify();
metadataRepositoryControl.verify();
}
private void assertMultiRepoCsvResult()
@ -595,20 +589,13 @@ public class GenerateReportActionTest
"snapshots,0,0,0,0,0\n" + "internal,0,0,0,0,0\n", IOUtils.toString( action.getInputStream() ) );
}
private RepositoryProblemReport createProblemReport( RepositoryProblem problem )
private RepositoryProblemFacet createProblem( String groupId, String artifactId, String repoId )
{
RepositoryProblemReport problemReport = new RepositoryProblemReport( problem );
problemReport.setGroupURL( "http://localhost/browse/" + problem.getGroupId() );
problemReport.setArtifactURL( problemReport.getGroupURL() + "/" + problem.getArtifactId() );
return problemReport;
}
private RepositoryProblem createProblem( String groupId, String artifactId, String repoId )
{
RepositoryProblem problem = new RepositoryProblem();
RepositoryProblemFacet problem = new RepositoryProblemFacet();
problem.setRepositoryId( repoId );
problem.setGroupId( groupId );
problem.setArtifactId( artifactId );
problem.setNamespace( groupId );
problem.setProject( artifactId );
problem.setProblem( PROBLEM );
return problem;
}

View File

@ -30,11 +30,6 @@
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.web.action.admin.repositories.RepositoryProblemDAOStub</implementation>
</component>
<component>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>

View File

@ -20,20 +20,15 @@
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.web.action.admin.repositories.ArchivaDAOStub</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.web.action.admin.repositories.ArtifactDAOStub</implementation>
<role>org.apache.maven.archiva.security.UserRepositories</role>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.security.UserRepositoriesStub</implementation>
<configuration>
<repoIds>
<repoId>internal</repoId>
<repoId>snapshots</repoId>
</repoIds>
</configuration>
</component>
</components>
</component-set>

View File

@ -116,7 +116,7 @@ public class TestMetadataRepository
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet )
public void addMetadataFacet( String repositoryId, String facetId, MetadataFacet metadataFacet )
{
//To change body of implemented methods use File | Settings | File Templates.
}
@ -126,6 +126,11 @@ public class TestMetadataRepository
//To change body of implemented methods use File | Settings | File Templates.
}
public void removeMetadataFacet( String repoId, String facetId, String name )
{
//To change body of implemented methods use File | Settings | File Templates.
}
public List<ArtifactMetadata> getArtifactsByDateRange( String repoId, Date startTime, Date endTime )
{
return null; //To change body of implemented methods use File | Settings | File Templates.

View File

@ -25,6 +25,8 @@ public interface MetadataFacet
{
String getFacetId();
String getName();
Map<String, String> toProperties();
void fromProperties( Map<String, String> properties );

View File

@ -55,10 +55,12 @@ public interface MetadataRepository
MetadataFacet getMetadataFacet( String repositoryId, String facetId, String name );
void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet );
void addMetadataFacet( String repositoryId, String facetId, MetadataFacet metadataFacet );
void removeMetadataFacets( String repositoryId, String facetId );
void removeMetadataFacet( String repoId, String facetId, String name );
List<ArtifactMetadata> getArtifactsByDateRange( String repoId, Date startTime, Date endTime );
Collection<String> getRepositories();

View File

@ -82,6 +82,12 @@ public class MavenProjectFacet
return FACET_ID;
}
public String getName()
{
// TODO: not needed, perhaps version metadata facet should be separate interface?
return null;
}
public Map<String, String> toProperties()
{
HashMap<String, String> properties = new HashMap<String, String>();

View File

@ -246,8 +246,28 @@ public class FileMetadataRepository
public List<String> getMetadataFacets( String repoId, String facetId )
{
File directory = getMetadataDirectory( repoId, facetId );
String[] list = directory.list();
return list != null ? Arrays.asList( list ) : Collections.<String>emptyList();
List<String> facets = new ArrayList<String>();
recurse( facets, "", directory );
return facets;
}
private void recurse( List<String> facets, String prefix, File directory )
{
File[] list = directory.listFiles();
if ( list != null )
{
for ( File dir : list )
{
if ( dir.isDirectory() )
{
recurse( facets, prefix + "/" + dir.getName(), dir );
}
else if ( dir.getName().equals( METADATA_KEY + ".properties" ) )
{
facets.add( prefix.substring( 1 ) );
}
}
}
}
public MetadataFacet getMetadataFacet( String repositoryId, String facetId, String name )
@ -283,14 +303,15 @@ public class FileMetadataRepository
return metadataFacet;
}
public void addMetadataFacet( String repositoryId, String facetId, String name, MetadataFacet metadataFacet )
public void addMetadataFacet( String repositoryId, String facetId, MetadataFacet metadataFacet )
{
Properties properties = new Properties();
properties.putAll( metadataFacet.toProperties() );
try
{
writeProperties( properties, new File( getMetadataDirectory( repositoryId, facetId ), name ),
writeProperties( properties,
new File( getMetadataDirectory( repositoryId, facetId ), metadataFacet.getName() ),
METADATA_KEY );
}
catch ( IOException e )
@ -313,6 +334,20 @@ public class FileMetadataRepository
}
}
public void removeMetadataFacet( String repoId, String facetId, String name )
{
File dir = new File( getMetadataDirectory( repoId, facetId ), name );
try
{
FileUtils.deleteDirectory( dir );
}
catch ( IOException e )
{
// TODO
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
public List<ArtifactMetadata> getArtifactsByDateRange( String repoId, Date startTime, Date endTime )
{
// TODO: this is quite slow - if we are to persist with this repository implementation we should build an index

View File

@ -53,7 +53,7 @@ public class FileMetadataRepositoryTest
private static final String TEST_FACET_ID = "test-facet-id";
private static final String TEST_NAME = "test-name";
private static final String TEST_NAME = "test/name";
private static final String TEST_VALUE = "test-value";
@ -125,7 +125,7 @@ public class FileMetadataRepositoryTest
public void testGetMetadataFacet()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, new TestMetadataFacet( TEST_VALUE ) );
assertEquals( new TestMetadataFacet( TEST_VALUE ),
repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
@ -138,14 +138,14 @@ public class FileMetadataRepositoryTest
public void testGetMetadataFacetWhenUnknownName()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, new TestMetadataFacet( TEST_VALUE ) );
assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, UNKNOWN ) );
}
public void testGetMetadataFacetWhenDefaultValue()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( null ) );
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, new TestMetadataFacet( null ) );
assertEquals( new TestMetadataFacet( "test-metadata" ),
repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
@ -153,14 +153,14 @@ public class FileMetadataRepositoryTest
public void testGetMetadataFacetWhenUnknownFacetId()
{
repository.addMetadataFacet( TEST_REPO_ID, UNKNOWN, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
repository.addMetadataFacet( TEST_REPO_ID, UNKNOWN, new TestMetadataFacet( TEST_VALUE ) );
assertNull( repository.getMetadataFacet( TEST_REPO_ID, UNKNOWN, TEST_NAME ) );
}
public void testGetMetadataFacets()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, new TestMetadataFacet( TEST_VALUE ) );
assertEquals( Collections.singletonList( TEST_NAME ),
repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID ) );
@ -174,18 +174,7 @@ public class FileMetadataRepositoryTest
public void testRemoveFacets()
{
List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertTrue( facets.isEmpty() );
repository.removeMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertTrue( facets.isEmpty() );
}
public void testRemoveFacetsWhenEmpty()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, new TestMetadataFacet( TEST_VALUE ) );
List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertFalse( facets.isEmpty() );
@ -196,11 +185,56 @@ public class FileMetadataRepositoryTest
assertTrue( facets.isEmpty() );
}
public void testRemoveFacetsWhenEmpty()
{
List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertTrue( facets.isEmpty() );
repository.removeMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertTrue( facets.isEmpty() );
}
public void testRemoveFacetsWhenUnknown()
{
repository.removeMetadataFacets( TEST_REPO_ID, UNKNOWN );
}
public void testRemoveFacet()
{
TestMetadataFacet metadataFacet = new TestMetadataFacet( TEST_VALUE );
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, metadataFacet );
assertEquals( metadataFacet, repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertFalse( facets.isEmpty() );
repository.removeMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME );
assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertTrue( facets.isEmpty() );
}
public void testRemoveFacetWhenEmpty()
{
List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertTrue( facets.isEmpty() );
assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
repository.removeMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME );
facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
assertTrue( facets.isEmpty() );
assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
}
public void testRemoveFacetWhenUnknown()
{
repository.removeMetadataFacet( TEST_REPO_ID, UNKNOWN, TEST_NAME );
}
public void testGetArtifacts()
{
ArtifactMetadata artifact1 = createArtifact();
@ -246,8 +280,8 @@ public class FileMetadataRepositoryTest
public void testRepositories()
{
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
repository.addMetadataFacet( OTHER_REPO, TEST_FACET_ID, TEST_NAME, new TestMetadataFacet( TEST_VALUE ) );
repository.addMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, new TestMetadataFacet( TEST_VALUE ) );
repository.addMetadataFacet( OTHER_REPO, TEST_FACET_ID, new TestMetadataFacet( TEST_VALUE ) );
assertEquals( Arrays.asList( OTHER_REPO, TEST_REPO_ID ), repository.getRepositories() );
}
@ -452,6 +486,11 @@ public class FileMetadataRepositoryTest
return TEST_FACET_ID;
}
public String getName()
{
return TEST_NAME;
}
public Map<String, String> toProperties()
{
if ( value != null )

View File

@ -30,5 +30,6 @@
<module>metadata-repository-file</module>
<module>maven2-repository</module>
<module>repository-statistics</module>
<module>problem-reports</module>
</modules>
</project>

View File

@ -8,7 +8,7 @@
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
@ -16,49 +16,30 @@
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-->
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>plugins</artifactId>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-reporting</artifactId>
<version>1.3-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>archiva-report-manager</artifactId>
<name>Archiva Reporting :: Report Manager</name>
<artifactId>problem-reports</artifactId>
<name>Archiva Problem Reporting Plugin</name>
<dependencies>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-database</artifactId>
<artifactId>metadata-repository-api</artifactId>
</dependency>
<!-- TODO: we want to move the event handling, or perhaps centralise under deleteArtifact() in the metadata repository itself -->
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-utils</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-spring</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId>
</dependency>
</dependencies>
<build>
</build>
</project>

View File

@ -0,0 +1,48 @@
package org.apache.archiva.reports;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
/**
* Process repository management events and respond appropriately.
*
* @plexus.component role="org.apache.maven.archiva.repository.events.RepositoryListener" role-hint="problem-reports"
*/
public class RepositoryProblemEventListener
implements RepositoryListener
{
/**
* @plexus.requirement
*/
private MetadataRepository metadataRepository;
public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
{
String name =
RepositoryProblemFacet.createName( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
repository.toFile( artifact ).getName() );
metadataRepository.removeMetadataFacet( repository.getId(), RepositoryProblemFacet.FACET_ID, name );
}
}

View File

@ -0,0 +1,154 @@
package org.apache.archiva.reports;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.HashMap;
import java.util.Map;
import org.apache.archiva.metadata.model.MetadataFacet;
public class RepositoryProblemFacet
implements MetadataFacet
{
public static final String FACET_ID = "org.apache.archiva.reports";
private String repositoryId;
private String namespace;
private String project;
private String version;
private String id;
private String message;
private String problem;
public String getFacetId()
{
return FACET_ID;
}
public String getName()
{
return createName( namespace, project, version, id );
}
public Map<String, String> toProperties()
{
Map<String, String> map = new HashMap<String, String>();
map.put( "repositoryId", repositoryId );
map.put( "namespace", namespace );
map.put( "project", project );
map.put( "version", version );
map.put( "id", id );
map.put( "message", message );
map.put( "problem", problem );
return map;
}
public void fromProperties( Map<String, String> properties )
{
repositoryId = properties.get( "repositoryId" );
namespace = properties.get( "namespace" );
project = properties.get( "project" );
version = properties.get( "version" );
id = properties.get( "id" );
message = properties.get( "message" );
problem = properties.get( "problem" );
}
public void setRepositoryId( String repositoryId )
{
this.repositoryId = repositoryId;
}
public void setNamespace( String namespace )
{
this.namespace = namespace;
}
public String getRepositoryId()
{
return repositoryId;
}
public String getNamespace()
{
return namespace;
}
public void setProject( String project )
{
this.project = project;
}
public String getProject()
{
return project;
}
public void setVersion( String version )
{
this.version = version;
}
public String getVersion()
{
return version;
}
public void setId( String id )
{
this.id = id;
}
public String getId()
{
return id;
}
public void setMessage( String message )
{
this.message = message;
}
public String getMessage()
{
return message;
}
public void setProblem( String problem )
{
this.problem = problem;
}
public String getProblem()
{
return problem;
}
static String createName( String namespace, String project, String projectVersion, String id )
{
return namespace + "/" + project + "/" + projectVersion + "/" + id;
}
}

View File

@ -1,4 +1,4 @@
package org.apache.maven.archiva.reporting;
package org.apache.archiva.reports;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -9,7 +9,7 @@ package org.apache.maven.archiva.reporting;
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
@ -19,21 +19,17 @@ package org.apache.maven.archiva.reporting;
* under the License.
*/
/**
* ArchivaReportException
*
* @version $Id: ArchivaReportException.java
*/
public class ArchivaReportException
extends Exception
{
public ArchivaReportException( String message, Throwable cause )
{
super( message, cause );
}
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
public ArchivaReportException( String message )
/**
* @plexus.component role="org.apache.archiva.metadata.model.MetadataFacetFactory" role-hint="org.apache.archiva.reports"
*/
public class RepositoryProblemFacetFactory
implements MetadataFacetFactory
{
public MetadataFacet createMetadataFacet()
{
super( message );
return new RepositoryProblemFacet();
}
}

View File

@ -1,4 +1,4 @@
package org.apache.maven.archiva.reporting.artifact;
package org.apache.archiva.reports.consumers;
/*
* Licensed to the Apache Software Foundation (ASF) under one
@ -20,11 +20,17 @@ package org.apache.maven.archiva.reporting.artifact;
*/
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksummedFile;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.reports.RepositoryProblemFacet;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
@ -33,18 +39,11 @@ import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.ArtifactsByChecksumConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.codehaus.plexus.digest.Digester;
import org.codehaus.plexus.digest.DigesterException;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
@ -88,11 +87,6 @@ public class DuplicateArtifactsConsumer
*/
private FileTypes filetypes;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* @plexus.requirement
*/
@ -102,15 +96,15 @@ public class DuplicateArtifactsConsumer
private File repositoryDir;
/**
* @plexus.requirement role-hint="sha1"
*/
private Digester digestSha1;
private String repoId;
private ManagedRepositoryContent repository;
/**
* @plexus.requirement
*/
private MetadataRepository metadataRepository;
public String getId()
{
return id;
@ -159,31 +153,17 @@ public class DuplicateArtifactsConsumer
// TODO: would be quicker to somehow make sure it ran after the update database consumer, or as a part of that
// perhaps could use an artifact context that is retained for all consumers? First in can set the SHA-1
String checksumSha1;
ChecksummedFile checksummedFile = new ChecksummedFile( artifactFile );
try
{
checksumSha1 = digestSha1.calc( artifactFile );
checksumSha1 = checksummedFile.calculateChecksum( ChecksumAlgorithm.SHA1 );
}
catch ( DigesterException e )
catch ( IOException e )
{
throw new ConsumerException( e.getMessage(), e );
}
List<ArchivaArtifact> results;
try
{
results = dao.getArtifactDAO().queryArtifacts(
new ArtifactsByChecksumConstraint( checksumSha1, ArtifactsByChecksumConstraint.SHA1 ) );
}
catch ( ObjectNotFoundException e )
{
log.debug( "No duplicates for artifact: " + path + " (repository " + repoId + ")" );
return;
}
catch ( ArchivaDatabaseException e )
{
log.warn( "Unable to query DB for potential duplicates with: " + path + " (repository " + repoId + "): " + e.getMessage(), e );
return;
}
List<ArtifactMetadata> results = metadataRepository.getArtifactsByChecksum( repoId, checksumSha1 );
if ( CollectionUtils.isNotEmpty( results ) )
{
@ -194,45 +174,42 @@ public class DuplicateArtifactsConsumer
return;
}
ArchivaArtifact artifact;
ArtifactReference artifactReference;
try
{
artifact = new ArchivaArtifact( repository.toArtifactReference( path ), repoId );
artifactReference = repository.toArtifactReference( path );
}
catch ( LayoutException e )
{
log.warn( "Unable to report problem for path: " + path );
return;
}
for ( ArchivaArtifact dupArtifact : results )
for ( ArtifactMetadata dupArtifact : results )
{
if ( dupArtifact.equals( artifact ) )
String id = path.substring( path.lastIndexOf( "/" ) + 1 );
if ( dupArtifact.getId().equals( id ) &&
dupArtifact.getNamespace().equals( artifactReference.getGroupId() ) &&
dupArtifact.getProject().equals( artifactReference.getArtifactId() ) &&
dupArtifact.getVersion().equals( artifactReference.getVersion() ) )
{
// Skip reference to itself.
continue;
}
RepositoryProblem problem = new RepositoryProblem();
problem.setRepositoryId( dupArtifact.getModel().getRepositoryId() );
problem.setPath( path );
problem.setGroupId( artifact.getGroupId() );
problem.setArtifactId( artifact.getArtifactId() );
problem.setVersion( artifact.getVersion() );
problem.setType( DuplicateArtifactReport.PROBLEM_TYPE_DUPLICATE_ARTIFACTS );
problem.setOrigin( getId() );
problem.setMessage( "Duplicate Artifact Detected: " + artifact + " <--> " + dupArtifact );
RepositoryProblemFacet problem = new RepositoryProblemFacet();
problem.setRepositoryId( repoId );
problem.setNamespace( artifactReference.getGroupId() );
problem.setProject( artifactReference.getArtifactId() );
problem.setVersion( artifactReference.getVersion() );
problem.setId( id );
// TODO: proper path conversion for new metadata
problem.setMessage(
"Duplicate Artifact Detected: " + path + " <--> " + dupArtifact.getNamespace().replace( '.', '/' ) +
"/" + dupArtifact.getProject() + "/" + dupArtifact.getVersion() + "/" + dupArtifact.getId() );
problem.setProblem( "duplicate-artifact" );
try
{
log.debug( "Found duplicate artifact: " + problem );
dao.getRepositoryProblemDAO().saveRepositoryProblem( problem );
}
catch ( ArchivaDatabaseException e )
{
String emsg = "Unable to save problem with duplicate artifact to DB: " + e.getMessage();
log.warn( emsg, e );
throw new ConsumerException( emsg, e );
}
metadataRepository.addMetadataFacet( repoId, RepositoryProblemFacet.FACET_ID, problem );
}
}
}

View File

@ -19,9 +19,7 @@ package org.apache.archiva.metadata.repository.stats;
* under the License.
*/
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
@ -44,8 +42,6 @@ public class DefaultRepositoryStatisticsManager
*/
private MetadataRepository metadataRepository;
static final DateFormat SCAN_TIMESTAMP = new SimpleDateFormat( "yyyyMMdd.HHmmss.SSS" );
public RepositoryStatistics getLastStatistics( String repositoryId )
{
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
@ -81,9 +77,7 @@ public class DefaultRepositoryStatisticsManager
// populate total project count from content repository
// repositoryStatistics.setTotalProjectCount( );
metadataRepository.addMetadataFacet( repositoryId, RepositoryStatistics.FACET_ID,
SCAN_TIMESTAMP.format( repositoryStatistics.getScanStartTime() ),
repositoryStatistics );
metadataRepository.addMetadataFacet( repositoryId, RepositoryStatistics.FACET_ID, repositoryStatistics );
}
public void deleteStatistics( String repositoryId )
@ -100,7 +94,7 @@ public class DefaultRepositoryStatisticsManager
{
try
{
Date date = SCAN_TIMESTAMP.parse( name );
Date date = RepositoryStatistics.SCAN_TIMESTAMP.parse( name );
if ( ( startTime == null || !date.before( startTime ) ) &&
( endTime == null || !date.after( endTime ) ) )
{

View File

@ -19,6 +19,8 @@ package org.apache.archiva.metadata.repository.stats;
* under the License.
*/
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
@ -46,6 +48,8 @@ public class RepositoryStatistics
public static String FACET_ID = "org.apache.archiva.metadata.repository.stats";
static final DateFormat SCAN_TIMESTAMP = new SimpleDateFormat( "yyyyMMdd.HHmmss.SSS" );
public Date getScanEndTime()
{
return scanEndTime;
@ -136,6 +140,11 @@ public class RepositoryStatistics
return FACET_ID;
}
public String getName()
{
return SCAN_TIMESTAMP.format( scanStartTime );
}
public Map<String, String> toProperties()
{
Map<String, String> properties = new HashMap<String, String>();

View File

@ -65,11 +65,10 @@ public class RepositoryStatisticsManagerTest
public void testGetLatestStats()
throws ParseException
{
Date endTime =
new Date( DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.parse( SECOND_TEST_SCAN ).getTime() + 60000 );
Date endTime = new Date( RepositoryStatistics.SCAN_TIMESTAMP.parse( SECOND_TEST_SCAN ).getTime() + 60000 );
RepositoryStatistics stats = new RepositoryStatistics();
stats.setScanStartTime( DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.parse( SECOND_TEST_SCAN ) );
stats.setScanStartTime( RepositoryStatistics.SCAN_TIMESTAMP.parse( SECOND_TEST_SCAN ) );
stats.setScanEndTime( endTime );
stats.setTotalArtifactFileSize( 1314527915L );
stats.setNewFileCount( 123 );
@ -94,8 +93,8 @@ public class RepositoryStatisticsManagerTest
assertEquals( 2031, stats.getTotalProjectCount() );
assertEquals( 529, stats.getTotalGroupCount() );
assertEquals( 56229, stats.getTotalFileCount() );
assertEquals( SECOND_TEST_SCAN,
DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.format( stats.getScanStartTime() ) );
assertEquals( SECOND_TEST_SCAN, RepositoryStatistics.SCAN_TIMESTAMP.format( stats.getScanStartTime() ) );
assertEquals( SECOND_TEST_SCAN, stats.getName() );
assertEquals( endTime, stats.getScanEndTime() );
metadataRepositoryControl.verify();
@ -119,17 +118,15 @@ public class RepositoryStatisticsManagerTest
Date current = new Date();
Date startTime = new Date( current.getTime() - 12345 );
RepositoryStatistics stats1 = createTestStats( startTime, current );
RepositoryStatistics stats = createTestStats( startTime, current );
String startTimeAsString = DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.format( startTime );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, startTimeAsString, stats1 );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, stats );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
Arrays.asList( startTimeAsString ) );
Arrays.asList( stats.getName() ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, startTimeAsString ),
stats1 );
RepositoryStatistics stats = stats1;
metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, stats.getName() ),
stats );
metadataRepositoryControl.replay();
@ -155,19 +152,17 @@ public class RepositoryStatisticsManagerTest
Date startTime1 = new Date( current.getTime() - 12345 );
RepositoryStatistics stats1 = createTestStats( startTime1, new Date( current.getTime() - 6000 ) );
String startTimeAsString1 = DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.format( startTime1 );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, startTimeAsString1, stats1 );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, stats1 );
Date startTime2 = new Date( current.getTime() - 3000 );
RepositoryStatistics stats2 = createTestStats( startTime2, current );
String startTimeAsString2 = DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.format( startTime2 );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, startTimeAsString2, stats2 );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, stats2 );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
Arrays.asList( startTimeAsString1, startTimeAsString2 ) );
Arrays.asList( stats1.getName(), stats2.getName() ) );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, startTimeAsString2 ),
metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, stats2.getName() ),
stats2 );
metadataRepository.removeMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID );
@ -396,9 +391,8 @@ public class RepositoryStatisticsManagerTest
private void addStats( Date startTime, Date endTime )
{
RepositoryStatistics stats = createTestStats( startTime, endTime );
String startTimeAsString = DefaultRepositoryStatisticsManager.SCAN_TIMESTAMP.format( startTime );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, startTimeAsString, stats );
statsCreated.put( startTimeAsString, stats );
metadataRepository.addMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, stats );
statsCreated.put( stats.getName(), stats );
}
private RepositoryStatistics createTestStats( Date startTime, Date endTime )

View File

@ -30,7 +30,6 @@
<modules>
<module>archiva-base</module>
<module>archiva-database</module>
<module>archiva-reporting</module>
<module>archiva-scheduler</module>
<module>archiva-web</module>
<module>metadata</module>

15
pom.xml
View File

@ -309,6 +309,11 @@
<artifactId>repository-statistics</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>problem-reports</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>maven2-repository</artifactId>
@ -324,11 +329,6 @@
<artifactId>archiva-artifact-converter</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-artifact-reports</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId>
@ -394,11 +394,6 @@
<artifactId>archiva-proxy</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-report-manager</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>