[MRM-773]

-get feed contents from the database
-generate the feed without writing to a file


git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@651956 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2008-04-27 14:04:28 +00:00
parent 8213830900
commit 36562a6696
11 changed files with 594 additions and 279 deletions

View File

@ -18,6 +18,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-database</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>

View File

@ -50,10 +50,9 @@ public class RssFeedEntry
}
public RssFeedEntry( String title, String link )
public RssFeedEntry( String title )
{
this.title = title;
this.link = link;
}
public String getTitle()

View File

@ -19,10 +19,6 @@ package org.apache.archiva.rss;
* under the License.
*/
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
@ -36,10 +32,6 @@ import com.sun.syndication.feed.synd.SyndEntry;
import com.sun.syndication.feed.synd.SyndEntryImpl;
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.feed.synd.SyndFeedImpl;
import com.sun.syndication.io.FeedException;
import com.sun.syndication.io.SyndFeedInput;
import com.sun.syndication.io.SyndFeedOutput;
import com.sun.syndication.io.XmlReader;
/**
* Generates RSS feeds.
@ -59,66 +51,27 @@ public class RssFeedGenerator
public static String DEFAULT_LANGUAGE = "en-us";
/**
* @plexus.configuration default-value="./apps/archiva/rss/"
*/
private String rssDirectory;
private String DEFAULT_LINK = "http://localhost:8080/archiva/rss/";
public void generateFeed( String title, String link, String description, List<RssFeedEntry> dataEntries,
String outputFilename )
{
File outputFile = new File( rssDirectory, outputFilename );
public SyndFeed generateFeed( String title, String description, List<RssFeedEntry> dataEntries,
String outputFilename )
{
SyndFeed feed = null;
List<SyndEntry> existingEntries = null;
if ( outputFile.exists() )
{
try
{
SyndFeedInput input = new SyndFeedInput();
feed = input.build( new XmlReader( outputFile ) );
existingEntries = feed.getEntries();
}
catch ( IOException ie )
{
log.error( "Error occurred while reading existing feed : " + ie.getLocalizedMessage() );
}
catch ( FeedException fe )
{
log.error( "Error occurred while reading existing feed : " + fe.getLocalizedMessage() );
}
}
else
{
feed = new SyndFeedImpl();
feed = new SyndFeedImpl();
feed.setTitle( title );
feed.setLink( link );
feed.setDescription( description );
feed.setLanguage( DEFAULT_LANGUAGE );
}
feed.setTitle( title );
feed.setLink( DEFAULT_LINK + outputFilename );
feed.setDescription( description );
feed.setLanguage( DEFAULT_LANGUAGE );
feed.setPublishedDate( Calendar.getInstance().getTime() );
feed.setFeedType( DEFAULT_FEEDTYPE );
feed.setFeedType( DEFAULT_FEEDTYPE );
feed.setEntries( getEntries( dataEntries, existingEntries ) );
try
{
Writer writer = new FileWriter( outputFile );
SyndFeedOutput output = new SyndFeedOutput();
output.output( feed, writer );
writer.close();
log.debug( "Finished writing feed to " + outputFile.getAbsolutePath() );
}
catch ( IOException ie )
{
log.error( "Error occurred while generating the feed : " + ie.getMessage() );
}
catch ( FeedException fe )
{
log.error( "Error occurred while generating the feed : " + fe.getMessage() );
}
log.debug( "Finished generating the feed \'" + title + "\'." );
return feed;
}
private List<SyndEntry> getEntries( List<RssFeedEntry> dataEntries, List<SyndEntry> existingEntries )
@ -148,10 +101,4 @@ public class RssFeedGenerator
return entries;
}
public void setRssDirectory( String rssDirectory )
{
this.rssDirectory = rssDirectory;
}
}

View File

@ -0,0 +1,96 @@
package org.apache.archiva.rss.processor;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.archiva.rss.RssFeedEntry;
import org.apache.maven.archiva.model.ArchivaArtifact;
import com.sun.syndication.feed.synd.SyndFeed;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/
public abstract class AbstractArtifactsRssFeedProcessor
implements RssFeedProcessor
{
public abstract SyndFeed process( Map<String, String> reqParams );
protected List<RssFeedEntry> processData( List<ArchivaArtifact> artifacts, boolean isRepoLevel )
{
long tmp = 0;
RssFeedEntry entry = null;
List<RssFeedEntry> entries = new ArrayList<RssFeedEntry>();
String description = "";
int idx = 0;
for ( ArchivaArtifact artifact : artifacts )
{
long whenGathered = artifact.getModel().getWhenGathered().getTime();
if ( tmp != whenGathered )
{
if ( entry != null )
{
entry.setDescription( description );
entries.add( entry );
}
String repoId = artifact.getModel().getRepositoryId();
if ( !isRepoLevel )
{
entry =
new RssFeedEntry( getTitle() + "\'" + artifact.getGroupId() + ":" + artifact.getArtifactId() +
"\'" + " as of " + new Date( whenGathered ) );
description = getDescription() + "\'" + repoId + "\'" + ": \n" + artifact.toString() + " | ";
}
else
{
entry = new RssFeedEntry( getTitle() + "\'" + repoId + "\'" + " as of " + new Date( whenGathered ) );
description = getDescription() + "\'" + repoId + "\'" + ": \n" + artifact.toString() + " | ";
}
}
else
{
description = description + artifact.toString() + " | ";
}
if ( idx == ( artifacts.size() - 1 ) )
{
entry.setDescription( description );
entries.add( entry );
}
tmp = whenGathered;
idx++;
}
return entries;
}
protected abstract String getTitle();
protected abstract String getDescription();
}

View File

@ -19,35 +19,36 @@ package org.apache.archiva.rss.processor;
* under the License.
*/
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.archiva.rss.RssFeedEntry;
import org.apache.archiva.rss.RssFeedGenerator;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.constraints.ArtifactsByRepositoryConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sun.syndication.feed.synd.SyndFeed;
/**
* Process new artifacts in the repository and generate RSS feeds.
* Retrieve and process all artifacts of a repository from the database and generate a rss feed.
* The artifacts will be grouped by the date when the artifacts were gathered.
* Each group will appear as one entry in the feed.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
* @plexus.component role="org.apache.archiva.rss.processor.RssFeedProcessor" role-hint="new-artifacts"
*/
public class NewArtifactsRssFeedProcessor
implements RssFeedProcessor
extends AbstractArtifactsRssFeedProcessor
{
public static final String NEW_ARTIFACTS_IN_REPO = "New Artifacts in Repository ";
private String title = "New Artifacts in Repository ";
public static final String NEW_VERSIONS_OF_ARTIFACT = "New Versions of Artifact ";
private String desc = "New Versions of Artifact ";
/**
* @plexus.requirement
@ -55,135 +56,57 @@ public class NewArtifactsRssFeedProcessor
private RssFeedGenerator generator;
private Logger log = LoggerFactory.getLogger( NewArtifactsRssFeedProcessor.class );
/**
* The hostname that will be used in the urls for the feed links.
* @plexus.requirement role-hint="jdo"
*/
private String host = "localhost";
/**
* The port that will be used in the urls for the feed links.
*/
private String port = "8080";
private ArtifactDAO artifactDAO;
/**
* Process the newly discovered artifacts in the repository. Generate feeds for new artifacts in the repository and
* new versions of artifact.
*/
public void process( List<ArchivaArtifact> data )
public SyndFeed process( Map<String, String> reqParams )
{
log.debug( "Process new artifacts into rss feeds." );
if ( System.getProperty( "jetty.host" ) != null )
String repoId = reqParams.get( RssFeedProcessor.KEY_REPO_ID );
if ( repoId != null )
{
host = System.getProperty( "jetty.host" );
return processNewArtifactsInRepo( repoId );
}
if ( System.getProperty( "jetty.port" ) != null )
{
port = System.getProperty( "jetty.port" );
}
processNewArtifactsInRepo( data );
processNewVersionsOfArtifact( data );
return null;
}
private void processNewArtifactsInRepo( List<ArchivaArtifact> data )
private SyndFeed processNewArtifactsInRepo( String repoId )
{
List<RssFeedEntry> entries = new ArrayList<RssFeedEntry>();
String repoId = getRepoId( data );
RssFeedEntry entry =
new RssFeedEntry( NEW_ARTIFACTS_IN_REPO + "\'" + repoId + "\'" + " as of " +
Calendar.getInstance().getTime(), getBaseUrl() + "/archiva/rss/new_artifacts_" + repoId + ".xml" );
String description = "These are the new artifacts found in repository " + "\'" + repoId + "\'" + ": \n";
for ( ArchivaArtifact artifact : data )
try
{
description = description + artifact.toString() + " | ";
}
entry.setDescription( description );
entries.add( entry );
Constraint artifactsByRepo = new ArtifactsByRepositoryConstraint( repoId, "whenGathered" );
List<ArchivaArtifact> artifacts = artifactDAO.queryArtifacts( artifactsByRepo );
generateFeed( "new_artifacts_" + repoId + ".xml", NEW_ARTIFACTS_IN_REPO + "\'" + repoId + "\'",
getBaseUrl() + "/archiva/repository/rss/new_artifacts_" + repoId + ".xml",
"New artifacts found in repository " + "\'" + repoId + "\'" + " during repository scan.", entries );
List<RssFeedEntry> entries = processData( artifacts, true );
return generator.generateFeed( getTitle() + "\'" + repoId + "\'", "New artifacts found in repository " +
"\'" + repoId + "\'" + " during repository scan.", entries, "new_artifacts_" + repoId + ".xml" );
}
catch ( ArchivaDatabaseException ae )
{
log.error( ae.getMessage() );
}
return null;
}
private void processNewVersionsOfArtifact( List<ArchivaArtifact> data )
public String getTitle()
{
String repoId = getRepoId( data );
List<String> artifacts = new ArrayList<String>();
for ( ArchivaArtifact artifact : data )
{
artifacts.add( artifact.toString() );
}
Collections.sort( artifacts );
Map<String, String> artifactsMap = toMap( artifacts );
for ( String key : artifactsMap.keySet() )
{
List<RssFeedEntry> entries = new ArrayList<RssFeedEntry>();
RssFeedEntry entry =
new RssFeedEntry( NEW_VERSIONS_OF_ARTIFACT + "\'" + key + "\'" + " as of " +
Calendar.getInstance().getTime(), getBaseUrl() + "/archiva/rss/new_versions_" + key + ".xml" );
String description =
"These are the new versions of artifact " + "\'" + key + "\'" + " in the repository: \n" +
( (String) artifactsMap.get( key ) );
entry.setDescription( description );
entries.add( entry );
generateFeed( "new_versions_" + key + ".xml", NEW_VERSIONS_OF_ARTIFACT + "\'" + key + "\'",
getBaseUrl() + "/archiva/rss/new_versions_" + key + ".xml",
"New versions of artifact " + "\'" + key + "\' found in repository " + "\'" + repoId + "\'" +
" during repository scan.", entries );
}
return title;
}
private String getRepoId( List<ArchivaArtifact> data )
public String getDescription()
{
String repoId = "";
if ( !data.isEmpty() )
{
repoId = ( (ArchivaArtifact) data.get( 0 ) ).getModel().getRepositoryId();
}
return repoId;
}
private void generateFeed( String filename, String title, String link, String description,
List<RssFeedEntry> dataEntries )
{
generator.generateFeed( title, link, description, dataEntries, filename );
}
private Map<String, String> toMap( List<String> artifacts )
{
Map<String, String> artifactsMap = new HashMap<String, String>();
for ( String id : artifacts )
{
String key = StringUtils.substringBefore( id, ":" );
key = key + ":" + StringUtils.substringBefore( StringUtils.substringAfter( id, ":" ), ":" );
String value = (String) artifactsMap.get( key );
if ( value != null )
{
value = value + " | " + id;
}
else
{
value = id;
}
artifactsMap.put( key, value );
}
return artifactsMap;
return desc;
}
public RssFeedGenerator getGenerator()
@ -195,17 +118,14 @@ public class NewArtifactsRssFeedProcessor
{
this.generator = generator;
}
private String getBaseUrl()
public ArtifactDAO getArtifactDAO()
{
String baseUrl = "http://" + host;
if( port != null && !"".equals( port ) )
{
baseUrl = baseUrl + ":" + port;
}
return baseUrl;
return artifactDAO;
}
public void setArtifactDAO( ArtifactDAO artifactDAO )
{
this.artifactDAO = artifactDAO;
}
}

View File

@ -0,0 +1,142 @@
package org.apache.archiva.rss.processor;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import java.util.Map;
import org.apache.archiva.rss.RssFeedEntry;
import org.apache.archiva.rss.RssFeedGenerator;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.ArtifactVersionsConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sun.syndication.feed.synd.SyndFeed;
/**
* Retrieve and process new versions of an artifact from the database and
* generate a rss feed. The versions will be grouped by the date when the artifact
* was gathered. Each group will appear as one entry in the feed.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
* @plexus.component role="org.apache.archiva.rss.processor.RssFeedProcessor" role-hint="new-versions"
*/
public class NewVersionsOfArtifactRssFeedProcessor
extends AbstractArtifactsRssFeedProcessor
{
private String title = "New Versions of Artifact ";
private String desc = "These are the new artifacts found in the repository ";
/**
* @plexus.requirement
*/
private RssFeedGenerator generator;
private Logger log = LoggerFactory.getLogger( NewArtifactsRssFeedProcessor.class );
/**
* @plexus.requirement role-hint="jdo"
*/
private ArtifactDAO artifactDAO;
/**
* Process all versions of the artifact which had a rss feed request.
*/
public SyndFeed process( Map<String, String> reqParams )
{
String repoId = reqParams.get( RssFeedProcessor.KEY_REPO_ID );
String groupId = reqParams.get( RssFeedProcessor.KEY_GROUP_ID );
String artifactId = reqParams.get( RssFeedProcessor.KEY_ARTIFACT_ID );
if ( repoId != null && groupId != null && artifactId != null )
{
return processNewVersionsOfArtifact( repoId, groupId, artifactId );
}
return null;
}
private SyndFeed processNewVersionsOfArtifact( String repoId, String groupId, String artifactId )
{
try
{
Constraint artifactVersions = new ArtifactVersionsConstraint( repoId, groupId, artifactId, "whenGathered" );
List<ArchivaArtifact> artifacts = artifactDAO.queryArtifacts( artifactVersions );
List<RssFeedEntry> entries = processData( artifacts, false );
String key = groupId + ":" + artifactId;
return generator.generateFeed( getTitle() + "\'" + key + "\'", "New versions of artifact " + "\'" + key +
"\' found in repository " + "\'" + repoId + "\'" + " during repository scan.", entries,
"new_versions_" + key + ".xml" );
}
catch ( ObjectNotFoundException oe )
{
oe.printStackTrace();
log.error( oe.getMessage() );
}
catch ( ArchivaDatabaseException ae )
{
ae.printStackTrace();
log.error( ae.getMessage() );
}
return null;
}
public String getTitle()
{
return title;
}
public String getDescription()
{
return desc;
}
public RssFeedGenerator getGenerator()
{
return generator;
}
public void setGenerator( RssFeedGenerator generator )
{
this.generator = generator;
}
public ArtifactDAO getArtifactDAO()
{
return artifactDAO;
}
public void setArtifactDAO( ArtifactDAO artifactDAO )
{
this.artifactDAO = artifactDAO;
}
}

View File

@ -19,17 +19,23 @@ package org.apache.archiva.rss.processor;
* under the License.
*/
import java.util.List;
import java.util.Map;
import org.apache.maven.archiva.model.ArchivaArtifact;
import com.sun.syndication.feed.synd.SyndFeed;
/**
* Process data that will be fed into the RssFeedGenerator
* Retrieve and process the data that will be fed into the RssFeedGenerator.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/
public interface RssFeedProcessor
{
void process( List<ArchivaArtifact> data );
public static final String KEY_REPO_ID = "repoId";
public static final String KEY_GROUP_ID = "groupId";
public static final String KEY_ARTIFACT_ID = "artifactId";
SyndFeed process( Map<String, String> reqParams );
}

View File

@ -19,13 +19,13 @@ package org.apache.archiva.rss;
* under the License.
*/
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.custommonkey.xmlunit.XMLAssert;
import com.sun.syndication.feed.synd.SyndEntry;
import com.sun.syndication.feed.synd.SyndFeed;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
@ -35,84 +35,74 @@ public class RssFeedGeneratorTest
extends PlexusInSpringTestCase
{
private RssFeedGenerator generator;
public void setUp()
throws Exception
{
super.setUp();
generator = (RssFeedGenerator) lookup( RssFeedGenerator.class );
File outputDir = new File( getBasedir(), "/target/test-classes/rss-feeds" );
outputDir.mkdir();
}
public void testNewFeed()
throws Exception
{
generator.setRssDirectory( getBasedir() + "/target/test-classes/rss-feeds/" );
List<RssFeedEntry> entries = new ArrayList<RssFeedEntry>();
RssFeedEntry entry = new RssFeedEntry( "Item 1", "http://rss-2.0-test-feed.com" );
RssFeedEntry entry = new RssFeedEntry( "Item 1" );
entry.setDescription( "RSS 2.0 feed item 1." );
entry.setGuid( "http://rss-2.0-test-feed.com/item1" );
entries.add( entry );
entry = new RssFeedEntry( "Item 2", "http://rss-2.0-test-feed.com" );
entry = new RssFeedEntry( "Item 2" );
entry.setDescription( "RSS 2.0 feed item 2." );
entry.setGuid( "http://rss-2.0-test-feed.com/item2" );
entries.add( entry );
entry = new RssFeedEntry( "Item 3", "http://rss-2.0-test-feed.com" );
entry = new RssFeedEntry( "Item 3" );
entry.setDescription( "RSS 2.0 feed item 3." );
entry.setGuid( "http://rss-2.0-test-feed.com/item3" );
entries.add( entry );
generator.generateFeed( "Test Feed", "http://localhost:8080/archiva", "The test feed from Archiva.", entries,
"generated-rss2.0-feed.xml" );
SyndFeed feed =
generator.generateFeed( "Test Feed", "The test feed from Archiva.", entries, "generated-rss2.0-feed.xml" );
File outputFile = new File( getBasedir(), "/target/test-classes/rss-feeds/generated-rss2.0-feed.xml" );
String generatedContent = FileUtils.readFileToString( outputFile );
assertEquals( "Test Feed", feed.getTitle() );
assertEquals( "http://localhost:8080/archiva/rss/generated-rss2.0-feed.xml", feed.getLink() );
assertEquals( "The test feed from Archiva.", feed.getDescription() );
assertEquals( "en-us", feed.getLanguage() );
XMLAssert.assertXpathEvaluatesTo( "Test Feed", "//channel/title", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "http://localhost:8080/archiva", "//channel/link", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "The test feed from Archiva.", "//channel/description", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "en-us", "//channel/language", generatedContent );
String expectedItem1 =
"<channel><item><title>Item 1</title></item><item><title>Item 2</title></item>"
+ "<item><title>Item 3</title></item></channel>";
XMLAssert.assertXpathsEqual( "//channel/item/title", expectedItem1, "//channel/item/title", generatedContent );
outputFile.deleteOnExit();
List<SyndEntry> syndEntries = feed.getEntries();
assertEquals( 3, syndEntries.size() );
assertEquals( "Item 1", syndEntries.get( 0 ).getTitle() );
assertEquals( "Item 2", syndEntries.get( 1 ).getTitle() );
assertEquals( "Item 3", syndEntries.get( 2 ).getTitle() );
}
/*
* this test might need to be removed since
* no updates are happening in the feeds anymore since everything's processed from the db.
*
public void testUpdateFeed()
throws Exception
{
generator.setRssDirectory( getBasedir() + "/target/test-classes/rss-feeds/" );
List<RssFeedEntry> entries = new ArrayList<RssFeedEntry>();
RssFeedEntry entry = new RssFeedEntry( "Item 1", "http://rss-2.0-test-feed.com" );
RssFeedEntry entry = new RssFeedEntry( "Item 1" );
entry.setDescription( "RSS 2.0 feed item 1." );
entry.setGuid( "http://rss-2.0-test-feed.com/item1" );
entries.add( entry );
entry = new RssFeedEntry( "Item 2", "http://rss-2.0-test-feed.com" );
entry = new RssFeedEntry( "Item 2" );
entry.setDescription( "RSS 2.0 feed item 2." );
entry.setGuid( "http://rss-2.0-test-feed.com/item2" );
entries.add( entry );
generator.generateFeed( "Test Feed", "http://localhost:8080/archiva", "The test feed from Archiva.", entries,
generator.generateFeed( "Test Feed", "The test feed from Archiva.", entries,
"generated-test-update-rss2.0-feed.xml" );
File outputFile = new File( getBasedir(), "/target/test-classes/rss-feeds/generated-test-update-rss2.0-feed.xml" );
String generatedContent = FileUtils.readFileToString( outputFile );
XMLAssert.assertXpathEvaluatesTo( "Test Feed", "//channel/title", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "http://localhost:8080/archiva", "//channel/link", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "http://localhost:8080/archiva/rss/generated-test-update-rss2.0-feed.xml", "//channel/link", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "The test feed from Archiva.", "//channel/description", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "en-us", "//channel/language", generatedContent );
@ -123,25 +113,23 @@ public class RssFeedGeneratorTest
//update existing rss feed
entries = new ArrayList<RssFeedEntry>();
entry = new RssFeedEntry( "Item 3", "http://rss-2.0-test-feed.com" );
entry = new RssFeedEntry( "Item 3" );
entry.setDescription( "RSS 2.0 feed item 3." );
entry.setGuid( "http://rss-2.0-test-feed.com/item4" );
entries.add( entry );
entry = new RssFeedEntry( "Item 4", "http://rss-2.0-test-feed.com" );
entry = new RssFeedEntry( "Item 4" );
entry.setDescription( "RSS 2.0 feed item 4." );
entry.setGuid( "http://rss-2.0-test-feed.com/item5" );
entries.add( entry );
generator.generateFeed( "Test Feed", "http://localhost:8080/archiva", "The test feed from Archiva.", entries,
generator.generateFeed( "Test Feed", "The test feed from Archiva.", entries,
"generated-test-update-rss2.0-feed.xml" );
outputFile = new File( getBasedir(), "/target/test-classes/rss-feeds/generated-test-update-rss2.0-feed.xml" );
generatedContent = FileUtils.readFileToString( outputFile );
XMLAssert.assertXpathEvaluatesTo( "Test Feed", "//channel/title", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "http://localhost:8080/archiva", "//channel/link", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "http://localhost:8080/archiva/rss/generated-test-update-rss2.0-feed.xml", "//channel/link", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "The test feed from Archiva.", "//channel/description", generatedContent );
XMLAssert.assertXpathEvaluatesTo( "en-us", "//channel/language", generatedContent );
@ -152,5 +140,6 @@ public class RssFeedGeneratorTest
outputFile.deleteOnExit();
}
*/
}

View File

@ -19,13 +19,19 @@ package org.apache.archiva.rss.processor;
* under the License.
*/
import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.archiva.rss.RssFeedGenerator;
import org.apache.archiva.rss.stubs.ArtifactDAOStub;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import com.sun.syndication.feed.synd.SyndEntry;
import com.sun.syndication.feed.synd.SyndFeed;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
@ -34,75 +40,89 @@ import org.codehaus.plexus.spring.PlexusInSpringTestCase;
public class NewArtifactsRssFeedProcessorTest
extends PlexusInSpringTestCase
{
private RssFeedProcessor newArtifactsProcessor;
private String rssDirectory;
private NewArtifactsRssFeedProcessor newArtifactsProcessor;
private ArtifactDAOStub artifactDAOStub;
private RssFeedGenerator rssFeedGenerator;
public void setUp()
throws Exception
{
super.setUp();
newArtifactsProcessor = (RssFeedProcessor) lookup( RssFeedProcessor.class, "new-artifacts" );
rssDirectory = getBasedir() + "/target/test-classes/rss-feeds/";
RssFeedGenerator generator = ( ( NewArtifactsRssFeedProcessor ) newArtifactsProcessor ).getGenerator();
generator.setRssDirectory( rssDirectory );
( (NewArtifactsRssFeedProcessor) newArtifactsProcessor ).setGenerator( generator );
newArtifactsProcessor = new NewArtifactsRssFeedProcessor();
artifactDAOStub = new ArtifactDAOStub();
rssFeedGenerator = new RssFeedGenerator();
newArtifactsProcessor.setGenerator( rssFeedGenerator );
newArtifactsProcessor.setArtifactDAO( artifactDAOStub );
}
public void testProcess()
throws Exception
{
List<ArchivaArtifact> newArtifacts = new ArrayList<ArchivaArtifact>();
Date whenGathered = Calendar.getInstance().getTime();
whenGathered.setTime( 123456789 );
ArchivaArtifact artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-one", "1.0", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
newArtifacts.add( artifact );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-one", "1.1", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
newArtifacts.add( artifact );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-one", "2.0", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
newArtifacts.add( artifact );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.1", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
newArtifacts.add( artifact );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.2", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
newArtifacts.add( artifact );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.3-SNAPSHOT", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
newArtifacts.add( artifact );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-three", "2.0-SNAPSHOT", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
newArtifacts.add( artifact );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-four", "1.1-beta-2", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
newArtifacts.add( artifact );
newArtifactsProcessor.process( newArtifacts );
File outputFile = new File( rssDirectory, "new_artifacts_test-repo.xml" );
assertTrue( outputFile.exists() );
outputFile = new File( rssDirectory, "new_versions_org.apache.archiva:artifact-one.xml" );
assertTrue( outputFile.exists() );
outputFile = new File( rssDirectory, "new_versions_org.apache.archiva:artifact-two.xml" );
assertTrue( outputFile.exists() );
outputFile = new File( rssDirectory, "new_versions_org.apache.archiva:artifact-three.xml" );
assertTrue( outputFile.exists() );
outputFile = new File( rssDirectory, "new_versions_org.apache.archiva:artifact-four.xml" );
assertTrue( outputFile.exists() );
artifactDAOStub.setArtifacts( newArtifacts );
Map<String, String> reqParams = new HashMap<String, String>();
reqParams.put( RssFeedProcessor.KEY_REPO_ID, "test-repo" );
SyndFeed feed = newArtifactsProcessor.process( reqParams );
assertTrue( feed.getTitle().equals( "New Artifacts in Repository 'test-repo'" ) );
assertTrue( feed.getLink().equals( "http://localhost:8080/archiva/rss/new_artifacts_test-repo.xml" ) );
assertTrue( feed.getDescription().equals(
"New artifacts found in repository 'test-repo' during repository scan." ) );
assertTrue( feed.getLanguage().equals( "en-us" ) );
List<SyndEntry> entries = feed.getEntries();
assertEquals( entries.size(), 1 );
assertTrue( entries.get( 0 ).getTitle().equals( "New Artifacts in Repository 'test-repo' as of " + whenGathered ) );
}
}

View File

@ -0,0 +1,112 @@
package org.apache.archiva.rss.processor;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.archiva.rss.RssFeedGenerator;
import org.apache.archiva.rss.stubs.ArtifactDAOStub;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import com.sun.syndication.feed.synd.SyndEntry;
import com.sun.syndication.feed.synd.SyndFeed;
public class NewVersionsOfArtifactRssFeedProcessorTest
extends PlexusInSpringTestCase
{
private NewVersionsOfArtifactRssFeedProcessor newVersionsProcessor;
private ArtifactDAOStub artifactDAOStub;
private RssFeedGenerator rssFeedGenerator;
public void setUp()
throws Exception
{
super.setUp();
newVersionsProcessor = new NewVersionsOfArtifactRssFeedProcessor();
artifactDAOStub = new ArtifactDAOStub();
rssFeedGenerator = new RssFeedGenerator();
newVersionsProcessor.setGenerator( rssFeedGenerator );
newVersionsProcessor.setArtifactDAO( artifactDAOStub );
}
public void testProcess()
throws Exception
{
List<ArchivaArtifact> artifacts = new ArrayList<ArchivaArtifact>();
Date whenGathered = Calendar.getInstance().getTime();
whenGathered.setTime( 123456789 );
ArchivaArtifact artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.1", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
artifacts.add( artifact );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.2", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGathered );
artifacts.add( artifact );
Date whenGatheredNext = Calendar.getInstance().getTime();
whenGatheredNext.setTime( 345678912 );
artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.3-SNAPSHOT", "", "jar" );
artifact.getModel().setRepositoryId( "test-repo" );
artifact.getModel().setWhenGathered( whenGatheredNext );
artifacts.add( artifact );
artifactDAOStub.setArtifacts( artifacts );
Map<String, String> reqParams = new HashMap<String, String>();
reqParams.put( RssFeedProcessor.KEY_REPO_ID, "test-repo" );
reqParams.put( RssFeedProcessor.KEY_GROUP_ID, "org.apache.archiva" );
reqParams.put( RssFeedProcessor.KEY_ARTIFACT_ID, "artifact-two" );
SyndFeed feed = newVersionsProcessor.process( reqParams );
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two'", feed.getTitle() );
assertEquals( "http://localhost:8080/archiva/rss/new_versions_org.apache.archiva:artifact-two.xml",
feed.getLink() );
assertEquals(
"New versions of artifact 'org.apache.archiva:artifact-two' found in repository 'test-repo' during repository scan.",
feed.getDescription() );
assertEquals( "en-us", feed.getLanguage() );
List<SyndEntry> entries = feed.getEntries();
assertEquals( 2, entries.size() );
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two' as of " + whenGathered,
entries.get( 0 ).getTitle() );
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two' as of " + whenGatheredNext,
entries.get( 1 ).getTitle() );
}
}

View File

@ -0,0 +1,80 @@
package org.apache.archiva.rss.stubs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.model.ArchivaArtifact;
/**
* ArtifactDAO stub.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version *
*/
public class ArtifactDAOStub
implements ArtifactDAO
{
private List<ArchivaArtifact> artifacts;
public ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String classifier,
String type )
{
// TODO Auto-generated method stub
return null;
}
public void deleteArtifact( ArchivaArtifact artifact )
throws ArchivaDatabaseException
{
// TODO Auto-generated method stub
}
public ArchivaArtifact getArtifact( String groupId, String artifactId, String version, String classifier,
String type )
throws ObjectNotFoundException, ArchivaDatabaseException
{
// TODO Auto-generated method stub
return null;
}
public List queryArtifacts( Constraint constraint )
throws ObjectNotFoundException, ArchivaDatabaseException
{
return artifacts;
}
public ArchivaArtifact saveArtifact( ArchivaArtifact artifact )
throws ArchivaDatabaseException
{
// TODO Auto-generated method stub
return null;
}
public void setArtifacts( List<ArchivaArtifact> artifacts )
{
this.artifacts = artifacts;
}
}