Using new artifact util in upload service

This commit is contained in:
Martin Stockhammer 2019-05-04 10:49:34 +02:00
parent a3c149327e
commit 0e09883158
1 changed files with 301 additions and 406 deletions

View File

@ -34,10 +34,9 @@ import org.apache.archiva.model.ArchivaRepositoryMetadata;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.SnapshotVersion;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryContentFactory;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryNotFoundException;
import org.apache.archiva.repository.content.ArtifactUtil;
import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.archiva.repository.metadata.RepositoryMetadataWriter;
@ -63,6 +62,7 @@ import org.springframework.stereotype.Service;
import javax.inject.Inject;
import javax.inject.Named;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import java.io.FileOutputStream;
@ -72,14 +72,7 @@ import java.net.URLDecoder;
import java.nio.file.*;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.TimeZone;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
/**
@ -88,9 +81,8 @@ import java.util.concurrent.CopyOnWriteArrayList;
@Service("fileUploadService#rest")
public class DefaultFileUploadService
extends AbstractRestService
implements FileUploadService
{
private Logger log = LoggerFactory.getLogger( getClass() );
implements FileUploadService {
private Logger log = LoggerFactory.getLogger(getClass());
@Context
private HttpServletRequest httpServletRequest;
@ -99,12 +91,12 @@ public class DefaultFileUploadService
private ManagedRepositoryAdmin managedRepositoryAdmin;
@Inject
private RepositoryContentFactory repositoryFactory;
private ArtifactUtil artifactUtil;
@Inject
private ArchivaAdministration archivaAdministration;
private List<ChecksumAlgorithm> algorithms = Arrays.asList( ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 );
private List<ChecksumAlgorithm> algorithms = Arrays.asList(ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5);
private final String FS = FileSystems.getDefault().getSeparator();
@ -112,49 +104,43 @@ public class DefaultFileUploadService
@Named(value = "archivaTaskScheduler#repository")
private ArchivaTaskScheduler<RepositoryTask> scheduler;
private String getStringValue( MultipartBody multipartBody, String attachmentId )
throws IOException
{
Attachment attachment = multipartBody.getAttachment( attachmentId );
private String getStringValue(MultipartBody multipartBody, String attachmentId)
throws IOException {
Attachment attachment = multipartBody.getAttachment(attachmentId);
return attachment == null ? "" :
StringUtils.trim(URLDecoder.decode(IOUtils.toString( attachment.getDataHandler().getInputStream(), "UTF-8" ), "UTF-8"));
StringUtils.trim(URLDecoder.decode(IOUtils.toString(attachment.getDataHandler().getInputStream(), "UTF-8"), "UTF-8"));
}
@Override
public FileMetadata post( MultipartBody multipartBody )
throws ArchivaRestServiceException
{
public FileMetadata post(MultipartBody multipartBody)
throws ArchivaRestServiceException {
try
{
try {
String classifier = getStringValue( multipartBody, "classifier" );
String packaging = getStringValue( multipartBody, "packaging" );
String classifier = getStringValue(multipartBody, "classifier");
String packaging = getStringValue(multipartBody, "packaging");
checkParamChars( "classifier", classifier );
checkParamChars( "packaging", packaging);
checkParamChars("classifier", classifier);
checkParamChars("packaging", packaging);
// skygo: http header form pomFile was once sending 1 for true and void for false
// leading to permanent false value for pomFile if using toBoolean(); use , "1", ""
boolean pomFile = false;
try
{
pomFile = BooleanUtils.toBoolean( getStringValue( multipartBody, "pomFile" ) );
}
catch ( IllegalArgumentException ex )
{
try {
pomFile = BooleanUtils.toBoolean(getStringValue(multipartBody, "pomFile"));
} catch (IllegalArgumentException ex) {
ArchivaRestServiceException e = new ArchivaRestServiceException("Bad value for boolean pomFile field.", null);
e.setHttpErrorCode(422);
e.setFieldName( "pomFile" );
e.setFieldName("pomFile");
e.setErrorKey("fileupload.malformed.pomFile");
throw e;
}
Attachment file = multipartBody.getAttachment( "files[]" );
Attachment file = multipartBody.getAttachment("files[]");
//Content-Disposition: form-data; name="files[]"; filename="org.apache.karaf.features.command-2.2.2.jar"
String fileName = file.getContentDisposition().getParameter( "filename" );
String fileName = file.getContentDisposition().getParameter("filename");
Path fileNamePath = Paths.get(fileName);
if (!fileName.equals(fileNamePath.getFileName().toString())) {
ArchivaRestServiceException e = new ArchivaRestServiceException("Bad filename in upload content: " + fileName + " - File traversal chars (..|/) are not allowed"
@ -164,60 +150,62 @@ public class DefaultFileUploadService
throw e;
}
Path tmpFile = Files.createTempFile( "upload-artifact", ".tmp" );
Path tmpFile = Files.createTempFile("upload-artifact", ".tmp");
tmpFile.toFile().deleteOnExit();
IOUtils.copy( file.getDataHandler().getInputStream(), new FileOutputStream( tmpFile.toFile() ) );
FileMetadata fileMetadata = new FileMetadata( fileName, Files.size(tmpFile), "theurl" );
fileMetadata.setServerFileName( tmpFile.toString() );
fileMetadata.setClassifier( classifier );
fileMetadata.setDeleteUrl( tmpFile.getFileName().toString() );
fileMetadata.setPomFile( pomFile );
fileMetadata.setPackaging( packaging );
IOUtils.copy(file.getDataHandler().getInputStream(), new FileOutputStream(tmpFile.toFile()));
FileMetadata fileMetadata = new FileMetadata(fileName, Files.size(tmpFile), "theurl");
fileMetadata.setServerFileName(tmpFile.toString());
fileMetadata.setClassifier(classifier);
fileMetadata.setDeleteUrl(tmpFile.getFileName().toString());
fileMetadata.setPomFile(pomFile);
fileMetadata.setPackaging(packaging);
log.info( "uploading file: {}", fileMetadata );
log.info("uploading file: {}", fileMetadata);
List<FileMetadata> fileMetadatas = getSessionFilesList();
fileMetadatas.add( fileMetadata );
fileMetadatas.add(fileMetadata);
return fileMetadata;
}
catch ( IOException e )
{
throw new ArchivaRestServiceException( e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
} catch (IOException e) {
throw new ArchivaRestServiceException(e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
}
}
/**
* FIXME must be per session synchronized not globally
*
* @return
* @return The file list from the session.
*/
protected synchronized List<FileMetadata> getSessionFilesList()
{
@SuppressWarnings("unchecked") List<FileMetadata> fileMetadatas = (List<FileMetadata>) httpServletRequest.getSession().getAttribute( FILES_SESSION_KEY );
if ( fileMetadatas == null )
{
fileMetadatas = new CopyOnWriteArrayList<>();
httpServletRequest.getSession().setAttribute( FILES_SESSION_KEY, fileMetadatas );
@SuppressWarnings("unchecked")
protected List<FileMetadata> getSessionFilesList() {
final HttpSession session = httpServletRequest.getSession();
List<FileMetadata> fileMetadata = (List<FileMetadata>) session.getAttribute(FILES_SESSION_KEY);
// Double check with synchronization, we assume, that httpServletRequest is
// fully initialized (no volatile)
if (fileMetadata == null) {
synchronized (session) {
fileMetadata = (List<FileMetadata>) session.getAttribute(FILES_SESSION_KEY);
if (fileMetadata == null) {
fileMetadata = new CopyOnWriteArrayList<>();
session.setAttribute(FILES_SESSION_KEY, fileMetadata);
}
return fileMetadatas;
}
}
return fileMetadata;
}
@Override
public Boolean deleteFile( String fileName )
throws ArchivaRestServiceException
{
public Boolean deleteFile(String fileName)
throws ArchivaRestServiceException {
log.debug("Deleting file {}", fileName);
// we make sure, that there are no other path components in the filename:
String checkedFileName = Paths.get(fileName).getFileName().toString();
Path file = SystemUtils.getJavaIoTmpDir().toPath().resolve( checkedFileName );
log.debug( "delete file:{},exists:{}", file, Files.exists(file) );
boolean removed = getSessionFileMetadatas().remove( new FileMetadata( fileName ) );
Path file = SystemUtils.getJavaIoTmpDir().toPath().resolve(checkedFileName);
log.debug("delete file:{},exists:{}", file, Files.exists(file));
boolean removed = getSessionFileMetadatas().remove(new FileMetadata(fileName));
// try with full name as ui only know the file name
if ( !removed ) {
if (!removed) {
removed = getSessionFileMetadatas().remove(new FileMetadata(file.toString()));
}
if (removed) {
@ -233,12 +221,10 @@ public class DefaultFileUploadService
@Override
public Boolean clearUploadedFiles()
throws ArchivaRestServiceException
{
List<FileMetadata> fileMetadatas = new ArrayList<>( getSessionFileMetadatas() );
for ( FileMetadata fileMetadata : fileMetadatas )
{
deleteFile( Paths.get( fileMetadata.getServerFileName() ).toString() );
throws ArchivaRestServiceException {
List<FileMetadata> fileMetadatas = new ArrayList<>(getSessionFileMetadatas());
for (FileMetadata fileMetadata : fileMetadatas) {
deleteFile(Paths.get(fileMetadata.getServerFileName()).toString());
}
getSessionFileMetadatas().clear();
return Boolean.TRUE;
@ -246,10 +232,9 @@ public class DefaultFileUploadService
@Override
public List<FileMetadata> getSessionFileMetadatas()
throws ArchivaRestServiceException
{
throws ArchivaRestServiceException {
@SuppressWarnings("unchecked") List<FileMetadata> fileMetadatas =
(List<FileMetadata>) httpServletRequest.getSession().getAttribute( FILES_SESSION_KEY );
(List<FileMetadata>) httpServletRequest.getSession().getAttribute(FILES_SESSION_KEY);
return fileMetadatas == null ? Collections.<FileMetadata>emptyList() : fileMetadatas;
}
@ -278,455 +263,369 @@ public class DefaultFileUploadService
}
@Override
public Boolean save( String repositoryId, String groupId, String artifactId, String version, String packaging,
boolean generatePom )
throws ArchivaRestServiceException
{
repositoryId = StringUtils.trim( repositoryId );
groupId = StringUtils.trim( groupId );
artifactId = StringUtils.trim( artifactId );
version = StringUtils.trim( version );
packaging = StringUtils.trim( packaging );
public Boolean save(String repositoryId, String groupId, String artifactId, String version, String packaging,
boolean generatePom)
throws ArchivaRestServiceException {
repositoryId = StringUtils.trim(repositoryId);
groupId = StringUtils.trim(groupId);
artifactId = StringUtils.trim(artifactId);
version = StringUtils.trim(version);
packaging = StringUtils.trim(packaging);
checkParamChars("repositoryId", repositoryId);
checkParamChars("groupId", groupId);
checkParamChars("artifactId", artifactId);
checkParamChars( "version", version);
checkParamChars("version", version);
checkParamChars("packaging", packaging);
List<FileMetadata> fileMetadatas = getSessionFilesList();
if ( fileMetadatas == null || fileMetadatas.isEmpty() )
{
if (fileMetadatas == null || fileMetadatas.isEmpty()) {
return Boolean.FALSE;
}
try
{
ManagedRepository managedRepository = managedRepositoryAdmin.getManagedRepository( repositoryId );
try {
ManagedRepository managedRepository = managedRepositoryAdmin.getManagedRepository(repositoryId);
if ( managedRepository == null )
{
if (managedRepository == null) {
// TODO i18n ?
throw new ArchivaRestServiceException( "Cannot find managed repository with id " + repositoryId,
Response.Status.BAD_REQUEST.getStatusCode(), null );
throw new ArchivaRestServiceException("Cannot find managed repository with id " + repositoryId,
Response.Status.BAD_REQUEST.getStatusCode(), null);
}
if ( VersionUtil.isSnapshot( version ) && !managedRepository.isSnapshots() )
{
if (VersionUtil.isSnapshot(version) && !managedRepository.isSnapshots()) {
// TODO i18n ?
throw new ArchivaRestServiceException(
"Managed repository with id " + repositoryId + " do not accept snapshots",
Response.Status.BAD_REQUEST.getStatusCode(), null );
Response.Status.BAD_REQUEST.getStatusCode(), null);
}
}
catch ( RepositoryAdminException e )
{
throw new ArchivaRestServiceException( e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
} catch (RepositoryAdminException e) {
throw new ArchivaRestServiceException(e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
}
// get from the session file with groupId/artifactId
Iterable<FileMetadata> filesToAdd = Iterables.filter( fileMetadatas, new Predicate<FileMetadata>()
{
public boolean apply( FileMetadata fileMetadata )
{
Iterable<FileMetadata> filesToAdd = Iterables.filter(fileMetadatas, new Predicate<FileMetadata>() {
public boolean apply(FileMetadata fileMetadata) {
return fileMetadata != null && !fileMetadata.isPomFile();
}
} );
});
Iterator<FileMetadata> iterator = filesToAdd.iterator();
boolean pomGenerated = false;
while ( iterator.hasNext() )
{
while (iterator.hasNext()) {
FileMetadata fileMetadata = iterator.next();
log.debug( "fileToAdd: {}", fileMetadata );
saveFile( repositoryId, fileMetadata, generatePom && !pomGenerated, groupId, artifactId, version,
packaging );
log.debug("fileToAdd: {}", fileMetadata);
saveFile(repositoryId, fileMetadata, generatePom && !pomGenerated, groupId, artifactId, version,
packaging);
pomGenerated = true;
deleteFile( fileMetadata.getServerFileName() );
deleteFile(fileMetadata.getServerFileName());
}
filesToAdd = Iterables.filter( fileMetadatas, new Predicate<FileMetadata>()
{
filesToAdd = Iterables.filter(fileMetadatas, new Predicate<FileMetadata>() {
@Override
public boolean apply( FileMetadata fileMetadata )
{
public boolean apply(FileMetadata fileMetadata) {
return fileMetadata != null && fileMetadata.isPomFile();
}
} );
});
iterator = filesToAdd.iterator();
while ( iterator.hasNext() )
{
while (iterator.hasNext()) {
FileMetadata fileMetadata = iterator.next();
log.debug( "fileToAdd: {}", fileMetadata );
savePomFile( repositoryId, fileMetadata, groupId, artifactId, version, packaging );
deleteFile( fileMetadata.getServerFileName() );
log.debug("fileToAdd: {}", fileMetadata);
savePomFile(repositoryId, fileMetadata, groupId, artifactId, version, packaging);
deleteFile(fileMetadata.getServerFileName());
}
return Boolean.TRUE;
}
protected void savePomFile( String repositoryId, FileMetadata fileMetadata, String groupId, String artifactId,
String version, String packaging )
throws ArchivaRestServiceException
{
protected void savePomFile(String repositoryId, FileMetadata fileMetadata, String groupId, String artifactId,
String version, String packaging)
throws ArchivaRestServiceException {
log.debug("Saving POM");
try
{
try {
boolean fixChecksums =
!( archivaAdministration.getKnownContentConsumers().contains( "create-missing-checksums" ) );
ManagedRepository repoConfig = managedRepositoryAdmin.getManagedRepository( repositoryId );
ArtifactReference artifactReference = new ArtifactReference();
artifactReference.setArtifactId( artifactId );
artifactReference.setGroupId( groupId );
artifactReference.setVersion( version );
artifactReference.setClassifier( fileMetadata.getClassifier() );
artifactReference.setType( packaging );
ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent( repositoryId );
String artifactPath = repository.toPath( artifactReference );
int lastIndex = artifactPath.lastIndexOf( '/' );
String path = artifactPath.substring( 0, lastIndex );
Path targetPath = Paths.get( repoConfig.getLocation(), path );
String pomFilename = artifactPath.substring( lastIndex + 1 );
if ( StringUtils.isNotEmpty( fileMetadata.getClassifier() ) )
{
pomFilename = StringUtils.remove( pomFilename, "-" + fileMetadata.getClassifier() );
}
pomFilename = FilenameUtils.removeExtension( pomFilename ) + ".pom";
copyFile( Paths.get( fileMetadata.getServerFileName() ), targetPath, pomFilename, fixChecksums );
triggerAuditEvent( repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE );
queueRepositoryTask( repoConfig.getId(), targetPath.resolve(pomFilename ) );
log.debug("Finished Saving POM");
}
catch ( IOException ie )
{
log.error("IOException for POM {}", ie.getMessage());
throw new ArchivaRestServiceException( "Error encountered while uploading pom file: " + ie.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie );
}
catch ( RepositoryException rep )
{
log.error("RepositoryException for POM {}", rep.getMessage());
throw new ArchivaRestServiceException( "Repository exception: " + rep.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep );
}
catch ( RepositoryAdminException e )
{
log.error("RepositoryAdminException for POM {}", e.getMessage());
throw new ArchivaRestServiceException( "RepositoryAdmin exception: " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
}
}
protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean generatePom, String groupId,
String artifactId, String version, String packaging )
throws ArchivaRestServiceException
{
log.debug("Saving file");
try
{
!(archivaAdministration.getKnownContentConsumers().contains("create-missing-checksums"));
org.apache.archiva.repository.ManagedRepository repoConfig = repositoryRegistry.getManagedRepository(repositoryId);
ArtifactReference artifactReference = new ArtifactReference();
artifactReference.setArtifactId( artifactId );
artifactReference.setGroupId( groupId );
artifactReference.setVersion( version );
artifactReference.setClassifier( fileMetadata.getClassifier() );
ArtifactReference artifactReference = createArtifactRef(fileMetadata, groupId, artifactId, version);
artifactReference.setType(packaging);
Path pomPath = artifactUtil.getArtifactPath(repoConfig, artifactReference);
Path targetPath = pomPath.getParent();
String pomFilename = pomPath.getFileName().toString();
if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) {
pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier());
}
pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";
copyFile(Paths.get(fileMetadata.getServerFileName()), targetPath, pomFilename, fixChecksums);
triggerAuditEvent(repoConfig.getId(), targetPath.resolve(pomFilename).toString(), AuditEvent.UPLOAD_FILE);
queueRepositoryTask(repoConfig.getId(), targetPath.resolve(pomFilename));
log.debug("Finished Saving POM");
} catch (IOException ie) {
log.error("IOException for POM {}", ie.getMessage());
throw new ArchivaRestServiceException("Error encountered while uploading pom file: " + ie.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
} catch (RepositoryException rep) {
log.error("RepositoryException for POM {}", rep.getMessage());
throw new ArchivaRestServiceException("Repository exception: " + rep.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep);
} catch (RepositoryAdminException e) {
log.error("RepositoryAdminException for POM {}", e.getMessage());
throw new ArchivaRestServiceException("RepositoryAdmin exception: " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
}
}
protected void saveFile(String repositoryId, FileMetadata fileMetadata, boolean generatePom, String groupId,
String artifactId, String version, String packaging)
throws ArchivaRestServiceException {
log.debug("Saving file");
try {
org.apache.archiva.repository.ManagedRepository repoConfig = repositoryRegistry.getManagedRepository(repositoryId);
ArtifactReference artifactReference = createArtifactRef(fileMetadata, groupId, artifactId, version);
artifactReference.setType(
StringUtils.isEmpty( fileMetadata.getPackaging() ) ? packaging : fileMetadata.getPackaging() );
StringUtils.isEmpty(fileMetadata.getPackaging()) ? packaging : fileMetadata.getPackaging());
ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent( repoConfig );
Path artifactPath = artifactUtil.getArtifactPath(repoConfig, artifactReference);
Path targetPath = artifactPath.getParent();
String artifactPath = repository.toPath( artifactReference );
int lastIndex = artifactPath.lastIndexOf( '/' );
String path = artifactPath.substring( 0, lastIndex );
Path targetPath = Paths.get(repoConfig.getLocation()).resolve(path);
log.debug( "artifactPath: {} found targetPath: {}", artifactPath, targetPath );
log.debug("artifactPath: {} found targetPath: {}", artifactPath, targetPath);
Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
int newBuildNumber = -1;
String timestamp = null;
Path versionMetadataFile = targetPath.resolve( MetadataTools.MAVEN_METADATA );
ArchivaRepositoryMetadata versionMetadata = getMetadata( versionMetadataFile );
Path versionMetadataFile = targetPath.resolve(MetadataTools.MAVEN_METADATA);
ArchivaRepositoryMetadata versionMetadata = getMetadata(versionMetadataFile);
if ( VersionUtil.isSnapshot( version ) )
{
TimeZone timezone = TimeZone.getTimeZone( "UTC" );
DateFormat fmt = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
fmt.setTimeZone( timezone );
timestamp = fmt.format( lastUpdatedTimestamp );
if ( versionMetadata.getSnapshotVersion() != null )
{
if (VersionUtil.isSnapshot(version)) {
TimeZone timezone = TimeZone.getTimeZone("UTC");
DateFormat fmt = new SimpleDateFormat("yyyyMMdd.HHmmss");
fmt.setTimeZone(timezone);
timestamp = fmt.format(lastUpdatedTimestamp);
if (versionMetadata.getSnapshotVersion() != null) {
newBuildNumber = versionMetadata.getSnapshotVersion().getBuildNumber() + 1;
}
else
{
} else {
newBuildNumber = 1;
}
}
if ( !Files.exists(targetPath) )
{
Files.createDirectories( targetPath );
if (!Files.exists(targetPath)) {
Files.createDirectories(targetPath);
}
String filename = artifactPath.substring( lastIndex + 1 );
if ( VersionUtil.isSnapshot( version ) )
{
filename = filename.replaceAll( VersionUtil.SNAPSHOT, timestamp + "-" + newBuildNumber );
String filename = artifactPath.getFileName().toString();
if (VersionUtil.isSnapshot(version)) {
filename = filename.replaceAll(VersionUtil.SNAPSHOT, timestamp + "-" + newBuildNumber);
}
boolean fixChecksums =
!( archivaAdministration.getKnownContentConsumers().contains( "create-missing-checksums" ) );
!(archivaAdministration.getKnownContentConsumers().contains("create-missing-checksums"));
try
{
Path targetFile = targetPath.resolve( filename );
if ( Files.exists(targetFile) && !VersionUtil.isSnapshot( version ) && repoConfig.blocksRedeployments())
{
try {
Path targetFile = targetPath.resolve(filename);
if (Files.exists(targetFile) && !VersionUtil.isSnapshot(version) && repoConfig.blocksRedeployments()) {
throw new ArchivaRestServiceException(
"Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.",
Response.Status.BAD_REQUEST.getStatusCode(), null );
Response.Status.BAD_REQUEST.getStatusCode(), null);
} else {
copyFile(Paths.get(fileMetadata.getServerFileName()), targetPath, filename, fixChecksums);
triggerAuditEvent(repoConfig.getId(), artifactPath.toString(), AuditEvent.UPLOAD_FILE);
queueRepositoryTask(repoConfig.getId(), targetFile);
}
else
{
copyFile( Paths.get( fileMetadata.getServerFileName() ), targetPath, filename, fixChecksums );
triggerAuditEvent( repository.getId(), path + "/" + filename, AuditEvent.UPLOAD_FILE );
queueRepositoryTask( repository.getId(), targetFile );
}
}
catch ( IOException ie )
{
log.error( "IOException copying file: {}", ie.getMessage(), ie );
} catch (IOException ie) {
log.error("IOException copying file: {}", ie.getMessage(), ie);
throw new ArchivaRestServiceException(
"Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.",
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie );
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
}
if ( generatePom )
{
if (generatePom) {
String pomFilename = filename;
if ( StringUtils.isNotEmpty( fileMetadata.getClassifier() ) )
{
pomFilename = StringUtils.remove( pomFilename, "-" + fileMetadata.getClassifier() );
if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) {
pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier());
}
pomFilename = FilenameUtils.removeExtension( pomFilename ) + ".pom";
pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";
try
{
try {
Path generatedPomFile =
createPom( targetPath, pomFilename, fileMetadata, groupId, artifactId, version, packaging );
triggerAuditEvent( repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE );
if ( fixChecksums )
{
fixChecksums( generatedPomFile );
createPom(targetPath, pomFilename, fileMetadata, groupId, artifactId, version, packaging);
triggerAuditEvent(repoConfig.getId(), targetPath.resolve(pomFilename).toString(), AuditEvent.UPLOAD_FILE);
if (fixChecksums) {
fixChecksums(generatedPomFile);
}
queueRepositoryTask( repoConfig.getId(), generatedPomFile );
}
catch ( IOException ie )
{
queueRepositoryTask(repoConfig.getId(), generatedPomFile);
} catch (IOException ie) {
throw new ArchivaRestServiceException(
"Error encountered while writing pom file: " + ie.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie );
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
}
}
// explicitly update only if metadata-updater consumer is not enabled!
if ( !archivaAdministration.getKnownContentConsumers().contains( "metadata-updater" ) )
{
updateProjectMetadata( targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber,
fixChecksums, fileMetadata, groupId, artifactId, version, packaging );
if (!archivaAdministration.getKnownContentConsumers().contains("metadata-updater")) {
updateProjectMetadata(targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber,
fixChecksums, fileMetadata, groupId, artifactId, version, packaging);
if ( VersionUtil.isSnapshot( version ) )
{
updateVersionMetadata( versionMetadata, versionMetadataFile, lastUpdatedTimestamp, timestamp,
if (VersionUtil.isSnapshot(version)) {
updateVersionMetadata(versionMetadata, versionMetadataFile, lastUpdatedTimestamp, timestamp,
newBuildNumber, fixChecksums, fileMetadata, groupId, artifactId, version,
packaging );
packaging);
}
}
}
catch ( RepositoryNotFoundException re )
{
} catch (RepositoryNotFoundException re) {
log.error("RepositoryNotFoundException during save {}", re.getMessage());
re.printStackTrace();
throw new ArchivaRestServiceException( "Target repository cannot be found: " + re.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), re );
}
catch ( RepositoryException rep )
{
throw new ArchivaRestServiceException("Target repository cannot be found: " + re.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), re);
} catch (RepositoryException rep) {
log.error("RepositoryException during save {}", rep.getMessage());
throw new ArchivaRestServiceException( "Repository exception: " + rep.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep );
}
catch ( RepositoryAdminException e )
{
throw new ArchivaRestServiceException("Repository exception: " + rep.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep);
} catch (RepositoryAdminException e) {
log.error("RepositoryAdminException during save {}", e.getMessage());
throw new ArchivaRestServiceException( "RepositoryAdmin exception: " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
}
catch ( IOException e )
{
throw new ArchivaRestServiceException("RepositoryAdmin exception: " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
} catch (IOException e) {
log.error("IOException during save {}", e.getMessage());
throw new ArchivaRestServiceException("Repository exception "+ e.getMessage(),
throw new ArchivaRestServiceException("Repository exception " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
}
}
private ArchivaRepositoryMetadata getMetadata( Path metadataFile )
throws RepositoryMetadataException
{
ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
if ( Files.exists(metadataFile) )
{
try
{
metadata = MavenMetadataReader.read( metadataFile );
private ArtifactReference createArtifactRef(FileMetadata fileMetadata, String groupId, String artifactId, String version) {
ArtifactReference artifactReference = new ArtifactReference();
artifactReference.setArtifactId(artifactId);
artifactReference.setGroupId(groupId);
artifactReference.setVersion(version);
artifactReference.setClassifier(fileMetadata.getClassifier());
return artifactReference;
}
catch ( XMLException e )
{
throw new RepositoryMetadataException( e.getMessage(), e );
private ArchivaRepositoryMetadata getMetadata(Path metadataFile)
throws RepositoryMetadataException {
ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
if (Files.exists(metadataFile)) {
try {
metadata = MavenMetadataReader.read(metadataFile);
} catch (XMLException e) {
throw new RepositoryMetadataException(e.getMessage(), e);
}
}
return metadata;
}
private Path createPom( Path targetPath, String filename, FileMetadata fileMetadata, String groupId,
String artifactId, String version, String packaging )
throws IOException
{
private Path createPom(Path targetPath, String filename, FileMetadata fileMetadata, String groupId,
String artifactId, String version, String packaging)
throws IOException {
Model projectModel = new Model();
projectModel.setModelVersion( "4.0.0" );
projectModel.setGroupId( groupId );
projectModel.setArtifactId( artifactId );
projectModel.setVersion( version );
projectModel.setPackaging( packaging );
projectModel.setModelVersion("4.0.0");
projectModel.setGroupId(groupId);
projectModel.setArtifactId(artifactId);
projectModel.setVersion(version);
projectModel.setPackaging(packaging);
Path pomFile = targetPath.resolve( filename );
Path pomFile = targetPath.resolve(filename);
MavenXpp3Writer writer = new MavenXpp3Writer();
try (FileWriter w = new FileWriter( pomFile.toFile() ))
{
writer.write( w, projectModel );
try (FileWriter w = new FileWriter(pomFile.toFile())) {
writer.write(w, projectModel);
}
return pomFile;
}
private void fixChecksums( Path file )
{
ChecksummedFile checksum = new ChecksummedFile( file );
checksum.fixChecksums( algorithms );
private void fixChecksums(Path file) {
ChecksummedFile checksum = new ChecksummedFile(file);
checksum.fixChecksums(algorithms);
}
private void queueRepositoryTask( String repositoryId, Path localFile )
{
private void queueRepositoryTask(String repositoryId, Path localFile) {
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
task.setResourceFile( localFile );
task.setUpdateRelatedArtifacts( true );
task.setScanAll( false );
task.setRepositoryId(repositoryId);
task.setResourceFile(localFile);
task.setUpdateRelatedArtifacts(true);
task.setScanAll(false);
try
{
scheduler.queueTask( task );
}
catch ( TaskQueueException e )
{
log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
+ "'].", localFile.getFileName() );
try {
scheduler.queueTask(task);
} catch (TaskQueueException e) {
log.error("Unable to queue repository task to execute consumers on resource file ['{}"
+ "'].", localFile.getFileName());
}
}
private void copyFile( Path sourceFile, Path targetPath, String targetFilename, boolean fixChecksums )
throws IOException
{
private void copyFile(Path sourceFile, Path targetPath, String targetFilename, boolean fixChecksums)
throws IOException {
Files.copy( sourceFile, targetPath.resolve( targetFilename ), StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.COPY_ATTRIBUTES );
Files.copy(sourceFile, targetPath.resolve(targetFilename), StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.COPY_ATTRIBUTES);
if ( fixChecksums )
{
fixChecksums( targetPath.resolve( targetFilename ) );
if (fixChecksums) {
fixChecksums(targetPath.resolve(targetFilename));
}
}
/**
* Update artifact level metadata. If it does not exist, create the metadata and fix checksums if necessary.
*/
private void updateProjectMetadata( String targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber,
private void updateProjectMetadata(String targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber,
boolean fixChecksums, FileMetadata fileMetadata, String groupId,
String artifactId, String version, String packaging )
throws RepositoryMetadataException
{
String artifactId, String version, String packaging)
throws RepositoryMetadataException {
List<String> availableVersions = new ArrayList<>();
String latestVersion = version;
Path projectDir = Paths.get(targetPath).getParent();
Path projectMetadataFile = projectDir.resolve( MetadataTools.MAVEN_METADATA );
Path projectMetadataFile = projectDir.resolve(MetadataTools.MAVEN_METADATA);
ArchivaRepositoryMetadata projectMetadata = getMetadata( projectMetadataFile );
ArchivaRepositoryMetadata projectMetadata = getMetadata(projectMetadataFile);
if ( Files.exists(projectMetadataFile) )
{
if (Files.exists(projectMetadataFile)) {
availableVersions = projectMetadata.getAvailableVersions();
Collections.sort( availableVersions, VersionComparator.getInstance() );
Collections.sort(availableVersions, VersionComparator.getInstance());
if ( !availableVersions.contains( version ) )
{
availableVersions.add( version );
if (!availableVersions.contains(version)) {
availableVersions.add(version);
}
latestVersion = availableVersions.get( availableVersions.size() - 1 );
}
else
{
availableVersions.add( version );
latestVersion = availableVersions.get(availableVersions.size() - 1);
} else {
availableVersions.add(version);
projectMetadata.setGroupId( groupId );
projectMetadata.setArtifactId( artifactId );
projectMetadata.setGroupId(groupId);
projectMetadata.setArtifactId(artifactId);
}
if ( projectMetadata.getGroupId() == null )
{
projectMetadata.setGroupId( groupId );
if (projectMetadata.getGroupId() == null) {
projectMetadata.setGroupId(groupId);
}
if ( projectMetadata.getArtifactId() == null )
{
projectMetadata.setArtifactId( artifactId );
if (projectMetadata.getArtifactId() == null) {
projectMetadata.setArtifactId(artifactId);
}
projectMetadata.setLatestVersion( latestVersion );
projectMetadata.setLastUpdatedTimestamp( lastUpdatedTimestamp );
projectMetadata.setAvailableVersions( availableVersions );
projectMetadata.setLatestVersion(latestVersion);
projectMetadata.setLastUpdatedTimestamp(lastUpdatedTimestamp);
projectMetadata.setAvailableVersions(availableVersions);
if ( !VersionUtil.isSnapshot( version ) )
{
projectMetadata.setReleasedVersion( latestVersion );
if (!VersionUtil.isSnapshot(version)) {
projectMetadata.setReleasedVersion(latestVersion);
}
RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile );
RepositoryMetadataWriter.write(projectMetadata, projectMetadataFile);
if ( fixChecksums )
{
fixChecksums( projectMetadataFile );
if (fixChecksums) {
fixChecksums(projectMetadataFile);
}
}
@ -734,33 +633,29 @@ public class DefaultFileUploadService
* Update version level metadata for snapshot artifacts. If it does not exist, create the metadata and fix checksums
* if necessary.
*/
private void updateVersionMetadata( ArchivaRepositoryMetadata metadata, Path metadataFile,
private void updateVersionMetadata(ArchivaRepositoryMetadata metadata, Path metadataFile,
Date lastUpdatedTimestamp, String timestamp, int buildNumber,
boolean fixChecksums, FileMetadata fileMetadata, String groupId,
String artifactId, String version, String packaging )
throws RepositoryMetadataException
{
if ( !Files.exists(metadataFile) )
{
metadata.setGroupId( groupId );
metadata.setArtifactId( artifactId );
metadata.setVersion( version );
String artifactId, String version, String packaging)
throws RepositoryMetadataException {
if (!Files.exists(metadataFile)) {
metadata.setGroupId(groupId);
metadata.setArtifactId(artifactId);
metadata.setVersion(version);
}
if ( metadata.getSnapshotVersion() == null )
{
metadata.setSnapshotVersion( new SnapshotVersion() );
if (metadata.getSnapshotVersion() == null) {
metadata.setSnapshotVersion(new SnapshotVersion());
}
metadata.getSnapshotVersion().setBuildNumber( buildNumber );
metadata.getSnapshotVersion().setTimestamp( timestamp );
metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp );
metadata.getSnapshotVersion().setBuildNumber(buildNumber);
metadata.getSnapshotVersion().setTimestamp(timestamp);
metadata.setLastUpdatedTimestamp(lastUpdatedTimestamp);
RepositoryMetadataWriter.write( metadata, metadataFile );
RepositoryMetadataWriter.write(metadata, metadataFile);
if ( fixChecksums )
{
fixChecksums( metadataFile );
if (fixChecksums) {
fixChecksums(metadataFile);
}
}