Using new artifact util in upload service

This commit is contained in:
Martin Stockhammer 2019-05-04 10:49:34 +02:00
parent a3c149327e
commit 0e09883158
1 changed files with 301 additions and 406 deletions

View File

@ -34,10 +34,9 @@ import org.apache.archiva.model.ArchivaRepositoryMetadata;
import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.SnapshotVersion; import org.apache.archiva.model.SnapshotVersion;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException; import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryContentFactory;
import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryNotFoundException; import org.apache.archiva.repository.RepositoryNotFoundException;
import org.apache.archiva.repository.content.ArtifactUtil;
import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.metadata.RepositoryMetadataException; import org.apache.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.archiva.repository.metadata.RepositoryMetadataWriter; import org.apache.archiva.repository.metadata.RepositoryMetadataWriter;
@ -63,6 +62,7 @@ import org.springframework.stereotype.Service;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Named; import javax.inject.Named;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.io.FileOutputStream; import java.io.FileOutputStream;
@ -72,14 +72,7 @@ import java.net.URLDecoder;
import java.nio.file.*; import java.nio.file.*;
import java.text.DateFormat; import java.text.DateFormat;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.TimeZone;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
/** /**
@ -87,10 +80,9 @@ import java.util.concurrent.CopyOnWriteArrayList;
*/ */
@Service("fileUploadService#rest") @Service("fileUploadService#rest")
public class DefaultFileUploadService public class DefaultFileUploadService
extends AbstractRestService extends AbstractRestService
implements FileUploadService implements FileUploadService {
{ private Logger log = LoggerFactory.getLogger(getClass());
private Logger log = LoggerFactory.getLogger( getClass() );
@Context @Context
private HttpServletRequest httpServletRequest; private HttpServletRequest httpServletRequest;
@ -99,12 +91,12 @@ public class DefaultFileUploadService
private ManagedRepositoryAdmin managedRepositoryAdmin; private ManagedRepositoryAdmin managedRepositoryAdmin;
@Inject @Inject
private RepositoryContentFactory repositoryFactory; private ArtifactUtil artifactUtil;
@Inject @Inject
private ArchivaAdministration archivaAdministration; private ArchivaAdministration archivaAdministration;
private List<ChecksumAlgorithm> algorithms = Arrays.asList( ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 ); private List<ChecksumAlgorithm> algorithms = Arrays.asList(ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5);
private final String FS = FileSystems.getDefault().getSeparator(); private final String FS = FileSystems.getDefault().getSeparator();
@ -112,49 +104,43 @@ public class DefaultFileUploadService
@Named(value = "archivaTaskScheduler#repository") @Named(value = "archivaTaskScheduler#repository")
private ArchivaTaskScheduler<RepositoryTask> scheduler; private ArchivaTaskScheduler<RepositoryTask> scheduler;
private String getStringValue( MultipartBody multipartBody, String attachmentId ) private String getStringValue(MultipartBody multipartBody, String attachmentId)
throws IOException throws IOException {
{ Attachment attachment = multipartBody.getAttachment(attachmentId);
Attachment attachment = multipartBody.getAttachment( attachmentId );
return attachment == null ? "" : return attachment == null ? "" :
StringUtils.trim(URLDecoder.decode(IOUtils.toString( attachment.getDataHandler().getInputStream(), "UTF-8" ), "UTF-8")); StringUtils.trim(URLDecoder.decode(IOUtils.toString(attachment.getDataHandler().getInputStream(), "UTF-8"), "UTF-8"));
} }
@Override @Override
public FileMetadata post( MultipartBody multipartBody ) public FileMetadata post(MultipartBody multipartBody)
throws ArchivaRestServiceException throws ArchivaRestServiceException {
{
try try {
{
String classifier = getStringValue( multipartBody, "classifier" ); String classifier = getStringValue(multipartBody, "classifier");
String packaging = getStringValue( multipartBody, "packaging" ); String packaging = getStringValue(multipartBody, "packaging");
checkParamChars( "classifier", classifier ); checkParamChars("classifier", classifier);
checkParamChars( "packaging", packaging); checkParamChars("packaging", packaging);
// skygo: http header form pomFile was once sending 1 for true and void for false // skygo: http header form pomFile was once sending 1 for true and void for false
// leading to permanent false value for pomFile if using toBoolean(); use , "1", "" // leading to permanent false value for pomFile if using toBoolean(); use , "1", ""
boolean pomFile = false; boolean pomFile = false;
try try {
{ pomFile = BooleanUtils.toBoolean(getStringValue(multipartBody, "pomFile"));
pomFile = BooleanUtils.toBoolean( getStringValue( multipartBody, "pomFile" ) ); } catch (IllegalArgumentException ex) {
}
catch ( IllegalArgumentException ex )
{
ArchivaRestServiceException e = new ArchivaRestServiceException("Bad value for boolean pomFile field.", null); ArchivaRestServiceException e = new ArchivaRestServiceException("Bad value for boolean pomFile field.", null);
e.setHttpErrorCode(422); e.setHttpErrorCode(422);
e.setFieldName( "pomFile" ); e.setFieldName("pomFile");
e.setErrorKey("fileupload.malformed.pomFile"); e.setErrorKey("fileupload.malformed.pomFile");
throw e; throw e;
} }
Attachment file = multipartBody.getAttachment( "files[]" ); Attachment file = multipartBody.getAttachment("files[]");
//Content-Disposition: form-data; name="files[]"; filename="org.apache.karaf.features.command-2.2.2.jar" //Content-Disposition: form-data; name="files[]"; filename="org.apache.karaf.features.command-2.2.2.jar"
String fileName = file.getContentDisposition().getParameter( "filename" ); String fileName = file.getContentDisposition().getParameter("filename");
Path fileNamePath = Paths.get(fileName); Path fileNamePath = Paths.get(fileName);
if (!fileName.equals(fileNamePath.getFileName().toString())) { if (!fileName.equals(fileNamePath.getFileName().toString())) {
ArchivaRestServiceException e = new ArchivaRestServiceException("Bad filename in upload content: " + fileName + " - File traversal chars (..|/) are not allowed" ArchivaRestServiceException e = new ArchivaRestServiceException("Bad filename in upload content: " + fileName + " - File traversal chars (..|/) are not allowed"
@ -164,60 +150,62 @@ public class DefaultFileUploadService
throw e; throw e;
} }
Path tmpFile = Files.createTempFile( "upload-artifact", ".tmp" ); Path tmpFile = Files.createTempFile("upload-artifact", ".tmp");
tmpFile.toFile().deleteOnExit(); tmpFile.toFile().deleteOnExit();
IOUtils.copy( file.getDataHandler().getInputStream(), new FileOutputStream( tmpFile.toFile() ) ); IOUtils.copy(file.getDataHandler().getInputStream(), new FileOutputStream(tmpFile.toFile()));
FileMetadata fileMetadata = new FileMetadata( fileName, Files.size(tmpFile), "theurl" ); FileMetadata fileMetadata = new FileMetadata(fileName, Files.size(tmpFile), "theurl");
fileMetadata.setServerFileName( tmpFile.toString() ); fileMetadata.setServerFileName(tmpFile.toString());
fileMetadata.setClassifier( classifier ); fileMetadata.setClassifier(classifier);
fileMetadata.setDeleteUrl( tmpFile.getFileName().toString() ); fileMetadata.setDeleteUrl(tmpFile.getFileName().toString());
fileMetadata.setPomFile( pomFile ); fileMetadata.setPomFile(pomFile);
fileMetadata.setPackaging( packaging ); fileMetadata.setPackaging(packaging);
log.info( "uploading file: {}", fileMetadata ); log.info("uploading file: {}", fileMetadata);
List<FileMetadata> fileMetadatas = getSessionFilesList(); List<FileMetadata> fileMetadatas = getSessionFilesList();
fileMetadatas.add( fileMetadata ); fileMetadatas.add(fileMetadata);
return fileMetadata; return fileMetadata;
} } catch (IOException e) {
catch ( IOException e ) throw new ArchivaRestServiceException(e.getMessage(),
{ Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
throw new ArchivaRestServiceException( e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
} }
} }
/** /**
* FIXME must be per session synchronized not globally * @return The file list from the session.
*
* @return
*/ */
protected synchronized List<FileMetadata> getSessionFilesList() @SuppressWarnings("unchecked")
{ protected List<FileMetadata> getSessionFilesList() {
@SuppressWarnings("unchecked") List<FileMetadata> fileMetadatas = (List<FileMetadata>) httpServletRequest.getSession().getAttribute( FILES_SESSION_KEY ); final HttpSession session = httpServletRequest.getSession();
if ( fileMetadatas == null ) List<FileMetadata> fileMetadata = (List<FileMetadata>) session.getAttribute(FILES_SESSION_KEY);
{ // Double check with synchronization, we assume, that httpServletRequest is
fileMetadatas = new CopyOnWriteArrayList<>(); // fully initialized (no volatile)
httpServletRequest.getSession().setAttribute( FILES_SESSION_KEY, fileMetadatas ); if (fileMetadata == null) {
synchronized (session) {
fileMetadata = (List<FileMetadata>) session.getAttribute(FILES_SESSION_KEY);
if (fileMetadata == null) {
fileMetadata = new CopyOnWriteArrayList<>();
session.setAttribute(FILES_SESSION_KEY, fileMetadata);
}
}
} }
return fileMetadatas; return fileMetadata;
} }
@Override @Override
public Boolean deleteFile( String fileName ) public Boolean deleteFile(String fileName)
throws ArchivaRestServiceException throws ArchivaRestServiceException {
{
log.debug("Deleting file {}", fileName); log.debug("Deleting file {}", fileName);
// we make sure, that there are no other path components in the filename: // we make sure, that there are no other path components in the filename:
String checkedFileName = Paths.get(fileName).getFileName().toString(); String checkedFileName = Paths.get(fileName).getFileName().toString();
Path file = SystemUtils.getJavaIoTmpDir().toPath().resolve( checkedFileName ); Path file = SystemUtils.getJavaIoTmpDir().toPath().resolve(checkedFileName);
log.debug( "delete file:{},exists:{}", file, Files.exists(file) ); log.debug("delete file:{},exists:{}", file, Files.exists(file));
boolean removed = getSessionFileMetadatas().remove( new FileMetadata( fileName ) ); boolean removed = getSessionFileMetadatas().remove(new FileMetadata(fileName));
// try with full name as ui only know the file name // try with full name as ui only know the file name
if ( !removed ) { if (!removed) {
removed = getSessionFileMetadatas().remove(new FileMetadata(file.toString())); removed = getSessionFileMetadatas().remove(new FileMetadata(file.toString()));
} }
if (removed) { if (removed) {
@ -233,12 +221,10 @@ public class DefaultFileUploadService
@Override @Override
public Boolean clearUploadedFiles() public Boolean clearUploadedFiles()
throws ArchivaRestServiceException throws ArchivaRestServiceException {
{ List<FileMetadata> fileMetadatas = new ArrayList<>(getSessionFileMetadatas());
List<FileMetadata> fileMetadatas = new ArrayList<>( getSessionFileMetadatas() ); for (FileMetadata fileMetadata : fileMetadatas) {
for ( FileMetadata fileMetadata : fileMetadatas ) deleteFile(Paths.get(fileMetadata.getServerFileName()).toString());
{
deleteFile( Paths.get( fileMetadata.getServerFileName() ).toString() );
} }
getSessionFileMetadatas().clear(); getSessionFileMetadatas().clear();
return Boolean.TRUE; return Boolean.TRUE;
@ -246,10 +232,9 @@ public class DefaultFileUploadService
@Override @Override
public List<FileMetadata> getSessionFileMetadatas() public List<FileMetadata> getSessionFileMetadatas()
throws ArchivaRestServiceException throws ArchivaRestServiceException {
{
@SuppressWarnings("unchecked") List<FileMetadata> fileMetadatas = @SuppressWarnings("unchecked") List<FileMetadata> fileMetadatas =
(List<FileMetadata>) httpServletRequest.getSession().getAttribute( FILES_SESSION_KEY ); (List<FileMetadata>) httpServletRequest.getSession().getAttribute(FILES_SESSION_KEY);
return fileMetadatas == null ? Collections.<FileMetadata>emptyList() : fileMetadatas; return fileMetadatas == null ? Collections.<FileMetadata>emptyList() : fileMetadatas;
} }
@ -278,455 +263,369 @@ public class DefaultFileUploadService
} }
@Override @Override
public Boolean save( String repositoryId, String groupId, String artifactId, String version, String packaging, public Boolean save(String repositoryId, String groupId, String artifactId, String version, String packaging,
boolean generatePom ) boolean generatePom)
throws ArchivaRestServiceException throws ArchivaRestServiceException {
{ repositoryId = StringUtils.trim(repositoryId);
repositoryId = StringUtils.trim( repositoryId ); groupId = StringUtils.trim(groupId);
groupId = StringUtils.trim( groupId ); artifactId = StringUtils.trim(artifactId);
artifactId = StringUtils.trim( artifactId ); version = StringUtils.trim(version);
version = StringUtils.trim( version ); packaging = StringUtils.trim(packaging);
packaging = StringUtils.trim( packaging );
checkParamChars("repositoryId", repositoryId); checkParamChars("repositoryId", repositoryId);
checkParamChars("groupId", groupId); checkParamChars("groupId", groupId);
checkParamChars("artifactId", artifactId); checkParamChars("artifactId", artifactId);
checkParamChars( "version", version); checkParamChars("version", version);
checkParamChars("packaging", packaging); checkParamChars("packaging", packaging);
List<FileMetadata> fileMetadatas = getSessionFilesList(); List<FileMetadata> fileMetadatas = getSessionFilesList();
if ( fileMetadatas == null || fileMetadatas.isEmpty() ) if (fileMetadatas == null || fileMetadatas.isEmpty()) {
{
return Boolean.FALSE; return Boolean.FALSE;
} }
try try {
{ ManagedRepository managedRepository = managedRepositoryAdmin.getManagedRepository(repositoryId);
ManagedRepository managedRepository = managedRepositoryAdmin.getManagedRepository( repositoryId );
if ( managedRepository == null ) if (managedRepository == null) {
{
// TODO i18n ? // TODO i18n ?
throw new ArchivaRestServiceException( "Cannot find managed repository with id " + repositoryId, throw new ArchivaRestServiceException("Cannot find managed repository with id " + repositoryId,
Response.Status.BAD_REQUEST.getStatusCode(), null ); Response.Status.BAD_REQUEST.getStatusCode(), null);
} }
if ( VersionUtil.isSnapshot( version ) && !managedRepository.isSnapshots() ) if (VersionUtil.isSnapshot(version) && !managedRepository.isSnapshots()) {
{
// TODO i18n ? // TODO i18n ?
throw new ArchivaRestServiceException( throw new ArchivaRestServiceException(
"Managed repository with id " + repositoryId + " do not accept snapshots", "Managed repository with id " + repositoryId + " do not accept snapshots",
Response.Status.BAD_REQUEST.getStatusCode(), null ); Response.Status.BAD_REQUEST.getStatusCode(), null);
} }
} } catch (RepositoryAdminException e) {
catch ( RepositoryAdminException e ) throw new ArchivaRestServiceException(e.getMessage(),
{ Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
throw new ArchivaRestServiceException( e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
} }
// get from the session file with groupId/artifactId // get from the session file with groupId/artifactId
Iterable<FileMetadata> filesToAdd = Iterables.filter( fileMetadatas, new Predicate<FileMetadata>() Iterable<FileMetadata> filesToAdd = Iterables.filter(fileMetadatas, new Predicate<FileMetadata>() {
{ public boolean apply(FileMetadata fileMetadata) {
public boolean apply( FileMetadata fileMetadata )
{
return fileMetadata != null && !fileMetadata.isPomFile(); return fileMetadata != null && !fileMetadata.isPomFile();
} }
} ); });
Iterator<FileMetadata> iterator = filesToAdd.iterator(); Iterator<FileMetadata> iterator = filesToAdd.iterator();
boolean pomGenerated = false; boolean pomGenerated = false;
while ( iterator.hasNext() ) while (iterator.hasNext()) {
{
FileMetadata fileMetadata = iterator.next(); FileMetadata fileMetadata = iterator.next();
log.debug( "fileToAdd: {}", fileMetadata ); log.debug("fileToAdd: {}", fileMetadata);
saveFile( repositoryId, fileMetadata, generatePom && !pomGenerated, groupId, artifactId, version, saveFile(repositoryId, fileMetadata, generatePom && !pomGenerated, groupId, artifactId, version,
packaging ); packaging);
pomGenerated = true; pomGenerated = true;
deleteFile( fileMetadata.getServerFileName() ); deleteFile(fileMetadata.getServerFileName());
} }
filesToAdd = Iterables.filter( fileMetadatas, new Predicate<FileMetadata>() filesToAdd = Iterables.filter(fileMetadatas, new Predicate<FileMetadata>() {
{
@Override @Override
public boolean apply( FileMetadata fileMetadata ) public boolean apply(FileMetadata fileMetadata) {
{
return fileMetadata != null && fileMetadata.isPomFile(); return fileMetadata != null && fileMetadata.isPomFile();
} }
} ); });
iterator = filesToAdd.iterator(); iterator = filesToAdd.iterator();
while ( iterator.hasNext() ) while (iterator.hasNext()) {
{
FileMetadata fileMetadata = iterator.next(); FileMetadata fileMetadata = iterator.next();
log.debug( "fileToAdd: {}", fileMetadata ); log.debug("fileToAdd: {}", fileMetadata);
savePomFile( repositoryId, fileMetadata, groupId, artifactId, version, packaging ); savePomFile(repositoryId, fileMetadata, groupId, artifactId, version, packaging);
deleteFile( fileMetadata.getServerFileName() ); deleteFile(fileMetadata.getServerFileName());
} }
return Boolean.TRUE; return Boolean.TRUE;
} }
protected void savePomFile( String repositoryId, FileMetadata fileMetadata, String groupId, String artifactId, protected void savePomFile(String repositoryId, FileMetadata fileMetadata, String groupId, String artifactId,
String version, String packaging ) String version, String packaging)
throws ArchivaRestServiceException throws ArchivaRestServiceException {
{
log.debug("Saving POM"); log.debug("Saving POM");
try try {
{
boolean fixChecksums = boolean fixChecksums =
!( archivaAdministration.getKnownContentConsumers().contains( "create-missing-checksums" ) ); !(archivaAdministration.getKnownContentConsumers().contains("create-missing-checksums"));
ManagedRepository repoConfig = managedRepositoryAdmin.getManagedRepository( repositoryId );
ArtifactReference artifactReference = new ArtifactReference();
artifactReference.setArtifactId( artifactId );
artifactReference.setGroupId( groupId );
artifactReference.setVersion( version );
artifactReference.setClassifier( fileMetadata.getClassifier() );
artifactReference.setType( packaging );
ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent( repositoryId );
String artifactPath = repository.toPath( artifactReference );
int lastIndex = artifactPath.lastIndexOf( '/' );
String path = artifactPath.substring( 0, lastIndex );
Path targetPath = Paths.get( repoConfig.getLocation(), path );
String pomFilename = artifactPath.substring( lastIndex + 1 );
if ( StringUtils.isNotEmpty( fileMetadata.getClassifier() ) )
{
pomFilename = StringUtils.remove( pomFilename, "-" + fileMetadata.getClassifier() );
}
pomFilename = FilenameUtils.removeExtension( pomFilename ) + ".pom";
copyFile( Paths.get( fileMetadata.getServerFileName() ), targetPath, pomFilename, fixChecksums );
triggerAuditEvent( repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE );
queueRepositoryTask( repoConfig.getId(), targetPath.resolve(pomFilename ) );
log.debug("Finished Saving POM");
}
catch ( IOException ie )
{
log.error("IOException for POM {}", ie.getMessage());
throw new ArchivaRestServiceException( "Error encountered while uploading pom file: " + ie.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie );
}
catch ( RepositoryException rep )
{
log.error("RepositoryException for POM {}", rep.getMessage());
throw new ArchivaRestServiceException( "Repository exception: " + rep.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep );
}
catch ( RepositoryAdminException e )
{
log.error("RepositoryAdminException for POM {}", e.getMessage());
throw new ArchivaRestServiceException( "RepositoryAdmin exception: " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
}
}
protected void saveFile( String repositoryId, FileMetadata fileMetadata, boolean generatePom, String groupId,
String artifactId, String version, String packaging )
throws ArchivaRestServiceException
{
log.debug("Saving file");
try
{
org.apache.archiva.repository.ManagedRepository repoConfig = repositoryRegistry.getManagedRepository(repositoryId); org.apache.archiva.repository.ManagedRepository repoConfig = repositoryRegistry.getManagedRepository(repositoryId);
ArtifactReference artifactReference = new ArtifactReference(); ArtifactReference artifactReference = createArtifactRef(fileMetadata, groupId, artifactId, version);
artifactReference.setArtifactId( artifactId ); artifactReference.setType(packaging);
artifactReference.setGroupId( groupId );
artifactReference.setVersion( version ); Path pomPath = artifactUtil.getArtifactPath(repoConfig, artifactReference);
artifactReference.setClassifier( fileMetadata.getClassifier() ); Path targetPath = pomPath.getParent();
String pomFilename = pomPath.getFileName().toString();
if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) {
pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier());
}
pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";
copyFile(Paths.get(fileMetadata.getServerFileName()), targetPath, pomFilename, fixChecksums);
triggerAuditEvent(repoConfig.getId(), targetPath.resolve(pomFilename).toString(), AuditEvent.UPLOAD_FILE);
queueRepositoryTask(repoConfig.getId(), targetPath.resolve(pomFilename));
log.debug("Finished Saving POM");
} catch (IOException ie) {
log.error("IOException for POM {}", ie.getMessage());
throw new ArchivaRestServiceException("Error encountered while uploading pom file: " + ie.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
} catch (RepositoryException rep) {
log.error("RepositoryException for POM {}", rep.getMessage());
throw new ArchivaRestServiceException("Repository exception: " + rep.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep);
} catch (RepositoryAdminException e) {
log.error("RepositoryAdminException for POM {}", e.getMessage());
throw new ArchivaRestServiceException("RepositoryAdmin exception: " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
}
}
protected void saveFile(String repositoryId, FileMetadata fileMetadata, boolean generatePom, String groupId,
String artifactId, String version, String packaging)
throws ArchivaRestServiceException {
log.debug("Saving file");
try {
org.apache.archiva.repository.ManagedRepository repoConfig = repositoryRegistry.getManagedRepository(repositoryId);
ArtifactReference artifactReference = createArtifactRef(fileMetadata, groupId, artifactId, version);
artifactReference.setType( artifactReference.setType(
StringUtils.isEmpty( fileMetadata.getPackaging() ) ? packaging : fileMetadata.getPackaging() ); StringUtils.isEmpty(fileMetadata.getPackaging()) ? packaging : fileMetadata.getPackaging());
ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent( repoConfig ); Path artifactPath = artifactUtil.getArtifactPath(repoConfig, artifactReference);
Path targetPath = artifactPath.getParent();
String artifactPath = repository.toPath( artifactReference ); log.debug("artifactPath: {} found targetPath: {}", artifactPath, targetPath);
int lastIndex = artifactPath.lastIndexOf( '/' );
String path = artifactPath.substring( 0, lastIndex );
Path targetPath = Paths.get(repoConfig.getLocation()).resolve(path);
log.debug( "artifactPath: {} found targetPath: {}", artifactPath, targetPath );
Date lastUpdatedTimestamp = Calendar.getInstance().getTime(); Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
int newBuildNumber = -1; int newBuildNumber = -1;
String timestamp = null; String timestamp = null;
Path versionMetadataFile = targetPath.resolve( MetadataTools.MAVEN_METADATA ); Path versionMetadataFile = targetPath.resolve(MetadataTools.MAVEN_METADATA);
ArchivaRepositoryMetadata versionMetadata = getMetadata( versionMetadataFile ); ArchivaRepositoryMetadata versionMetadata = getMetadata(versionMetadataFile);
if ( VersionUtil.isSnapshot( version ) ) if (VersionUtil.isSnapshot(version)) {
{ TimeZone timezone = TimeZone.getTimeZone("UTC");
TimeZone timezone = TimeZone.getTimeZone( "UTC" ); DateFormat fmt = new SimpleDateFormat("yyyyMMdd.HHmmss");
DateFormat fmt = new SimpleDateFormat( "yyyyMMdd.HHmmss" ); fmt.setTimeZone(timezone);
fmt.setTimeZone( timezone ); timestamp = fmt.format(lastUpdatedTimestamp);
timestamp = fmt.format( lastUpdatedTimestamp ); if (versionMetadata.getSnapshotVersion() != null) {
if ( versionMetadata.getSnapshotVersion() != null )
{
newBuildNumber = versionMetadata.getSnapshotVersion().getBuildNumber() + 1; newBuildNumber = versionMetadata.getSnapshotVersion().getBuildNumber() + 1;
} } else {
else
{
newBuildNumber = 1; newBuildNumber = 1;
} }
} }
if ( !Files.exists(targetPath) ) if (!Files.exists(targetPath)) {
{ Files.createDirectories(targetPath);
Files.createDirectories( targetPath );
} }
String filename = artifactPath.substring( lastIndex + 1 ); String filename = artifactPath.getFileName().toString();
if ( VersionUtil.isSnapshot( version ) ) if (VersionUtil.isSnapshot(version)) {
{ filename = filename.replaceAll(VersionUtil.SNAPSHOT, timestamp + "-" + newBuildNumber);
filename = filename.replaceAll( VersionUtil.SNAPSHOT, timestamp + "-" + newBuildNumber );
} }
boolean fixChecksums = boolean fixChecksums =
!( archivaAdministration.getKnownContentConsumers().contains( "create-missing-checksums" ) ); !(archivaAdministration.getKnownContentConsumers().contains("create-missing-checksums"));
try try {
{ Path targetFile = targetPath.resolve(filename);
Path targetFile = targetPath.resolve( filename ); if (Files.exists(targetFile) && !VersionUtil.isSnapshot(version) && repoConfig.blocksRedeployments()) {
if ( Files.exists(targetFile) && !VersionUtil.isSnapshot( version ) && repoConfig.blocksRedeployments())
{
throw new ArchivaRestServiceException( throw new ArchivaRestServiceException(
"Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.", "Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.",
Response.Status.BAD_REQUEST.getStatusCode(), null ); Response.Status.BAD_REQUEST.getStatusCode(), null);
} else {
copyFile(Paths.get(fileMetadata.getServerFileName()), targetPath, filename, fixChecksums);
triggerAuditEvent(repoConfig.getId(), artifactPath.toString(), AuditEvent.UPLOAD_FILE);
queueRepositoryTask(repoConfig.getId(), targetFile);
} }
else } catch (IOException ie) {
{ log.error("IOException copying file: {}", ie.getMessage(), ie);
copyFile( Paths.get( fileMetadata.getServerFileName() ), targetPath, filename, fixChecksums );
triggerAuditEvent( repository.getId(), path + "/" + filename, AuditEvent.UPLOAD_FILE );
queueRepositoryTask( repository.getId(), targetFile );
}
}
catch ( IOException ie )
{
log.error( "IOException copying file: {}", ie.getMessage(), ie );
throw new ArchivaRestServiceException( throw new ArchivaRestServiceException(
"Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.", "Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.",
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie ); Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
} }
if ( generatePom ) if (generatePom) {
{
String pomFilename = filename; String pomFilename = filename;
if ( StringUtils.isNotEmpty( fileMetadata.getClassifier() ) ) if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) {
{ pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier());
pomFilename = StringUtils.remove( pomFilename, "-" + fileMetadata.getClassifier() );
} }
pomFilename = FilenameUtils.removeExtension( pomFilename ) + ".pom"; pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";
try try {
{
Path generatedPomFile = Path generatedPomFile =
createPom( targetPath, pomFilename, fileMetadata, groupId, artifactId, version, packaging ); createPom(targetPath, pomFilename, fileMetadata, groupId, artifactId, version, packaging);
triggerAuditEvent( repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE ); triggerAuditEvent(repoConfig.getId(), targetPath.resolve(pomFilename).toString(), AuditEvent.UPLOAD_FILE);
if ( fixChecksums ) if (fixChecksums) {
{ fixChecksums(generatedPomFile);
fixChecksums( generatedPomFile );
} }
queueRepositoryTask( repoConfig.getId(), generatedPomFile ); queueRepositoryTask(repoConfig.getId(), generatedPomFile);
} } catch (IOException ie) {
catch ( IOException ie )
{
throw new ArchivaRestServiceException( throw new ArchivaRestServiceException(
"Error encountered while writing pom file: " + ie.getMessage(), "Error encountered while writing pom file: " + ie.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie ); Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
} }
} }
// explicitly update only if metadata-updater consumer is not enabled! // explicitly update only if metadata-updater consumer is not enabled!
if ( !archivaAdministration.getKnownContentConsumers().contains( "metadata-updater" ) ) if (!archivaAdministration.getKnownContentConsumers().contains("metadata-updater")) {
{ updateProjectMetadata(targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber,
updateProjectMetadata( targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber, fixChecksums, fileMetadata, groupId, artifactId, version, packaging);
fixChecksums, fileMetadata, groupId, artifactId, version, packaging );
if ( VersionUtil.isSnapshot( version ) ) if (VersionUtil.isSnapshot(version)) {
{ updateVersionMetadata(versionMetadata, versionMetadataFile, lastUpdatedTimestamp, timestamp,
updateVersionMetadata( versionMetadata, versionMetadataFile, lastUpdatedTimestamp, timestamp, newBuildNumber, fixChecksums, fileMetadata, groupId, artifactId, version,
newBuildNumber, fixChecksums, fileMetadata, groupId, artifactId, version, packaging);
packaging );
} }
} }
} } catch (RepositoryNotFoundException re) {
catch ( RepositoryNotFoundException re )
{
log.error("RepositoryNotFoundException during save {}", re.getMessage()); log.error("RepositoryNotFoundException during save {}", re.getMessage());
re.printStackTrace(); re.printStackTrace();
throw new ArchivaRestServiceException( "Target repository cannot be found: " + re.getMessage(), throw new ArchivaRestServiceException("Target repository cannot be found: " + re.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), re ); Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), re);
} } catch (RepositoryException rep) {
catch ( RepositoryException rep )
{
log.error("RepositoryException during save {}", rep.getMessage()); log.error("RepositoryException during save {}", rep.getMessage());
throw new ArchivaRestServiceException( "Repository exception: " + rep.getMessage(), throw new ArchivaRestServiceException("Repository exception: " + rep.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep ); Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep);
} } catch (RepositoryAdminException e) {
catch ( RepositoryAdminException e )
{
log.error("RepositoryAdminException during save {}", e.getMessage()); log.error("RepositoryAdminException during save {}", e.getMessage());
throw new ArchivaRestServiceException( "RepositoryAdmin exception: " + e.getMessage(), throw new ArchivaRestServiceException("RepositoryAdmin exception: " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e ); Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
} } catch (IOException e) {
catch ( IOException e )
{
log.error("IOException during save {}", e.getMessage()); log.error("IOException during save {}", e.getMessage());
throw new ArchivaRestServiceException("Repository exception "+ e.getMessage(), throw new ArchivaRestServiceException("Repository exception " + e.getMessage(),
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e); Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
} }
} }
private ArchivaRepositoryMetadata getMetadata( Path metadataFile ) private ArtifactReference createArtifactRef(FileMetadata fileMetadata, String groupId, String artifactId, String version) {
throws RepositoryMetadataException ArtifactReference artifactReference = new ArtifactReference();
{ artifactReference.setArtifactId(artifactId);
artifactReference.setGroupId(groupId);
artifactReference.setVersion(version);
artifactReference.setClassifier(fileMetadata.getClassifier());
return artifactReference;
}
private ArchivaRepositoryMetadata getMetadata(Path metadataFile)
throws RepositoryMetadataException {
ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata(); ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
if ( Files.exists(metadataFile) ) if (Files.exists(metadataFile)) {
{ try {
try metadata = MavenMetadataReader.read(metadataFile);
{ } catch (XMLException e) {
metadata = MavenMetadataReader.read( metadataFile ); throw new RepositoryMetadataException(e.getMessage(), e);
}
catch ( XMLException e )
{
throw new RepositoryMetadataException( e.getMessage(), e );
} }
} }
return metadata; return metadata;
} }
private Path createPom( Path targetPath, String filename, FileMetadata fileMetadata, String groupId, private Path createPom(Path targetPath, String filename, FileMetadata fileMetadata, String groupId,
String artifactId, String version, String packaging ) String artifactId, String version, String packaging)
throws IOException throws IOException {
{
Model projectModel = new Model(); Model projectModel = new Model();
projectModel.setModelVersion( "4.0.0" ); projectModel.setModelVersion("4.0.0");
projectModel.setGroupId( groupId ); projectModel.setGroupId(groupId);
projectModel.setArtifactId( artifactId ); projectModel.setArtifactId(artifactId);
projectModel.setVersion( version ); projectModel.setVersion(version);
projectModel.setPackaging( packaging ); projectModel.setPackaging(packaging);
Path pomFile = targetPath.resolve( filename ); Path pomFile = targetPath.resolve(filename);
MavenXpp3Writer writer = new MavenXpp3Writer(); MavenXpp3Writer writer = new MavenXpp3Writer();
try (FileWriter w = new FileWriter( pomFile.toFile() )) try (FileWriter w = new FileWriter(pomFile.toFile())) {
{ writer.write(w, projectModel);
writer.write( w, projectModel );
} }
return pomFile; return pomFile;
} }
private void fixChecksums( Path file ) private void fixChecksums(Path file) {
{ ChecksummedFile checksum = new ChecksummedFile(file);
ChecksummedFile checksum = new ChecksummedFile( file ); checksum.fixChecksums(algorithms);
checksum.fixChecksums( algorithms );
} }
private void queueRepositoryTask( String repositoryId, Path localFile ) private void queueRepositoryTask(String repositoryId, Path localFile) {
{
RepositoryTask task = new RepositoryTask(); RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId ); task.setRepositoryId(repositoryId);
task.setResourceFile( localFile ); task.setResourceFile(localFile);
task.setUpdateRelatedArtifacts( true ); task.setUpdateRelatedArtifacts(true);
task.setScanAll( false ); task.setScanAll(false);
try try {
{ scheduler.queueTask(task);
scheduler.queueTask( task ); } catch (TaskQueueException e) {
} log.error("Unable to queue repository task to execute consumers on resource file ['{}"
catch ( TaskQueueException e ) + "'].", localFile.getFileName());
{
log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
+ "'].", localFile.getFileName() );
} }
} }
private void copyFile( Path sourceFile, Path targetPath, String targetFilename, boolean fixChecksums ) private void copyFile(Path sourceFile, Path targetPath, String targetFilename, boolean fixChecksums)
throws IOException throws IOException {
{
Files.copy( sourceFile, targetPath.resolve( targetFilename ), StandardCopyOption.REPLACE_EXISTING, Files.copy(sourceFile, targetPath.resolve(targetFilename), StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.COPY_ATTRIBUTES ); StandardCopyOption.COPY_ATTRIBUTES);
if ( fixChecksums ) if (fixChecksums) {
{ fixChecksums(targetPath.resolve(targetFilename));
fixChecksums( targetPath.resolve( targetFilename ) );
} }
} }
/** /**
* Update artifact level metadata. If it does not exist, create the metadata and fix checksums if necessary. * Update artifact level metadata. If it does not exist, create the metadata and fix checksums if necessary.
*/ */
private void updateProjectMetadata( String targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber, private void updateProjectMetadata(String targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber,
boolean fixChecksums, FileMetadata fileMetadata, String groupId, boolean fixChecksums, FileMetadata fileMetadata, String groupId,
String artifactId, String version, String packaging ) String artifactId, String version, String packaging)
throws RepositoryMetadataException throws RepositoryMetadataException {
{
List<String> availableVersions = new ArrayList<>(); List<String> availableVersions = new ArrayList<>();
String latestVersion = version; String latestVersion = version;
Path projectDir = Paths.get(targetPath).getParent(); Path projectDir = Paths.get(targetPath).getParent();
Path projectMetadataFile = projectDir.resolve( MetadataTools.MAVEN_METADATA ); Path projectMetadataFile = projectDir.resolve(MetadataTools.MAVEN_METADATA);
ArchivaRepositoryMetadata projectMetadata = getMetadata( projectMetadataFile ); ArchivaRepositoryMetadata projectMetadata = getMetadata(projectMetadataFile);
if ( Files.exists(projectMetadataFile) ) if (Files.exists(projectMetadataFile)) {
{
availableVersions = projectMetadata.getAvailableVersions(); availableVersions = projectMetadata.getAvailableVersions();
Collections.sort( availableVersions, VersionComparator.getInstance() ); Collections.sort(availableVersions, VersionComparator.getInstance());
if ( !availableVersions.contains( version ) ) if (!availableVersions.contains(version)) {
{ availableVersions.add(version);
availableVersions.add( version );
} }
latestVersion = availableVersions.get( availableVersions.size() - 1 ); latestVersion = availableVersions.get(availableVersions.size() - 1);
} } else {
else availableVersions.add(version);
{
availableVersions.add( version );
projectMetadata.setGroupId( groupId ); projectMetadata.setGroupId(groupId);
projectMetadata.setArtifactId( artifactId ); projectMetadata.setArtifactId(artifactId);
} }
if ( projectMetadata.getGroupId() == null ) if (projectMetadata.getGroupId() == null) {
{ projectMetadata.setGroupId(groupId);
projectMetadata.setGroupId( groupId );
} }
if ( projectMetadata.getArtifactId() == null ) if (projectMetadata.getArtifactId() == null) {
{ projectMetadata.setArtifactId(artifactId);
projectMetadata.setArtifactId( artifactId );
} }
projectMetadata.setLatestVersion( latestVersion ); projectMetadata.setLatestVersion(latestVersion);
projectMetadata.setLastUpdatedTimestamp( lastUpdatedTimestamp ); projectMetadata.setLastUpdatedTimestamp(lastUpdatedTimestamp);
projectMetadata.setAvailableVersions( availableVersions ); projectMetadata.setAvailableVersions(availableVersions);
if ( !VersionUtil.isSnapshot( version ) ) if (!VersionUtil.isSnapshot(version)) {
{ projectMetadata.setReleasedVersion(latestVersion);
projectMetadata.setReleasedVersion( latestVersion );
} }
RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile ); RepositoryMetadataWriter.write(projectMetadata, projectMetadataFile);
if ( fixChecksums ) if (fixChecksums) {
{ fixChecksums(projectMetadataFile);
fixChecksums( projectMetadataFile );
} }
} }
@ -734,33 +633,29 @@ public class DefaultFileUploadService
* Update version level metadata for snapshot artifacts. If it does not exist, create the metadata and fix checksums * Update version level metadata for snapshot artifacts. If it does not exist, create the metadata and fix checksums
* if necessary. * if necessary.
*/ */
private void updateVersionMetadata( ArchivaRepositoryMetadata metadata, Path metadataFile, private void updateVersionMetadata(ArchivaRepositoryMetadata metadata, Path metadataFile,
Date lastUpdatedTimestamp, String timestamp, int buildNumber, Date lastUpdatedTimestamp, String timestamp, int buildNumber,
boolean fixChecksums, FileMetadata fileMetadata, String groupId, boolean fixChecksums, FileMetadata fileMetadata, String groupId,
String artifactId, String version, String packaging ) String artifactId, String version, String packaging)
throws RepositoryMetadataException throws RepositoryMetadataException {
{ if (!Files.exists(metadataFile)) {
if ( !Files.exists(metadataFile) ) metadata.setGroupId(groupId);
{ metadata.setArtifactId(artifactId);
metadata.setGroupId( groupId ); metadata.setVersion(version);
metadata.setArtifactId( artifactId );
metadata.setVersion( version );
} }
if ( metadata.getSnapshotVersion() == null ) if (metadata.getSnapshotVersion() == null) {
{ metadata.setSnapshotVersion(new SnapshotVersion());
metadata.setSnapshotVersion( new SnapshotVersion() );
} }
metadata.getSnapshotVersion().setBuildNumber( buildNumber ); metadata.getSnapshotVersion().setBuildNumber(buildNumber);
metadata.getSnapshotVersion().setTimestamp( timestamp ); metadata.getSnapshotVersion().setTimestamp(timestamp);
metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp ); metadata.setLastUpdatedTimestamp(lastUpdatedTimestamp);
RepositoryMetadataWriter.write( metadata, metadataFile ); RepositoryMetadataWriter.write(metadata, metadataFile);
if ( fixChecksums ) if (fixChecksums) {
{ fixChecksums(metadataFile);
fixChecksums( metadataFile );
} }
} }