FEATURE: Direct S3 multipart uploads for backups (#14736)
This PR introduces a new `enable_experimental_backup_uploads` site setting (default false and hidden), which when enabled alongside `enable_direct_s3_uploads` will allow for direct S3 multipart uploads of backup .tar.gz files. To make multipart external uploads work with both the S3BackupStore and the S3Store, I've had to move several methods out of S3Store and into S3Helper, including: * presigned_url * create_multipart * abort_multipart * complete_multipart * presign_multipart_part * list_multipart_parts Then, S3Store and S3BackupStore either delegate directly to S3Helper or have their own special methods to call S3Helper for these methods. FileStore.temporary_upload_path has also removed its dependence on upload_path, and can now be used interchangeably between the stores. A similar change was made in the frontend as well, moving the multipart related JS code out of ComposerUppyUpload and into a mixin of its own, so it can also be used by UppyUploadMixin. Some changes to ExternalUploadManager had to be made here as well. The backup direct uploads do not need an Upload record made for them in the database, so they can be moved to their final S3 resting place when completing the multipart upload. This changeset is not perfect; it introduces some special cases in UploadController to handle backups that was previously in BackupController, because UploadController is where the multipart routes are located. A subsequent pull request will pull these routes into a module or some other sharing pattern, along with hooks, so the backup controller and the upload controller (and any future controllers that may need them) can include these routes in a nicer way.
This commit is contained in:
parent
d4e35f50c2
commit
e4350bb966
|
@ -12,6 +12,9 @@ export default Controller.extend({
|
|||
uploadLabel: i18n("admin.backups.upload.label"),
|
||||
backupLocation: setting("backup_location"),
|
||||
localBackupStorage: equal("backupLocation", "local"),
|
||||
enableExperimentalBackupUploader: setting(
|
||||
"enable_experimental_backup_uploader"
|
||||
),
|
||||
|
||||
@discourseComputed("status.allowRestore", "status.isOperationRunning")
|
||||
restoreTitle(allowRestore, isOperationRunning) {
|
||||
|
|
|
@ -7,9 +7,13 @@
|
|||
uploadText=uploadLabel
|
||||
title="admin.backups.upload.title"
|
||||
class="btn-default"}}
|
||||
{{else}}
|
||||
{{#if enableExperimentalBackupUploader}}
|
||||
{{uppy-backup-uploader done=(route-action "remoteUploadSuccess")}}
|
||||
{{else}}
|
||||
{{backup-uploader done=(route-action "remoteUploadSuccess")}}
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
|
||||
{{#if site.isReadOnly}}
|
||||
{{d-button
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
import Component from "@ember/component";
|
||||
import I18n from "I18n";
|
||||
import UppyUploadMixin from "discourse/mixins/uppy-upload";
|
||||
import discourseComputed from "discourse-common/utils/decorators";
|
||||
|
||||
export default Component.extend(UppyUploadMixin, {
|
||||
tagName: "span",
|
||||
type: "backup",
|
||||
useMultipartUploadsIfAvailable: true,
|
||||
|
||||
@discourseComputed("uploading", "uploadProgress")
|
||||
uploadButtonText(uploading, progress) {
|
||||
return uploading
|
||||
? I18n.t("admin.backups.upload.uploading_progress", { progress })
|
||||
: I18n.t("admin.backups.upload.label");
|
||||
},
|
||||
|
||||
validateUploadedFilesOptions() {
|
||||
return { skipValidation: true };
|
||||
},
|
||||
|
||||
uploadDone() {
|
||||
this.done();
|
||||
},
|
||||
});
|
|
@ -1,13 +1,11 @@
|
|||
import Mixin from "@ember/object/mixin";
|
||||
import { Promise } from "rsvp";
|
||||
import ExtendableUploader from "discourse/mixins/extendable-uploader";
|
||||
import { ajax } from "discourse/lib/ajax";
|
||||
import UppyS3Multipart from "discourse/mixins/uppy-s3-multipart";
|
||||
import { deepMerge } from "discourse-common/lib/object";
|
||||
import UppyChecksum from "discourse/lib/uppy-checksum-plugin";
|
||||
import Uppy from "@uppy/core";
|
||||
import DropTarget from "@uppy/drop-target";
|
||||
import XHRUpload from "@uppy/xhr-upload";
|
||||
import AwsS3Multipart from "@uppy/aws-s3-multipart";
|
||||
import { warn } from "@ember/debug";
|
||||
import I18n from "I18n";
|
||||
import getURL from "discourse-common/lib/get-url";
|
||||
|
@ -33,7 +31,7 @@ import { cacheShortUploadUrl } from "pretty-text/upload-short-url";
|
|||
// and the most important _bindUploadTarget which handles all the main upload
|
||||
// functionality and event binding.
|
||||
//
|
||||
export default Mixin.create(ExtendableUploader, {
|
||||
export default Mixin.create(ExtendableUploader, UppyS3Multipart, {
|
||||
@observes("composerModel.uploadCancelled")
|
||||
_cancelUpload() {
|
||||
if (!this.get("composerModel.uploadCancelled")) {
|
||||
|
@ -390,158 +388,6 @@ export default Mixin.create(ExtendableUploader, {
|
|||
});
|
||||
},
|
||||
|
||||
_useS3MultipartUploads() {
|
||||
const self = this;
|
||||
const retryDelays = [0, 1000, 3000, 5000];
|
||||
|
||||
this._uppyInstance.use(AwsS3Multipart, {
|
||||
// controls how many simultaneous _chunks_ are uploaded, not files,
|
||||
// which in turn controls the minimum number of chunks presigned
|
||||
// in each batch (limit / 2)
|
||||
//
|
||||
// the default, and minimum, chunk size is 5mb. we can control the
|
||||
// chunk size via getChunkSize(file), so we may want to increase
|
||||
// the chunk size for larger files
|
||||
limit: 10,
|
||||
retryDelays,
|
||||
|
||||
createMultipartUpload(file) {
|
||||
self._uppyInstance.emit("create-multipart", file.id);
|
||||
|
||||
const data = {
|
||||
file_name: file.name,
|
||||
file_size: file.size,
|
||||
upload_type: file.meta.upload_type,
|
||||
metadata: file.meta,
|
||||
};
|
||||
|
||||
// the sha1 checksum is set by the UppyChecksum plugin, except
|
||||
// for in cases where the browser does not support the required
|
||||
// crypto mechanisms or an error occurs. it is an additional layer
|
||||
// of security, and not required.
|
||||
if (file.meta.sha1_checksum) {
|
||||
data.metadata = { "sha1-checksum": file.meta.sha1_checksum };
|
||||
}
|
||||
|
||||
return ajax("/uploads/create-multipart.json", {
|
||||
type: "POST",
|
||||
data,
|
||||
// uppy is inconsistent, an error here fires the upload-error event
|
||||
}).then((responseData) => {
|
||||
self._uppyInstance.emit("create-multipart-success", file.id);
|
||||
|
||||
file.meta.unique_identifier = responseData.unique_identifier;
|
||||
return {
|
||||
uploadId: responseData.external_upload_identifier,
|
||||
key: responseData.key,
|
||||
};
|
||||
});
|
||||
},
|
||||
|
||||
prepareUploadParts(file, partData) {
|
||||
if (file.preparePartsRetryAttempts === undefined) {
|
||||
file.preparePartsRetryAttempts = 0;
|
||||
}
|
||||
return ajax("/uploads/batch-presign-multipart-parts.json", {
|
||||
type: "POST",
|
||||
data: {
|
||||
part_numbers: partData.partNumbers,
|
||||
unique_identifier: file.meta.unique_identifier,
|
||||
},
|
||||
})
|
||||
.then((data) => {
|
||||
if (file.preparePartsRetryAttempts) {
|
||||
delete file.preparePartsRetryAttempts;
|
||||
self._consoleDebug(
|
||||
`[uppy] Retrying batch fetch for ${file.id} was successful, continuing.`
|
||||
);
|
||||
}
|
||||
return { presignedUrls: data.presigned_urls };
|
||||
})
|
||||
.catch((err) => {
|
||||
const status = err.jqXHR.status;
|
||||
|
||||
// it is kind of ugly to have to track the retry attempts for
|
||||
// the file based on the retry delays, but uppy's `retryable`
|
||||
// function expects the rejected Promise data to be structured
|
||||
// _just so_, and provides no interface for us to tell how many
|
||||
// times the upload has been retried (which it tracks internally)
|
||||
//
|
||||
// if we exceed the attempts then there is no way that uppy will
|
||||
// retry the upload once again, so in that case the alert can
|
||||
// be safely shown to the user that their upload has failed.
|
||||
if (file.preparePartsRetryAttempts < retryDelays.length) {
|
||||
file.preparePartsRetryAttempts += 1;
|
||||
const attemptsLeft =
|
||||
retryDelays.length - file.preparePartsRetryAttempts + 1;
|
||||
self._consoleDebug(
|
||||
`[uppy] Fetching a batch of upload part URLs for ${file.id} failed with status ${status}, retrying ${attemptsLeft} more times...`
|
||||
);
|
||||
return Promise.reject({ source: { status } });
|
||||
} else {
|
||||
self._consoleDebug(
|
||||
`[uppy] Fetching a batch of upload part URLs for ${file.id} failed too many times, throwing error.`
|
||||
);
|
||||
// uppy is inconsistent, an error here does not fire the upload-error event
|
||||
self._handleUploadError(file, err);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
completeMultipartUpload(file, data) {
|
||||
self._uppyInstance.emit("complete-multipart", file.id);
|
||||
const parts = data.parts.map((part) => {
|
||||
return { part_number: part.PartNumber, etag: part.ETag };
|
||||
});
|
||||
return ajax("/uploads/complete-multipart.json", {
|
||||
type: "POST",
|
||||
contentType: "application/json",
|
||||
data: JSON.stringify({
|
||||
parts,
|
||||
unique_identifier: file.meta.unique_identifier,
|
||||
pasted: file.meta.pasted,
|
||||
for_private_message: file.meta.for_private_message,
|
||||
}),
|
||||
// uppy is inconsistent, an error here fires the upload-error event
|
||||
}).then((responseData) => {
|
||||
self._uppyInstance.emit("complete-multipart-success", file.id);
|
||||
return responseData;
|
||||
});
|
||||
},
|
||||
|
||||
abortMultipartUpload(file, { key, uploadId }) {
|
||||
// if the user cancels the upload before the key and uploadId
|
||||
// are stored from the createMultipartUpload response then they
|
||||
// will not be set, and we don't have to abort the upload because
|
||||
// it will not exist yet
|
||||
if (!key || !uploadId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// this gives us a chance to inspect the upload stub before
|
||||
// it is deleted from external storage by aborting the multipart
|
||||
// upload; see also ExternalUploadManager
|
||||
if (file.meta.error && self.siteSettings.enable_upload_debug_mode) {
|
||||
return;
|
||||
}
|
||||
|
||||
return ajax("/uploads/abort-multipart.json", {
|
||||
type: "POST",
|
||||
data: {
|
||||
external_upload_identifier: uploadId,
|
||||
},
|
||||
// uppy is inconsistent, an error here does not fire the upload-error event
|
||||
}).catch((err) => {
|
||||
self._handleUploadError(file, err);
|
||||
});
|
||||
},
|
||||
|
||||
// we will need a listParts function at some point when we want to
|
||||
// resume multipart uploads; this is used by uppy to figure out
|
||||
// what parts are uploaded and which still need to be
|
||||
});
|
||||
},
|
||||
|
||||
_reset() {
|
||||
this._uppyInstance?.reset();
|
||||
this.setProperties({
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
import Mixin from "@ember/object/mixin";
|
||||
import { Promise } from "rsvp";
|
||||
import { ajax } from "discourse/lib/ajax";
|
||||
import AwsS3Multipart from "@uppy/aws-s3-multipart";
|
||||
|
||||
export default Mixin.create({
|
||||
_useS3MultipartUploads() {
|
||||
this.set("usingS3MultipartUploads", true);
|
||||
const self = this;
|
||||
const retryDelays = [0, 1000, 3000, 5000];
|
||||
|
||||
this._uppyInstance.use(AwsS3Multipart, {
|
||||
// controls how many simultaneous _chunks_ are uploaded, not files,
|
||||
// which in turn controls the minimum number of chunks presigned
|
||||
// in each batch (limit / 2)
|
||||
//
|
||||
// the default, and minimum, chunk size is 5mb. we can control the
|
||||
// chunk size via getChunkSize(file), so we may want to increase
|
||||
// the chunk size for larger files
|
||||
limit: 10,
|
||||
retryDelays,
|
||||
|
||||
createMultipartUpload(file) {
|
||||
self._uppyInstance.emit("create-multipart", file.id);
|
||||
|
||||
const data = {
|
||||
file_name: file.name,
|
||||
file_size: file.size,
|
||||
upload_type: file.meta.upload_type,
|
||||
metadata: file.meta,
|
||||
};
|
||||
|
||||
// the sha1 checksum is set by the UppyChecksum plugin, except
|
||||
// for in cases where the browser does not support the required
|
||||
// crypto mechanisms or an error occurs. it is an additional layer
|
||||
// of security, and not required.
|
||||
if (file.meta.sha1_checksum) {
|
||||
data.metadata = { "sha1-checksum": file.meta.sha1_checksum };
|
||||
}
|
||||
|
||||
return ajax("/uploads/create-multipart.json", {
|
||||
type: "POST",
|
||||
data,
|
||||
// uppy is inconsistent, an error here fires the upload-error event
|
||||
}).then((responseData) => {
|
||||
self._uppyInstance.emit("create-multipart-success", file.id);
|
||||
|
||||
file.meta.unique_identifier = responseData.unique_identifier;
|
||||
return {
|
||||
uploadId: responseData.external_upload_identifier,
|
||||
key: responseData.key,
|
||||
};
|
||||
});
|
||||
},
|
||||
|
||||
prepareUploadParts(file, partData) {
|
||||
if (file.preparePartsRetryAttempts === undefined) {
|
||||
file.preparePartsRetryAttempts = 0;
|
||||
}
|
||||
return ajax("/uploads/batch-presign-multipart-parts.json", {
|
||||
type: "POST",
|
||||
data: {
|
||||
part_numbers: partData.partNumbers,
|
||||
unique_identifier: file.meta.unique_identifier,
|
||||
},
|
||||
})
|
||||
.then((data) => {
|
||||
if (file.preparePartsRetryAttempts) {
|
||||
delete file.preparePartsRetryAttempts;
|
||||
self._consoleDebug(
|
||||
`[uppy] Retrying batch fetch for ${file.id} was successful, continuing.`
|
||||
);
|
||||
}
|
||||
return { presignedUrls: data.presigned_urls };
|
||||
})
|
||||
.catch((err) => {
|
||||
const status = err.jqXHR.status;
|
||||
|
||||
// it is kind of ugly to have to track the retry attempts for
|
||||
// the file based on the retry delays, but uppy's `retryable`
|
||||
// function expects the rejected Promise data to be structured
|
||||
// _just so_, and provides no interface for us to tell how many
|
||||
// times the upload has been retried (which it tracks internally)
|
||||
//
|
||||
// if we exceed the attempts then there is no way that uppy will
|
||||
// retry the upload once again, so in that case the alert can
|
||||
// be safely shown to the user that their upload has failed.
|
||||
if (file.preparePartsRetryAttempts < retryDelays.length) {
|
||||
file.preparePartsRetryAttempts += 1;
|
||||
const attemptsLeft =
|
||||
retryDelays.length - file.preparePartsRetryAttempts + 1;
|
||||
self._consoleDebug(
|
||||
`[uppy] Fetching a batch of upload part URLs for ${file.id} failed with status ${status}, retrying ${attemptsLeft} more times...`
|
||||
);
|
||||
return Promise.reject({ source: { status } });
|
||||
} else {
|
||||
self._consoleDebug(
|
||||
`[uppy] Fetching a batch of upload part URLs for ${file.id} failed too many times, throwing error.`
|
||||
);
|
||||
// uppy is inconsistent, an error here does not fire the upload-error event
|
||||
self._handleUploadError(file, err);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
completeMultipartUpload(file, data) {
|
||||
self._uppyInstance.emit("complete-multipart", file.id);
|
||||
const parts = data.parts.map((part) => {
|
||||
return { part_number: part.PartNumber, etag: part.ETag };
|
||||
});
|
||||
return ajax("/uploads/complete-multipart.json", {
|
||||
type: "POST",
|
||||
contentType: "application/json",
|
||||
data: JSON.stringify({
|
||||
parts,
|
||||
unique_identifier: file.meta.unique_identifier,
|
||||
pasted: file.meta.pasted,
|
||||
for_private_message: file.meta.for_private_message,
|
||||
}),
|
||||
// uppy is inconsistent, an error here fires the upload-error event
|
||||
}).then((responseData) => {
|
||||
self._uppyInstance.emit("complete-multipart-success", file.id);
|
||||
return responseData;
|
||||
});
|
||||
},
|
||||
|
||||
abortMultipartUpload(file, { key, uploadId }) {
|
||||
// if the user cancels the upload before the key and uploadId
|
||||
// are stored from the createMultipartUpload response then they
|
||||
// will not be set, and we don't have to abort the upload because
|
||||
// it will not exist yet
|
||||
if (!key || !uploadId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// this gives us a chance to inspect the upload stub before
|
||||
// it is deleted from external storage by aborting the multipart
|
||||
// upload; see also ExternalUploadManager
|
||||
if (file.meta.error && self.siteSettings.enable_upload_debug_mode) {
|
||||
return;
|
||||
}
|
||||
|
||||
return ajax("/uploads/abort-multipart.json", {
|
||||
type: "POST",
|
||||
data: {
|
||||
external_upload_identifier: uploadId,
|
||||
},
|
||||
// uppy is inconsistent, an error here does not fire the upload-error event
|
||||
}).catch((err) => {
|
||||
self._handleUploadError(file, err);
|
||||
});
|
||||
},
|
||||
|
||||
// we will need a listParts function at some point when we want to
|
||||
// resume multipart uploads; this is used by uppy to figure out
|
||||
// what parts are uploaded and which still need to be
|
||||
});
|
||||
},
|
||||
});
|
|
@ -13,12 +13,13 @@ import DropTarget from "@uppy/drop-target";
|
|||
import XHRUpload from "@uppy/xhr-upload";
|
||||
import AwsS3 from "@uppy/aws-s3";
|
||||
import UppyChecksum from "discourse/lib/uppy-checksum-plugin";
|
||||
import UppyS3Multipart from "discourse/mixins/uppy-s3-multipart";
|
||||
import { on } from "discourse-common/utils/decorators";
|
||||
import { warn } from "@ember/debug";
|
||||
|
||||
export const HUGE_FILE_THRESHOLD_BYTES = 104_857_600; // 100MB
|
||||
|
||||
export default Mixin.create({
|
||||
export default Mixin.create(UppyS3Multipart, {
|
||||
uploading: false,
|
||||
uploadProgress: 0,
|
||||
_uppyInstance: null,
|
||||
|
@ -26,13 +27,6 @@ export default Mixin.create({
|
|||
_inProgressUploads: 0,
|
||||
id: null,
|
||||
|
||||
// TODO (martin): currently used for backups to turn on auto upload and PUT/XML requests
|
||||
// and for emojis to do sequential uploads, when we get to replacing those
|
||||
// with uppy make sure this is used when initializing uppy
|
||||
uploadOptions() {
|
||||
return {};
|
||||
},
|
||||
|
||||
uploadDone() {
|
||||
warn("You should implement `uploadDone`", {
|
||||
id: "discourse.upload.missing-upload-done",
|
||||
|
@ -170,7 +164,7 @@ export default Mixin.create({
|
|||
});
|
||||
|
||||
this._uppyInstance.on("upload-error", (file, error, response) => {
|
||||
displayErrorForUpload(response, this.siteSettings, file.name);
|
||||
displayErrorForUpload(response || error, this.siteSettings, file.name);
|
||||
this._reset();
|
||||
});
|
||||
|
||||
|
@ -184,7 +178,11 @@ export default Mixin.create({
|
|||
this.siteSettings.enable_direct_s3_uploads &&
|
||||
!this.preventDirectS3Uploads
|
||||
) {
|
||||
if (this.useMultipartUploadsIfAvailable) {
|
||||
this._useS3MultipartUploads();
|
||||
} else {
|
||||
this._useS3Uploads();
|
||||
}
|
||||
} else {
|
||||
this._useXHRUploads();
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<label class="btn" disabled={{uploading}} title={{i18n "admin.backups.upload.title"}}>
|
||||
{{d-icon "upload"}} {{uploadButtonText}}
|
||||
{{d-icon "upload"}}{{uploadButtonText}}
|
||||
<input class="hidden-upload-field" disabled={{uploading}} type="file" accept=".gz">
|
||||
</label>
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
<label class="btn" disabled={{uploading}} title={{i18n "admin.backups.upload.title"}}>
|
||||
{{d-icon "upload"}}{{uploadButtonText}}
|
||||
<input class="hidden-upload-field" disabled={{uploading}} type="file" accept=".gz">
|
||||
</label>
|
|
@ -255,10 +255,12 @@ class UploadsController < ApplicationController
|
|||
external_upload_manager = ExternalUploadManager.new(external_upload_stub, opts)
|
||||
hijack do
|
||||
begin
|
||||
upload = external_upload_manager.promote_to_upload!
|
||||
upload = external_upload_manager.transform!
|
||||
|
||||
if upload.errors.empty?
|
||||
response_serialized = external_upload_stub.upload_type != "backup" ? UploadsController.serialize_upload(upload) : {}
|
||||
external_upload_stub.destroy!
|
||||
render json: UploadsController.serialize_upload(upload), status: 200
|
||||
render json: response_serialized, status: 200
|
||||
else
|
||||
render_json_error(upload.errors.to_hash.values.flatten, status: 422)
|
||||
end
|
||||
|
@ -301,12 +303,18 @@ class UploadsController < ApplicationController
|
|||
file_size = params.require(:file_size).to_i
|
||||
upload_type = params.require(:upload_type)
|
||||
|
||||
if upload_type == "backup"
|
||||
ensure_staff
|
||||
return render_json_error(I18n.t("backup.backup_file_should_be_tar_gz")) unless valid_backup_extension?(file_name)
|
||||
return render_json_error(I18n.t("backup.invalid_filename")) unless valid_backup_filename?(file_name)
|
||||
else
|
||||
if file_size_too_big?(file_name, file_size)
|
||||
return render_json_error(
|
||||
I18n.t("upload.attachments.too_large_humanized", max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_attachment_size_kb.kilobytes)),
|
||||
status: 422
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
begin
|
||||
external_upload_data = ExternalUploadManager.create_direct_multipart_upload(
|
||||
|
@ -321,6 +329,13 @@ class UploadsController < ApplicationController
|
|||
debug_upload_error(err, "upload.create_mutlipart_failure", additional_detail: err.message),
|
||||
status: 422
|
||||
)
|
||||
rescue BackupRestore::BackupStore::BackupFileExists
|
||||
return render_json_error(I18n.t("backup.file_exists"), status: 422)
|
||||
rescue BackupRestore::BackupStore::StorageError => err
|
||||
return render_json_error(
|
||||
debug_upload_error(err, "upload.create_mutlipart_failure", additional_detail: err.message),
|
||||
status: 422
|
||||
)
|
||||
end
|
||||
|
||||
render json: external_upload_data
|
||||
|
@ -347,9 +362,11 @@ class UploadsController < ApplicationController
|
|||
return render_404
|
||||
end
|
||||
|
||||
store = multipart_store(external_upload_stub.upload_type)
|
||||
|
||||
presigned_urls = {}
|
||||
part_numbers.each do |part_number|
|
||||
presigned_urls[part_number] = Discourse.store.presign_multipart_part(
|
||||
presigned_urls[part_number] = store.presign_multipart_part(
|
||||
upload_id: external_upload_stub.external_upload_identifier,
|
||||
key: external_upload_stub.key,
|
||||
part_number: part_number
|
||||
|
@ -370,8 +387,9 @@ class UploadsController < ApplicationController
|
|||
end
|
||||
|
||||
def multipart_upload_exists?(external_upload_stub)
|
||||
store = multipart_store(external_upload_stub.upload_type)
|
||||
begin
|
||||
Discourse.store.list_multipart_parts(
|
||||
store.list_multipart_parts(
|
||||
upload_id: external_upload_stub.external_upload_identifier,
|
||||
key: external_upload_stub.key,
|
||||
max_parts: 1
|
||||
|
@ -395,9 +413,10 @@ class UploadsController < ApplicationController
|
|||
return render json: success_json if external_upload_stub.blank?
|
||||
|
||||
return render_404 if external_upload_stub.created_by_id != current_user.id
|
||||
store = multipart_store(external_upload_stub.upload_type)
|
||||
|
||||
begin
|
||||
Discourse.store.abort_multipart(
|
||||
store.abort_multipart(
|
||||
upload_id: external_upload_stub.external_upload_identifier,
|
||||
key: external_upload_stub.key
|
||||
)
|
||||
|
@ -430,6 +449,7 @@ class UploadsController < ApplicationController
|
|||
return render_404
|
||||
end
|
||||
|
||||
store = multipart_store(external_upload_stub.upload_type)
|
||||
parts = parts.map do |part|
|
||||
part_number = part[:part_number]
|
||||
etag = part[:etag]
|
||||
|
@ -447,7 +467,7 @@ class UploadsController < ApplicationController
|
|||
end
|
||||
|
||||
begin
|
||||
complete_response = Discourse.store.complete_multipart(
|
||||
complete_response = store.complete_multipart(
|
||||
upload_id: external_upload_stub.external_upload_identifier,
|
||||
key: external_upload_stub.key,
|
||||
parts: parts
|
||||
|
@ -464,6 +484,11 @@ class UploadsController < ApplicationController
|
|||
|
||||
protected
|
||||
|
||||
def multipart_store(upload_type)
|
||||
ensure_staff if upload_type == "backup"
|
||||
ExternalUploadManager.store_for_upload_type(upload_type)
|
||||
end
|
||||
|
||||
def force_download?
|
||||
params[:dl] == "1"
|
||||
end
|
||||
|
@ -585,4 +610,12 @@ class UploadsController < ApplicationController
|
|||
return if metadata.blank?
|
||||
metadata.permit("sha1-checksum").to_h
|
||||
end
|
||||
|
||||
def valid_backup_extension?(filename)
|
||||
/\.(tar\.gz|t?gz)$/i =~ filename
|
||||
end
|
||||
|
||||
def valid_backup_filename?(filename)
|
||||
!!(/^[a-zA-Z0-9\._\-]+$/ =~ filename)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5,6 +5,8 @@ class ExternalUploadManager
|
|||
SIZE_MISMATCH_BAN_MINUTES = 5
|
||||
BAN_USER_REDIS_PREFIX = "ban_user_from_external_uploads_"
|
||||
|
||||
UPLOAD_TYPES_EXCLUDED_FROM_UPLOAD_PROMOTION = ["backup"].freeze
|
||||
|
||||
class ChecksumMismatchError < StandardError; end
|
||||
class DownloadFailedError < StandardError; end
|
||||
class CannotPromoteError < StandardError; end
|
||||
|
@ -21,10 +23,11 @@ class ExternalUploadManager
|
|||
end
|
||||
|
||||
def self.create_direct_upload(current_user:, file_name:, file_size:, upload_type:, metadata: {})
|
||||
url = Discourse.store.signed_url_for_temporary_upload(
|
||||
store = store_for_upload_type(upload_type)
|
||||
url = store.signed_url_for_temporary_upload(
|
||||
file_name, metadata: metadata
|
||||
)
|
||||
key = Discourse.store.path_from_url(url)
|
||||
key = store.s3_helper.path_from_url(url)
|
||||
|
||||
upload_stub = ExternalUploadStub.create!(
|
||||
key: key,
|
||||
|
@ -41,7 +44,8 @@ class ExternalUploadManager
|
|||
current_user:, file_name:, file_size:, upload_type:, metadata: {}
|
||||
)
|
||||
content_type = MiniMime.lookup_by_filename(file_name)&.content_type
|
||||
multipart_upload = Discourse.store.create_multipart(
|
||||
store = store_for_upload_type(upload_type)
|
||||
multipart_upload = store.create_multipart(
|
||||
file_name, content_type, metadata: metadata
|
||||
)
|
||||
|
||||
|
@ -62,29 +66,30 @@ class ExternalUploadManager
|
|||
}
|
||||
end
|
||||
|
||||
def self.store_for_upload_type(upload_type)
|
||||
if upload_type == "backup"
|
||||
if !SiteSetting.enable_backups? || SiteSetting.backup_location != BackupLocationSiteSetting::S3
|
||||
raise Discourse::InvalidAccess.new
|
||||
end
|
||||
BackupRestore::BackupStore.create
|
||||
else
|
||||
Discourse.store
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(external_upload_stub, upload_create_opts = {})
|
||||
@external_upload_stub = external_upload_stub
|
||||
@upload_create_opts = upload_create_opts
|
||||
@store = ExternalUploadManager.store_for_upload_type(external_upload_stub.upload_type)
|
||||
end
|
||||
|
||||
def can_promote?
|
||||
external_upload_stub.status == ExternalUploadStub.statuses[:created]
|
||||
end
|
||||
|
||||
def promote_to_upload!
|
||||
def transform!
|
||||
raise CannotPromoteError if !can_promote?
|
||||
|
||||
external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded])
|
||||
external_stub_object = Discourse.store.object_from_path(external_upload_stub.key)
|
||||
external_etag = external_stub_object.etag
|
||||
external_size = external_stub_object.size
|
||||
external_sha1 = external_stub_object.metadata["sha1-checksum"]
|
||||
|
||||
# This could be legitimately nil, if it's too big to download on the
|
||||
# server, or it could have failed. To this end we set a should_download
|
||||
# variable as well to check.
|
||||
tempfile = nil
|
||||
should_download = external_size < DOWNLOAD_LIMIT
|
||||
|
||||
# We require that the file size is specified ahead of time, and compare
|
||||
# it here to make sure that people are not uploading excessively large
|
||||
|
@ -98,6 +103,34 @@ class ExternalUploadManager
|
|||
raise SizeMismatchError.new("expected: #{external_upload_stub.filesize}, actual: #{external_size}")
|
||||
end
|
||||
|
||||
if UPLOAD_TYPES_EXCLUDED_FROM_UPLOAD_PROMOTION.include?(external_upload_stub.upload_type)
|
||||
move_to_final_destination
|
||||
else
|
||||
promote_to_upload
|
||||
end
|
||||
rescue
|
||||
if !SiteSetting.enable_upload_debug_mode
|
||||
# We don't need to do anything special to abort multipart uploads here,
|
||||
# because at this point (calling promote_to_upload!), the multipart
|
||||
# upload would already be complete.
|
||||
@store.delete_file(external_upload_stub.key)
|
||||
external_upload_stub.destroy!
|
||||
else
|
||||
external_upload_stub.update(status: ExternalUploadStub.statuses[:failed])
|
||||
end
|
||||
|
||||
raise
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def promote_to_upload
|
||||
# This could be legitimately nil, if it's too big to download on the
|
||||
# server, or it could have failed. To this end we set a should_download
|
||||
# variable as well to check.
|
||||
tempfile = nil
|
||||
should_download = external_size < DOWNLOAD_LIMIT
|
||||
|
||||
if should_download
|
||||
tempfile = download(external_upload_stub.key, external_upload_stub.upload_type)
|
||||
|
||||
|
@ -121,26 +154,38 @@ class ExternalUploadManager
|
|||
UploadCreator.new(tempfile, external_upload_stub.original_filename, opts).create_for(
|
||||
external_upload_stub.created_by_id
|
||||
)
|
||||
rescue
|
||||
if !SiteSetting.enable_upload_debug_mode
|
||||
# We don't need to do anything special to abort multipart uploads here,
|
||||
# because at this point (calling promote_to_upload!), the multipart
|
||||
# upload would already be complete.
|
||||
Discourse.store.delete_file(external_upload_stub.key)
|
||||
external_upload_stub.destroy!
|
||||
else
|
||||
external_upload_stub.update(status: ExternalUploadStub.statuses[:failed])
|
||||
end
|
||||
|
||||
raise
|
||||
ensure
|
||||
tempfile&.close!
|
||||
end
|
||||
|
||||
private
|
||||
def move_to_final_destination
|
||||
content_type = MiniMime.lookup_by_filename(external_upload_stub.original_filename).content_type
|
||||
@store.move_existing_stored_upload(
|
||||
existing_external_upload_key: external_upload_stub.key,
|
||||
original_filename: external_upload_stub.original_filename,
|
||||
content_type: content_type
|
||||
)
|
||||
Struct.new(:errors).new([])
|
||||
end
|
||||
|
||||
def external_stub_object
|
||||
@external_stub_object ||= @store.object_from_path(external_upload_stub.key)
|
||||
end
|
||||
|
||||
def external_etag
|
||||
@external_etag ||= external_stub_object.etag
|
||||
end
|
||||
|
||||
def external_size
|
||||
@external_size ||= external_stub_object.size
|
||||
end
|
||||
|
||||
def external_sha1
|
||||
@external_sha1 ||= external_stub_object.metadata["sha1-checksum"]
|
||||
end
|
||||
|
||||
def download(key, type)
|
||||
url = Discourse.store.signed_url_for_path(external_upload_stub.key)
|
||||
url = @store.signed_url_for_path(external_upload_stub.key)
|
||||
FileHelper.download(
|
||||
url,
|
||||
max_file_size: DOWNLOAD_LIMIT,
|
||||
|
|
|
@ -271,6 +271,10 @@ basic:
|
|||
client: true
|
||||
default: false
|
||||
hidden: true
|
||||
enable_experimental_backup_uploader:
|
||||
client: true
|
||||
default: false
|
||||
hidden: true
|
||||
enable_direct_s3_uploads:
|
||||
client: true
|
||||
default: false
|
||||
|
|
|
@ -2,12 +2,18 @@
|
|||
|
||||
module BackupRestore
|
||||
class S3BackupStore < BackupStore
|
||||
UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 21_600 # 6 hours
|
||||
UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 6.hours.to_i
|
||||
|
||||
delegate :abort_multipart, :presign_multipart_part, :list_multipart_parts,
|
||||
:complete_multipart, to: :s3_helper
|
||||
|
||||
def initialize(opts = {})
|
||||
@s3_options = S3Helper.s3_options(SiteSetting)
|
||||
@s3_options.merge!(opts[:s3_options]) if opts[:s3_options]
|
||||
@s3_helper = S3Helper.new(s3_bucket_name_with_prefix, '', @s3_options.clone)
|
||||
end
|
||||
|
||||
def s3_helper
|
||||
@s3_helper ||= S3Helper.new(s3_bucket_name_with_prefix, '', @s3_options.clone)
|
||||
end
|
||||
|
||||
def remote?
|
||||
|
@ -15,12 +21,12 @@ module BackupRestore
|
|||
end
|
||||
|
||||
def file(filename, include_download_source: false)
|
||||
obj = @s3_helper.object(filename)
|
||||
obj = s3_helper.object(filename)
|
||||
create_file_from_object(obj, include_download_source) if obj.exists?
|
||||
end
|
||||
|
||||
def delete_file(filename)
|
||||
obj = @s3_helper.object(filename)
|
||||
obj = s3_helper.object(filename)
|
||||
|
||||
if obj.exists?
|
||||
obj.delete
|
||||
|
@ -29,11 +35,11 @@ module BackupRestore
|
|||
end
|
||||
|
||||
def download_file(filename, destination_path, failure_message = nil)
|
||||
@s3_helper.download_file(filename, destination_path, failure_message)
|
||||
s3_helper.download_file(filename, destination_path, failure_message)
|
||||
end
|
||||
|
||||
def upload_file(filename, source_path, content_type)
|
||||
obj = @s3_helper.object(filename)
|
||||
obj = s3_helper.object(filename)
|
||||
raise BackupFileExists.new if obj.exists?
|
||||
|
||||
obj.upload_file(source_path, content_type: content_type)
|
||||
|
@ -41,7 +47,7 @@ module BackupRestore
|
|||
end
|
||||
|
||||
def generate_upload_url(filename)
|
||||
obj = @s3_helper.object(filename)
|
||||
obj = s3_helper.object(filename)
|
||||
raise BackupFileExists.new if obj.exists?
|
||||
|
||||
# TODO (martin) We can remove this at a later date when we move this
|
||||
|
@ -55,12 +61,65 @@ module BackupRestore
|
|||
raise StorageError.new(e.message.presence || e.class.name)
|
||||
end
|
||||
|
||||
def signed_url_for_temporary_upload(file_name, expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS, metadata: {})
|
||||
obj = object_from_path(file_name)
|
||||
raise BackupFileExists.new if obj.exists?
|
||||
key = temporary_upload_path(file_name)
|
||||
s3_helper.presigned_url(
|
||||
key,
|
||||
method: :put_object,
|
||||
expires_in: expires_in,
|
||||
opts: {
|
||||
metadata: metadata,
|
||||
acl: "private"
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
def temporary_upload_path(file_name)
|
||||
FileStore::BaseStore.temporary_upload_path(file_name, folder_prefix: temporary_folder_prefix)
|
||||
end
|
||||
|
||||
def temporary_folder_prefix
|
||||
folder_prefix = s3_helper.s3_bucket_folder_path.nil? ? "" : s3_helper.s3_bucket_folder_path
|
||||
|
||||
if Rails.env.test?
|
||||
folder_prefix = File.join(folder_prefix, "test_#{ENV['TEST_ENV_NUMBER'].presence || '0'}")
|
||||
end
|
||||
|
||||
folder_prefix
|
||||
end
|
||||
|
||||
def create_multipart(file_name, content_type, metadata: {})
|
||||
obj = object_from_path(file_name)
|
||||
raise BackupFileExists.new if obj.exists?
|
||||
key = temporary_upload_path(file_name)
|
||||
s3_helper.create_multipart(key, content_type, metadata: metadata)
|
||||
end
|
||||
|
||||
def move_existing_stored_upload(
|
||||
existing_external_upload_key:,
|
||||
original_filename: nil,
|
||||
content_type: nil
|
||||
)
|
||||
s3_helper.copy(
|
||||
existing_external_upload_key,
|
||||
File.join(s3_helper.s3_bucket_folder_path, original_filename),
|
||||
options: { acl: "private", apply_metadata_to_destination: true }
|
||||
)
|
||||
s3_helper.delete_object(existing_external_upload_key)
|
||||
end
|
||||
|
||||
def object_from_path(path)
|
||||
s3_helper.object(path)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def unsorted_files
|
||||
objects = []
|
||||
|
||||
@s3_helper.list.each do |obj|
|
||||
s3_helper.list.each do |obj|
|
||||
if obj.key.match?(file_regex)
|
||||
objects << create_file_from_object(obj)
|
||||
end
|
||||
|
@ -96,7 +155,7 @@ module BackupRestore
|
|||
|
||||
def file_regex
|
||||
@file_regex ||= begin
|
||||
path = @s3_helper.s3_bucket_folder_path || ""
|
||||
path = s3_helper.s3_bucket_folder_path || ""
|
||||
|
||||
if path.present?
|
||||
path = "#{path}/" unless path.end_with?("/")
|
||||
|
|
|
@ -41,7 +41,7 @@ module FileStore
|
|||
File.join(path, "test_#{ENV['TEST_ENV_NUMBER'].presence || '0'}")
|
||||
end
|
||||
|
||||
def temporary_upload_path(file_name, folder_prefix: "")
|
||||
def self.temporary_upload_path(file_name, folder_prefix: "")
|
||||
# We don't want to use the original file name as it can contain special
|
||||
# characters, which can interfere with external providers operations and
|
||||
# introduce other unexpected behaviour.
|
||||
|
@ -49,7 +49,6 @@ module FileStore
|
|||
File.join(
|
||||
TEMPORARY_UPLOAD_PREFIX,
|
||||
folder_prefix,
|
||||
upload_path,
|
||||
SecureRandom.hex,
|
||||
file_name_random
|
||||
)
|
||||
|
|
|
@ -39,6 +39,10 @@ module FileStore
|
|||
File.join(Discourse.base_path, upload_path)
|
||||
end
|
||||
|
||||
def temporary_upload_path(filename)
|
||||
FileStore::BaseStore.temporary_upload_path(filename, folder_prefix: relative_base_url)
|
||||
end
|
||||
|
||||
def external?
|
||||
false
|
||||
end
|
||||
|
|
|
@ -11,6 +11,9 @@ module FileStore
|
|||
class S3Store < BaseStore
|
||||
TOMBSTONE_PREFIX ||= "tombstone/"
|
||||
|
||||
delegate :abort_multipart, :presign_multipart_part, :list_multipart_parts,
|
||||
:complete_multipart, to: :s3_helper
|
||||
|
||||
def initialize(s3_helper = nil)
|
||||
@s3_helper = s3_helper
|
||||
end
|
||||
|
@ -35,7 +38,11 @@ module FileStore
|
|||
url
|
||||
end
|
||||
|
||||
def move_existing_stored_upload(existing_external_upload_key, upload, content_type = nil)
|
||||
def move_existing_stored_upload(
|
||||
existing_external_upload_key:,
|
||||
upload: nil,
|
||||
content_type: nil
|
||||
)
|
||||
upload.url = nil
|
||||
path = get_path_for_upload(upload)
|
||||
url, upload.etag = store_file(
|
||||
|
@ -210,10 +217,6 @@ module FileStore
|
|||
upload.url
|
||||
end
|
||||
|
||||
def path_from_url(url)
|
||||
URI.parse(url).path.delete_prefix("/")
|
||||
end
|
||||
|
||||
def cdn_url(url)
|
||||
return url if SiteSetting.Upload.s3_cdn_url.blank?
|
||||
schema = url[/^(https?:)?\/\//, 1]
|
||||
|
@ -228,7 +231,7 @@ module FileStore
|
|||
|
||||
def signed_url_for_temporary_upload(file_name, expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS, metadata: {})
|
||||
key = temporary_upload_path(file_name)
|
||||
presigned_url(
|
||||
s3_helper.presigned_url(
|
||||
key,
|
||||
method: :put_object,
|
||||
expires_in: expires_in,
|
||||
|
@ -240,7 +243,10 @@ module FileStore
|
|||
end
|
||||
|
||||
def temporary_upload_path(file_name)
|
||||
s3_bucket_folder_path.nil? ? super(file_name) : super(file_name, folder_prefix: s3_bucket_folder_path)
|
||||
folder_prefix = s3_bucket_folder_path.nil? ? upload_path : File.join(s3_bucket_folder_path, upload_path)
|
||||
FileStore::BaseStore.temporary_upload_path(
|
||||
file_name, folder_prefix: folder_prefix
|
||||
)
|
||||
end
|
||||
|
||||
def object_from_path(path)
|
||||
|
@ -315,96 +321,13 @@ module FileStore
|
|||
FileUtils.mv(old_upload_path, public_upload_path) if old_upload_path
|
||||
end
|
||||
|
||||
def abort_multipart(key:, upload_id:)
|
||||
s3_helper.s3_client.abort_multipart_upload(
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
upload_id: upload_id
|
||||
)
|
||||
end
|
||||
|
||||
def create_multipart(file_name, content_type, metadata: {})
|
||||
key = temporary_upload_path(file_name)
|
||||
response = s3_helper.s3_client.create_multipart_upload(
|
||||
acl: "private",
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
content_type: content_type,
|
||||
metadata: metadata
|
||||
)
|
||||
{ upload_id: response.upload_id, key: key }
|
||||
end
|
||||
|
||||
def presign_multipart_part(upload_id:, key:, part_number:)
|
||||
presigned_url(
|
||||
key,
|
||||
method: :upload_part,
|
||||
expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS,
|
||||
opts: {
|
||||
part_number: part_number,
|
||||
upload_id: upload_id
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
# Important note from the S3 documentation:
|
||||
#
|
||||
# This request returns a default and maximum of 1000 parts.
|
||||
# You can restrict the number of parts returned by specifying the
|
||||
# max_parts argument. If your multipart upload consists of more than 1,000
|
||||
# parts, the response returns an IsTruncated field with the value of true,
|
||||
# and a NextPartNumberMarker element.
|
||||
#
|
||||
# In subsequent ListParts requests you can include the part_number_marker arg
|
||||
# using the NextPartNumberMarker the field value from the previous response to
|
||||
# get more parts.
|
||||
#
|
||||
# See https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Client.html#list_parts-instance_method
|
||||
def list_multipart_parts(upload_id:, key:, max_parts: 1000, start_from_part_number: nil)
|
||||
options = {
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
upload_id: upload_id,
|
||||
max_parts: max_parts
|
||||
}
|
||||
|
||||
if start_from_part_number.present?
|
||||
options[:part_number_marker] = start_from_part_number
|
||||
end
|
||||
|
||||
s3_helper.s3_client.list_parts(options)
|
||||
end
|
||||
|
||||
def complete_multipart(upload_id:, key:, parts:)
|
||||
s3_helper.s3_client.complete_multipart_upload(
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
upload_id: upload_id,
|
||||
multipart_upload: {
|
||||
parts: parts
|
||||
}
|
||||
)
|
||||
s3_helper.create_multipart(key, content_type, metadata: metadata)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def presigned_url(
|
||||
key,
|
||||
method:,
|
||||
expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS,
|
||||
opts: {}
|
||||
)
|
||||
signer = Aws::S3::Presigner.new(client: s3_helper.s3_client)
|
||||
signer.presigned_url(
|
||||
method,
|
||||
{
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
expires_in: expires_in,
|
||||
}.merge(opts)
|
||||
)
|
||||
end
|
||||
|
||||
def presigned_get_url(
|
||||
url,
|
||||
force_download: false,
|
||||
|
|
|
@ -78,6 +78,10 @@ class S3Helper
|
|||
[path, etag.gsub('"', '')]
|
||||
end
|
||||
|
||||
def path_from_url(url)
|
||||
URI.parse(url).path.delete_prefix("/")
|
||||
end
|
||||
|
||||
def remove(s3_filename, copy_to_tombstone = false)
|
||||
s3_filename = s3_filename.dup
|
||||
|
||||
|
@ -282,6 +286,92 @@ class S3Helper
|
|||
get_path_for_s3_upload(path)
|
||||
end
|
||||
|
||||
def abort_multipart(key:, upload_id:)
|
||||
s3_client.abort_multipart_upload(
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
upload_id: upload_id
|
||||
)
|
||||
end
|
||||
|
||||
def create_multipart(key, content_type, metadata: {})
|
||||
response = s3_client.create_multipart_upload(
|
||||
acl: "private",
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
content_type: content_type,
|
||||
metadata: metadata
|
||||
)
|
||||
{ upload_id: response.upload_id, key: key }
|
||||
end
|
||||
|
||||
def presign_multipart_part(upload_id:, key:, part_number:)
|
||||
presigned_url(
|
||||
key,
|
||||
method: :upload_part,
|
||||
expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS,
|
||||
opts: {
|
||||
part_number: part_number,
|
||||
upload_id: upload_id
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
# Important note from the S3 documentation:
|
||||
#
|
||||
# This request returns a default and maximum of 1000 parts.
|
||||
# You can restrict the number of parts returned by specifying the
|
||||
# max_parts argument. If your multipart upload consists of more than 1,000
|
||||
# parts, the response returns an IsTruncated field with the value of true,
|
||||
# and a NextPartNumberMarker element.
|
||||
#
|
||||
# In subsequent ListParts requests you can include the part_number_marker arg
|
||||
# using the NextPartNumberMarker the field value from the previous response to
|
||||
# get more parts.
|
||||
#
|
||||
# See https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Client.html#list_parts-instance_method
|
||||
def list_multipart_parts(upload_id:, key:, max_parts: 1000, start_from_part_number: nil)
|
||||
options = {
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
upload_id: upload_id,
|
||||
max_parts: max_parts
|
||||
}
|
||||
|
||||
if start_from_part_number.present?
|
||||
options[:part_number_marker] = start_from_part_number
|
||||
end
|
||||
|
||||
s3_client.list_parts(options)
|
||||
end
|
||||
|
||||
def complete_multipart(upload_id:, key:, parts:)
|
||||
s3_client.complete_multipart_upload(
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
upload_id: upload_id,
|
||||
multipart_upload: {
|
||||
parts: parts
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
def presigned_url(
|
||||
key,
|
||||
method:,
|
||||
expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS,
|
||||
opts: {}
|
||||
)
|
||||
Aws::S3::Presigner.new(client: s3_client).presigned_url(
|
||||
method,
|
||||
{
|
||||
bucket: s3_bucket_name,
|
||||
key: key,
|
||||
expires_in: expires_in,
|
||||
}.merge(opts)
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def fetch_bucket_cors_rules
|
||||
|
|
|
@ -200,7 +200,10 @@ class UploadCreator
|
|||
|
||||
if should_move
|
||||
# move the file in the store instead of reuploading
|
||||
url = Discourse.store.move_existing_stored_upload(@opts[:existing_external_upload_key], @upload)
|
||||
url = Discourse.store.move_existing_stored_upload(
|
||||
existing_external_upload_key: @opts[:existing_external_upload_key],
|
||||
upload: @upload
|
||||
)
|
||||
else
|
||||
# store the file and update its url
|
||||
File.open(@file.path) do |f|
|
||||
|
|
|
@ -163,7 +163,11 @@ describe FileStore::S3Store do
|
|||
s3_helper.expects(:copy).with(external_upload_stub.key, kind_of(String), options: upload_opts).returns(["path", "etag"])
|
||||
s3_helper.expects(:delete_object).with(external_upload_stub.key)
|
||||
upload = Fabricate(:upload, extension: "png", sha1: upload_sha1, original_filename: original_filename)
|
||||
store.move_existing_stored_upload(external_upload_stub.key, upload, "image/png")
|
||||
store.move_existing_stored_upload(
|
||||
existing_external_upload_key: external_upload_stub.key,
|
||||
upload: upload,
|
||||
content_type: "image/png"
|
||||
)
|
||||
end
|
||||
|
||||
context "when the file is a PDF" do
|
||||
|
@ -175,7 +179,11 @@ describe FileStore::S3Store do
|
|||
disp_opts = { content_disposition: "attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}", content_type: "application/pdf" }
|
||||
s3_helper.expects(:copy).with(external_upload_stub.key, kind_of(String), options: upload_opts.merge(disp_opts)).returns(["path", "etag"])
|
||||
upload = Fabricate(:upload, extension: "png", sha1: upload_sha1, original_filename: original_filename)
|
||||
store.move_existing_stored_upload(external_upload_stub.key, upload, "application/pdf")
|
||||
store.move_existing_stored_upload(
|
||||
existing_external_upload_key: external_upload_stub.key,
|
||||
upload: upload,
|
||||
content_type: "application/pdf"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
Fabricator(:external_upload_stub) do
|
||||
transient :folder_prefix
|
||||
|
||||
created_by { Fabricate(:user) }
|
||||
original_filename "test.txt"
|
||||
key { Discourse.store.temporary_upload_path("test.txt") }
|
||||
key { |attrs| FileStore::BaseStore.temporary_upload_path("test.txt", folder_prefix: attrs[:folder_prefix] || "") }
|
||||
upload_type "card_background"
|
||||
filesize 1024
|
||||
status 1
|
||||
|
@ -12,11 +14,11 @@ end
|
|||
Fabricator(:image_external_upload_stub, from: :external_upload_stub) do
|
||||
original_filename "logo.png"
|
||||
filesize 1024
|
||||
key { Discourse.store.temporary_upload_path("logo.png") }
|
||||
key { |attrs| FileStore::BaseStore.temporary_upload_path("logo.png", folder_prefix: attrs[:folder_prefix] || "") }
|
||||
end
|
||||
|
||||
Fabricator(:attachment_external_upload_stub, from: :external_upload_stub) do
|
||||
original_filename "file.pdf"
|
||||
filesize 1024
|
||||
key { Discourse.store.temporary_upload_path("file.pdf") }
|
||||
key { |attrs| FileStore::BaseStore.temporary_upload_path("file.pdf", folder_prefix: attrs[:folder_prefix] || "") }
|
||||
end
|
||||
|
|
|
@ -183,7 +183,6 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
|
|||
it "returns signed URL with correct path" do
|
||||
test_multisite_connection('default') do
|
||||
upload = Fabricate(:upload, original_filename: "small.pdf", extension: "pdf", secure: true)
|
||||
|
||||
path = Discourse.store.get_path_for_upload(upload)
|
||||
|
||||
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
||||
|
@ -312,7 +311,7 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
|
|||
|
||||
it "returns a presigned url with the correct params and the key for the temporary file" do
|
||||
url = store.signed_url_for_temporary_upload("test.png")
|
||||
key = store.path_from_url(url)
|
||||
key = store.s3_helper.path_from_url(url)
|
||||
expect(url).to match(/Amz-Expires/)
|
||||
expect(key).to match(/temp\/uploads\/default\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/)
|
||||
end
|
||||
|
@ -328,7 +327,7 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
|
|||
|
||||
it "returns a presigned url with the correct params and the key for the temporary file" do
|
||||
url = store.signed_url_for_temporary_upload("test.png")
|
||||
key = store.path_from_url(url)
|
||||
key = store.s3_helper.path_from_url(url)
|
||||
expect(url).to match(/Amz-Expires/)
|
||||
expect(key).to match(/temp\/site\/uploads\/default\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/)
|
||||
end
|
||||
|
@ -340,7 +339,7 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
|
|||
it "returns a presigned url with the correct params and the key for the temporary file" do
|
||||
test_multisite_connection('second') do
|
||||
url = store.signed_url_for_temporary_upload("test.png")
|
||||
key = store.path_from_url(url)
|
||||
key = store.s3_helper.path_from_url(url)
|
||||
expect(url).to match(/Amz-Expires/)
|
||||
expect(key).to match(/temp\/standard99\/uploads\/second\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/)
|
||||
end
|
||||
|
|
|
@ -787,14 +787,14 @@ describe UploadsController do
|
|||
describe "#create_multipart" do
|
||||
context "when the store is external" do
|
||||
let(:mock_multipart_upload_id) { "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6CFTR.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" }
|
||||
let(:test_bucket_prefix) { "test_#{ENV['TEST_ENV_NUMBER'].presence || '0'}" }
|
||||
|
||||
before do
|
||||
sign_in(user)
|
||||
SiteSetting.enable_direct_s3_uploads = true
|
||||
setup_s3
|
||||
FileStore::S3Store.any_instance.stubs(:temporary_upload_path).returns(
|
||||
"uploads/default/test_0/temp/28fccf8259bbe75b873a2bd2564b778c/test.png"
|
||||
)
|
||||
SiteSetting.s3_backup_bucket = "s3-backup-bucket"
|
||||
SiteSetting.backup_location = BackupLocationSiteSetting::S3
|
||||
end
|
||||
|
||||
it "errors if the correct params are not provided" do
|
||||
|
@ -830,17 +830,20 @@ describe UploadsController do
|
|||
end
|
||||
|
||||
def stub_create_multipart_request
|
||||
FileStore::S3Store.any_instance.stubs(:temporary_upload_path).returns(
|
||||
"uploads/default/#{test_bucket_prefix}/temp/28fccf8259bbe75b873a2bd2564b778c/test.png"
|
||||
)
|
||||
create_multipart_result = <<~BODY
|
||||
<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n
|
||||
<InitiateMultipartUploadResult>
|
||||
<Bucket>s3-upload-bucket</Bucket>
|
||||
<Key>uploads/default/test_0/temp/28fccf8259bbe75b873a2bd2564b778c/test.png</Key>
|
||||
<Key>uploads/default/#{test_bucket_prefix}/temp/28fccf8259bbe75b873a2bd2564b778c/test.png</Key>
|
||||
<UploadId>#{mock_multipart_upload_id}</UploadId>
|
||||
</InitiateMultipartUploadResult>
|
||||
BODY
|
||||
stub_request(
|
||||
:post,
|
||||
"https://s3-upload-bucket.s3.us-west-1.amazonaws.com/uploads/default/test_0/temp/28fccf8259bbe75b873a2bd2564b778c/test.png?uploads"
|
||||
"https://s3-upload-bucket.s3.us-west-1.amazonaws.com/uploads/default/#{test_bucket_prefix}/temp/28fccf8259bbe75b873a2bd2564b778c/test.png?uploads"
|
||||
).to_return({ status: 200, body: create_multipart_result })
|
||||
end
|
||||
|
||||
|
@ -915,6 +918,88 @@ describe UploadsController do
|
|||
expect(response.status).to eq(429)
|
||||
end
|
||||
end
|
||||
|
||||
context "when the upload_type is backup" do
|
||||
let(:upload_type) { "backup" }
|
||||
let(:backup_file_exists_response) { { status: 404 } }
|
||||
|
||||
before do
|
||||
stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/").to_return(status: 200, body: "", headers: {})
|
||||
stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/default/test.tar.gz").to_return(
|
||||
backup_file_exists_response
|
||||
)
|
||||
end
|
||||
|
||||
context "when the user is not admin" do
|
||||
it "errors with invalid access error" do
|
||||
post "/uploads/create-multipart.json", params: {
|
||||
file_name: "test.tar.gz",
|
||||
upload_type: upload_type,
|
||||
file_size: 4098
|
||||
}
|
||||
expect(response.status).to eq(403)
|
||||
end
|
||||
end
|
||||
|
||||
context "when the user is admin" do
|
||||
before do
|
||||
user.update(admin: true)
|
||||
end
|
||||
|
||||
def stub_create_multipart_backup_request
|
||||
BackupRestore::S3BackupStore.any_instance.stubs(:temporary_upload_path).returns(
|
||||
"temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz"
|
||||
)
|
||||
create_multipart_result = <<~BODY
|
||||
<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n
|
||||
<InitiateMultipartUploadResult>
|
||||
<Bucket>s3-backup-bucket</Bucket>
|
||||
<Key>temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz</Key>
|
||||
<UploadId>#{mock_multipart_upload_id}</UploadId>
|
||||
</InitiateMultipartUploadResult>
|
||||
BODY
|
||||
stub_request(:post, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz?uploads").
|
||||
to_return(status: 200, body: create_multipart_result)
|
||||
end
|
||||
|
||||
it "creates the multipart upload" do
|
||||
stub_create_multipart_backup_request
|
||||
post "/uploads/create-multipart.json", params: {
|
||||
file_name: "test.tar.gz",
|
||||
upload_type: upload_type,
|
||||
file_size: 4098
|
||||
}
|
||||
expect(response.status).to eq(200)
|
||||
result = response.parsed_body
|
||||
|
||||
external_upload_stub = ExternalUploadStub.where(
|
||||
unique_identifier: result["unique_identifier"],
|
||||
original_filename: "test.tar.gz",
|
||||
created_by: user,
|
||||
upload_type: upload_type,
|
||||
key: result["key"],
|
||||
multipart: true
|
||||
)
|
||||
expect(external_upload_stub.exists?).to eq(true)
|
||||
end
|
||||
|
||||
context "when backup of same filename already exists" do
|
||||
let(:backup_file_exists_response) { { status: 200, body: "" } }
|
||||
|
||||
it "throws an error" do
|
||||
post "/uploads/create-multipart.json", params: {
|
||||
file_name: "test.tar.gz",
|
||||
upload_type: upload_type,
|
||||
file_size: 4098
|
||||
}
|
||||
expect(response.status).to eq(422)
|
||||
expect(response.parsed_body["errors"]).to include(
|
||||
I18n.t("backup.file_exists")
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "when the store is not external" do
|
||||
|
@ -1215,7 +1300,7 @@ describe UploadsController do
|
|||
# all the functionality for ExternalUploadManager is already tested along
|
||||
# with stubs to S3 in its own test, we can just stub the response here
|
||||
upload = Fabricate(:upload)
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).returns(upload)
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).returns(upload)
|
||||
|
||||
post "/uploads/complete-multipart.json", params: {
|
||||
unique_identifier: external_upload_stub.unique_identifier,
|
||||
|
@ -1383,39 +1468,39 @@ describe UploadsController do
|
|||
end
|
||||
|
||||
it "handles ChecksumMismatchError" do
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).raises(ExternalUploadManager::ChecksumMismatchError)
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).raises(ExternalUploadManager::ChecksumMismatchError)
|
||||
post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier }
|
||||
expect(response.status).to eq(422)
|
||||
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
|
||||
end
|
||||
|
||||
it "handles SizeMismatchError" do
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).raises(ExternalUploadManager::SizeMismatchError.new("expected: 10, actual: 1000"))
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).raises(ExternalUploadManager::SizeMismatchError.new("expected: 10, actual: 1000"))
|
||||
post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier }
|
||||
expect(response.status).to eq(422)
|
||||
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
|
||||
end
|
||||
|
||||
it "handles CannotPromoteError" do
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).raises(ExternalUploadManager::CannotPromoteError)
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).raises(ExternalUploadManager::CannotPromoteError)
|
||||
post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier }
|
||||
expect(response.status).to eq(422)
|
||||
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
|
||||
end
|
||||
|
||||
it "handles DownloadFailedError and Aws::S3::Errors::NotFound" do
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).raises(ExternalUploadManager::DownloadFailedError)
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).raises(ExternalUploadManager::DownloadFailedError)
|
||||
post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier }
|
||||
expect(response.status).to eq(422)
|
||||
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).raises(Aws::S3::Errors::NotFound.new("error", "not found"))
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).raises(Aws::S3::Errors::NotFound.new("error", "not found"))
|
||||
post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier }
|
||||
expect(response.status).to eq(422)
|
||||
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
|
||||
end
|
||||
|
||||
it "handles a generic upload failure" do
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).raises(StandardError)
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).raises(StandardError)
|
||||
post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier }
|
||||
expect(response.status).to eq(422)
|
||||
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
|
||||
|
@ -1423,14 +1508,14 @@ describe UploadsController do
|
|||
|
||||
it "handles validation errors on the upload" do
|
||||
upload.errors.add(:base, "test error")
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).returns(upload)
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).returns(upload)
|
||||
post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier }
|
||||
expect(response.status).to eq(422)
|
||||
expect(response.parsed_body["errors"]).to eq(["test error"])
|
||||
end
|
||||
|
||||
it "deletes the stub and returns the serialized upload when complete" do
|
||||
ExternalUploadManager.any_instance.stubs(:promote_to_upload!).returns(upload)
|
||||
ExternalUploadManager.any_instance.stubs(:transform!).returns(upload)
|
||||
post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier }
|
||||
expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false)
|
||||
expect(response.status).to eq(200)
|
||||
|
|
|
@ -25,6 +25,10 @@ RSpec.describe ExternalUploadManager do
|
|||
SiteSetting.max_attachment_size_kb = 210.megabytes / 1000
|
||||
|
||||
setup_s3
|
||||
|
||||
SiteSetting.s3_backup_bucket = "s3-backup-bucket"
|
||||
SiteSetting.backup_location = BackupLocationSiteSetting::S3
|
||||
|
||||
stub_head_object
|
||||
stub_download_object_filehelper
|
||||
stub_copy_object
|
||||
|
@ -47,7 +51,7 @@ RSpec.describe ExternalUploadManager do
|
|||
end
|
||||
end
|
||||
|
||||
describe "#promote_to_upload!" do
|
||||
describe "#transform!" do
|
||||
context "when stubbed upload is < DOWNLOAD_LIMIT (small enough to download + generate sha)" do
|
||||
let!(:external_upload_stub) { Fabricate(:image_external_upload_stub, created_by: user, filesize: object_size) }
|
||||
let(:object_size) { 1.megabyte }
|
||||
|
@ -59,7 +63,7 @@ RSpec.describe ExternalUploadManager do
|
|||
end
|
||||
|
||||
it "raises an error" do
|
||||
expect { subject.promote_to_upload! }.to raise_error(ExternalUploadManager::DownloadFailedError)
|
||||
expect { subject.transform! }.to raise_error(ExternalUploadManager::DownloadFailedError)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -68,16 +72,16 @@ RSpec.describe ExternalUploadManager do
|
|||
external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded])
|
||||
end
|
||||
it "raises an error" do
|
||||
expect { subject.promote_to_upload! }.to raise_error(ExternalUploadManager::CannotPromoteError)
|
||||
expect { subject.transform! }.to raise_error(ExternalUploadManager::CannotPromoteError)
|
||||
end
|
||||
end
|
||||
|
||||
context "when the upload does not get changed in UploadCreator (resized etc.)" do
|
||||
it "copies the stubbed upload on S3 to its new destination and deletes it" do
|
||||
upload = subject.promote_to_upload!
|
||||
upload = subject.transform!
|
||||
expect(WebMock).to have_requested(
|
||||
:put,
|
||||
"#{upload_base_url}/original/1X/#{upload.sha1}.png",
|
||||
"#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}",
|
||||
).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_upload_bucket}/#{external_upload_stub.key}" })
|
||||
expect(WebMock).to have_requested(
|
||||
:delete,
|
||||
|
@ -87,7 +91,7 @@ RSpec.describe ExternalUploadManager do
|
|||
|
||||
it "errors if the image upload is too big" do
|
||||
SiteSetting.max_image_size_kb = 1
|
||||
upload = subject.promote_to_upload!
|
||||
upload = subject.transform!
|
||||
expect(upload.errors.full_messages).to include(
|
||||
"Filesize " + I18n.t("upload.images.too_large_humanized", max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_image_size_kb.kilobytes))
|
||||
)
|
||||
|
@ -95,7 +99,7 @@ RSpec.describe ExternalUploadManager do
|
|||
|
||||
it "errors if the extension is not supported" do
|
||||
SiteSetting.authorized_extensions = ""
|
||||
upload = subject.promote_to_upload!
|
||||
upload = subject.transform!
|
||||
expect(upload.errors.full_messages).to include(
|
||||
"Original filename " + I18n.t("upload.unauthorized", authorized_extensions: "")
|
||||
)
|
||||
|
@ -106,10 +110,10 @@ RSpec.describe ExternalUploadManager do
|
|||
let(:file) { file_from_fixtures("should_be_jpeg.heic", "images") }
|
||||
|
||||
it "creates a new upload in s3 (not copy) and deletes the original stubbed upload" do
|
||||
upload = subject.promote_to_upload!
|
||||
upload = subject.transform!
|
||||
expect(WebMock).to have_requested(
|
||||
:put,
|
||||
"#{upload_base_url}/original/1X/#{upload.sha1}.png"
|
||||
"#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}",
|
||||
)
|
||||
expect(WebMock).to have_requested(
|
||||
:delete, "#{upload_base_url}/#{external_upload_stub.key}"
|
||||
|
@ -124,13 +128,13 @@ RSpec.describe ExternalUploadManager do
|
|||
let(:client_sha1) { "blahblah" }
|
||||
|
||||
it "raises an error, deletes the stub" do
|
||||
expect { subject.promote_to_upload! }.to raise_error(ExternalUploadManager::ChecksumMismatchError)
|
||||
expect { subject.transform! }.to raise_error(ExternalUploadManager::ChecksumMismatchError)
|
||||
expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false)
|
||||
end
|
||||
|
||||
it "does not delete the stub if enable_upload_debug_mode" do
|
||||
SiteSetting.enable_upload_debug_mode = true
|
||||
expect { subject.promote_to_upload! }.to raise_error(ExternalUploadManager::ChecksumMismatchError)
|
||||
expect { subject.transform! }.to raise_error(ExternalUploadManager::ChecksumMismatchError)
|
||||
external_stub = ExternalUploadStub.find(external_upload_stub.id)
|
||||
expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed])
|
||||
end
|
||||
|
@ -145,7 +149,7 @@ RSpec.describe ExternalUploadManager do
|
|||
after { Discourse.redis.flushdb }
|
||||
|
||||
it "raises an error, deletes the file immediately, and prevents the user from uploading external files for a few minutes" do
|
||||
expect { subject.promote_to_upload! }.to raise_error(ExternalUploadManager::SizeMismatchError)
|
||||
expect { subject.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError)
|
||||
expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false)
|
||||
expect(Discourse.redis.get("#{ExternalUploadManager::BAN_USER_REDIS_PREFIX}#{external_upload_stub.created_by_id}")).to eq("1")
|
||||
expect(WebMock).to have_requested(
|
||||
|
@ -156,7 +160,7 @@ RSpec.describe ExternalUploadManager do
|
|||
|
||||
it "does not delete the stub if enable_upload_debug_mode" do
|
||||
SiteSetting.enable_upload_debug_mode = true
|
||||
expect { subject.promote_to_upload! }.to raise_error(ExternalUploadManager::SizeMismatchError)
|
||||
expect { subject.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError)
|
||||
external_stub = ExternalUploadStub.find(external_upload_stub.id)
|
||||
expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed])
|
||||
end
|
||||
|
@ -174,32 +178,93 @@ RSpec.describe ExternalUploadManager do
|
|||
|
||||
it "does not try and download the file" do
|
||||
FileHelper.expects(:download).never
|
||||
subject.promote_to_upload!
|
||||
subject.transform!
|
||||
end
|
||||
|
||||
it "generates a fake sha for the upload record" do
|
||||
upload = subject.promote_to_upload!
|
||||
upload = subject.transform!
|
||||
expect(upload.sha1).not_to eq(sha1)
|
||||
expect(upload.original_sha1).to eq(nil)
|
||||
expect(upload.filesize).to eq(object_size)
|
||||
end
|
||||
|
||||
it "marks the stub as uploaded" do
|
||||
subject.promote_to_upload!
|
||||
subject.transform!
|
||||
expect(external_upload_stub.reload.status).to eq(ExternalUploadStub.statuses[:uploaded])
|
||||
end
|
||||
|
||||
it "copies the stubbed upload on S3 to its new destination and deletes it" do
|
||||
upload = subject.promote_to_upload!
|
||||
upload = subject.transform!
|
||||
expect(WebMock).to have_requested(
|
||||
:put,
|
||||
"#{upload_base_url}/original/1X/#{upload.sha1}.pdf"
|
||||
"#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}",
|
||||
).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_upload_bucket}/#{external_upload_stub.key}" })
|
||||
expect(WebMock).to have_requested(
|
||||
:delete, "#{upload_base_url}/#{external_upload_stub.key}"
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context "when the upload type is backup" do
|
||||
let(:upload_base_url) { "https://#{SiteSetting.s3_backup_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com" }
|
||||
let(:object_size) { 200.megabytes }
|
||||
let(:object_file) { file_from_fixtures("backup_since_v1.6.tar.gz", "backups") }
|
||||
let!(:external_upload_stub) do
|
||||
Fabricate(
|
||||
:attachment_external_upload_stub,
|
||||
created_by: user,
|
||||
filesize: object_size,
|
||||
upload_type: "backup",
|
||||
original_filename: "backup_since_v1.6.tar.gz",
|
||||
folder_prefix: RailsMultisite::ConnectionManagement.current_db
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
stub_request(:head, "https://#{SiteSetting.s3_backup_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com/")
|
||||
|
||||
# stub copy and delete object for backup, which copies the original filename to the root,
|
||||
# and also uses current_db in the bucket name always
|
||||
stub_request(
|
||||
:put,
|
||||
"#{upload_base_url}/#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz"
|
||||
).to_return(
|
||||
status: 200,
|
||||
headers: { "ETag" => etag },
|
||||
body: copy_object_result
|
||||
)
|
||||
end
|
||||
|
||||
it "does not try and download the file" do
|
||||
FileHelper.expects(:download).never
|
||||
subject.transform!
|
||||
end
|
||||
|
||||
it "raises an error when backups are disabled" do
|
||||
SiteSetting.enable_backups = false
|
||||
expect { subject.transform! }.to raise_error(Discourse::InvalidAccess)
|
||||
end
|
||||
|
||||
it "raises an error when backups are local, not s3" do
|
||||
SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL
|
||||
expect { subject.transform! }.to raise_error(Discourse::InvalidAccess)
|
||||
end
|
||||
|
||||
it "does not create an upload record" do
|
||||
expect { subject.transform! }.not_to change { Upload.count }
|
||||
end
|
||||
|
||||
it "copies the stubbed upload on S3 to its new destination and deletes it" do
|
||||
upload = subject.transform!
|
||||
expect(WebMock).to have_requested(
|
||||
:put,
|
||||
"#{upload_base_url}/#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz",
|
||||
).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_backup_bucket}/#{external_upload_stub.key}" })
|
||||
expect(WebMock).to have_requested(
|
||||
:delete, "#{upload_base_url}/#{external_upload_stub.key}"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def stub_head_object
|
||||
|
@ -226,8 +291,8 @@ RSpec.describe ExternalUploadManager do
|
|||
)
|
||||
end
|
||||
|
||||
def stub_copy_object
|
||||
copy_object_result = <<~BODY
|
||||
def copy_object_result
|
||||
<<~BODY
|
||||
<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n
|
||||
<CopyObjectResult
|
||||
xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">
|
||||
|
@ -235,17 +300,28 @@ RSpec.describe ExternalUploadManager do
|
|||
<ETag>"#{etag}"</ETag>
|
||||
</CopyObjectResult>
|
||||
BODY
|
||||
end
|
||||
|
||||
def stub_copy_object
|
||||
upload_pdf = Fabricate(:upload, sha1: "testbc60eb18e8f974cbfae8bb0f069c3a311024", original_filename: "test.pdf", extension: "pdf")
|
||||
upload_path = Discourse.store.get_path_for_upload(upload_pdf)
|
||||
upload_pdf.destroy!
|
||||
|
||||
stub_request(
|
||||
:put,
|
||||
"#{upload_base_url}/original/1X/testbc60eb18e8f974cbfae8bb0f069c3a311024.pdf"
|
||||
"#{upload_base_url}/#{upload_path}"
|
||||
).to_return(
|
||||
status: 200,
|
||||
headers: { "ETag" => etag },
|
||||
body: copy_object_result
|
||||
)
|
||||
|
||||
upload_png = Fabricate(:upload, sha1: "bc975735dfc6409c1c2aa5ebf2239949bcbdbd65", original_filename: "test.png", extension: "png")
|
||||
upload_path = Discourse.store.get_path_for_upload(upload_png)
|
||||
upload_png.destroy!
|
||||
stub_request(
|
||||
:put,
|
||||
"#{upload_base_url}/original/1X/bc975735dfc6409c1c2aa5ebf2239949bcbdbd65.png"
|
||||
"#{upload_base_url}/#{upload_path}"
|
||||
).to_return(
|
||||
status: 200,
|
||||
headers: { "ETag" => etag },
|
||||
|
|
Loading…
Reference in New Issue