2019-04-29 20:27:42 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-07-31 17:26:34 -04:00
|
|
|
require "file_store/s3_store"
|
2015-05-25 23:08:31 -04:00
|
|
|
require "file_store/local_store"
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2022-07-27 22:27:38 -04:00
|
|
|
RSpec.describe FileStore::S3Store do
|
2016-08-19 02:08:04 -04:00
|
|
|
let(:store) { FileStore::S3Store.new }
|
2020-12-30 13:13:13 -05:00
|
|
|
let(:s3_helper) { store.s3_helper }
|
2020-09-14 07:32:25 -04:00
|
|
|
let(:client) { Aws::S3::Client.new(stub_responses: true) }
|
|
|
|
let(:resource) { Aws::S3::Resource.new(client: client) }
|
|
|
|
let(:s3_bucket) { resource.bucket("s3-upload-bucket") }
|
|
|
|
let(:s3_object) { stub }
|
2022-06-20 12:36:05 -04:00
|
|
|
let(:upload_path) { Discourse.store.upload_path }
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2019-05-06 23:12:20 -04:00
|
|
|
fab!(:optimized_image) { Fabricate(:optimized_image) }
|
2014-07-14 11:34:23 -04:00
|
|
|
let(:optimized_image_file) { file_from_fixtures("logo.png") }
|
2020-09-14 07:32:25 -04:00
|
|
|
let(:uploaded_file) { file_from_fixtures("logo.png") }
|
2021-05-20 21:43:47 -04:00
|
|
|
fab!(:upload) { Fabricate(:upload, sha1: Digest::SHA1.hexdigest("secret image string")) }
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2020-09-14 07:32:25 -04:00
|
|
|
before do
|
|
|
|
setup_s3
|
|
|
|
SiteSetting.s3_region = "us-west-1"
|
2013-07-31 17:26:34 -04:00
|
|
|
end
|
|
|
|
|
2022-07-27 12:14:14 -04:00
|
|
|
describe "uploading to s3" do
|
2019-01-04 01:16:22 -05:00
|
|
|
let(:etag) { "etag" }
|
|
|
|
|
2016-08-12 05:18:19 -04:00
|
|
|
describe "#store_upload" do
|
|
|
|
it "returns an absolute schemaless url" do
|
2018-08-07 23:26:05 -04:00
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with(regexp_matches(%r{original/\d+X.*/#{upload.sha1}\.png}))
|
|
|
|
.returns(s3_object)
|
2022-11-02 05:47:59 -04:00
|
|
|
s3_object
|
|
|
|
.expects(:put)
|
|
|
|
.with(
|
|
|
|
{
|
2019-11-17 20:25:42 -05:00
|
|
|
acl: "public-read",
|
|
|
|
cache_control: "max-age=31556952, public, immutable",
|
|
|
|
content_type: "image/png",
|
2022-11-02 05:47:59 -04:00
|
|
|
body: uploaded_file,
|
|
|
|
},
|
2023-01-09 06:18:21 -05:00
|
|
|
)
|
2022-11-02 05:47:59 -04:00
|
|
|
.returns(Aws::S3::Types::PutObjectOutput.new(etag: "\"#{etag}\""))
|
2019-01-04 01:16:22 -05:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
expect(store.store_upload(uploaded_file, upload)).to match(
|
|
|
|
%r{//s3-upload-bucket\.s3\.dualstack\.us-west-1\.amazonaws\.com/original/\d+X.*/#{upload.sha1}\.png},
|
2016-08-12 05:18:19 -04:00
|
|
|
)
|
2019-01-04 01:16:22 -05:00
|
|
|
expect(upload.etag).to eq(etag)
|
2016-08-12 05:18:19 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
describe "when s3_upload_bucket includes folders path" do
|
|
|
|
before do
|
2019-11-17 20:25:42 -05:00
|
|
|
s3_object.stubs(:put).returns(Aws::S3::Types::PutObjectOutput.new(etag: "\"#{etag}\""))
|
2016-08-12 05:18:19 -04:00
|
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads"
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns an absolute schemaless url" do
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with(regexp_matches(%r{discourse-uploads/original/\d+X.*/#{upload.sha1}\.png}))
|
|
|
|
.returns(s3_object)
|
2019-01-04 01:16:22 -05:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
expect(store.store_upload(uploaded_file, upload)).to match(
|
|
|
|
%r{//s3-upload-bucket\.s3\.dualstack\.us-west-1\.amazonaws\.com/discourse-uploads/original/\d+X.*/#{upload.sha1}\.png},
|
2016-08-12 05:18:19 -04:00
|
|
|
)
|
2019-01-04 01:16:22 -05:00
|
|
|
expect(upload.etag).to eq(etag)
|
2016-08-12 05:18:19 -04:00
|
|
|
end
|
|
|
|
end
|
2019-06-05 23:27:24 -04:00
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
describe "when secure uploads are enabled" do
|
|
|
|
it "saves secure attachment using private ACL" do
|
2019-06-05 23:27:24 -04:00
|
|
|
SiteSetting.prevent_anons_from_downloading_files = true
|
|
|
|
SiteSetting.authorized_extensions = "pdf|png|jpg|gif"
|
2021-05-27 11:42:25 -04:00
|
|
|
upload =
|
|
|
|
Fabricate(:upload, original_filename: "small.pdf", extension: "pdf", secure: true)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with(regexp_matches(%r{original/\d+X.*/#{upload.sha1}\.pdf}))
|
|
|
|
.returns(s3_object)
|
2022-11-02 05:47:59 -04:00
|
|
|
s3_object
|
|
|
|
.expects(:put)
|
|
|
|
.with(
|
|
|
|
{
|
2019-11-17 20:25:42 -05:00
|
|
|
acl: "private",
|
|
|
|
cache_control: "max-age=31556952, public, immutable",
|
|
|
|
content_type: "application/pdf",
|
2020-06-23 03:10:56 -04:00
|
|
|
content_disposition:
|
|
|
|
"attachment; filename=\"#{upload.original_filename}\"; filename*=UTF-8''#{upload.original_filename}",
|
2022-11-02 05:47:59 -04:00
|
|
|
body: uploaded_file,
|
|
|
|
},
|
2023-01-09 06:18:21 -05:00
|
|
|
)
|
2022-11-02 05:47:59 -04:00
|
|
|
.returns(Aws::S3::Types::PutObjectOutput.new(etag: "\"#{etag}\""))
|
2019-06-05 23:27:24 -04:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
expect(store.store_upload(uploaded_file, upload)).to match(
|
|
|
|
%r{//s3-upload-bucket\.s3\.dualstack\.us-west-1\.amazonaws\.com/original/\d+X.*/#{upload.sha1}\.pdf},
|
2019-06-05 23:27:24 -04:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
it "saves image upload using public ACL" do
|
2019-06-05 23:27:24 -04:00
|
|
|
SiteSetting.prevent_anons_from_downloading_files = true
|
|
|
|
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with(regexp_matches(%r{original/\d+X.*/#{upload.sha1}\.png}))
|
|
|
|
.returns(s3_object)
|
|
|
|
.at_least_once
|
2022-11-02 05:47:59 -04:00
|
|
|
s3_object
|
|
|
|
.expects(:put)
|
|
|
|
.with(
|
|
|
|
{
|
2019-11-17 20:25:42 -05:00
|
|
|
acl: "public-read",
|
|
|
|
cache_control: "max-age=31556952, public, immutable",
|
|
|
|
content_type: "image/png",
|
2022-11-02 05:47:59 -04:00
|
|
|
body: uploaded_file,
|
|
|
|
},
|
2023-01-09 06:18:21 -05:00
|
|
|
)
|
2022-11-02 05:47:59 -04:00
|
|
|
.returns(Aws::S3::Types::PutObjectOutput.new(etag: "\"#{etag}\""))
|
2019-06-05 23:27:24 -04:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
expect(store.store_upload(uploaded_file, upload)).to match(
|
|
|
|
%r{//s3-upload-bucket\.s3\.dualstack\.us-west-1\.amazonaws\.com/original/\d+X.*/#{upload.sha1}\.png},
|
2019-06-05 23:27:24 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
expect(store.url_for(upload)).to eq(upload.url)
|
|
|
|
end
|
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
end
|
|
|
|
|
2016-08-12 05:18:19 -04:00
|
|
|
describe "#store_optimized_image" do
|
2019-11-17 20:25:42 -05:00
|
|
|
before do
|
|
|
|
s3_object.stubs(:put).returns(Aws::S3::Types::PutObjectOutput.new(etag: "\"#{etag}\""))
|
|
|
|
end
|
|
|
|
|
2016-08-12 05:18:19 -04:00
|
|
|
it "returns an absolute schemaless url" do
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
2021-12-08 14:13:59 -05:00
|
|
|
path =
|
|
|
|
%r{optimized/\d+X.*/#{optimized_image.upload.sha1}_#{OptimizedImage::VERSION}_100x200\.png}
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket.expects(:object).with(regexp_matches(path)).returns(s3_object)
|
2019-01-04 01:16:22 -05:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
expect(store.store_optimized_image(optimized_image_file, optimized_image)).to match(
|
|
|
|
%r{//s3-upload-bucket\.s3\.dualstack\.us-west-1\.amazonaws\.com/#{path}},
|
2016-08-12 05:18:19 -04:00
|
|
|
)
|
2019-01-04 01:16:22 -05:00
|
|
|
expect(optimized_image.etag).to eq(etag)
|
2016-08-12 05:18:19 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
describe "when s3_upload_bucket includes folders path" do
|
|
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads" }
|
|
|
|
|
|
|
|
it "returns an absolute schemaless url" do
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
2021-12-08 14:13:59 -05:00
|
|
|
path =
|
|
|
|
%r{discourse-uploads/optimized/\d+X.*/#{optimized_image.upload.sha1}_#{OptimizedImage::VERSION}_100x200\.png}
|
2016-08-12 05:18:19 -04:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket.expects(:object).with(regexp_matches(path)).returns(s3_object)
|
2019-01-04 01:16:22 -05:00
|
|
|
|
2021-12-08 14:13:59 -05:00
|
|
|
expect(store.store_optimized_image(optimized_image_file, optimized_image)).to match(
|
|
|
|
%r{//s3-upload-bucket\.s3\.dualstack\.us-west-1\.amazonaws\.com/#{path}},
|
2016-08-12 05:18:19 -04:00
|
|
|
)
|
2019-01-04 01:16:22 -05:00
|
|
|
expect(optimized_image.etag).to eq(etag)
|
2016-08-12 05:18:19 -04:00
|
|
|
end
|
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
end
|
2021-09-09 22:59:51 -04:00
|
|
|
|
|
|
|
describe "#move_existing_stored_upload" do
|
|
|
|
let(:uploaded_file) { file_from_fixtures(original_filename) }
|
|
|
|
let(:upload_sha1) { Digest::SHA1.hexdigest(File.read(uploaded_file)) }
|
|
|
|
let(:original_filename) { "smallest.png" }
|
|
|
|
let(:s3_client) { Aws::S3::Client.new(stub_responses: true) }
|
|
|
|
let(:s3_helper) { S3Helper.new(SiteSetting.s3_upload_bucket, "", client: s3_client) }
|
|
|
|
let(:store) { FileStore::S3Store.new(s3_helper) }
|
|
|
|
let(:upload_opts) do
|
|
|
|
{
|
|
|
|
acl: "public-read",
|
|
|
|
cache_control: "max-age=31556952, public, immutable",
|
|
|
|
content_type: "image/png",
|
|
|
|
apply_metadata_to_destination: true,
|
|
|
|
}
|
|
|
|
end
|
|
|
|
let(:external_upload_stub) { Fabricate(:image_external_upload_stub) }
|
|
|
|
let(:existing_external_upload_key) { external_upload_stub.key }
|
|
|
|
|
|
|
|
before { SiteSetting.authorized_extensions = "pdf|png" }
|
|
|
|
|
|
|
|
it "does not provide a content_disposition for images" do
|
|
|
|
s3_helper
|
|
|
|
.expects(:copy)
|
|
|
|
.with(external_upload_stub.key, kind_of(String), options: upload_opts)
|
|
|
|
.returns(%w[path etag])
|
|
|
|
s3_helper.expects(:delete_object).with(external_upload_stub.key)
|
|
|
|
upload =
|
|
|
|
Fabricate(
|
|
|
|
:upload,
|
|
|
|
extension: "png",
|
|
|
|
sha1: upload_sha1,
|
|
|
|
original_filename: original_filename,
|
|
|
|
)
|
FEATURE: Direct S3 multipart uploads for backups (#14736)
This PR introduces a new `enable_experimental_backup_uploads` site setting (default false and hidden), which when enabled alongside `enable_direct_s3_uploads` will allow for direct S3 multipart uploads of backup .tar.gz files.
To make multipart external uploads work with both the S3BackupStore and the S3Store, I've had to move several methods out of S3Store and into S3Helper, including:
* presigned_url
* create_multipart
* abort_multipart
* complete_multipart
* presign_multipart_part
* list_multipart_parts
Then, S3Store and S3BackupStore either delegate directly to S3Helper or have their own special methods to call S3Helper for these methods. FileStore.temporary_upload_path has also removed its dependence on upload_path, and can now be used interchangeably between the stores. A similar change was made in the frontend as well, moving the multipart related JS code out of ComposerUppyUpload and into a mixin of its own, so it can also be used by UppyUploadMixin.
Some changes to ExternalUploadManager had to be made here as well. The backup direct uploads do not need an Upload record made for them in the database, so they can be moved to their final S3 resting place when completing the multipart upload.
This changeset is not perfect; it introduces some special cases in UploadController to handle backups that was previously in BackupController, because UploadController is where the multipart routes are located. A subsequent pull request will pull these routes into a module or some other sharing pattern, along with hooks, so the backup controller and the upload controller (and any future controllers that may need them) can include these routes in a nicer way.
2021-11-10 17:25:31 -05:00
|
|
|
store.move_existing_stored_upload(
|
|
|
|
existing_external_upload_key: external_upload_stub.key,
|
|
|
|
upload: upload,
|
|
|
|
content_type: "image/png",
|
|
|
|
)
|
2021-09-09 22:59:51 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
context "when the file is a PDF" do
|
|
|
|
let(:external_upload_stub) do
|
|
|
|
Fabricate(:attachment_external_upload_stub, original_filename: original_filename)
|
2023-01-09 06:18:21 -05:00
|
|
|
end
|
2021-09-09 22:59:51 -04:00
|
|
|
let(:original_filename) { "small.pdf" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures("small.pdf", "pdf") }
|
|
|
|
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
|
|
|
disp_opts = {
|
|
|
|
content_disposition:
|
|
|
|
"attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}",
|
|
|
|
content_type: "application/pdf",
|
|
|
|
}
|
|
|
|
s3_helper
|
|
|
|
.expects(:copy)
|
|
|
|
.with(external_upload_stub.key, kind_of(String), options: upload_opts.merge(disp_opts))
|
|
|
|
.returns(%w[path etag])
|
|
|
|
upload =
|
|
|
|
Fabricate(
|
|
|
|
:upload,
|
|
|
|
extension: "png",
|
|
|
|
sha1: upload_sha1,
|
|
|
|
original_filename: original_filename,
|
|
|
|
)
|
FEATURE: Direct S3 multipart uploads for backups (#14736)
This PR introduces a new `enable_experimental_backup_uploads` site setting (default false and hidden), which when enabled alongside `enable_direct_s3_uploads` will allow for direct S3 multipart uploads of backup .tar.gz files.
To make multipart external uploads work with both the S3BackupStore and the S3Store, I've had to move several methods out of S3Store and into S3Helper, including:
* presigned_url
* create_multipart
* abort_multipart
* complete_multipart
* presign_multipart_part
* list_multipart_parts
Then, S3Store and S3BackupStore either delegate directly to S3Helper or have their own special methods to call S3Helper for these methods. FileStore.temporary_upload_path has also removed its dependence on upload_path, and can now be used interchangeably between the stores. A similar change was made in the frontend as well, moving the multipart related JS code out of ComposerUppyUpload and into a mixin of its own, so it can also be used by UppyUploadMixin.
Some changes to ExternalUploadManager had to be made here as well. The backup direct uploads do not need an Upload record made for them in the database, so they can be moved to their final S3 resting place when completing the multipart upload.
This changeset is not perfect; it introduces some special cases in UploadController to handle backups that was previously in BackupController, because UploadController is where the multipart routes are located. A subsequent pull request will pull these routes into a module or some other sharing pattern, along with hooks, so the backup controller and the upload controller (and any future controllers that may need them) can include these routes in a nicer way.
2021-11-10 17:25:31 -05:00
|
|
|
store.move_existing_stored_upload(
|
|
|
|
existing_external_upload_key: external_upload_stub.key,
|
|
|
|
upload: upload,
|
|
|
|
content_type: "application/pdf",
|
|
|
|
)
|
2021-09-09 22:59:51 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-08-12 05:18:19 -04:00
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2022-07-27 12:14:14 -04:00
|
|
|
describe "copying files in S3" do
|
2018-08-07 23:26:05 -04:00
|
|
|
describe "#copy_file" do
|
|
|
|
it "copies the from in S3 with the right paths" do
|
|
|
|
upload.update!(
|
2018-08-26 21:22:46 -04:00
|
|
|
url:
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/original/1X/#{upload.sha1}.png",
|
2018-08-07 23:26:05 -04:00
|
|
|
)
|
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
source = "#{upload_path}/#{Discourse.store.get_path_for_upload(upload)}"
|
|
|
|
destination = source.sub(".png", ".jpg")
|
|
|
|
bucket = prepare_fake_s3(source, upload)
|
2018-08-07 23:26:05 -04:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
expect(bucket.find_object(source)).to be_present
|
|
|
|
expect(bucket.find_object(destination)).to be_nil
|
2018-08-07 23:26:05 -04:00
|
|
|
|
|
|
|
store.copy_file(upload.url, source, destination)
|
2022-06-20 12:36:05 -04:00
|
|
|
|
|
|
|
expect(bucket.find_object(source)).to be_present
|
|
|
|
expect(bucket.find_object(destination)).to be_present
|
2018-08-07 23:26:05 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-07-27 12:14:14 -04:00
|
|
|
describe "removal from s3" do
|
2016-08-12 05:18:19 -04:00
|
|
|
describe "#remove_upload" do
|
2016-08-14 23:21:24 -04:00
|
|
|
it "removes the file from s3 with the right paths" do
|
2022-06-20 12:36:05 -04:00
|
|
|
upload_key = Discourse.store.get_path_for_upload(upload)
|
|
|
|
tombstone_key = "tombstone/#{upload_key}"
|
|
|
|
bucket = prepare_fake_s3(upload_key, upload)
|
2016-08-12 05:18:19 -04:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}")
|
2019-01-11 08:05:38 -05:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_nil
|
2019-01-11 08:05:38 -05:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
store.remove_upload(upload)
|
2019-01-11 08:05:38 -05:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
expect(bucket.find_object(upload_key)).to be_nil
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_present
|
2019-01-11 08:05:38 -05:00
|
|
|
end
|
|
|
|
|
2016-08-12 05:18:19 -04:00
|
|
|
describe "when s3_upload_bucket includes folders path" do
|
|
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads" }
|
|
|
|
|
|
|
|
it "removes the file from s3 with the right paths" do
|
2022-06-20 12:36:05 -04:00
|
|
|
upload_key = "discourse-uploads/#{Discourse.store.get_path_for_upload(upload)}"
|
|
|
|
tombstone_key =
|
|
|
|
"discourse-uploads/tombstone/#{Discourse.store.get_path_for_upload(upload)}"
|
|
|
|
bucket = prepare_fake_s3(upload_key, upload)
|
2016-08-12 05:18:19 -04:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
upload.update!(
|
|
|
|
url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}",
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_nil
|
2016-08-12 05:18:19 -04:00
|
|
|
|
|
|
|
store.remove_upload(upload)
|
2022-06-20 12:36:05 -04:00
|
|
|
|
|
|
|
expect(bucket.find_object(upload_key)).to be_nil
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_present
|
2016-08-12 05:18:19 -04:00
|
|
|
end
|
|
|
|
end
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
2013-08-13 16:08:29 -04:00
|
|
|
|
2016-08-12 05:18:19 -04:00
|
|
|
describe "#remove_optimized_image" do
|
2022-06-20 12:36:05 -04:00
|
|
|
let(:optimized_key) { FileStore::BaseStore.new.get_path_for_optimized_image(optimized_image) }
|
|
|
|
let(:tombstone_key) { "tombstone/#{optimized_key}" }
|
|
|
|
let(:upload) { optimized_image.upload }
|
|
|
|
let(:upload_key) { Discourse.store.get_path_for_upload(upload) }
|
2019-01-03 02:34:10 -05:00
|
|
|
|
|
|
|
before do
|
2022-06-20 12:36:05 -04:00
|
|
|
optimized_image.update!(
|
|
|
|
url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{optimized_key}",
|
|
|
|
)
|
|
|
|
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}")
|
2016-08-14 23:21:24 -04:00
|
|
|
end
|
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
it "removes the optimized image from s3 with the right paths" do
|
|
|
|
bucket = prepare_fake_s3(upload_key, upload)
|
|
|
|
store_fake_s3_object(optimized_key, optimized_image)
|
2016-08-14 23:21:24 -04:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(optimized_key)).to be_present
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_nil
|
2016-08-14 23:21:24 -04:00
|
|
|
|
|
|
|
store.remove_optimized_image(optimized_image)
|
2022-06-20 12:36:05 -04:00
|
|
|
|
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(optimized_key)).to be_nil
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_present
|
2016-08-14 23:21:24 -04:00
|
|
|
end
|
2016-08-12 05:18:19 -04:00
|
|
|
|
|
|
|
describe "when s3_upload_bucket includes folders path" do
|
|
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads" }
|
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
let(:image_path) { FileStore::BaseStore.new.get_path_for_optimized_image(optimized_image) }
|
|
|
|
let(:optimized_key) { "discourse-uploads/#{image_path}" }
|
|
|
|
let(:tombstone_key) { "discourse-uploads/tombstone/#{image_path}" }
|
|
|
|
let(:upload_key) { "discourse-uploads/#{Discourse.store.get_path_for_upload(upload)}" }
|
2020-05-23 00:56:13 -04:00
|
|
|
|
2016-08-12 05:18:19 -04:00
|
|
|
it "removes the file from s3 with the right paths" do
|
2022-06-20 12:36:05 -04:00
|
|
|
bucket = prepare_fake_s3(upload_key, upload)
|
|
|
|
store_fake_s3_object(optimized_key, optimized_image)
|
2019-01-03 02:34:10 -05:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(optimized_key)).to be_present
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_nil
|
2016-08-12 05:18:19 -04:00
|
|
|
|
|
|
|
store.remove_optimized_image(optimized_image)
|
2022-06-20 12:36:05 -04:00
|
|
|
|
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(optimized_key)).to be_nil
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_present
|
2016-08-12 05:18:19 -04:00
|
|
|
end
|
|
|
|
end
|
2016-08-14 23:21:24 -04:00
|
|
|
end
|
2013-08-13 16:08:29 -04:00
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
describe ".has_been_uploaded?" do
|
2020-05-23 00:56:13 -04:00
|
|
|
it "doesn't crash for invalid URLs" do
|
|
|
|
expect(store.has_been_uploaded?("https://site.discourse.com/#bad#6")).to eq(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't crash if URL contains non-ascii characters" do
|
2020-09-14 07:32:25 -04:00
|
|
|
expect(
|
|
|
|
store.has_been_uploaded?(
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/漢1337.png",
|
2023-01-09 06:18:21 -05:00
|
|
|
),
|
2020-09-14 07:32:25 -04:00
|
|
|
).to eq(true)
|
2020-05-23 00:56:13 -04:00
|
|
|
expect(store.has_been_uploaded?("//s3-upload-bucket.s3.amazonaws.com/漢1337.png")).to eq(false)
|
|
|
|
end
|
|
|
|
|
2013-07-31 17:26:34 -04:00
|
|
|
it "identifies S3 uploads" do
|
2020-09-14 07:32:25 -04:00
|
|
|
expect(
|
|
|
|
store.has_been_uploaded?(
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/1337.png",
|
2023-01-09 06:18:21 -05:00
|
|
|
),
|
2020-09-14 07:32:25 -04:00
|
|
|
).to eq(true)
|
2013-07-31 17:26:34 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "does not match other s3 urls" do
|
2018-08-26 21:22:46 -04:00
|
|
|
expect(store.has_been_uploaded?("//s3-upload-bucket.s3.amazonaws.com/1337.png")).to eq(false)
|
2020-09-14 07:32:25 -04:00
|
|
|
expect(
|
|
|
|
store.has_been_uploaded?("//s3-upload-bucket.s3-us-west-1.amazonaws.com/1337.png"),
|
|
|
|
).to eq(false)
|
2016-08-14 23:21:24 -04:00
|
|
|
expect(store.has_been_uploaded?("//s3.amazonaws.com/s3-upload-bucket/1337.png")).to eq(false)
|
2018-07-06 18:15:28 -04:00
|
|
|
expect(store.has_been_uploaded?("//s4_upload_bucket.s3.amazonaws.com/1337.png")).to eq(false)
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe ".absolute_base_url" do
|
|
|
|
it "returns a lowercase schemaless absolute url" do
|
2020-09-14 07:32:25 -04:00
|
|
|
expect(store.absolute_base_url).to eq(
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com",
|
|
|
|
)
|
2015-05-27 11:50:49 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "uses the proper endpoint" do
|
2018-07-06 18:15:28 -04:00
|
|
|
SiteSetting.s3_region = "us-east-1"
|
2018-08-26 21:22:46 -04:00
|
|
|
expect(FileStore::S3Store.new(s3_helper).absolute_base_url).to eq(
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-east-1.amazonaws.com",
|
|
|
|
)
|
2018-07-06 18:15:28 -04:00
|
|
|
|
2017-07-07 02:09:14 -04:00
|
|
|
SiteSetting.s3_region = "us-west-2"
|
2018-08-26 21:22:46 -04:00
|
|
|
expect(FileStore::S3Store.new(s3_helper).absolute_base_url).to eq(
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-west-2.amazonaws.com",
|
|
|
|
)
|
2016-07-14 10:56:09 -04:00
|
|
|
|
2017-07-07 02:09:14 -04:00
|
|
|
SiteSetting.s3_region = "cn-north-1"
|
2016-08-14 23:21:24 -04:00
|
|
|
expect(FileStore::S3Store.new(s3_helper).absolute_base_url).to eq(
|
|
|
|
"//s3-upload-bucket.s3.cn-north-1.amazonaws.com.cn",
|
|
|
|
)
|
2016-07-14 10:56:09 -04:00
|
|
|
|
2018-08-26 21:22:46 -04:00
|
|
|
SiteSetting.s3_region = "cn-northwest-1"
|
|
|
|
expect(FileStore::S3Store.new(s3_helper).absolute_base_url).to eq(
|
|
|
|
"//s3-upload-bucket.s3.cn-northwest-1.amazonaws.com.cn",
|
|
|
|
)
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it "is external" do
|
2015-01-09 11:34:37 -05:00
|
|
|
expect(store.external?).to eq(true)
|
|
|
|
expect(store.internal?).to eq(false)
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2014-09-24 16:52:09 -04:00
|
|
|
describe ".purge_tombstone" do
|
|
|
|
it "updates tombstone lifecycle" do
|
|
|
|
s3_helper.expects(:update_tombstone_lifecycle)
|
|
|
|
store.purge_tombstone(1.day)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-05-25 23:08:31 -04:00
|
|
|
describe ".path_for" do
|
2019-05-29 06:53:31 -04:00
|
|
|
def assert_path(path, expected)
|
|
|
|
upload = Upload.new(url: path)
|
|
|
|
|
|
|
|
path = store.path_for(upload)
|
|
|
|
expected = FileStore::LocalStore.new.path_for(upload) if expected
|
2019-05-28 21:00:25 -04:00
|
|
|
|
2019-05-29 06:53:31 -04:00
|
|
|
expect(path).to eq(expected)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "correctly falls back to local" do
|
|
|
|
assert_path("/hello", "/hello")
|
|
|
|
assert_path("//hello", nil)
|
|
|
|
assert_path("http://hello", nil)
|
|
|
|
assert_path("https://hello", nil)
|
2015-05-25 23:08:31 -04:00
|
|
|
end
|
|
|
|
end
|
2018-08-21 21:31:33 -04:00
|
|
|
|
2022-07-27 12:14:14 -04:00
|
|
|
describe "update ACL" do
|
2019-11-17 20:25:42 -05:00
|
|
|
before { SiteSetting.authorized_extensions = "pdf|png" }
|
|
|
|
|
2019-06-05 23:27:24 -04:00
|
|
|
describe ".update_upload_ACL" do
|
2021-05-27 11:42:25 -04:00
|
|
|
let(:upload) { Fabricate(:upload, original_filename: "small.pdf", extension: "pdf") }
|
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
it "sets acl to public by default" do
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with(regexp_matches(%r{original/\d+X.*/#{upload.sha1}\.pdf}))
|
|
|
|
.returns(s3_object)
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_object.expects(:acl).returns(s3_object)
|
2019-11-17 20:25:42 -05:00
|
|
|
s3_object.expects(:put).with(acl: "public-read").returns(s3_object)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
|
|
|
expect(store.update_upload_ACL(upload)).to be_truthy
|
|
|
|
end
|
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
it "sets acl to private when upload is marked secure" do
|
2021-05-27 11:42:25 -04:00
|
|
|
upload.update!(secure: true)
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with(regexp_matches(%r{original/\d+X.*/#{upload.sha1}\.pdf}))
|
|
|
|
.returns(s3_object)
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_object.expects(:acl).returns(s3_object)
|
2019-11-17 20:25:42 -05:00
|
|
|
s3_object.expects(:put).with(acl: "private").returns(s3_object)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
|
|
|
expect(store.update_upload_ACL(upload)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-21 21:31:33 -04:00
|
|
|
describe ".cdn_url" do
|
|
|
|
it "supports subfolder" do
|
2018-08-21 22:31:13 -04:00
|
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket/livechat"
|
|
|
|
SiteSetting.s3_cdn_url = "https://rainbow.com"
|
|
|
|
|
|
|
|
# none of this should matter at all
|
|
|
|
# subfolder should not leak into uploads
|
2019-11-15 00:48:24 -05:00
|
|
|
set_subfolder "/community"
|
2018-08-21 22:31:13 -04:00
|
|
|
|
2020-09-14 07:32:25 -04:00
|
|
|
url = "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/livechat/original/gif.png"
|
2018-08-21 22:31:13 -04:00
|
|
|
|
|
|
|
expect(store.cdn_url(url)).to eq("https://rainbow.com/original/gif.png")
|
2018-08-21 21:31:33 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-04 11:32:51 -04:00
|
|
|
describe ".download_url" do
|
|
|
|
it "returns correct short URL with dl=1 param" do
|
|
|
|
expect(store.download_url(upload)).to eq("#{upload.short_path}?dl=1")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe ".url_for" do
|
|
|
|
it "returns signed URL with content disposition when requesting to download image" do
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2021-12-08 14:13:59 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with(regexp_matches(%r{original/\d+X.*/#{upload.sha1}\.png}))
|
|
|
|
.returns(s3_object)
|
2019-07-04 11:32:51 -04:00
|
|
|
opts = {
|
2022-05-25 19:53:01 -04:00
|
|
|
expires_in: SiteSetting.s3_presigned_get_url_expires_after_seconds,
|
2019-08-07 11:00:43 -04:00
|
|
|
response_content_disposition:
|
|
|
|
%Q|attachment; filename="#{upload.original_filename}"; filename*=UTF-8''#{upload.original_filename}|,
|
2019-07-04 11:32:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
s3_object.expects(:presigned_url).with(:get, opts)
|
|
|
|
|
|
|
|
expect(store.url_for(upload, force_download: true)).not_to eq(upload.url)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
describe ".signed_url_for_path" do
|
|
|
|
it "returns signed URL for a given path" do
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
|
|
|
s3_bucket.expects(:object).with("special/optimized/file.png").returns(s3_object)
|
2022-05-25 19:53:01 -04:00
|
|
|
opts = { expires_in: SiteSetting.s3_presigned_get_url_expires_after_seconds }
|
2019-11-17 20:25:42 -05:00
|
|
|
|
|
|
|
s3_object.expects(:presigned_url).with(:get, opts)
|
|
|
|
|
|
|
|
expect(store.signed_url_for_path("special/optimized/file.png")).not_to eq(upload.url)
|
|
|
|
end
|
2021-08-25 18:50:49 -04:00
|
|
|
|
|
|
|
it "does not prefix the s3_bucket_folder_path onto temporary upload prefixed keys" do
|
|
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket/folder_path"
|
|
|
|
uri =
|
|
|
|
URI.parse(
|
|
|
|
store.signed_url_for_path(
|
|
|
|
"#{FileStore::BaseStore::TEMPORARY_UPLOAD_PREFIX}folder_path/uploads/default/blah/def.xyz",
|
2023-01-09 06:18:21 -05:00
|
|
|
),
|
2021-08-25 18:50:49 -04:00
|
|
|
)
|
|
|
|
expect(uri.path).to eq(
|
|
|
|
"/#{FileStore::BaseStore::TEMPORARY_UPLOAD_PREFIX}folder_path/uploads/default/blah/def.xyz",
|
|
|
|
)
|
|
|
|
uri = URI.parse(store.signed_url_for_path("uploads/default/blah/def.xyz"))
|
|
|
|
expect(uri.path).to eq("/folder_path/uploads/default/blah/def.xyz")
|
|
|
|
end
|
2019-11-17 20:25:42 -05:00
|
|
|
end
|
2021-07-27 18:42:25 -04:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
def prepare_fake_s3(upload_key, upload)
|
|
|
|
@fake_s3 = FakeS3.create
|
|
|
|
@fake_s3_bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
|
|
|
|
store_fake_s3_object(upload_key, upload)
|
|
|
|
@fake_s3_bucket
|
|
|
|
end
|
|
|
|
|
|
|
|
def store_fake_s3_object(upload_key, upload)
|
|
|
|
@fake_s3_bucket.put_object(
|
|
|
|
key: upload_key,
|
|
|
|
size: upload.filesize,
|
|
|
|
last_modified: upload.created_at,
|
2021-07-27 18:42:25 -04:00
|
|
|
)
|
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
end
|