2019-04-29 20:27:42 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2018-11-28 23:11:48 -05:00
|
|
|
require "file_store/s3_store"
|
|
|
|
|
|
|
|
RSpec.describe "Multisite s3 uploads", type: :multisite do
|
2020-06-16 21:16:37 -04:00
|
|
|
let(:original_filename) { "smallest.png" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures(original_filename) }
|
2019-01-09 15:13:02 -05:00
|
|
|
let(:upload_sha1) { Digest::SHA1.hexdigest(File.read(uploaded_file)) }
|
2019-12-18 00:51:57 -05:00
|
|
|
let(:upload_path) { Discourse.store.upload_path }
|
2018-11-28 23:11:48 -05:00
|
|
|
|
2019-01-25 15:24:44 -05:00
|
|
|
def build_upload
|
2020-06-16 21:16:37 -04:00
|
|
|
Fabricate.build(:upload, sha1: upload_sha1, id: 1, original_filename: original_filename)
|
2019-01-25 15:24:44 -05:00
|
|
|
end
|
|
|
|
|
2022-07-27 12:14:14 -04:00
|
|
|
describe "uploading to s3" do
|
2018-11-28 23:11:48 -05:00
|
|
|
before(:each) { setup_s3 }
|
|
|
|
|
|
|
|
describe "#store_upload" do
|
2018-12-19 00:32:32 -05:00
|
|
|
let(:s3_client) { Aws::S3::Client.new(stub_responses: true) }
|
|
|
|
let(:s3_helper) { S3Helper.new(SiteSetting.s3_upload_bucket, "", client: s3_client) }
|
|
|
|
let(:store) { FileStore::S3Store.new(s3_helper) }
|
2020-06-16 21:16:37 -04:00
|
|
|
let(:upload_opts) do
|
|
|
|
{
|
|
|
|
acl: "public-read",
|
|
|
|
cache_control: "max-age=31556952, public, immutable",
|
|
|
|
content_type: "image/png",
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
it "does not provide a content_disposition for images" do
|
|
|
|
s3_helper
|
|
|
|
.expects(:upload)
|
|
|
|
.with(uploaded_file, kind_of(String), upload_opts)
|
|
|
|
.returns(%w[path etag])
|
|
|
|
upload = build_upload
|
|
|
|
store.store_upload(uploaded_file, upload)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the file is a PDF" do
|
|
|
|
let(:original_filename) { "small.pdf" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures("small.pdf", "pdf") }
|
|
|
|
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
2020-06-23 03:10:56 -04:00
|
|
|
disp_opts = {
|
|
|
|
content_disposition:
|
|
|
|
"attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}",
|
|
|
|
content_type: "application/pdf",
|
|
|
|
}
|
2020-06-16 21:16:37 -04:00
|
|
|
s3_helper
|
|
|
|
.expects(:upload)
|
|
|
|
.with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts))
|
|
|
|
.returns(%w[path etag])
|
|
|
|
upload = build_upload
|
|
|
|
store.store_upload(uploaded_file, upload)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the file is a video" do
|
|
|
|
let(:original_filename) { "small.mp4" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures("small.mp4", "media") }
|
|
|
|
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
2020-06-23 03:10:56 -04:00
|
|
|
disp_opts = {
|
|
|
|
content_disposition:
|
|
|
|
"attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}",
|
|
|
|
content_type: "application/mp4",
|
|
|
|
}
|
2020-06-16 21:16:37 -04:00
|
|
|
s3_helper
|
|
|
|
.expects(:upload)
|
|
|
|
.with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts))
|
|
|
|
.returns(%w[path etag])
|
|
|
|
upload = build_upload
|
|
|
|
store.store_upload(uploaded_file, upload)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the file is audio" do
|
|
|
|
let(:original_filename) { "small.mp3" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures("small.mp3", "media") }
|
|
|
|
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
2020-06-23 03:10:56 -04:00
|
|
|
disp_opts = {
|
|
|
|
content_disposition:
|
|
|
|
"attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}",
|
|
|
|
content_type: "audio/mpeg",
|
|
|
|
}
|
2020-06-16 21:16:37 -04:00
|
|
|
s3_helper
|
|
|
|
.expects(:upload)
|
|
|
|
.with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts))
|
|
|
|
.returns(%w[path etag])
|
|
|
|
upload = build_upload
|
|
|
|
store.store_upload(uploaded_file, upload)
|
|
|
|
end
|
|
|
|
end
|
2018-12-19 00:32:32 -05:00
|
|
|
|
2018-11-28 23:11:48 -05:00
|
|
|
it "returns the correct url for default and second multisite db" do
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection("default") do
|
2019-01-25 15:24:44 -05:00
|
|
|
upload = build_upload
|
2019-01-24 16:54:03 -05:00
|
|
|
expect(store.store_upload(uploaded_file, upload)).to eq(
|
2020-09-14 07:32:25 -04:00
|
|
|
"//#{SiteSetting.s3_upload_bucket}.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/c530c06cf89c410c0355d7852644a73fc3ec8c04.png",
|
2018-12-02 23:04:14 -05:00
|
|
|
)
|
2019-01-04 01:16:22 -05:00
|
|
|
expect(upload.etag).to eq("ETag")
|
2018-12-02 23:04:14 -05:00
|
|
|
end
|
2018-11-28 23:11:48 -05:00
|
|
|
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection("second") do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload_path = Discourse.store.upload_path
|
2019-01-25 15:24:44 -05:00
|
|
|
upload = build_upload
|
2019-01-24 16:54:03 -05:00
|
|
|
expect(store.store_upload(uploaded_file, upload)).to eq(
|
2020-09-14 07:32:25 -04:00
|
|
|
"//#{SiteSetting.s3_upload_bucket}.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/c530c06cf89c410c0355d7852644a73fc3ec8c04.png",
|
2018-11-28 23:11:48 -05:00
|
|
|
)
|
2019-01-04 01:16:22 -05:00
|
|
|
expect(upload.etag).to eq("ETag")
|
2018-11-28 23:11:48 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-12-19 00:32:32 -05:00
|
|
|
|
2022-07-27 12:14:14 -04:00
|
|
|
describe "removal from s3" do
|
2020-09-14 07:32:25 -04:00
|
|
|
before { setup_s3 }
|
2018-12-19 00:32:32 -05:00
|
|
|
|
|
|
|
describe "#remove_upload" do
|
|
|
|
let(:store) { FileStore::S3Store.new }
|
2022-06-20 12:36:05 -04:00
|
|
|
|
|
|
|
let(:upload) { build_upload }
|
|
|
|
let(:upload_key) { "#{upload_path}/original/1X/#{upload.sha1}.png" }
|
|
|
|
|
|
|
|
def prepare_fake_s3
|
|
|
|
@fake_s3 = FakeS3.create
|
|
|
|
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
|
|
|
|
bucket.put_object(key: upload_key, size: upload.filesize, last_modified: upload.created_at)
|
|
|
|
bucket
|
|
|
|
end
|
2018-12-19 00:32:32 -05:00
|
|
|
|
|
|
|
it "removes the file from s3 on multisite" do
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection("default") do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload.update!(
|
|
|
|
url:
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png",
|
|
|
|
)
|
2022-06-20 12:36:05 -04:00
|
|
|
tombstone_key = "uploads/tombstone/default/original/1X/#{upload.sha1}.png"
|
|
|
|
bucket = prepare_fake_s3
|
2018-12-19 00:32:32 -05:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_nil
|
2018-12-19 00:32:32 -05:00
|
|
|
|
|
|
|
store.remove_upload(upload)
|
2022-06-20 12:36:05 -04:00
|
|
|
|
|
|
|
expect(bucket.find_object(upload_key)).to be_nil
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_present
|
2018-12-19 00:32:32 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it "removes the file from s3 on another multisite db" do
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection("second") do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload.update!(
|
|
|
|
url:
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png",
|
|
|
|
)
|
2022-06-20 12:36:05 -04:00
|
|
|
tombstone_key = "uploads/tombstone/second/original/1X/#{upload.sha1}.png"
|
|
|
|
bucket = prepare_fake_s3
|
2018-12-19 00:32:32 -05:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_nil
|
2018-12-19 00:32:32 -05:00
|
|
|
|
|
|
|
store.remove_upload(upload)
|
2022-06-20 12:36:05 -04:00
|
|
|
|
|
|
|
expect(bucket.find_object(upload_key)).to be_nil
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_present
|
2018-12-19 00:32:32 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "when s3_upload_bucket includes folders path" do
|
2022-06-20 12:36:05 -04:00
|
|
|
let(:upload_key) { "discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png" }
|
|
|
|
|
2018-12-19 00:32:32 -05:00
|
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads" }
|
|
|
|
|
|
|
|
it "removes the file from s3 on multisite" do
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection("default") do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload.update!(
|
|
|
|
url:
|
|
|
|
"//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png",
|
|
|
|
)
|
2022-06-20 12:36:05 -04:00
|
|
|
tombstone_key =
|
|
|
|
"discourse-uploads/uploads/tombstone/default/original/1X/#{upload.sha1}.png"
|
|
|
|
bucket = prepare_fake_s3
|
2018-12-19 00:32:32 -05:00
|
|
|
|
2022-06-20 12:36:05 -04:00
|
|
|
expect(bucket.find_object(upload_key)).to be_present
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_nil
|
2018-12-19 00:32:32 -05:00
|
|
|
|
|
|
|
store.remove_upload(upload)
|
2022-06-20 12:36:05 -04:00
|
|
|
|
|
|
|
expect(bucket.find_object(upload_key)).to be_nil
|
|
|
|
expect(bucket.find_object(tombstone_key)).to be_present
|
2018-12-19 00:32:32 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-06-05 23:27:24 -04:00
|
|
|
|
2022-09-28 19:24:33 -04:00
|
|
|
describe "secure uploadss" do
|
2019-06-05 23:27:24 -04:00
|
|
|
let(:store) { FileStore::S3Store.new }
|
|
|
|
let(:client) { Aws::S3::Client.new(stub_responses: true) }
|
|
|
|
let(:resource) { Aws::S3::Resource.new(client: client) }
|
|
|
|
let(:s3_bucket) { resource.bucket("some-really-cool-bucket") }
|
2020-12-30 13:13:13 -05:00
|
|
|
let(:s3_helper) { store.s3_helper }
|
2019-06-05 23:27:24 -04:00
|
|
|
let(:s3_object) { stub }
|
|
|
|
|
|
|
|
before(:each) do
|
2020-09-14 07:32:25 -04:00
|
|
|
setup_s3
|
2019-06-05 23:27:24 -04:00
|
|
|
SiteSetting.s3_upload_bucket = "some-really-cool-bucket"
|
2019-11-17 20:25:42 -05:00
|
|
|
SiteSetting.authorized_extensions = "pdf|png|jpg|gif"
|
2019-06-05 23:27:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
before { s3_object.stubs(:put).returns(Aws::S3::Types::PutObjectOutput.new(etag: "etag")) }
|
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
describe "when secure attachments are enabled" do
|
2019-06-05 23:27:24 -04:00
|
|
|
it "returns signed URL with correct path" do
|
|
|
|
test_multisite_connection("default") do
|
2021-05-27 11:42:25 -04:00
|
|
|
upload =
|
|
|
|
Fabricate(:upload, original_filename: "small.pdf", extension: "pdf", secure: true)
|
2021-11-07 18:16:38 -05:00
|
|
|
path = Discourse.store.get_path_for_upload(upload)
|
|
|
|
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2021-11-07 18:16:38 -05:00
|
|
|
s3_bucket.expects(:object).with("#{upload_path}/#{path}").returns(s3_object).at_least_once
|
2022-11-02 05:47:59 -04:00
|
|
|
s3_object.expects(:presigned_url).with(
|
|
|
|
:get,
|
|
|
|
{ expires_in: SiteSetting.s3_presigned_get_url_expires_after_seconds },
|
|
|
|
)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
2021-05-27 11:42:25 -04:00
|
|
|
upload.url = store.store_upload(uploaded_file, upload)
|
|
|
|
expect(upload.url).to eq(
|
2021-11-07 18:16:38 -05:00
|
|
|
"//some-really-cool-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/#{path}",
|
2019-06-05 23:27:24 -04:00
|
|
|
)
|
|
|
|
expect(store.url_for(upload)).not_to eq(upload.url)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-09-28 19:24:33 -04:00
|
|
|
describe "when secure uploads are enabled" do
|
2019-11-17 20:25:42 -05:00
|
|
|
before do
|
|
|
|
SiteSetting.login_required = true
|
2022-09-28 19:24:33 -04:00
|
|
|
SiteSetting.secure_uploads = true
|
2019-11-17 20:25:42 -05:00
|
|
|
s3_helper.stubs(:s3_client).returns(client)
|
|
|
|
Discourse.stubs(:store).returns(store)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns signed URL with correct path" do
|
|
|
|
test_multisite_connection("default") do
|
|
|
|
upload = Fabricate.build(:upload_s3, sha1: upload_sha1, id: 1)
|
|
|
|
|
|
|
|
signed_url = Discourse.store.signed_url_for_path(upload.url)
|
|
|
|
expect(signed_url).to match(/Amz-Expires/)
|
2019-12-18 00:51:57 -05:00
|
|
|
expect(signed_url).to match("#{upload_path}")
|
2019-11-17 20:25:42 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
test_multisite_connection("second") do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload_path = Discourse.store.upload_path
|
2019-11-17 20:25:42 -05:00
|
|
|
upload = Fabricate.build(:upload_s3, sha1: upload_sha1, id: 1)
|
|
|
|
|
|
|
|
signed_url = Discourse.store.signed_url_for_path(upload.url)
|
|
|
|
expect(signed_url).to match(/Amz-Expires/)
|
2019-12-18 00:51:57 -05:00
|
|
|
expect(signed_url).to match("#{upload_path}")
|
2019-11-17 20:25:42 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-05 23:27:24 -04:00
|
|
|
describe "#update_upload_ACL" do
|
|
|
|
it "updates correct file for default and second multisite db" do
|
|
|
|
test_multisite_connection("default") do
|
|
|
|
upload = build_upload
|
2019-11-17 20:25:42 -05:00
|
|
|
upload.update!(original_filename: "small.pdf", extension: "pdf", secure: true)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2019-12-18 00:51:57 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with("#{upload_path}/original/1X/#{upload.sha1}.pdf")
|
|
|
|
.returns(s3_object)
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_object.expects(:acl).returns(s3_object)
|
|
|
|
s3_object.expects(:put).with(acl: "private").returns(s3_object)
|
|
|
|
|
|
|
|
expect(store.update_upload_ACL(upload)).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
test_multisite_connection("second") do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload_path = Discourse.store.upload_path
|
2019-06-05 23:27:24 -04:00
|
|
|
upload = build_upload
|
2019-11-17 20:25:42 -05:00
|
|
|
upload.update!(original_filename: "small.pdf", extension: "pdf", secure: true)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2019-12-18 00:51:57 -05:00
|
|
|
s3_bucket
|
|
|
|
.expects(:object)
|
|
|
|
.with("#{upload_path}/original/1X/#{upload.sha1}.pdf")
|
|
|
|
.returns(s3_object)
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_object.expects(:acl).returns(s3_object)
|
|
|
|
s3_object.expects(:put).with(acl: "private").returns(s3_object)
|
|
|
|
|
|
|
|
expect(store.update_upload_ACL(upload)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-05-23 00:56:13 -04:00
|
|
|
|
|
|
|
describe "#has_been_uploaded?" do
|
|
|
|
before do
|
2020-09-14 07:32:25 -04:00
|
|
|
setup_s3
|
2020-05-23 00:56:13 -04:00
|
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket/test"
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:store) { FileStore::S3Store.new }
|
|
|
|
|
2020-06-25 01:00:15 -04:00
|
|
|
it "returns false for blank urls and bad urls" do
|
|
|
|
expect(store.has_been_uploaded?("")).to eq(false)
|
|
|
|
expect(store.has_been_uploaded?("http://test@test.com:test/test.git")).to eq(false)
|
|
|
|
expect(store.has_been_uploaded?("http:///+test@test.com/test.git")).to eq(false)
|
2020-05-23 00:56:13 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "returns true if the base hostname is the same for both urls" do
|
|
|
|
url =
|
|
|
|
"https://s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
|
|
expect(store.has_been_uploaded?(url)).to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns false if the base hostname is the same for both urls BUT the bucket name is different in the path" do
|
|
|
|
bucket = "someotherbucket"
|
|
|
|
url =
|
|
|
|
"https://s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{bucket}/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
|
|
expect(store.has_been_uploaded?(url)).to eq(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns false if the hostnames do not match and the s3_cdn_url is blank" do
|
|
|
|
url =
|
|
|
|
"https://www.someotherhostname.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
|
|
expect(store.has_been_uploaded?(url)).to eq(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns true if the s3_cdn_url is present and matches the url hostname" do
|
|
|
|
SiteSetting.s3_cdn_url = "https://www.someotherhostname.com"
|
|
|
|
url =
|
|
|
|
"https://www.someotherhostname.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
|
|
expect(store.has_been_uploaded?(url)).to eq(true)
|
|
|
|
end
|
2020-05-26 09:32:48 -04:00
|
|
|
|
|
|
|
it "returns false if the URI is an invalid mailto link" do
|
|
|
|
link = "mailto: roman;@test.com"
|
|
|
|
|
|
|
|
expect(store.has_been_uploaded?(link)).to eq(false)
|
|
|
|
end
|
2020-05-23 00:56:13 -04:00
|
|
|
end
|
2021-07-27 18:42:25 -04:00
|
|
|
|
|
|
|
describe "#signed_url_for_temporary_upload" do
|
|
|
|
before { setup_s3 }
|
|
|
|
|
|
|
|
let(:store) { FileStore::S3Store.new }
|
|
|
|
|
|
|
|
context "for a bucket with no folder path" do
|
|
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket" }
|
|
|
|
|
|
|
|
it "returns a presigned url with the correct params and the key for the temporary file" do
|
|
|
|
url = store.signed_url_for_temporary_upload("test.png")
|
FEATURE: Direct S3 multipart uploads for backups (#14736)
This PR introduces a new `enable_experimental_backup_uploads` site setting (default false and hidden), which when enabled alongside `enable_direct_s3_uploads` will allow for direct S3 multipart uploads of backup .tar.gz files.
To make multipart external uploads work with both the S3BackupStore and the S3Store, I've had to move several methods out of S3Store and into S3Helper, including:
* presigned_url
* create_multipart
* abort_multipart
* complete_multipart
* presign_multipart_part
* list_multipart_parts
Then, S3Store and S3BackupStore either delegate directly to S3Helper or have their own special methods to call S3Helper for these methods. FileStore.temporary_upload_path has also removed its dependence on upload_path, and can now be used interchangeably between the stores. A similar change was made in the frontend as well, moving the multipart related JS code out of ComposerUppyUpload and into a mixin of its own, so it can also be used by UppyUploadMixin.
Some changes to ExternalUploadManager had to be made here as well. The backup direct uploads do not need an Upload record made for them in the database, so they can be moved to their final S3 resting place when completing the multipart upload.
This changeset is not perfect; it introduces some special cases in UploadController to handle backups that was previously in BackupController, because UploadController is where the multipart routes are located. A subsequent pull request will pull these routes into a module or some other sharing pattern, along with hooks, so the backup controller and the upload controller (and any future controllers that may need them) can include these routes in a nicer way.
2021-11-10 17:25:31 -05:00
|
|
|
key = store.s3_helper.path_from_url(url)
|
2021-07-27 18:42:25 -04:00
|
|
|
expect(url).to match(/Amz-Expires/)
|
2021-09-05 20:21:20 -04:00
|
|
|
expect(key).to match(
|
|
|
|
/temp\/uploads\/default\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/,
|
|
|
|
)
|
2021-07-27 18:42:25 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "presigned url contans the metadata when provided" do
|
|
|
|
url =
|
|
|
|
store.signed_url_for_temporary_upload("test.png", metadata: { "test-meta": "testing" })
|
|
|
|
expect(url).to include("&x-amz-meta-test-meta=testing")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "for a bucket with a folder path" do
|
|
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/site" }
|
|
|
|
|
|
|
|
it "returns a presigned url with the correct params and the key for the temporary file" do
|
|
|
|
url = store.signed_url_for_temporary_upload("test.png")
|
FEATURE: Direct S3 multipart uploads for backups (#14736)
This PR introduces a new `enable_experimental_backup_uploads` site setting (default false and hidden), which when enabled alongside `enable_direct_s3_uploads` will allow for direct S3 multipart uploads of backup .tar.gz files.
To make multipart external uploads work with both the S3BackupStore and the S3Store, I've had to move several methods out of S3Store and into S3Helper, including:
* presigned_url
* create_multipart
* abort_multipart
* complete_multipart
* presign_multipart_part
* list_multipart_parts
Then, S3Store and S3BackupStore either delegate directly to S3Helper or have their own special methods to call S3Helper for these methods. FileStore.temporary_upload_path has also removed its dependence on upload_path, and can now be used interchangeably between the stores. A similar change was made in the frontend as well, moving the multipart related JS code out of ComposerUppyUpload and into a mixin of its own, so it can also be used by UppyUploadMixin.
Some changes to ExternalUploadManager had to be made here as well. The backup direct uploads do not need an Upload record made for them in the database, so they can be moved to their final S3 resting place when completing the multipart upload.
This changeset is not perfect; it introduces some special cases in UploadController to handle backups that was previously in BackupController, because UploadController is where the multipart routes are located. A subsequent pull request will pull these routes into a module or some other sharing pattern, along with hooks, so the backup controller and the upload controller (and any future controllers that may need them) can include these routes in a nicer way.
2021-11-10 17:25:31 -05:00
|
|
|
key = store.s3_helper.path_from_url(url)
|
2021-07-27 18:42:25 -04:00
|
|
|
expect(url).to match(/Amz-Expires/)
|
2021-09-05 20:21:20 -04:00
|
|
|
expect(key).to match(
|
|
|
|
/temp\/site\/uploads\/default\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/,
|
|
|
|
)
|
2021-07-27 18:42:25 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "for a multisite site" do
|
|
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/standard99" }
|
|
|
|
|
|
|
|
it "returns a presigned url with the correct params and the key for the temporary file" do
|
|
|
|
test_multisite_connection("second") do
|
|
|
|
url = store.signed_url_for_temporary_upload("test.png")
|
FEATURE: Direct S3 multipart uploads for backups (#14736)
This PR introduces a new `enable_experimental_backup_uploads` site setting (default false and hidden), which when enabled alongside `enable_direct_s3_uploads` will allow for direct S3 multipart uploads of backup .tar.gz files.
To make multipart external uploads work with both the S3BackupStore and the S3Store, I've had to move several methods out of S3Store and into S3Helper, including:
* presigned_url
* create_multipart
* abort_multipart
* complete_multipart
* presign_multipart_part
* list_multipart_parts
Then, S3Store and S3BackupStore either delegate directly to S3Helper or have their own special methods to call S3Helper for these methods. FileStore.temporary_upload_path has also removed its dependence on upload_path, and can now be used interchangeably between the stores. A similar change was made in the frontend as well, moving the multipart related JS code out of ComposerUppyUpload and into a mixin of its own, so it can also be used by UppyUploadMixin.
Some changes to ExternalUploadManager had to be made here as well. The backup direct uploads do not need an Upload record made for them in the database, so they can be moved to their final S3 resting place when completing the multipart upload.
This changeset is not perfect; it introduces some special cases in UploadController to handle backups that was previously in BackupController, because UploadController is where the multipart routes are located. A subsequent pull request will pull these routes into a module or some other sharing pattern, along with hooks, so the backup controller and the upload controller (and any future controllers that may need them) can include these routes in a nicer way.
2021-11-10 17:25:31 -05:00
|
|
|
key = store.s3_helper.path_from_url(url)
|
2021-07-27 18:42:25 -04:00
|
|
|
expect(url).to match(/Amz-Expires/)
|
2021-09-05 20:21:20 -04:00
|
|
|
expect(key).to match(
|
|
|
|
/temp\/standard99\/uploads\/second\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/,
|
|
|
|
)
|
2021-07-27 18:42:25 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-11-28 23:11:48 -05:00
|
|
|
end
|