2019-04-29 20:27:42 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2018-11-28 23:11:48 -05:00
|
|
|
require 'rails_helper'
|
|
|
|
require 'file_store/s3_store'
|
|
|
|
|
|
|
|
RSpec.describe 'Multisite s3 uploads', type: :multisite do
|
2020-06-16 21:16:37 -04:00
|
|
|
let(:original_filename) { "smallest.png" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures(original_filename) }
|
2019-01-09 15:13:02 -05:00
|
|
|
let(:upload_sha1) { Digest::SHA1.hexdigest(File.read(uploaded_file)) }
|
2019-12-18 00:51:57 -05:00
|
|
|
let(:upload_path) { Discourse.store.upload_path }
|
2018-11-28 23:11:48 -05:00
|
|
|
|
2019-01-25 15:24:44 -05:00
|
|
|
def build_upload
|
2020-06-16 21:16:37 -04:00
|
|
|
Fabricate.build(:upload, sha1: upload_sha1, id: 1, original_filename: original_filename)
|
2019-01-25 15:24:44 -05:00
|
|
|
end
|
|
|
|
|
2018-11-28 23:11:48 -05:00
|
|
|
context 'uploading to s3' do
|
|
|
|
before(:each) do
|
|
|
|
SiteSetting.s3_upload_bucket = "some-really-cool-bucket"
|
|
|
|
SiteSetting.s3_access_key_id = "s3-access-key-id"
|
|
|
|
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
|
|
|
|
SiteSetting.enable_s3_uploads = true
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#store_upload" do
|
2018-12-19 00:32:32 -05:00
|
|
|
let(:s3_client) { Aws::S3::Client.new(stub_responses: true) }
|
|
|
|
let(:s3_helper) { S3Helper.new(SiteSetting.s3_upload_bucket, '', client: s3_client) }
|
|
|
|
let(:store) { FileStore::S3Store.new(s3_helper) }
|
2020-06-16 21:16:37 -04:00
|
|
|
let(:upload_opts) do
|
|
|
|
{
|
|
|
|
acl: "public-read",
|
|
|
|
cache_control: "max-age=31556952, public, immutable",
|
|
|
|
content_type: "image/png"
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
it "does not provide a content_disposition for images" do
|
|
|
|
s3_helper.expects(:upload).with(uploaded_file, kind_of(String), upload_opts).returns(["path", "etag"])
|
|
|
|
upload = build_upload
|
|
|
|
store.store_upload(uploaded_file, upload)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the file is a PDF" do
|
|
|
|
let(:original_filename) { "small.pdf" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures("small.pdf", "pdf") }
|
|
|
|
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
2020-06-23 03:10:56 -04:00
|
|
|
disp_opts = { content_disposition: "attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}", content_type: "application/pdf" }
|
2020-06-16 21:16:37 -04:00
|
|
|
s3_helper.expects(:upload).with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts)).returns(["path", "etag"])
|
|
|
|
upload = build_upload
|
|
|
|
store.store_upload(uploaded_file, upload)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the file is a video" do
|
|
|
|
let(:original_filename) { "small.mp4" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures("small.mp4", "media") }
|
|
|
|
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
2020-06-23 03:10:56 -04:00
|
|
|
disp_opts = { content_disposition: "attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}", content_type: "application/mp4" }
|
2020-06-16 21:16:37 -04:00
|
|
|
s3_helper.expects(:upload).with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts)).returns(["path", "etag"])
|
|
|
|
upload = build_upload
|
|
|
|
store.store_upload(uploaded_file, upload)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the file is audio" do
|
|
|
|
let(:original_filename) { "small.mp3" }
|
|
|
|
let(:uploaded_file) { file_from_fixtures("small.mp3", "media") }
|
|
|
|
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
2020-06-23 03:10:56 -04:00
|
|
|
disp_opts = { content_disposition: "attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}", content_type: "audio/mpeg" }
|
2020-06-16 21:16:37 -04:00
|
|
|
s3_helper.expects(:upload).with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts)).returns(["path", "etag"])
|
|
|
|
upload = build_upload
|
|
|
|
store.store_upload(uploaded_file, upload)
|
|
|
|
end
|
|
|
|
end
|
2018-12-19 00:32:32 -05:00
|
|
|
|
2018-11-28 23:11:48 -05:00
|
|
|
it "returns the correct url for default and second multisite db" do
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection('default') do
|
2019-01-25 15:24:44 -05:00
|
|
|
upload = build_upload
|
2019-01-24 16:54:03 -05:00
|
|
|
expect(store.store_upload(uploaded_file, upload)).to eq(
|
2019-12-18 00:51:57 -05:00
|
|
|
"//#{SiteSetting.s3_upload_bucket}.s3.dualstack.us-east-1.amazonaws.com/#{upload_path}/original/1X/c530c06cf89c410c0355d7852644a73fc3ec8c04.png"
|
2018-12-02 23:04:14 -05:00
|
|
|
)
|
2019-01-04 01:16:22 -05:00
|
|
|
expect(upload.etag).to eq("ETag")
|
2018-12-02 23:04:14 -05:00
|
|
|
end
|
2018-11-28 23:11:48 -05:00
|
|
|
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection('second') do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload_path = Discourse.store.upload_path
|
2019-01-25 15:24:44 -05:00
|
|
|
upload = build_upload
|
2019-01-24 16:54:03 -05:00
|
|
|
expect(store.store_upload(uploaded_file, upload)).to eq(
|
2019-12-18 00:51:57 -05:00
|
|
|
"//#{SiteSetting.s3_upload_bucket}.s3.dualstack.us-east-1.amazonaws.com/#{upload_path}/original/1X/c530c06cf89c410c0355d7852644a73fc3ec8c04.png"
|
2018-11-28 23:11:48 -05:00
|
|
|
)
|
2019-01-04 01:16:22 -05:00
|
|
|
expect(upload.etag).to eq("ETag")
|
2018-11-28 23:11:48 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-12-19 00:32:32 -05:00
|
|
|
|
|
|
|
context 'removal from s3' do
|
|
|
|
before do
|
|
|
|
SiteSetting.s3_region = 'us-west-1'
|
|
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket"
|
|
|
|
SiteSetting.s3_access_key_id = "s3-access-key-id"
|
|
|
|
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
|
|
|
|
SiteSetting.enable_s3_uploads = true
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#remove_upload" do
|
|
|
|
let(:store) { FileStore::S3Store.new }
|
|
|
|
let(:client) { Aws::S3::Client.new(stub_responses: true) }
|
|
|
|
let(:resource) { Aws::S3::Resource.new(client: client) }
|
|
|
|
let(:s3_bucket) { resource.bucket(SiteSetting.s3_upload_bucket) }
|
|
|
|
let(:s3_helper) { store.s3_helper }
|
|
|
|
|
|
|
|
it "removes the file from s3 on multisite" do
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection('default') do
|
2019-01-25 15:24:44 -05:00
|
|
|
upload = build_upload
|
2018-12-19 00:32:32 -05:00
|
|
|
store.expects(:get_depth_for).with(upload.id).returns(0)
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2019-12-18 00:51:57 -05:00
|
|
|
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png")
|
2018-12-19 00:32:32 -05:00
|
|
|
s3_object = stub
|
|
|
|
|
|
|
|
s3_bucket.expects(:object).with("uploads/tombstone/default/original/1X/#{upload.sha1}.png").returns(s3_object)
|
2019-12-18 00:51:57 -05:00
|
|
|
s3_object.expects(:copy_from).with(copy_source: "s3-upload-bucket/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
|
2018-12-19 00:32:32 -05:00
|
|
|
s3_object.expects(:delete)
|
|
|
|
|
|
|
|
store.remove_upload(upload)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it "removes the file from s3 on another multisite db" do
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection('second') do
|
2019-01-25 15:24:44 -05:00
|
|
|
upload = build_upload
|
2018-12-19 00:32:32 -05:00
|
|
|
store.expects(:get_depth_for).with(upload.id).returns(0)
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2019-12-18 00:51:57 -05:00
|
|
|
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png")
|
2018-12-19 00:32:32 -05:00
|
|
|
s3_object = stub
|
|
|
|
|
|
|
|
s3_bucket.expects(:object).with("uploads/tombstone/second/original/1X/#{upload.sha1}.png").returns(s3_object)
|
2019-12-18 00:51:57 -05:00
|
|
|
s3_object.expects(:copy_from).with(copy_source: "s3-upload-bucket/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
|
2018-12-19 00:32:32 -05:00
|
|
|
s3_object.expects(:delete)
|
|
|
|
|
|
|
|
store.remove_upload(upload)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "when s3_upload_bucket includes folders path" do
|
|
|
|
before do
|
|
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads"
|
|
|
|
end
|
|
|
|
|
|
|
|
it "removes the file from s3 on multisite" do
|
2019-01-09 15:13:02 -05:00
|
|
|
test_multisite_connection('default') do
|
2019-01-25 15:24:44 -05:00
|
|
|
upload = build_upload
|
2018-12-19 00:32:32 -05:00
|
|
|
store.expects(:get_depth_for).with(upload.id).returns(0)
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2019-12-18 00:51:57 -05:00
|
|
|
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png")
|
2018-12-19 00:32:32 -05:00
|
|
|
s3_object = stub
|
|
|
|
|
|
|
|
s3_bucket.expects(:object).with("discourse-uploads/uploads/tombstone/default/original/1X/#{upload.sha1}.png").returns(s3_object)
|
2019-12-18 00:51:57 -05:00
|
|
|
s3_object.expects(:copy_from).with(copy_source: "s3-upload-bucket/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
|
|
s3_bucket.expects(:object).with("discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
|
2018-12-19 00:32:32 -05:00
|
|
|
s3_object.expects(:delete)
|
|
|
|
|
|
|
|
store.remove_upload(upload)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-06-05 23:27:24 -04:00
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
context 'secure uploads' do
|
2019-06-05 23:27:24 -04:00
|
|
|
let(:store) { FileStore::S3Store.new }
|
|
|
|
let(:client) { Aws::S3::Client.new(stub_responses: true) }
|
|
|
|
let(:resource) { Aws::S3::Resource.new(client: client) }
|
|
|
|
let(:s3_bucket) { resource.bucket("some-really-cool-bucket") }
|
|
|
|
let(:s3_helper) { store.instance_variable_get(:@s3_helper) }
|
|
|
|
let(:s3_object) { stub }
|
|
|
|
|
|
|
|
before(:each) do
|
|
|
|
SiteSetting.s3_upload_bucket = "some-really-cool-bucket"
|
|
|
|
SiteSetting.s3_access_key_id = "s3-access-key-id"
|
|
|
|
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
|
|
|
|
SiteSetting.enable_s3_uploads = true
|
2019-11-17 20:25:42 -05:00
|
|
|
SiteSetting.authorized_extensions = "pdf|png|jpg|gif"
|
2019-06-05 23:27:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
s3_object.stubs(:put).returns(Aws::S3::Types::PutObjectOutput.new(etag: "etag"))
|
|
|
|
end
|
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
describe "when secure attachments are enabled" do
|
2019-06-05 23:27:24 -04:00
|
|
|
it "returns signed URL with correct path" do
|
|
|
|
test_multisite_connection('default') do
|
|
|
|
upload = build_upload
|
2019-11-17 20:25:42 -05:00
|
|
|
upload.update!(original_filename: "small.pdf", extension: "pdf", secure: true)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2019-12-18 00:51:57 -05:00
|
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.pdf").returns(s3_object).at_least_once
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_object.expects(:presigned_url).with(:get, expires_in: S3Helper::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS)
|
|
|
|
|
|
|
|
expect(store.store_upload(uploaded_file, upload)).to eq(
|
2019-12-18 00:51:57 -05:00
|
|
|
"//some-really-cool-bucket.s3.dualstack.us-east-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.pdf"
|
2019-06-05 23:27:24 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
expect(store.url_for(upload)).not_to eq(upload.url)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
describe "when secure media are enabled" do
|
|
|
|
before do
|
|
|
|
SiteSetting.login_required = true
|
|
|
|
SiteSetting.secure_media = true
|
|
|
|
s3_helper.stubs(:s3_client).returns(client)
|
|
|
|
Discourse.stubs(:store).returns(store)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns signed URL with correct path" do
|
|
|
|
test_multisite_connection('default') do
|
|
|
|
upload = Fabricate.build(:upload_s3, sha1: upload_sha1, id: 1)
|
|
|
|
|
|
|
|
signed_url = Discourse.store.signed_url_for_path(upload.url)
|
|
|
|
expect(signed_url).to match(/Amz-Expires/)
|
2019-12-18 00:51:57 -05:00
|
|
|
expect(signed_url).to match("#{upload_path}")
|
2019-11-17 20:25:42 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
test_multisite_connection('second') do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload_path = Discourse.store.upload_path
|
2019-11-17 20:25:42 -05:00
|
|
|
upload = Fabricate.build(:upload_s3, sha1: upload_sha1, id: 1)
|
|
|
|
|
|
|
|
signed_url = Discourse.store.signed_url_for_path(upload.url)
|
|
|
|
expect(signed_url).to match(/Amz-Expires/)
|
2019-12-18 00:51:57 -05:00
|
|
|
expect(signed_url).to match("#{upload_path}")
|
2019-11-17 20:25:42 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-05 23:27:24 -04:00
|
|
|
describe "#update_upload_ACL" do
|
|
|
|
it "updates correct file for default and second multisite db" do
|
|
|
|
test_multisite_connection('default') do
|
|
|
|
upload = build_upload
|
2019-11-17 20:25:42 -05:00
|
|
|
upload.update!(original_filename: "small.pdf", extension: "pdf", secure: true)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2019-12-18 00:51:57 -05:00
|
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.pdf").returns(s3_object)
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_object.expects(:acl).returns(s3_object)
|
|
|
|
s3_object.expects(:put).with(acl: "private").returns(s3_object)
|
|
|
|
|
|
|
|
expect(store.update_upload_ACL(upload)).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
test_multisite_connection('second') do
|
2019-12-18 00:51:57 -05:00
|
|
|
upload_path = Discourse.store.upload_path
|
2019-06-05 23:27:24 -04:00
|
|
|
upload = build_upload
|
2019-11-17 20:25:42 -05:00
|
|
|
upload.update!(original_filename: "small.pdf", extension: "pdf", secure: true)
|
2019-06-05 23:27:24 -04:00
|
|
|
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
2019-12-18 00:51:57 -05:00
|
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.pdf").returns(s3_object)
|
2019-06-05 23:27:24 -04:00
|
|
|
s3_object.expects(:acl).returns(s3_object)
|
|
|
|
s3_object.expects(:put).with(acl: "private").returns(s3_object)
|
|
|
|
|
|
|
|
expect(store.update_upload_ACL(upload)).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-05-23 00:56:13 -04:00
|
|
|
|
|
|
|
describe "#has_been_uploaded?" do
|
|
|
|
before do
|
|
|
|
SiteSetting.s3_region = 'us-west-1'
|
|
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket/test"
|
|
|
|
SiteSetting.s3_access_key_id = "s3-access-key-id"
|
|
|
|
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
|
|
|
|
SiteSetting.enable_s3_uploads = true
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:store) { FileStore::S3Store.new }
|
|
|
|
let(:client) { Aws::S3::Client.new(stub_responses: true) }
|
|
|
|
let(:resource) { Aws::S3::Resource.new(client: client) }
|
|
|
|
let(:s3_bucket) { resource.bucket(SiteSetting.s3_upload_bucket) }
|
|
|
|
let(:s3_helper) { store.s3_helper }
|
|
|
|
|
2020-06-25 01:00:15 -04:00
|
|
|
it "returns false for blank urls and bad urls" do
|
|
|
|
expect(store.has_been_uploaded?("")).to eq(false)
|
|
|
|
expect(store.has_been_uploaded?("http://test@test.com:test/test.git")).to eq(false)
|
|
|
|
expect(store.has_been_uploaded?("http:///+test@test.com/test.git")).to eq(false)
|
2020-05-23 00:56:13 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it "returns true if the base hostname is the same for both urls" do
|
|
|
|
url = "https://s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
|
|
expect(store.has_been_uploaded?(url)).to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns false if the base hostname is the same for both urls BUT the bucket name is different in the path" do
|
|
|
|
bucket = "someotherbucket"
|
|
|
|
url = "https://s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{bucket}/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
|
|
expect(store.has_been_uploaded?(url)).to eq(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns false if the hostnames do not match and the s3_cdn_url is blank" do
|
|
|
|
url = "https://www.someotherhostname.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
|
|
expect(store.has_been_uploaded?(url)).to eq(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns true if the s3_cdn_url is present and matches the url hostname" do
|
|
|
|
SiteSetting.s3_cdn_url = "https://www.someotherhostname.com"
|
|
|
|
url = "https://www.someotherhostname.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
|
|
expect(store.has_been_uploaded?(url)).to eq(true)
|
|
|
|
end
|
2020-05-26 09:32:48 -04:00
|
|
|
|
|
|
|
it "returns false if the URI is an invalid mailto link" do
|
|
|
|
link = 'mailto: roman;@test.com'
|
|
|
|
|
|
|
|
expect(store.has_been_uploaded?(link)).to eq(false)
|
|
|
|
end
|
2020-05-23 00:56:13 -04:00
|
|
|
end
|
2018-11-28 23:11:48 -05:00
|
|
|
end
|