DEV: Add S3 upload system specs using minio (#22975)

This commit adds some system specs to test uploads with
direct to S3 single and multipart uploads via uppy. This
is done with minio as a local S3 replacement. We are doing
this to catch regressions when uppy dependencies need to
be upgraded or we change uppy upload code, since before
this there was no way to know outside manual testing whether
these changes would cause regressions.

Minio's server lifecycle and the installed binaries are managed
by the https://github.com/discourse/minio_runner gem, though the
binaries are already installed on the discourse_test image we run
GitHub CI from.

These tests will only run in CI unless you specifically use the
CI=1 or RUN_S3_SYSTEM_SPECS=1 env vars.

For a history of experimentation here see https://github.com/discourse/discourse/pull/22381

Related PRs:

* https://github.com/discourse/minio_runner/pull/1
* https://github.com/discourse/minio_runner/pull/2
* https://github.com/discourse/minio_runner/pull/3
This commit is contained in:
Martin Brennan 2023-08-23 11:18:33 +10:00 committed by GitHub
parent 9b63ac473b
commit cf42466dea
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 360 additions and 29 deletions

View File

@ -30,6 +30,7 @@ jobs:
PGPASSWORD: discourse
USES_PARALLEL_DATABASES: ${{ matrix.build_type == 'backend' || matrix.build_type == 'system' }}
CAPYBARA_DEFAULT_MAX_WAIT_TIME: 10
MINIO_RUNNER_LOG_LEVEL: DEBUG
strategy:
fail-fast: false
@ -106,6 +107,11 @@ jobs:
if: matrix.target == 'plugins'
run: bin/rake plugin:pull_compatible_all
- name: Add hosts to /etc/hosts, otherwise Chrome cannot reach minio
run: |
echo "127.0.0.1 minio.local" | sudo tee -a /etc/hosts
echo "127.0.0.1 discoursetest.minio.local" | sudo tee -a /etc/hosts
- name: Fetch app state cache
uses: actions/cache@v3
id: app-cache

View File

@ -144,6 +144,7 @@ group :test do
gem "selenium-webdriver", "~> 4.11", require: false
gem "test-prof"
gem "rails-dom-testing", require: false
gem "minio_runner", require: false
end
group :test, :development do

View File

@ -243,6 +243,7 @@ GEM
mini_sql (1.5.0)
mini_suffix (0.3.3)
ffi (~> 1.9)
minio_runner (0.1.1)
minitest (5.19.0)
mocha (2.1.0)
ruby2_keywords (>= 0.0.5)
@ -605,6 +606,7 @@ DEPENDENCIES
mini_scheduler
mini_sql
mini_suffix
minio_runner
minitest
mocha
multi_json

View File

@ -2,6 +2,8 @@
class="btn btn-default btn-icon-text"
disabled={{this.uploading}}
title={{i18n "user.change_avatar.upload_title"}}
data-uploaded={{this.customAvatarUploaded}}
data-avatar-upload-id={{this.uploadedAvatarId}}
>
{{d-icon "far-image"}}
{{#if this.uploading}}

View File

@ -1,4 +1,5 @@
import Component from "@ember/component";
import { isBlank } from "@ember/utils";
import UppyUploadMixin from "discourse/mixins/uppy-upload";
import discourseComputed from "discourse-common/utils/decorators";
@ -7,6 +8,11 @@ export default Component.extend(UppyUploadMixin, {
tagName: "span",
imageIsNotASquare: false,
@discourseComputed("uploading", "uploadedAvatarId")
customAvatarUploaded() {
return !this.uploading && !isBlank(this.uploadedAvatarId);
},
validateUploadedFilesOptions() {
return { imagesOnly: true };
},

View File

@ -397,6 +397,7 @@ export default Mixin.create(UppyS3Multipart, ExtendableUploader, {
method: "put",
url: response.url,
headers: {
...response.signed_headers,
"Content-Type": file.type,
},
};

View File

@ -8,6 +8,12 @@
<label class="control-label" id="profile-picture">{{i18n
"user.avatar.title"
}}</label>
<input
type="hidden"
id="user-avatar-uploads"
data-custom-avatar-upload-id={{this.model.custom_avatar_upload_id}}
data-system-avatar-upload-id={{this.model.system_avatar_upload_id}}
/>
<div class="controls">
{{! we want the "huge" version even though we're downsizing it in CSS }}
{{bound-avatar this.model "huge"}}
@ -16,6 +22,7 @@
@actionParam={{this.model}}
@class="btn-default pad-left"
@icon="pencil-alt"
id="edit-avatar"
/>
</div>
</div>

View File

@ -28,7 +28,7 @@ class ExternalUploadManager
def self.create_direct_upload(current_user:, file_name:, file_size:, upload_type:, metadata: {})
store = store_for_upload_type(upload_type)
url = store.signed_url_for_temporary_upload(file_name, metadata: metadata)
url, signed_headers = store.signed_request_for_temporary_upload(file_name, metadata: metadata)
key = store.s3_helper.path_from_url(url)
upload_stub =
@ -40,7 +40,12 @@ class ExternalUploadManager
filesize: file_size,
)
{ url: url, key: key, unique_identifier: upload_stub.unique_identifier }
{
url: url,
key: key,
unique_identifier: upload_stub.unique_identifier,
signed_headers: signed_headers,
}
end
def self.create_direct_multipart_upload(
@ -191,11 +196,18 @@ class ExternalUploadManager
def download(key, type)
url = @store.signed_url_for_path(external_upload_stub.key)
uri = URI(url)
FileHelper.download(
url,
max_file_size: DOWNLOAD_LIMIT,
tmp_file_name: "discourse-upload-#{type}",
follow_redirect: true,
# Local S3 servers (like minio) do not use port 80, and the Aws::Sigv4::Signer
# includes the port number in the Host header when presigning URLs if the
# port is not 80, so we have to make sure the Host header sent by
# FinalDestination includes the port, otherwise we will get a
# `SignatureDoesNotMatch` error.
include_port_in_host_header: uri.scheme == "http" && uri.port != 80,
)
end
end

View File

@ -355,7 +355,7 @@ module ExternalUploadHelpers
def debug_upload_error(err, friendly_message)
return if !SiteSetting.enable_upload_debug_mode
Discourse.warn_exception(err, message: friendly_message)
Rails.env.development? ? friendly_message : I18n.t("upload.failed")
(Rails.env.development? || Rails.env.test?) ? friendly_message : I18n.t("upload.failed")
end
def multipart_store(upload_type)

View File

@ -49,7 +49,8 @@ class FileHelper
skip_rate_limit: false,
verbose: false,
validate_uri: true,
retain_on_max_file_size_exceeded: false
retain_on_max_file_size_exceeded: false,
include_port_in_host_header: false
)
url = "https:" + url if url.start_with?("//")
raise Discourse::InvalidParameters.new(:url) unless url =~ %r{\Ahttps?://}
@ -64,6 +65,7 @@ class FileHelper
verbose: verbose,
validate_uri: validate_uri,
timeout: read_timeout,
include_port_in_host_header: include_port_in_host_header,
)
fd.get do |response, chunk, uri|

View File

@ -252,13 +252,13 @@ module FileStore
presigned_get_url(key, expires_in: expires_in, force_download: force_download)
end
def signed_url_for_temporary_upload(
def signed_request_for_temporary_upload(
file_name,
expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS,
metadata: {}
)
key = temporary_upload_path(file_name)
s3_helper.presigned_url(
s3_helper.presigned_request(
key,
method: :put_object,
expires_in: expires_in,

View File

@ -49,6 +49,7 @@ class FinalDestination
@default_user_agent = @opts[:default_user_agent] || DEFAULT_USER_AGENT
@opts[:max_redirects] ||= 5
@https_redirect_ignore_limit = @opts[:initial_https_redirect_ignore_limit]
@include_port_in_host_header = @opts[:include_port_in_host_header] || false
@max_redirects = @opts[:max_redirects]
@limit = @max_redirects
@ -114,7 +115,7 @@ class FinalDestination
"User-Agent" => @user_agent,
"Accept" => "*/*",
"Accept-Language" => "*",
"Host" => @uri.hostname,
"Host" => @uri.hostname + (@include_port_in_host_header ? ":#{@uri.port}" : ""),
}
result["Cookie"] = @cookie if @cookie
@ -397,7 +398,15 @@ class FinalDestination
def validate_uri_format
return false unless @uri && @uri.host
return false unless %w[https http].include?(@uri.scheme)
return false if @uri.scheme == "http" && @uri.port != 80
# In some cases (like local/test environments) we may want to allow http URLs
# to be used for internal hosts, but only if it's the case that the host is
# explicitly used for SiteSetting.s3_endpoint. This is to allow for local
# S3 providers like minio.
#
# In all other cases, we should not be allowing http calls to anything except
# port 80.
return false if @uri.scheme == "http" && !http_port_ok?
return false if @uri.scheme == "https" && @uri.port != 443
# Disallow IP based crawling
@ -410,6 +419,23 @@ class FinalDestination
).nil?
end
def http_port_ok?
return true if @uri.port == 80
allowed_internal_hosts =
SiteSetting.allowed_internal_hosts&.split(/[|\n]/).filter_map { |aih| aih.strip.presence }
return false if allowed_internal_hosts.empty? || SiteSetting.s3_endpoint.blank?
return false if allowed_internal_hosts.none? { |aih| hostname_matches_s3_endpoint?(aih) }
true
end
def hostname_matches_s3_endpoint?(allowed_internal_host)
s3_endpoint_uri = URI(SiteSetting.s3_endpoint)
hostname_matches?("http://#{allowed_internal_host}") && @uri.port == s3_endpoint_uri.port &&
@uri.hostname.end_with?(s3_endpoint_uri.hostname)
end
def hostname
@uri.hostname
end
@ -451,7 +477,11 @@ class FinalDestination
headers_subset = Struct.new(:location, :set_cookie).new
safe_session(uri) do |http|
headers = request_headers.merge("Accept-Encoding" => "gzip", "Host" => uri.host)
headers =
request_headers.merge(
"Accept-Encoding" => "gzip",
"Host" => uri.hostname + (@include_port_in_host_header ? ":#{uri.port}" : ""),
)
req = FinalDestination::HTTP::Get.new(uri.request_uri, headers)

View File

@ -353,6 +353,19 @@ class S3Helper
)
end
# Returns url, headers in a tuple which is needed in some cases.
def presigned_request(
key,
method:,
expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS,
opts: {}
)
Aws::S3::Presigner.new(client: s3_client).presigned_request(
method,
{ bucket: s3_bucket_name, key: key, expires_in: expires_in }.merge(opts),
)
end
private
def fetch_bucket_cors_rules

View File

@ -0,0 +1,7 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require "minio_runner"
ENV["MINIO_RUNNER_LOG_LEVEL"] = "DEBUG"
MinioRunner.install_binaries

92
script/local_minio_s3.rb Executable file
View File

@ -0,0 +1,92 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require "minio_runner"
class ExecuteLocalMinioS3
def run
begin
start_minio
save_old_config
change_to_minio_settings
puts "Press any key when done..."
gets
restore_old
rescue SystemExit, Interrupt
restore_old
raise
end
end
def start_minio
MinioRunner.config do |minio_runner_config|
minio_runner_config.minio_domain = ENV["MINIO_RUNNER_MINIO_DOMAIN"] || "minio.local"
minio_runner_config.buckets =
(
if ENV["MINIO_RUNNER_BUCKETS"]
ENV["MINIO_RUNNER_BUCKETS"].split(",")
else
["discoursetest"]
end
)
minio_runner_config.public_buckets =
(
if ENV["MINIO_RUNNER_PUBLIC_BUCKETS"]
ENV["MINIO_RUNNER_PUBLIC_BUCKETS"].split(",")
else
["discoursetest"]
end
)
end
puts "Starting minio..."
MinioRunner.start
end
def save_old_config
puts "Temporarily using minio config for S3. Current settings:"
@current_s3_endpoint = puts_current(:s3_endpoint)
@current_s3_upload_bucket = puts_current(:s3_upload_bucket)
@current_s3_access_key_id = puts_current(:s3_access_key_id)
@current_s3_secret_access_key = puts_current(:s3_secret_access_key)
@current_allowed_internal_hosts = puts_current(:allowed_internal_hosts)
end
def change_to_minio_settings
puts "Changing to minio settings..."
SiteSetting.s3_upload_bucket = "discoursetest"
SiteSetting.s3_access_key_id = MinioRunner.config.minio_root_user
SiteSetting.s3_secret_access_key = MinioRunner.config.minio_root_password
SiteSetting.s3_endpoint = MinioRunner.config.minio_server_url
SiteSetting.allowed_internal_hosts =
MinioRunner.config.minio_urls.map { |url| URI.parse(url).host }.join("|")
puts_current(:s3_endpoint)
puts_current(:s3_upload_bucket)
puts_current(:s3_access_key_id)
puts_current(:s3_secret_access_key)
puts_current(:allowed_internal_hosts)
end
def restore_old
puts "Restoring old S3 settings..."
SiteSetting.s3_upload_bucket = @current_s3_upload_bucket
SiteSetting.s3_access_key_id = @current_s3_access_key_id
SiteSetting.s3_secret_access_key = @current_s3_secret_access_key
SiteSetting.s3_endpoint = @current_s3_endpoint
SiteSetting.allowed_internal_hosts = @current_allowed_internal_hosts
puts_current(:s3_endpoint)
puts_current(:s3_upload_bucket)
puts_current(:s3_access_key_id)
puts_current(:s3_secret_access_key)
puts_current(:allowed_internal_hosts)
puts "Done!"
end
def puts_current(setting)
printf "%-40s %s\n", " > Current #{setting}:", SiteSetting.send(setting)
SiteSetting.send(setting)
end
end
ExecuteLocalMinioS3.new.run

View File

@ -546,15 +546,31 @@ RSpec.describe FinalDestination do
expect(fd(nil).validate_uri_format).to eq(false)
end
it "returns false for invalid ports" do
expect(fd("http://eviltrout.com:21").validate_uri_format).to eq(false)
it "returns false for invalid https ports" do
expect(fd("https://eviltrout.com:8000").validate_uri_format).to eq(false)
end
it "returns true for valid ports" do
it "returns true for valid http and https ports" do
expect(fd("http://eviltrout.com:80").validate_uri_format).to eq(true)
expect(fd("https://eviltrout.com:443").validate_uri_format).to eq(true)
end
it "returns false for invalid http port" do
expect(fd("http://eviltrout.com:21").validate_uri_format).to eq(false)
end
context "when s3_endpoint defined" do
before { SiteSetting.s3_endpoint = "http://minio.local:9000" }
it "returns false if the host is not in allowed_internal_hosts" do
expect(fd("http://discoursetest.minio.local:9000").validate_uri_format).to eq(false)
end
it "returns true if the host is in allowed_internal_hosts" do
SiteSetting.allowed_internal_hosts = %w[minio.local discoursetest.minio.local].join("|")
expect(fd("http://discoursetest.minio.local:9000").validate_uri_format).to eq(true)
end
end
end
describe "https cache" do

View File

@ -384,7 +384,7 @@ RSpec.describe "Multisite s3 uploads", type: :multisite do
end
end
describe "#signed_url_for_temporary_upload" do
describe "#signed_request_for_temporary_upload" do
before { setup_s3 }
let(:store) { FileStore::S3Store.new }
@ -392,19 +392,26 @@ RSpec.describe "Multisite s3 uploads", type: :multisite do
context "for a bucket with no folder path" do
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket" }
it "returns a presigned url with the correct params and the key for the temporary file" do
url = store.signed_url_for_temporary_upload("test.png")
it "returns a presigned url and headers with the correct params and the key for the temporary file" do
url, signed_headers = store.signed_request_for_temporary_upload("test.png")
key = store.s3_helper.path_from_url(url)
expect(signed_headers).to eq("x-amz-acl" => "private")
expect(url).to match(/Amz-Expires/)
expect(key).to match(
/temp\/uploads\/default\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/,
)
end
it "presigned url contains the metadata when provided" do
url =
store.signed_url_for_temporary_upload("test.png", metadata: { "test-meta": "testing" })
expect(url).to include("&x-amz-meta-test-meta=testing")
it "presigned url headers contains the metadata when provided" do
url, signed_headers =
store.signed_request_for_temporary_upload(
"test.png",
metadata: {
"test-meta": "testing",
},
)
expect(signed_headers).to eq("x-amz-acl" => "private", "x-amz-meta-test-meta" => "testing")
expect(url).not_to include("&x-amz-meta-test-meta=testing")
end
end
@ -412,7 +419,7 @@ RSpec.describe "Multisite s3 uploads", type: :multisite do
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/site" }
it "returns a presigned url with the correct params and the key for the temporary file" do
url = store.signed_url_for_temporary_upload("test.png")
url, _signed_headers = store.signed_request_for_temporary_upload("test.png")
key = store.s3_helper.path_from_url(url)
expect(url).to match(/Amz-Expires/)
expect(key).to match(
@ -426,7 +433,7 @@ RSpec.describe "Multisite s3 uploads", type: :multisite do
it "returns a presigned url with the correct params and the key for the temporary file" do
test_multisite_connection("second") do
url = store.signed_url_for_temporary_upload("test.png")
url, _signed_headers = store.signed_request_for_temporary_upload("test.png")
key = store.s3_helper.path_from_url(url)
expect(url).to match(/Amz-Expires/)
expect(key).to match(

View File

@ -23,6 +23,7 @@ require "fabrication"
require "mocha/api"
require "certified"
require "webmock/rspec"
require "minio_runner"
class RspecErrorTracker
def self.last_exception=(ex)
@ -267,7 +268,34 @@ RSpec.configure do |config|
SiteSetting.provider = TestLocalProcessProvider.new
WebMock.disable_net_connect!(allow_localhost: true)
# Used for S3 system specs, see also setup_s3_system_test.
MinioRunner.config do |minio_runner_config|
minio_runner_config.minio_domain = ENV["MINIO_RUNNER_MINIO_DOMAIN"] || "minio.local"
minio_runner_config.buckets =
(
if ENV["MINIO_RUNNER_BUCKETS"]
ENV["MINIO_RUNNER_BUCKETS"].split(",")
else
["discoursetest"]
end
)
minio_runner_config.public_buckets =
(
if ENV["MINIO_RUNNER_PUBLIC_BUCKETS"]
ENV["MINIO_RUNNER_PUBLIC_BUCKETS"].split(",")
else
["discoursetest"]
end
)
end
WebMock.disable_net_connect!(
allow_localhost: true,
allow: [
*MinioRunner.config.minio_urls,
URI(MinioRunner::MinioBinary.platform_binary_url).host,
],
)
if ENV["CAPYBARA_DEFAULT_MAX_WAIT_TIME"].present?
Capybara.default_max_wait_time = ENV["CAPYBARA_DEFAULT_MAX_WAIT_TIME"].to_i
@ -420,6 +448,7 @@ RSpec.configure do |config|
config.after(:suite) do
FileUtils.remove_dir(concurrency_safe_tmp_dir, true) if SpecSecureRandom.value
Downloads.clear
MinioRunner.stop
end
config.around :each do |example|

View File

@ -11,6 +11,14 @@
"description": "A presigned PUT URL which must be used to upload the file binary blob to.",
"example": "https://file-uploads.s3.us-west-2.amazonaws.com/temp/site/uploads/default/123/456.jpg?x-amz-acl=private&x-amz-meta-sha1-checksum=sha1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AAAAus-west-2%2Fs3%2Faws4_request&X-Amz-Date=20211221T011246Z&X-Amz-Expires=600&X-Amz-SignedHeaders=host&X-Amz-Signature=12345678"
},
"signed_headers": {
"type": "object",
"description": "A map of headers that must be sent with the PUT request.",
"example": {
"x-amz-acl": "private",
"x-amz-meta-sha1-checksum": "sha1"
}
},
"unique_identifier": {
"type": "string",
"description": "A unique string that identifies the external upload. This must be stored and then sent in the /complete-external-upload endpoint to complete the direct upload.",

View File

@ -756,7 +756,7 @@ RSpec.describe UploadsController do
expect(result["url"]).to include("Amz-Expires")
end
it "includes accepted metadata in the presigned url when provided" do
it "includes accepted metadata in the response when provided" do
post "/uploads/generate-presigned-put.json",
**{
params: {
@ -772,8 +772,12 @@ RSpec.describe UploadsController do
expect(response.status).to eq(200)
result = response.parsed_body
expect(result["url"]).to include("&x-amz-meta-sha1-checksum=testing")
expect(result["url"]).not_to include("&x-amz-meta-sha1-checksum=testing")
expect(result["url"]).not_to include("&x-amz-meta-blah=wontbeincluded")
expect(result["signed_headers"]).to eq(
"x-amz-acl" => "private",
"x-amz-meta-sha1-checksum" => "testing",
)
end
describe "rate limiting" do
@ -1517,7 +1521,9 @@ RSpec.describe UploadsController do
unique_identifier: external_upload_stub.unique_identifier,
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
expect(response.parsed_body["errors"].first).to eq(
I18n.t("upload.checksum_mismatch_failure"),
)
end
it "handles SizeMismatchError" do
@ -1530,7 +1536,9 @@ RSpec.describe UploadsController do
unique_identifier: external_upload_stub.unique_identifier,
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
expect(response.parsed_body["errors"].first).to eq(
I18n.t("upload.size_mismatch_failure", additional_detail: "expected: 10, actual: 1000"),
)
end
it "handles CannotPromoteError" do
@ -1543,7 +1551,7 @@ RSpec.describe UploadsController do
unique_identifier: external_upload_stub.unique_identifier,
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.cannot_promote_failure"))
end
it "handles DownloadFailedError and Aws::S3::Errors::NotFound" do
@ -1556,7 +1564,7 @@ RSpec.describe UploadsController do
unique_identifier: external_upload_stub.unique_identifier,
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.download_failure"))
ExternalUploadManager
.any_instance
.stubs(:transform!)
@ -1566,7 +1574,7 @@ RSpec.describe UploadsController do
unique_identifier: external_upload_stub.unique_identifier,
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed"))
expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.download_failure"))
end
it "handles a generic upload failure" do

View File

@ -35,6 +35,11 @@ module SystemHelpers
SiteSetting.disable_avatar_education_message = true
SiteSetting.enable_user_tips = false
SiteSetting.splash_screen = false
SiteSetting.allowed_internal_hosts =
(
SiteSetting.allowed_internal_hosts.to_s.split("|") +
MinioRunner.config.minio_urls.map { |url| URI.parse(url).host }
).join("|")
end
def try_until_success(timeout: Capybara.default_max_wait_time, frequency: 0.01)
@ -138,4 +143,25 @@ module SystemHelpers
page.execute_script(js, selector, start, offset)
end
def setup_s3_system_test
SiteSetting.enable_s3_uploads = true
SiteSetting.s3_upload_bucket = "discoursetest"
SiteSetting.enable_upload_debug_mode = true
SiteSetting.s3_access_key_id = MinioRunner.config.minio_root_user
SiteSetting.s3_secret_access_key = MinioRunner.config.minio_root_password
SiteSetting.s3_endpoint = MinioRunner.config.minio_server_url
MinioRunner.start
end
def skip_unless_s3_system_specs_enabled!
if !ENV["CI"] && !ENV["RUN_S3_SYSTEM_SPECS"]
skip(
"S3 system specs are disabled in this environment, set CI=1 or RUN_S3_SYSTEM_SPECS=1 to enable them.",
)
end
end
end

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
describe "Uploading files to S3", type: :system do
fab!(:current_user) { Fabricate(:admin) }
let(:modal) { PageObjects::Modals::Base.new }
let(:composer) { PageObjects::Components::Composer.new }
describe "direct S3 uploads" do
before { SiteSetting.enable_direct_s3_uploads = true }
describe "single part uploads" do
it "uploads custom avatars to S3" do
skip_unless_s3_system_specs_enabled!
setup_s3_system_test
sign_in(current_user)
visit "/my/preferences/account"
find("#edit-avatar").click
find("#uploaded-avatar").click
attach_file(File.absolute_path(file_from_fixtures("logo.jpg"))) do
find("#avatar-uploader").click
end
expect(page).to have_css(".avatar-uploader label[data-uploaded]")
modal.click_primary_button
expect(page).to have_css(
"#user-avatar-uploads[data-custom-avatar-upload-id]",
visible: false,
)
puts page.driver.browser.logs.get(:browser).map(&:message)
expect(current_user.reload.uploaded_avatar_id).to eq(
find("#user-avatar-uploads", visible: false)["data-custom-avatar-upload-id"].to_i,
)
end
end
describe "multipart uploads" do
it "uploads a file in the post composer" do
skip_unless_s3_system_specs_enabled!
setup_s3_system_test
sign_in(current_user)
visit "/new-topic"
file_path = file_from_fixtures("logo.png", "images").path
attach_file(file_path) { composer.click_toolbar_button("upload") }
expect(page).to have_no_css("#file-uploading")
expect(composer.preview).to have_css(".image-wrapper")
end
end
end
end