2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-08-06 21:43:09 -04:00
|
|
|
module SiteSettings
|
|
|
|
end
|
2015-08-20 21:27:19 -04:00
|
|
|
|
2017-08-06 21:43:09 -04:00
|
|
|
module SiteSettings::Validations
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 04:55:25 -05:00
|
|
|
PROHIBITED_USER_AGENT_STRINGS = %w[
|
|
|
|
apple
|
|
|
|
windows
|
|
|
|
linux
|
|
|
|
ubuntu
|
|
|
|
gecko
|
|
|
|
firefox
|
|
|
|
chrome
|
|
|
|
safari
|
|
|
|
applewebkit
|
|
|
|
webkit
|
|
|
|
mozilla
|
|
|
|
macintosh
|
|
|
|
khtml
|
|
|
|
intel
|
|
|
|
osx
|
|
|
|
os\ x
|
|
|
|
iphone
|
|
|
|
ipad
|
|
|
|
mac
|
|
|
|
]
|
|
|
|
|
2018-10-14 21:43:31 -04:00
|
|
|
def validate_error(key, opts = {})
|
|
|
|
raise Discourse::InvalidParameters.new(I18n.t("errors.site_settings.#{key}", opts))
|
2015-08-20 21:27:19 -04:00
|
|
|
end
|
|
|
|
|
2019-07-11 13:41:51 -04:00
|
|
|
def validate_category_ids(category_ids)
|
|
|
|
category_ids = category_ids.split("|").map(&:to_i).to_set
|
|
|
|
if Category.where(id: category_ids).count != category_ids.size
|
|
|
|
validate_error :invalid_category_id
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2019-07-11 13:41:51 -04:00
|
|
|
category_ids
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_categories(category_ids, default_categories_selected)
|
|
|
|
if (category_ids & default_categories_selected).size > 0
|
|
|
|
validate_error :default_categories_already_selected
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2015-08-21 14:39:21 -04:00
|
|
|
end
|
|
|
|
|
2015-08-26 16:40:16 -04:00
|
|
|
def validate_default_categories_watching(new_val)
|
2019-07-11 13:41:51 -04:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
2015-08-26 16:40:16 -04:00
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_tracking.split("|"),
|
|
|
|
SiteSetting.default_categories_muted.split("|"),
|
2020-08-19 15:05:04 -04:00
|
|
|
SiteSetting.default_categories_watching_first_post.split("|"),
|
2022-06-19 23:49:33 -04:00
|
|
|
SiteSetting.default_categories_normal.split("|"),
|
2019-10-06 14:50:07 -04:00
|
|
|
].flatten.map(&:to_i).to_set
|
2015-08-26 16:40:16 -04:00
|
|
|
|
2019-07-11 13:41:51 -04:00
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
2015-08-26 16:40:16 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_categories_tracking(new_val)
|
2019-07-11 13:41:51 -04:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
2015-08-26 16:40:16 -04:00
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_watching.split("|"),
|
|
|
|
SiteSetting.default_categories_muted.split("|"),
|
2020-08-19 15:05:04 -04:00
|
|
|
SiteSetting.default_categories_watching_first_post.split("|"),
|
2022-06-19 23:49:33 -04:00
|
|
|
SiteSetting.default_categories_normal.split("|"),
|
2019-10-06 14:50:07 -04:00
|
|
|
].flatten.map(&:to_i).to_set
|
2015-08-26 16:40:16 -04:00
|
|
|
|
2019-07-11 13:41:51 -04:00
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
2015-08-26 16:40:16 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_categories_muted(new_val)
|
2019-07-11 13:41:51 -04:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
2015-08-26 16:40:16 -04:00
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_watching.split("|"),
|
|
|
|
SiteSetting.default_categories_tracking.split("|"),
|
2020-08-19 15:05:04 -04:00
|
|
|
SiteSetting.default_categories_watching_first_post.split("|"),
|
2022-06-19 23:49:33 -04:00
|
|
|
SiteSetting.default_categories_normal.split("|"),
|
2019-10-06 14:50:07 -04:00
|
|
|
].flatten.map(&:to_i).to_set
|
2015-08-26 16:40:16 -04:00
|
|
|
|
2019-07-11 13:41:51 -04:00
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
2015-08-26 16:40:16 -04:00
|
|
|
end
|
2015-08-21 14:39:21 -04:00
|
|
|
|
2016-11-09 13:37:54 -05:00
|
|
|
def validate_default_categories_watching_first_post(new_val)
|
2019-07-11 13:41:51 -04:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
2016-11-09 13:37:54 -05:00
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_watching.split("|"),
|
|
|
|
SiteSetting.default_categories_tracking.split("|"),
|
2020-08-19 15:05:04 -04:00
|
|
|
SiteSetting.default_categories_muted.split("|"),
|
2022-06-19 23:49:33 -04:00
|
|
|
SiteSetting.default_categories_normal.split("|"),
|
2020-08-19 15:05:04 -04:00
|
|
|
].flatten.map(&:to_i).to_set
|
|
|
|
|
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
|
|
|
end
|
|
|
|
|
2023-03-24 02:10:37 -04:00
|
|
|
def validate_default_categories_normal(new_val)
|
2020-08-19 15:05:04 -04:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_watching.split("|"),
|
|
|
|
SiteSetting.default_categories_tracking.split("|"),
|
|
|
|
SiteSetting.default_categories_muted.split("|"),
|
|
|
|
SiteSetting.default_categories_watching_first_post.split("|"),
|
2019-10-06 14:50:07 -04:00
|
|
|
].flatten.map(&:to_i).to_set
|
2016-11-09 13:37:54 -05:00
|
|
|
|
2019-07-11 13:41:51 -04:00
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
2016-11-09 13:37:54 -05:00
|
|
|
end
|
|
|
|
|
2019-11-01 03:10:13 -04:00
|
|
|
def validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
validate_error :default_tags_already_selected if (tag_names & default_tags_selected).size > 0
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_tags_watching(new_val)
|
|
|
|
tag_names = new_val.split("|").to_set
|
|
|
|
|
|
|
|
default_tags_selected = [
|
|
|
|
SiteSetting.default_tags_tracking.split("|"),
|
|
|
|
SiteSetting.default_tags_muted.split("|"),
|
|
|
|
SiteSetting.default_tags_watching_first_post.split("|"),
|
|
|
|
].flatten.to_set
|
|
|
|
|
|
|
|
validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_tags_tracking(new_val)
|
|
|
|
tag_names = new_val.split("|").to_set
|
|
|
|
|
|
|
|
default_tags_selected = [
|
|
|
|
SiteSetting.default_tags_watching.split("|"),
|
|
|
|
SiteSetting.default_tags_muted.split("|"),
|
|
|
|
SiteSetting.default_tags_watching_first_post.split("|"),
|
|
|
|
].flatten.to_set
|
|
|
|
|
|
|
|
validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_tags_muted(new_val)
|
|
|
|
tag_names = new_val.split("|").to_set
|
|
|
|
|
|
|
|
default_tags_selected = [
|
|
|
|
SiteSetting.default_tags_watching.split("|"),
|
|
|
|
SiteSetting.default_tags_tracking.split("|"),
|
|
|
|
SiteSetting.default_tags_watching_first_post.split("|"),
|
|
|
|
].flatten.to_set
|
|
|
|
|
|
|
|
validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_tags_watching_first_post(new_val)
|
|
|
|
tag_names = new_val.split("|").to_set
|
|
|
|
|
|
|
|
default_tags_selected = [
|
|
|
|
SiteSetting.default_tags_watching.split("|"),
|
|
|
|
SiteSetting.default_tags_tracking.split("|"),
|
|
|
|
SiteSetting.default_tags_muted.split("|"),
|
|
|
|
].flatten.to_set
|
|
|
|
|
|
|
|
validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
end
|
|
|
|
|
2015-09-16 09:55:26 -04:00
|
|
|
def validate_enable_s3_uploads(new_val)
|
2019-11-19 16:46:44 -05:00
|
|
|
return if new_val == "f"
|
|
|
|
validate_error :cannot_enable_s3_uploads_when_s3_enabled_globally if GlobalSetting.use_s3?
|
|
|
|
validate_error :s3_upload_bucket_is_required if SiteSetting.s3_upload_bucket.blank?
|
2015-09-16 09:55:26 -04:00
|
|
|
end
|
|
|
|
|
2022-09-28 19:24:33 -04:00
|
|
|
def validate_secure_uploads(new_val)
|
2023-06-06 01:47:40 -04:00
|
|
|
if new_val == "t" && (!SiteSetting.Upload.enable_s3_uploads || !SiteSetting.s3_use_acls)
|
2022-09-28 19:24:33 -04:00
|
|
|
validate_error :secure_uploads_requirements
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2019-11-17 20:25:42 -05:00
|
|
|
end
|
|
|
|
|
2023-06-06 01:47:40 -04:00
|
|
|
def validate_s3_use_acls(new_val)
|
|
|
|
validate_error :s3_use_acls_requirements if new_val == "f" && SiteSetting.secure_uploads
|
|
|
|
end
|
|
|
|
|
2020-11-05 19:33:19 -05:00
|
|
|
def validate_enable_page_publishing(new_val)
|
2022-09-28 19:24:33 -04:00
|
|
|
validate_error :page_publishing_requirements if new_val == "t" && SiteSetting.secure_uploads?
|
2020-11-05 19:33:19 -05:00
|
|
|
end
|
|
|
|
|
2020-07-17 14:44:31 -04:00
|
|
|
def validate_share_quote_buttons(new_val)
|
|
|
|
if new_val.include?("facebook") && SiteSetting.facebook_app_id.blank?
|
|
|
|
validate_error :share_quote_facebook_requirements
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2020-07-17 14:44:31 -04:00
|
|
|
end
|
|
|
|
|
2019-01-31 23:40:48 -05:00
|
|
|
def validate_enable_s3_inventory(new_val)
|
2019-02-01 03:47:10 -05:00
|
|
|
if new_val == "t" && !SiteSetting.Upload.enable_s3_uploads
|
|
|
|
validate_error :enable_s3_uploads_is_required
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2019-01-31 23:40:48 -05:00
|
|
|
end
|
|
|
|
|
2018-10-14 21:43:31 -04:00
|
|
|
def validate_backup_location(new_val)
|
|
|
|
return unless new_val == BackupLocationSiteSetting::S3
|
|
|
|
if SiteSetting.s3_backup_bucket.blank?
|
|
|
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_backup_bucket")
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2018-10-14 21:43:31 -04:00
|
|
|
|
|
|
|
unless SiteSetting.s3_use_iam_profile
|
|
|
|
if SiteSetting.s3_access_key_id.blank?
|
|
|
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_access_key_id")
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2018-10-14 21:43:31 -04:00
|
|
|
if SiteSetting.s3_secret_access_key.blank?
|
|
|
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_secret_access_key")
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
end
|
2018-12-16 18:09:13 -05:00
|
|
|
|
|
|
|
def validate_s3_upload_bucket(new_val)
|
|
|
|
validate_bucket_setting("s3_upload_bucket", new_val, SiteSetting.s3_backup_bucket)
|
2020-03-06 08:49:28 -05:00
|
|
|
|
|
|
|
if new_val.blank? && SiteSetting.enable_s3_uploads?
|
|
|
|
validate_error(:s3_upload_bucket_is_required, setting_name: "s3_upload_bucket")
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2018-12-16 18:09:13 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def validate_s3_backup_bucket(new_val)
|
|
|
|
validate_bucket_setting("s3_backup_bucket", SiteSetting.s3_upload_bucket, new_val)
|
|
|
|
end
|
|
|
|
|
2019-11-15 01:05:10 -05:00
|
|
|
def validate_enforce_second_factor(new_val)
|
2022-04-25 13:49:36 -04:00
|
|
|
if new_val != "no" && SiteSetting.enable_discourse_connect?
|
2021-02-08 05:04:33 -05:00
|
|
|
return validate_error :second_factor_cannot_be_enforced_with_discourse_connect_enabled
|
2020-10-09 13:06:38 -04:00
|
|
|
end
|
2020-08-19 14:16:31 -04:00
|
|
|
if new_val == "all" && Discourse.enabled_auth_providers.count > 0
|
|
|
|
auth_provider_names = Discourse.enabled_auth_providers.map(&:name).join(", ")
|
|
|
|
return(
|
|
|
|
validate_error(
|
|
|
|
:second_factor_cannot_enforce_with_socials,
|
|
|
|
auth_provider_names: auth_provider_names,
|
|
|
|
)
|
2023-01-09 07:10:19 -05:00
|
|
|
)
|
2020-08-19 14:16:31 -04:00
|
|
|
end
|
2019-11-15 01:05:10 -05:00
|
|
|
return if SiteSetting.enable_local_logins
|
2019-12-06 19:42:15 -05:00
|
|
|
return if new_val == "no"
|
2019-11-15 01:05:10 -05:00
|
|
|
validate_error :second_factor_cannot_be_enforced_with_disabled_local_login
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_enable_local_logins(new_val)
|
|
|
|
return if new_val == "t"
|
|
|
|
return if SiteSetting.enforce_second_factor == "no"
|
|
|
|
validate_error :local_login_cannot_be_disabled_if_second_factor_enforced
|
|
|
|
end
|
|
|
|
|
2020-10-28 22:01:06 -04:00
|
|
|
def validate_cors_origins(new_val)
|
|
|
|
return if new_val.blank?
|
2023-02-16 04:40:11 -05:00
|
|
|
return if new_val.split("|").none?(%r{/\z})
|
2020-10-28 22:01:06 -04:00
|
|
|
validate_error :cors_origins_should_not_have_trailing_slash
|
|
|
|
end
|
|
|
|
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 04:55:25 -05:00
|
|
|
def validate_slow_down_crawler_user_agents(new_val)
|
|
|
|
return if new_val.blank?
|
|
|
|
|
|
|
|
new_val
|
|
|
|
.downcase
|
|
|
|
.split("|")
|
|
|
|
.each do |crawler|
|
|
|
|
if crawler.size < 3
|
|
|
|
validate_error(:slow_down_crawler_user_agent_must_be_at_least_3_characters)
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 04:55:25 -05:00
|
|
|
if PROHIBITED_USER_AGENT_STRINGS.any? { |c| c.include?(crawler) }
|
|
|
|
validate_error(
|
|
|
|
:slow_down_crawler_user_agent_cannot_be_popular_browsers,
|
|
|
|
values: PROHIBITED_USER_AGENT_STRINGS.join(I18n.t("word_connector.comma")),
|
2023-01-09 07:10:19 -05:00
|
|
|
)
|
|
|
|
end
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 04:55:25 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-05 14:13:17 -04:00
|
|
|
def validate_strip_image_metadata(new_val)
|
|
|
|
return if new_val == "t"
|
|
|
|
return if SiteSetting.composer_media_optimization_image_enabled == false
|
|
|
|
validate_error :strip_image_metadata_cannot_be_disabled_if_composer_media_optimization_image_enabled
|
|
|
|
end
|
|
|
|
|
2022-06-02 19:02:57 -04:00
|
|
|
def validate_twitter_summary_large_image(new_val)
|
|
|
|
return if new_val.blank?
|
|
|
|
return if !Upload.exists?(id: new_val, extension: "svg")
|
|
|
|
validate_error :twitter_summary_large_image_no_svg
|
|
|
|
end
|
|
|
|
|
2018-12-16 18:09:13 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def validate_bucket_setting(setting_name, upload_bucket, backup_bucket)
|
|
|
|
return if upload_bucket.blank? || backup_bucket.blank?
|
|
|
|
|
|
|
|
backup_bucket_name, backup_prefix = split_s3_bucket(backup_bucket)
|
|
|
|
upload_bucket_name, upload_prefix = split_s3_bucket(upload_bucket)
|
|
|
|
|
|
|
|
return if backup_bucket_name != upload_bucket_name
|
|
|
|
|
|
|
|
if backup_prefix == upload_prefix || backup_prefix.blank? ||
|
|
|
|
upload_prefix&.start_with?(backup_prefix)
|
|
|
|
validate_error(:s3_bucket_reused, setting_name: setting_name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def split_s3_bucket(s3_bucket)
|
|
|
|
bucket_name, prefix = s3_bucket.downcase.split("/", 2)
|
|
|
|
prefix&.chomp!("/")
|
|
|
|
[bucket_name, prefix]
|
|
|
|
end
|
2015-08-20 21:27:19 -04:00
|
|
|
end
|