FIX: ensure we can download maxmind without redis or db config

This also corrects FileHelper.download so it supports "follow_redirect"
correctly (it used to always follow 1 redirect) and adds a `validate_url`
param that will bypass all uri validation if set to false (default is true)
This commit is contained in:
Sam Saffron 2019-05-28 10:28:57 +10:00
parent e4e2acf148
commit 7429700389
7 changed files with 55 additions and 15 deletions

View File

@ -134,6 +134,7 @@ class UploadsController < ApplicationController
maximum_upload_size = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
tempfile = FileHelper.download(
url,
follow_redirect: true,
max_file_size: maximum_upload_size,
tmp_file_name: "discourse-upload-#{type}"
) rescue nil

View File

@ -30,7 +30,9 @@ class DiscourseIpInfo
gz_file = FileHelper.download(
"https://geolite.maxmind.com/geoip/databases/#{name}/update",
max_file_size: 100.megabytes,
tmp_file_name: "#{name}.gz"
tmp_file_name: "#{name}.gz",
validate_uri: false,
follow_redirect: false
)
Discourse::Utils.execute_command("gunzip", gz_file.path)

View File

@ -28,6 +28,7 @@ class FileHelper
read_timeout: 5,
skip_rate_limit: false,
verbose: false,
validate_uri: true,
retain_on_max_file_size_exceeded: false)
url = "https:" + url if url.start_with?("//")
@ -37,9 +38,10 @@ class FileHelper
fd = FinalDestination.new(
url,
max_redirects: follow_redirect ? 5 : 1,
max_redirects: follow_redirect ? 5 : 0,
skip_rate_limit: skip_rate_limit,
verbose: verbose
verbose: verbose,
validate_uri: validate_uri
)
fd.get do |response, chunk, uri|

View File

@ -41,7 +41,9 @@ class FinalDestination
@opts[:lookup_ip] ||= lambda { |host| FinalDestination.lookup_ip(host) }
@ignored = @opts[:ignore_hostnames] || []
@limit = @opts[:max_redirects]
if @limit > 0
ignore_redirects = [Discourse.base_url_no_prefix]
if @opts[:ignore_redirects]
@ -54,8 +56,8 @@ class FinalDestination
@ignored << ignore_redirect.hostname
end
end
end
@limit = @opts[:max_redirects]
@status = :ready
@http_verb = @force_get_hosts.any? { |host| hostname_matches?(host) } ? :get : :head
@cookie = nil
@ -63,6 +65,7 @@ class FinalDestination
@verbose = @opts[:verbose] || false
@timeout = @opts[:timeout] || nil
@preserve_fragment_url = @preserve_fragment_url_hosts.any? { |host| hostname_matches?(host) }
@validate_uri = @opts.fetch(:validate_uri) { true }
end
def self.connection_timeout
@ -252,7 +255,7 @@ class FinalDestination
end
def validate_uri
validate_uri_format && is_dest_valid?
!@validate_uri || (validate_uri_format && is_dest_valid?)
end
def validate_uri_format

View File

@ -3,7 +3,7 @@
require_dependency 'discourse_ip_info'
desc "downloads MaxMind's GeoLite2-City database"
task "maxminddb:get" => :environment do
task "maxminddb:get" do
puts "Downloading MaxMindDb's GeoLite2-City..."
DiscourseIpInfo.mmdb_download('GeoLite2-City')

View File

@ -70,7 +70,8 @@ module ImportScripts::PhpBB3
avatar_file = FileHelper.download(
url,
max_file_size: max_image_size_kb,
tmp_file_name: 'discourse-avatar'
tmp_file_name: 'discourse-avatar',
follow_redirect: true
)
rescue StandardError => err
warn "Error downloading avatar: #{err.message}. Skipping..."

View File

@ -33,6 +33,37 @@ describe FileHelper do
end.to raise_error(OpenURI::HTTPError, "404 Error: 404")
end
it "does not follow redirects if instructed not to" do
url2 = "https://test.com/image.png"
stub_request(:get, url).to_return(status: 302, body: "", headers: { location: url2 })
missing = FileHelper.download(
url,
max_file_size: 10000,
tmp_file_name: 'trouttmp',
follow_redirect: false
)
expect(missing).to eq(nil)
end
it "does follow redirects if instructed to" do
url2 = "https://test.com/image.png"
stub_request(:get, url).to_return(status: 302, body: "", headers: { location: url2 })
stub_request(:get, url2).to_return(status: 200, body: "i am the body")
found = FileHelper.download(
url,
max_file_size: 10000,
tmp_file_name: 'trouttmp',
follow_redirect: true
)
expect(found.read).to eq("i am the body")
found.close
found.unlink
end
it "correctly raises an OpenURI HTTP error if it gets a 404" do
url = "http://fourohfour.com/404"