strengthen warning around crawler whitelist
This commit is contained in:
parent
0edf012dd9
commit
24346e4612
|
@ -1422,7 +1422,7 @@ en:
|
|||
blacklist_ip_blocks: "A list of private IP blocks that should never be crawled by Discourse"
|
||||
whitelist_internal_hosts: "A list of internal hosts that discourse can safely crawl for oneboxing and other purposes"
|
||||
allowed_iframes: "A list of iframe src domain prefixes that discourse can safely allow in posts"
|
||||
whitelisted_crawler_user_agents: "User agents of web crawlers that should be allowed to access the site."
|
||||
whitelisted_crawler_user_agents: "User agents of web crawlers that should be allowed to access the site. WARNING! SETTING THIS WILL DISALLOW ALL CRAWLERS NOT LISTED HERE!"
|
||||
blacklisted_crawler_user_agents: "Unique case insensitive word in the user agent string identifying web crawlers that should not be allowed to access the site. Does not apply if whitelist is defined."
|
||||
slow_down_crawler_user_agents: "User agents of web crawlers that should be rate limited in robots.txt using the Crawl-delay directive"
|
||||
slow_down_crawler_rate: "If slow_down_crawler_user_agents is specified this rate will apply to all the crawlers (number of seconds delay between requests)"
|
||||
|
|
Loading…
Reference in New Issue