FEATURE: when under extreme load disable search
The global setting disable_search_queue_threshold (DISCOURSE_DISABLE_SEARCH_QUEUE_THRESHOLD) which default to 1 second was added. This protection ensures that when the application is unable to keep up with requests it will simply turn off search till it is not backed up. To disable this protection set this to 0.
This commit is contained in:
parent
dc5eb76551
commit
4dcc5f16f1
|
@ -125,6 +125,12 @@
|
|||
{{#if searchActive}}
|
||||
<h3>{{i18n "search.no_results"}}</h3>
|
||||
|
||||
{{#if model.grouped_search_result.error}}
|
||||
<div class="warning">
|
||||
{{model.grouped_search_result.error}}
|
||||
</div>
|
||||
{{/if}}
|
||||
|
||||
{{#if showSuggestion}}
|
||||
<div class="no-results-suggestion">
|
||||
{{i18n "search.cant_find"}}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { popupAjaxError } from "discourse/lib/ajax-error";
|
||||
import { searchForTerm, isValidSearchTerm } from "discourse/lib/search";
|
||||
import { createWidget } from "discourse/widgets/widget";
|
||||
import { h } from "virtual-dom";
|
||||
|
@ -78,6 +79,7 @@ const SearchHelper = {
|
|||
searchData.topicId = null;
|
||||
}
|
||||
})
|
||||
.catch(popupAjaxError)
|
||||
.finally(() => {
|
||||
searchData.loading = false;
|
||||
widget.scheduleRerender();
|
||||
|
|
|
@ -2,6 +2,12 @@
|
|||
display: flex;
|
||||
justify-content: space-between;
|
||||
|
||||
.warning {
|
||||
background-color: $danger-medium;
|
||||
padding: 5px 8px;
|
||||
color: $secondary;
|
||||
}
|
||||
|
||||
.search-bar {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
|
|
|
@ -6,6 +6,8 @@ class SearchController < ApplicationController
|
|||
|
||||
skip_before_action :check_xhr, only: :show
|
||||
|
||||
before_action :cancel_overloaded_search, only: [:query]
|
||||
|
||||
def self.valid_context_types
|
||||
%w{user topic category private_messages}
|
||||
end
|
||||
|
@ -44,10 +46,14 @@ class SearchController < ApplicationController
|
|||
search_args[:ip_address] = request.remote_ip
|
||||
search_args[:user_id] = current_user.id if current_user.present?
|
||||
|
||||
search = Search.new(@search_term, search_args)
|
||||
result = search.execute
|
||||
|
||||
result.find_user_data(guardian) if result
|
||||
if site_overloaded?
|
||||
result = Search::GroupedSearchResults.new(search_args[:type_filter], @search_term, context, false, 0)
|
||||
result.error = I18n.t("search.extreme_load_error")
|
||||
else
|
||||
search = Search.new(@search_term, search_args)
|
||||
result = search.execute
|
||||
result.find_user_data(guardian) if result
|
||||
end
|
||||
|
||||
serializer = serialize_data(result, GroupedSearchResultSerializer, result: result)
|
||||
|
||||
|
@ -82,8 +88,12 @@ class SearchController < ApplicationController
|
|||
search_args[:user_id] = current_user.id if current_user.present?
|
||||
search_args[:restrict_to_archetype] = params[:restrict_to_archetype] if params[:restrict_to_archetype].present?
|
||||
|
||||
search = Search.new(params[:term], search_args)
|
||||
result = search.execute
|
||||
if site_overloaded?
|
||||
result = GroupedSearchResults.new(search_args["type_filter"], params[:term], context, false, 0)
|
||||
else
|
||||
search = Search.new(params[:term], search_args)
|
||||
result = search.execute
|
||||
end
|
||||
render_serialized(result, GroupedSearchResultSerializer, result: result)
|
||||
end
|
||||
|
||||
|
@ -118,6 +128,18 @@ class SearchController < ApplicationController
|
|||
|
||||
protected
|
||||
|
||||
def site_overloaded?
|
||||
(queue_time = request.env['REQUEST_QUEUE_SECONDS']) &&
|
||||
(GlobalSetting.disable_search_queue_threshold > 0) &&
|
||||
(queue_time > GlobalSetting.disable_search_queue_threshold)
|
||||
end
|
||||
|
||||
def cancel_overloaded_search
|
||||
if site_overloaded?
|
||||
render_json_error I18n.t("search.extreme_load_error"), status: 409
|
||||
end
|
||||
end
|
||||
|
||||
def lookup_search_context
|
||||
|
||||
return if params[:skip_context] == "true"
|
||||
|
|
|
@ -6,7 +6,7 @@ class GroupedSearchResultSerializer < ApplicationSerializer
|
|||
has_many :categories, serializer: BasicCategorySerializer
|
||||
has_many :tags, serializer: TagSerializer
|
||||
has_many :groups, serializer: BasicGroupSerializer
|
||||
attributes :more_posts, :more_users, :more_categories, :term, :search_log_id, :more_full_page_results, :can_create_topic
|
||||
attributes :more_posts, :more_users, :more_categories, :term, :search_log_id, :more_full_page_results, :can_create_topic, :error
|
||||
|
||||
def search_log_id
|
||||
object.search_log_id
|
||||
|
|
|
@ -224,6 +224,9 @@ force_anonymous_min_queue_seconds = 1
|
|||
# only trigger anon if we see more than N requests for this path in last 10 seconds
|
||||
force_anonymous_min_per_10_seconds = 3
|
||||
|
||||
# disable search if app server is queueing for longer than this (in seconds)
|
||||
disable_search_queue_threshold = 1
|
||||
|
||||
# maximum number of posts rebaked across the cluster in the periodical job
|
||||
# rebake process is very expensive, on multisite we have to make sure we never
|
||||
# flood the queue
|
||||
|
|
|
@ -2096,6 +2096,7 @@ en:
|
|||
value: "SSO secret"
|
||||
|
||||
search:
|
||||
extreme_load_error: "Site is under extreme load, search is disabled, try again later"
|
||||
within_post: "#%{post_number} by %{username}"
|
||||
types:
|
||||
category: "Categories"
|
||||
|
|
|
@ -24,7 +24,8 @@ class Search
|
|||
:term,
|
||||
:search_context,
|
||||
:include_blurbs,
|
||||
:more_full_page_results
|
||||
:more_full_page_results,
|
||||
:error
|
||||
)
|
||||
|
||||
attr_accessor :search_log_id
|
||||
|
@ -40,6 +41,11 @@ class Search
|
|||
@users = []
|
||||
@tags = []
|
||||
@groups = []
|
||||
@error = nil
|
||||
end
|
||||
|
||||
def error=(error)
|
||||
@error = error
|
||||
end
|
||||
|
||||
def find_user_data(guardian)
|
||||
|
|
|
@ -3,6 +3,21 @@
|
|||
require 'rails_helper'
|
||||
|
||||
describe SearchController do
|
||||
|
||||
fab!(:awesome_post) do
|
||||
SearchIndexer.enable
|
||||
Fabricate(:post, raw: 'this is my really awesome post')
|
||||
end
|
||||
|
||||
fab!(:user) do
|
||||
Fabricate(:user)
|
||||
end
|
||||
|
||||
fab!(:user_post) do
|
||||
SearchIndexer.enable
|
||||
Fabricate(:post, raw: "#{user.username} is a cool person")
|
||||
end
|
||||
|
||||
context "integration" do
|
||||
before do
|
||||
SearchIndexer.enable
|
||||
|
@ -18,6 +33,49 @@ describe SearchController do
|
|||
$redis.flushall
|
||||
end
|
||||
|
||||
context "when overloaded" do
|
||||
|
||||
before do
|
||||
global_setting :disable_search_queue_threshold, 0.2
|
||||
end
|
||||
|
||||
let! :start_time do
|
||||
freeze_time
|
||||
Time.now
|
||||
end
|
||||
|
||||
let! :current_time do
|
||||
freeze_time 0.3.seconds.from_now
|
||||
end
|
||||
|
||||
it "errors on #query" do
|
||||
|
||||
get "/search/query.json", headers: {
|
||||
"HTTP_X_REQUEST_START" => "t=#{start_time.to_f}"
|
||||
}, params: {
|
||||
term: "hi there", include_blurb: true
|
||||
}
|
||||
|
||||
expect(response.status).to eq(409)
|
||||
end
|
||||
|
||||
it "no results and error on #index" do
|
||||
get "/search.json", headers: {
|
||||
"HTTP_X_REQUEST_START" => "t=#{start_time.to_f}"
|
||||
}, params: {
|
||||
q: "awesome"
|
||||
}
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
data = JSON.parse(response.body)
|
||||
|
||||
expect(data["posts"]).to be_empty
|
||||
expect(data["grouped_search_result"]["error"]).not_to be_empty
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
it "returns a 400 error if you search for null bytes" do
|
||||
term = "hello\0hello"
|
||||
|
||||
|
@ -29,22 +87,21 @@ describe SearchController do
|
|||
end
|
||||
|
||||
it "can search correctly" do
|
||||
my_post = Fabricate(:post, raw: 'this is my really awesome post')
|
||||
|
||||
get "/search/query.json", params: {
|
||||
term: 'awesome', include_blurb: true
|
||||
}
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
data = JSON.parse(response.body)
|
||||
expect(data['posts'][0]['id']).to eq(my_post.id)
|
||||
expect(data['posts'][0]['blurb']).to eq('this is my really awesome post')
|
||||
expect(data['topics'][0]['id']).to eq(my_post.topic_id)
|
||||
|
||||
expect(data['posts'].length).to eq(1)
|
||||
expect(data['posts'][0]['id']).to eq(awesome_post.id)
|
||||
expect(data['posts'][0]['blurb']).to eq(awesome_post.raw)
|
||||
expect(data['topics'][0]['id']).to eq(awesome_post.topic_id)
|
||||
end
|
||||
|
||||
it 'performs the query with a type filter' do
|
||||
user = Fabricate(:user)
|
||||
my_post = Fabricate(:post, raw: "#{user.username} is a cool person")
|
||||
|
||||
get "/search/query.json", params: {
|
||||
term: user.username, type_filter: 'topic'
|
||||
|
@ -53,7 +110,7 @@ describe SearchController do
|
|||
expect(response.status).to eq(200)
|
||||
data = JSON.parse(response.body)
|
||||
|
||||
expect(data['posts'][0]['id']).to eq(my_post.id)
|
||||
expect(data['posts'][0]['id']).to eq(user_post.id)
|
||||
expect(data['users']).to be_blank
|
||||
|
||||
get "/search/query.json", params: {
|
||||
|
@ -71,10 +128,8 @@ describe SearchController do
|
|||
it 'should not be restricted by minimum search term length' do
|
||||
SiteSetting.min_search_term_length = 20000
|
||||
|
||||
post = Fabricate(:post)
|
||||
|
||||
get "/search/query.json", params: {
|
||||
term: post.topic_id,
|
||||
term: awesome_post.topic_id,
|
||||
type_filter: 'topic',
|
||||
search_for_id: true
|
||||
}
|
||||
|
@ -82,15 +137,13 @@ describe SearchController do
|
|||
expect(response.status).to eq(200)
|
||||
data = JSON.parse(response.body)
|
||||
|
||||
expect(data['topics'][0]['id']).to eq(post.topic_id)
|
||||
expect(data['topics'][0]['id']).to eq(awesome_post.topic_id)
|
||||
end
|
||||
|
||||
it "should return the right result" do
|
||||
user = Fabricate(:user)
|
||||
my_post = Fabricate(:post, raw: "#{user.username} is a cool person")
|
||||
|
||||
get "/search/query.json", params: {
|
||||
term: my_post.topic_id,
|
||||
term: user_post.topic_id,
|
||||
type_filter: 'topic',
|
||||
search_for_id: true
|
||||
}
|
||||
|
@ -98,7 +151,7 @@ describe SearchController do
|
|||
expect(response.status).to eq(200)
|
||||
data = JSON.parse(response.body)
|
||||
|
||||
expect(data['topics'][0]['id']).to eq(my_post.topic_id)
|
||||
expect(data['topics'][0]['id']).to eq(user_post.topic_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -174,7 +227,6 @@ describe SearchController do
|
|||
end
|
||||
|
||||
context "with a user" do
|
||||
fab!(:user) { Fabricate(:user) }
|
||||
|
||||
it "raises an error if the user can't see the context" do
|
||||
get "/search/query.json", params: {
|
||||
|
@ -205,7 +257,7 @@ describe SearchController do
|
|||
end
|
||||
|
||||
it "doesn't record the click for a different user" do
|
||||
sign_in(Fabricate(:user))
|
||||
sign_in(user)
|
||||
|
||||
_, search_log_id = SearchLog.log(
|
||||
term: SecureRandom.hex,
|
||||
|
@ -225,7 +277,7 @@ describe SearchController do
|
|||
end
|
||||
|
||||
it "records the click for a logged in user" do
|
||||
user = sign_in(Fabricate(:user))
|
||||
sign_in(user)
|
||||
|
||||
_, search_log_id = SearchLog.log(
|
||||
term: SecureRandom.hex,
|
||||
|
|
Loading…
Reference in New Issue