FEATURE: when under extreme load disable search

The global setting disable_search_queue_threshold
(DISCOURSE_DISABLE_SEARCH_QUEUE_THRESHOLD) which default to 1 second was
added.

This protection ensures that when the application is unable to keep up with
requests it will simply turn off search till it is not backed up.

To disable this protection set this to 0.
This commit is contained in:
Sam Saffron 2019-07-02 11:21:52 +10:00
parent dc5eb76551
commit 4dcc5f16f1
9 changed files with 125 additions and 27 deletions

View File

@ -125,6 +125,12 @@
{{#if searchActive}} {{#if searchActive}}
<h3>{{i18n "search.no_results"}}</h3> <h3>{{i18n "search.no_results"}}</h3>
{{#if model.grouped_search_result.error}}
<div class="warning">
{{model.grouped_search_result.error}}
</div>
{{/if}}
{{#if showSuggestion}} {{#if showSuggestion}}
<div class="no-results-suggestion"> <div class="no-results-suggestion">
{{i18n "search.cant_find"}} {{i18n "search.cant_find"}}

View File

@ -1,3 +1,4 @@
import { popupAjaxError } from "discourse/lib/ajax-error";
import { searchForTerm, isValidSearchTerm } from "discourse/lib/search"; import { searchForTerm, isValidSearchTerm } from "discourse/lib/search";
import { createWidget } from "discourse/widgets/widget"; import { createWidget } from "discourse/widgets/widget";
import { h } from "virtual-dom"; import { h } from "virtual-dom";
@ -78,6 +79,7 @@ const SearchHelper = {
searchData.topicId = null; searchData.topicId = null;
} }
}) })
.catch(popupAjaxError)
.finally(() => { .finally(() => {
searchData.loading = false; searchData.loading = false;
widget.scheduleRerender(); widget.scheduleRerender();

View File

@ -2,6 +2,12 @@
display: flex; display: flex;
justify-content: space-between; justify-content: space-between;
.warning {
background-color: $danger-medium;
padding: 5px 8px;
color: $secondary;
}
.search-bar { .search-bar {
display: flex; display: flex;
justify-content: space-between; justify-content: space-between;

View File

@ -6,6 +6,8 @@ class SearchController < ApplicationController
skip_before_action :check_xhr, only: :show skip_before_action :check_xhr, only: :show
before_action :cancel_overloaded_search, only: [:query]
def self.valid_context_types def self.valid_context_types
%w{user topic category private_messages} %w{user topic category private_messages}
end end
@ -44,10 +46,14 @@ class SearchController < ApplicationController
search_args[:ip_address] = request.remote_ip search_args[:ip_address] = request.remote_ip
search_args[:user_id] = current_user.id if current_user.present? search_args[:user_id] = current_user.id if current_user.present?
search = Search.new(@search_term, search_args) if site_overloaded?
result = search.execute result = Search::GroupedSearchResults.new(search_args[:type_filter], @search_term, context, false, 0)
result.error = I18n.t("search.extreme_load_error")
result.find_user_data(guardian) if result else
search = Search.new(@search_term, search_args)
result = search.execute
result.find_user_data(guardian) if result
end
serializer = serialize_data(result, GroupedSearchResultSerializer, result: result) serializer = serialize_data(result, GroupedSearchResultSerializer, result: result)
@ -82,8 +88,12 @@ class SearchController < ApplicationController
search_args[:user_id] = current_user.id if current_user.present? search_args[:user_id] = current_user.id if current_user.present?
search_args[:restrict_to_archetype] = params[:restrict_to_archetype] if params[:restrict_to_archetype].present? search_args[:restrict_to_archetype] = params[:restrict_to_archetype] if params[:restrict_to_archetype].present?
search = Search.new(params[:term], search_args) if site_overloaded?
result = search.execute result = GroupedSearchResults.new(search_args["type_filter"], params[:term], context, false, 0)
else
search = Search.new(params[:term], search_args)
result = search.execute
end
render_serialized(result, GroupedSearchResultSerializer, result: result) render_serialized(result, GroupedSearchResultSerializer, result: result)
end end
@ -118,6 +128,18 @@ class SearchController < ApplicationController
protected protected
def site_overloaded?
(queue_time = request.env['REQUEST_QUEUE_SECONDS']) &&
(GlobalSetting.disable_search_queue_threshold > 0) &&
(queue_time > GlobalSetting.disable_search_queue_threshold)
end
def cancel_overloaded_search
if site_overloaded?
render_json_error I18n.t("search.extreme_load_error"), status: 409
end
end
def lookup_search_context def lookup_search_context
return if params[:skip_context] == "true" return if params[:skip_context] == "true"

View File

@ -6,7 +6,7 @@ class GroupedSearchResultSerializer < ApplicationSerializer
has_many :categories, serializer: BasicCategorySerializer has_many :categories, serializer: BasicCategorySerializer
has_many :tags, serializer: TagSerializer has_many :tags, serializer: TagSerializer
has_many :groups, serializer: BasicGroupSerializer has_many :groups, serializer: BasicGroupSerializer
attributes :more_posts, :more_users, :more_categories, :term, :search_log_id, :more_full_page_results, :can_create_topic attributes :more_posts, :more_users, :more_categories, :term, :search_log_id, :more_full_page_results, :can_create_topic, :error
def search_log_id def search_log_id
object.search_log_id object.search_log_id

View File

@ -224,6 +224,9 @@ force_anonymous_min_queue_seconds = 1
# only trigger anon if we see more than N requests for this path in last 10 seconds # only trigger anon if we see more than N requests for this path in last 10 seconds
force_anonymous_min_per_10_seconds = 3 force_anonymous_min_per_10_seconds = 3
# disable search if app server is queueing for longer than this (in seconds)
disable_search_queue_threshold = 1
# maximum number of posts rebaked across the cluster in the periodical job # maximum number of posts rebaked across the cluster in the periodical job
# rebake process is very expensive, on multisite we have to make sure we never # rebake process is very expensive, on multisite we have to make sure we never
# flood the queue # flood the queue

View File

@ -2096,6 +2096,7 @@ en:
value: "SSO secret" value: "SSO secret"
search: search:
extreme_load_error: "Site is under extreme load, search is disabled, try again later"
within_post: "#%{post_number} by %{username}" within_post: "#%{post_number} by %{username}"
types: types:
category: "Categories" category: "Categories"

View File

@ -24,7 +24,8 @@ class Search
:term, :term,
:search_context, :search_context,
:include_blurbs, :include_blurbs,
:more_full_page_results :more_full_page_results,
:error
) )
attr_accessor :search_log_id attr_accessor :search_log_id
@ -40,6 +41,11 @@ class Search
@users = [] @users = []
@tags = [] @tags = []
@groups = [] @groups = []
@error = nil
end
def error=(error)
@error = error
end end
def find_user_data(guardian) def find_user_data(guardian)

View File

@ -3,6 +3,21 @@
require 'rails_helper' require 'rails_helper'
describe SearchController do describe SearchController do
fab!(:awesome_post) do
SearchIndexer.enable
Fabricate(:post, raw: 'this is my really awesome post')
end
fab!(:user) do
Fabricate(:user)
end
fab!(:user_post) do
SearchIndexer.enable
Fabricate(:post, raw: "#{user.username} is a cool person")
end
context "integration" do context "integration" do
before do before do
SearchIndexer.enable SearchIndexer.enable
@ -18,6 +33,49 @@ describe SearchController do
$redis.flushall $redis.flushall
end end
context "when overloaded" do
before do
global_setting :disable_search_queue_threshold, 0.2
end
let! :start_time do
freeze_time
Time.now
end
let! :current_time do
freeze_time 0.3.seconds.from_now
end
it "errors on #query" do
get "/search/query.json", headers: {
"HTTP_X_REQUEST_START" => "t=#{start_time.to_f}"
}, params: {
term: "hi there", include_blurb: true
}
expect(response.status).to eq(409)
end
it "no results and error on #index" do
get "/search.json", headers: {
"HTTP_X_REQUEST_START" => "t=#{start_time.to_f}"
}, params: {
q: "awesome"
}
expect(response.status).to eq(200)
data = JSON.parse(response.body)
expect(data["posts"]).to be_empty
expect(data["grouped_search_result"]["error"]).not_to be_empty
end
end
it "returns a 400 error if you search for null bytes" do it "returns a 400 error if you search for null bytes" do
term = "hello\0hello" term = "hello\0hello"
@ -29,22 +87,21 @@ describe SearchController do
end end
it "can search correctly" do it "can search correctly" do
my_post = Fabricate(:post, raw: 'this is my really awesome post')
get "/search/query.json", params: { get "/search/query.json", params: {
term: 'awesome', include_blurb: true term: 'awesome', include_blurb: true
} }
expect(response.status).to eq(200) expect(response.status).to eq(200)
data = JSON.parse(response.body) data = JSON.parse(response.body)
expect(data['posts'][0]['id']).to eq(my_post.id)
expect(data['posts'][0]['blurb']).to eq('this is my really awesome post') expect(data['posts'].length).to eq(1)
expect(data['topics'][0]['id']).to eq(my_post.topic_id) expect(data['posts'][0]['id']).to eq(awesome_post.id)
expect(data['posts'][0]['blurb']).to eq(awesome_post.raw)
expect(data['topics'][0]['id']).to eq(awesome_post.topic_id)
end end
it 'performs the query with a type filter' do it 'performs the query with a type filter' do
user = Fabricate(:user)
my_post = Fabricate(:post, raw: "#{user.username} is a cool person")
get "/search/query.json", params: { get "/search/query.json", params: {
term: user.username, type_filter: 'topic' term: user.username, type_filter: 'topic'
@ -53,7 +110,7 @@ describe SearchController do
expect(response.status).to eq(200) expect(response.status).to eq(200)
data = JSON.parse(response.body) data = JSON.parse(response.body)
expect(data['posts'][0]['id']).to eq(my_post.id) expect(data['posts'][0]['id']).to eq(user_post.id)
expect(data['users']).to be_blank expect(data['users']).to be_blank
get "/search/query.json", params: { get "/search/query.json", params: {
@ -71,10 +128,8 @@ describe SearchController do
it 'should not be restricted by minimum search term length' do it 'should not be restricted by minimum search term length' do
SiteSetting.min_search_term_length = 20000 SiteSetting.min_search_term_length = 20000
post = Fabricate(:post)
get "/search/query.json", params: { get "/search/query.json", params: {
term: post.topic_id, term: awesome_post.topic_id,
type_filter: 'topic', type_filter: 'topic',
search_for_id: true search_for_id: true
} }
@ -82,15 +137,13 @@ describe SearchController do
expect(response.status).to eq(200) expect(response.status).to eq(200)
data = JSON.parse(response.body) data = JSON.parse(response.body)
expect(data['topics'][0]['id']).to eq(post.topic_id) expect(data['topics'][0]['id']).to eq(awesome_post.topic_id)
end end
it "should return the right result" do it "should return the right result" do
user = Fabricate(:user)
my_post = Fabricate(:post, raw: "#{user.username} is a cool person")
get "/search/query.json", params: { get "/search/query.json", params: {
term: my_post.topic_id, term: user_post.topic_id,
type_filter: 'topic', type_filter: 'topic',
search_for_id: true search_for_id: true
} }
@ -98,7 +151,7 @@ describe SearchController do
expect(response.status).to eq(200) expect(response.status).to eq(200)
data = JSON.parse(response.body) data = JSON.parse(response.body)
expect(data['topics'][0]['id']).to eq(my_post.topic_id) expect(data['topics'][0]['id']).to eq(user_post.topic_id)
end end
end end
end end
@ -174,7 +227,6 @@ describe SearchController do
end end
context "with a user" do context "with a user" do
fab!(:user) { Fabricate(:user) }
it "raises an error if the user can't see the context" do it "raises an error if the user can't see the context" do
get "/search/query.json", params: { get "/search/query.json", params: {
@ -205,7 +257,7 @@ describe SearchController do
end end
it "doesn't record the click for a different user" do it "doesn't record the click for a different user" do
sign_in(Fabricate(:user)) sign_in(user)
_, search_log_id = SearchLog.log( _, search_log_id = SearchLog.log(
term: SecureRandom.hex, term: SecureRandom.hex,
@ -225,7 +277,7 @@ describe SearchController do
end end
it "records the click for a logged in user" do it "records the click for a logged in user" do
user = sign_in(Fabricate(:user)) sign_in(user)
_, search_log_id = SearchLog.log( _, search_log_id = SearchLog.log(
term: SecureRandom.hex, term: SecureRandom.hex,