From 6493ddce175c2af6c977ff35f932b5ef2495d3bc Mon Sep 17 00:00:00 2001 From: Joffrey JAFFEUX Date: Mon, 14 Nov 2022 13:38:50 +0100 Subject: [PATCH] PERF: limits use of redis cache while building emojis list (#19013) We were doing get on Redis two times for each emoji while building the custom/standard/all lists which where resulting in ~3710 Redis calls. Given the emoji DB file is loaded in memory while we build/cache the emojis list this is unnecessary and slow. As a simplification in pseudo code here is an explanation of what we were doing: ```ruby emojis.each |emoji_name| aliases = get_aliases_from_redis_cache(emoji_name) is_tonable = get_is_tonable_from_redis_cache(emoji_name) build_emoji(emoji_name, aliases, is_tonable) end ``` The two redis calls are now simplified to a simple hash access: `@db[emoji_name]` --- app/models/emoji.rb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/models/emoji.rb b/app/models/emoji.rb index 2525b30948f..750be812834 100644 --- a/app/models/emoji.rb +++ b/app/models/emoji.rb @@ -29,11 +29,11 @@ class Emoji end def self.aliases - Discourse.cache.fetch(cache_key("aliases_emojis")) { db['aliases'] } + db['aliases'] end def self.search_aliases - Discourse.cache.fetch(cache_key("search_aliases_emojis")) { db['searchAliases'] } + db['searchAliases'] end def self.translations @@ -45,7 +45,7 @@ class Emoji end def self.tonable_emojis - Discourse.cache.fetch(cache_key("tonable_emojis")) { db['tonableEmojis'] } + db['tonableEmojis'] end def self.custom?(name) @@ -118,7 +118,7 @@ class Emoji end def self.clear_cache - %w{custom standard aliases search_aliases translations all tonable}.each do |key| + %w{custom standard translations all}.each do |key| Discourse.cache.delete(cache_key("#{key}_emojis")) end global_emoji_cache.clear