DEV: Make DistributedMemoizer use DistributedMutex (#16229)

Its implementation was already distributed-mutex-like, with slight differences that did not seem necessary.
This commit is contained in:
Jarek Radosz 2022-04-05 19:29:58 +02:00 committed by GitHub
parent 14778ba52e
commit 3c44bed545
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 18 additions and 73 deletions

View File

@ -1,72 +1,31 @@
# frozen_string_literal: true
class DistributedMemoizer
# never wait for longer that 1 second for a cross process lock
MAX_WAIT = 2
LOCK = Mutex.new
MAX_WAIT = 1
# memoize a key across processes and machines
def self.memoize(key, duration = 60 * 60 * 24, redis = nil)
redis ||= Discourse.redis
def self.memoize(key, duration = 60 * 60 * 24, redis = Discourse.redis)
redis_lock_key = self.redis_lock_key(key)
redis_key = self.redis_key(key)
unless result = redis.get(redis_key)
redis_lock_key = self.redis_lock_key(key)
DistributedMutex.synchronize(redis_lock_key, redis: redis, validity: MAX_WAIT) do
result = redis.get(redis_key)
start = Time.now
got_lock = false
begin
while Time.now < start + MAX_WAIT && !got_lock
LOCK.synchronize do
got_lock = get_lock(redis, redis_lock_key)
end
sleep 0.001
end
unless result = redis.get(redis_key)
result = yield
redis.setex(redis_key, duration, result)
end
ensure
# NOTE: delete regardless so next one in does not need to wait MAX_WAIT again
redis.del(redis_lock_key)
unless result
result = yield
redis.setex(redis_key, duration, result)
end
end
result
result
end
end
def self.redis_lock_key(key)
+"memoize_lock_" << key
"memoize_lock_#{key}"
end
def self.redis_key(key)
+"memoize_" << key
end
# Used for testing
def self.flush!
Discourse.redis.scan_each(match: "memoize_*").each { |key| Discourse.redis.del(key) }
end
protected
def self.get_lock(redis, redis_lock_key)
redis.watch(redis_lock_key)
current = redis.get(redis_lock_key)
return false if current
unique = SecureRandom.hex
result = redis.multi do
redis.setex(redis_lock_key, MAX_WAIT, unique)
end
redis.unwatch
result == ["OK"]
"memoize_#{key}"
end
end

View File

@ -1,35 +1,25 @@
# frozen_string_literal: true
describe DistributedMemoizer do
before do
after do
Discourse.redis.del(DistributedMemoizer.redis_key("hello"))
Discourse.redis.del(DistributedMemoizer.redis_lock_key("hello"))
Discourse.redis.unwatch
end
# NOTE we could use a mock redis here, but I think it makes sense to test the real thing
# let(:mock_redis) { MockRedis.new }
def memoize(&block)
DistributedMemoizer.memoize("hello", duration = 120, &block)
end
it "returns the value of a block" do
expect(memoize do
"abc"
end).to eq("abc")
expect(memoize { "abc" }).to eq("abc")
end
it "return the old value once memoized" do
memoize do
"abc"
end
expect(memoize do
"world"
end).to eq("abc")
expect(memoize { "world" }).to eq("abc")
end
it "memoizes correctly when used concurrently" do
@ -48,7 +38,5 @@ describe DistributedMemoizer do
threads.each(&:join)
expect(results.uniq.length).to eq(1)
expect(results.count).to eq(5)
end
end

View File

@ -11,12 +11,10 @@ describe StaticController do
UploadCreator.new(file, filename).create_for(Discourse.system_user.id)
end
before_all do
DistributedMemoizer.flush!
end
after do
DistributedMemoizer.flush!
Discourse.redis.scan_each(match: "memoize_*").each do |key|
Discourse.redis.del(key)
end
end
describe 'local store' do