[Gems] Update to sidekiq 7

Moves the redis handling into the cache class because of a naming
conflict with redis-client which is now being pulled in by sidekiq
This commit is contained in:
Earlopain 2023-04-15 20:43:37 +02:00
parent 46eca3cb97
commit eb8e402152
No known key found for this signature in database
GPG Key ID: 6CFB948E15246897
10 changed files with 29 additions and 26 deletions

View File

@ -22,7 +22,7 @@ gem 'httparty'
gem 'recaptcha', require: "recaptcha/rails" gem 'recaptcha', require: "recaptcha/rails"
gem 'webpacker', '>= 4.0.x' gem 'webpacker', '>= 4.0.x'
gem 'retriable' gem 'retriable'
gem 'sidekiq', '~> 6.0' gem 'sidekiq', '~> 7.0'
gem 'marcel' gem 'marcel'
# bookmarks for later, if they are needed # bookmarks for later, if they are needed
# gem 'sidekiq-worker-killer' # gem 'sidekiq-worker-killer'

View File

@ -105,7 +105,7 @@ GEM
activesupport activesupport
cityhash (0.9.0) cityhash (0.9.0)
concurrent-ruby (1.2.2) concurrent-ruby (1.2.2)
connection_pool (2.3.0) connection_pool (2.4.0)
crack (0.4.5) crack (0.4.5)
rexml rexml
crass (1.0.6) crass (1.0.6)
@ -282,7 +282,10 @@ GEM
rbs (2.8.4) rbs (2.8.4)
recaptcha (5.12.3) recaptcha (5.12.3)
json json
redis (4.8.1) redis (5.0.6)
redis-client (>= 0.9.0)
redis-client (0.14.1)
connection_pool
regexp_parser (2.7.0) regexp_parser (2.7.0)
request_store (1.5.1) request_store (1.5.1)
rack (>= 1.4) rack (>= 1.4)
@ -326,16 +329,16 @@ GEM
shoulda-context (2.0.0) shoulda-context (2.0.0)
shoulda-matchers (5.3.0) shoulda-matchers (5.3.0)
activesupport (>= 5.2.0) activesupport (>= 5.2.0)
sidekiq (6.5.8) sidekiq (7.0.8)
connection_pool (>= 2.2.5, < 3) concurrent-ruby (< 2)
rack (~> 2.0) connection_pool (>= 2.3.0)
redis (>= 4.5.0, < 5) rack (>= 2.2.4)
sidekiq-unique-jobs (7.1.29) redis-client (>= 0.11.0)
sidekiq-unique-jobs (8.0.2)
brpoplpush-redis_script (> 0.1.1, <= 2.0.0) brpoplpush-redis_script (> 0.1.1, <= 2.0.0)
concurrent-ruby (~> 1.0, >= 1.0.5) concurrent-ruby (~> 1.0, >= 1.0.5)
redis (< 5.0) sidekiq (>= 7.0.0, < 8.0.0)
sidekiq (>= 5.0, < 7.0) thor (>= 1.0, < 3.0)
thor (>= 0.20, < 3.0)
simple_form (5.2.0) simple_form (5.2.0)
actionpack (>= 5.2) actionpack (>= 5.2)
activemodel (>= 5.2) activemodel (>= 5.2)
@ -427,7 +430,7 @@ DEPENDENCIES
ruby-vips ruby-vips
shoulda-context shoulda-context
shoulda-matchers shoulda-matchers
sidekiq (~> 6.0) sidekiq (~> 7.0)
sidekiq-unique-jobs sidekiq-unique-jobs
simple_form simple_form
solargraph solargraph

View File

@ -2,7 +2,6 @@ class StatsController < ApplicationController
respond_to :html respond_to :html
def index def index
client = RedisClient.client @stats = JSON.parse(Cache.redis.get("e6stats") || "{}")
@stats = JSON.parse(client.get('e6stats') || '{}')
end end
end end

View File

@ -27,4 +27,10 @@ class Cache
def self.hash(string) def self.hash(string)
CityHash.hash64(string).to_s(36) CityHash.hash64(string).to_s(36)
end end
def self.redis
# Using a shared variable like this here is OK
# since unicorn spawns a new process for each worker
@redis ||= Redis.new(url: Danbooru.config.redis_url)
end
end end

View File

@ -4,10 +4,10 @@ module DangerZone
end end
def self.min_upload_level def self.min_upload_level
(RedisClient.client.get("min_upload_level") || User::Levels::MEMBER).to_i (Cache.redis.get("min_upload_level") || User::Levels::MEMBER).to_i
end end
def self.min_upload_level=(min_upload_level) def self.min_upload_level=(min_upload_level)
RedisClient.client.set("min_upload_level", min_upload_level) Cache.redis.set("min_upload_level", min_upload_level)
end end
end end

View File

@ -1,5 +0,0 @@
class RedisClient
def self.client
@@_client ||= ::Redis.new(url: Danbooru.config.redis_url)
end
end

View File

@ -77,6 +77,6 @@ class StatsUpdater
stats["#{cat}_tags".to_sym] = Tag.where(category: TagCategory.mapping[cat]).count stats["#{cat}_tags".to_sym] = Tag.where(category: TagCategory.mapping[cat]).count
end end
RedisClient.client.set("e6stats", stats.to_json) Cache.redis.set("e6stats", stats.to_json)
end end
end end

View File

@ -40,14 +40,14 @@ class UserThrottle
ckey = current_key(t) ckey = current_key(t)
pkey = previous_key(t) pkey = previous_key(t)
tdiff = t.to_i - ctime(t)*@duration.to_i tdiff = t.to_i - ctime(t)*@duration.to_i
hits = RedisClient.client.mget(ckey, pkey) hits = Cache.redis.mget(ckey, pkey)
@cached_rate = (hits[1].to_f * ((@duration.to_i-tdiff)/@duration.to_f) + hits[0].to_f).to_i @cached_rate = (hits[1].to_f * ((@duration.to_i-tdiff)/@duration.to_f) + hits[0].to_f).to_i
end end
def hit! def hit!
t = Time.now t = Time.now
ckey = current_key(t) ckey = current_key(t)
RedisClient.client.multi do |transaction| Cache.redis.multi do |transaction|
transaction.incr(ckey) transaction.incr(ckey)
transaction.expire(ckey, cache_duration.minutes) transaction.expire(ckey, cache_duration.minutes)
end end

View File

@ -361,7 +361,7 @@ class Ticket < ApplicationRecord
end end
def push_pubsub(action) def push_pubsub(action)
RedisClient.client.publish("ticket_updates", pubsub_hash(action).to_json) Cache.redis.publish("ticket_updates", pubsub_hash(action).to_json)
end end
end end

View File

@ -1,4 +1,4 @@
require 'sidekiq' require "sidekiq-unique-jobs"
Sidekiq.configure_server do |config| Sidekiq.configure_server do |config|
config.redis = { url: Danbooru.config.redis_url } config.redis = { url: Danbooru.config.redis_url }