Commit 4ab1011d authored by Felipe Artur's avatar Felipe Artur

Fix refresh cache for open issues count service

parent e13aeab5
...@@ -17,7 +17,7 @@ class BaseCountService ...@@ -17,7 +17,7 @@ class BaseCountService
end end
def refresh_cache(&block) def refresh_cache(&block)
Rails.cache.write(cache_key, block_given? ? yield : uncached_count, raw: raw?) update_cache_for_key(cache_key, &block)
end end
def uncached_count def uncached_count
...@@ -41,4 +41,8 @@ class BaseCountService ...@@ -41,4 +41,8 @@ class BaseCountService
def cache_options def cache_options
{ raw: raw? } { raw: raw? }
end end
def update_cache_for_key(key, &block)
Rails.cache.write(key, block_given? ? yield : uncached_count, raw: raw?)
end
end end
...@@ -3,10 +3,11 @@ ...@@ -3,10 +3,11 @@
# because the service use maps to retrieve the project ids # because the service use maps to retrieve the project ids
module Projects module Projects
class BatchOpenIssuesCountService < Projects::BatchCountService class BatchOpenIssuesCountService < Projects::BatchCountService
# Method not needed. Cache here is updated using
# overloaded OpenIssuesCount#refresh_cache method
def global_count def global_count
@global_count ||= begin nil
count_service.query(project_ids).group(:project_id).count
end
end end
def count_service def count_service
......
...@@ -22,8 +22,10 @@ module Projects ...@@ -22,8 +22,10 @@ module Projects
) )
end end
def cache_key def cache_key(key = nil)
['projects', 'count_service', VERSION, @project.id, cache_key_name] cache_key = key || cache_key_name
['projects', 'count_service', VERSION, @project.id, cache_key]
end end
def self.query(project_ids) def self.query(project_ids)
......
...@@ -4,6 +4,10 @@ module Projects ...@@ -4,6 +4,10 @@ module Projects
class OpenIssuesCountService < Projects::CountService class OpenIssuesCountService < Projects::CountService
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
# Cache keys used to store issues count
PUBLIC_COUNT_KEY = 'public_open_issues_count'.freeze
TOTAL_COUNT_KEY = 'total_open_issues_count'.freeze
def initialize(project, user = nil) def initialize(project, user = nil)
@user = user @user = user
...@@ -11,7 +15,7 @@ module Projects ...@@ -11,7 +15,7 @@ module Projects
end end
def cache_key_name def cache_key_name
public_only? ? 'public_open_issues_count' : 'total_open_issues_count' public_only? ? PUBLIC_COUNT_KEY : TOTAL_COUNT_KEY
end end
def public_only? def public_only?
...@@ -28,6 +32,30 @@ module Projects ...@@ -28,6 +32,30 @@ module Projects
end end
end end
def public_count_cache_key
cache_key(PUBLIC_COUNT_KEY)
end
def total_count_cache_key
cache_key(TOTAL_COUNT_KEY)
end
# The block passed as parameter is ignored because we need to refresh both
# cache keys on every case.
def refresh_cache(&block)
count_grouped_by_confidential = self.class.query(@project, public_only: false).group(:confidential).count
public_count = count_grouped_by_confidential[false] || 0
total_count = public_count + (count_grouped_by_confidential[true] || 0)
update_cache_for_key(public_count_cache_key) do
public_count
end
update_cache_for_key(total_count_cache_key) do
total_count
end
end
# We only show total issues count for reporters # We only show total issues count for reporters
# which are allowed to view confidential issues # which are allowed to view confidential issues
# This will still show a discrepancy on issues number but should be less than before. # This will still show a discrepancy on issues number but should be less than before.
......
---
title: Fix refreshing cache keys for open issues count
merge_request:
author:
type: fixed
require 'spec_helper'
describe Projects::BatchOpenIssuesCountService do
let!(:project_1) { create(:project) }
let!(:project_2) { create(:project) }
let(:subject) { described_class.new([project_1, project_2]) }
context '#refresh_cache', :use_clean_rails_memory_store_caching do
before do
create(:issue, project: project_1)
create(:issue, project: project_1, confidential: true)
create(:issue, project: project_2)
create(:issue, project: project_2, confidential: true)
end
context 'when cache is clean' do
it 'refreshes cache keys correctly' do
subject.refresh_cache
expect(Rails.cache.read(get_cache_key(subject, project_1))).to eq(2)
expect(Rails.cache.read(get_cache_key(subject, project_2))).to eq(2)
expect(Rails.cache.read(get_cache_key(subject, project_1, true))).to eq(1)
expect(Rails.cache.read(get_cache_key(subject, project_1, true))).to eq(1)
end
end
context 'when issues count is already cached' do
before do
create(:issue, project: project_2)
subject.refresh_cache
end
it 'does update cache again' do
expect(Rails.cache).not_to receive(:write)
subject.refresh_cache
end
end
end
def get_cache_key(subject, project, public_key = false)
service = subject.count_service.new(project)
if public_key
service.cache_key(service.class::PUBLIC_COUNT_KEY)
else
service.cache_key(service.class::TOTAL_COUNT_KEY)
end
end
end
...@@ -50,5 +50,40 @@ describe Projects::OpenIssuesCountService do ...@@ -50,5 +50,40 @@ describe Projects::OpenIssuesCountService do
end end
end end
end end
context '#refresh_cache', :use_clean_rails_memory_store_caching do
let(:subject) { described_class.new(project) }
before do
create(:issue, :opened, project: project)
create(:issue, :opened, project: project)
create(:issue, :opened, confidential: true, project: project)
end
context 'when cache is empty' do
it 'refreshes cache keys correctly' do
subject.refresh_cache
expect(Rails.cache.read(subject.cache_key(described_class::PUBLIC_COUNT_KEY))).to eq(2)
expect(Rails.cache.read(subject.cache_key(described_class::TOTAL_COUNT_KEY))).to eq(3)
end
end
context 'when cache is outdated' do
before do
subject.refresh_cache
end
it 'refreshes cache keys correctly' do
create(:issue, :opened, project: project)
create(:issue, :opened, confidential: true, project: project)
subject.refresh_cache
expect(Rails.cache.read(subject.cache_key(described_class::PUBLIC_COUNT_KEY))).to eq(3)
expect(Rails.cache.read(subject.cache_key(described_class::TOTAL_COUNT_KEY))).to eq(5)
end
end
end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment