Commit 489520a9 authored by James Lopez's avatar James Lopez

Merge branch 'rpereira2/scope-metrics-dashboard-cache-deletion-by-project' into 'master'

Delete cache of dashboards in same project only

See merge request gitlab-org/gitlab!38563
parents 207fbf70 7cb0cd54
......@@ -34,7 +34,7 @@ module Metrics
# Returns an un-processed dashboard from the cache.
def raw_dashboard
Gitlab::Metrics::Dashboard::Cache.fetch(cache_key) { get_raw_dashboard }
Gitlab::Metrics::Dashboard::Cache.for(project).fetch(cache_key) { get_raw_dashboard }
end
# Should return true if this dashboard service is for an out-of-the-box
......
......@@ -30,6 +30,11 @@ module Metrics
end
end
# Returns an un-processed dashboard from the cache.
def raw_dashboard
Gitlab::Metrics::Dashboard::Cache.fetch(cache_key) { get_raw_dashboard }
end
private
def dashboard_version
......
......@@ -9,8 +9,20 @@ module Gitlab
CACHE_KEYS = 'all_cached_metric_dashboards'
class << self
# This class method (Gitlab::Metrics::Dashboard::Cache.fetch) can be used
# when the key does not need to be deleted by `delete_all!`.
# For example, out of the box dashboard caches do not need to be deleted.
delegate :fetch, to: :"Rails.cache"
alias_method :for, :new
end
def initialize(project)
@project = project
end
# Stores a dashboard in the cache, documenting the key
# so the cached can be cleared in bulk at another time.
# so the cache can be cleared in bulk at another time.
def fetch(key)
register_key(key)
......@@ -23,7 +35,7 @@ module Gitlab
def delete_all!
all_keys.each { |key| Rails.cache.delete(key) }
Rails.cache.delete(CACHE_KEYS)
Rails.cache.delete(catalog_key)
end
private
......@@ -31,12 +43,19 @@ module Gitlab
def register_key(key)
new_keys = all_keys.add(key).to_a.join('|')
Rails.cache.write(CACHE_KEYS, new_keys)
Rails.cache.write(catalog_key, new_keys)
end
def all_keys
Set.new(Rails.cache.read(CACHE_KEYS)&.split('|'))
keys = Rails.cache.read(catalog_key)&.split('|')
Set.new(keys)
end
# One key to store them all...
# This key is used to store the names of all the keys that contain this
# project's dashboards.
def catalog_key
"#{CACHE_KEYS}_#{@project.id}"
end
end
end
......
......@@ -14,7 +14,7 @@ module Gitlab
# Also deletes all dashboard cache entries.
# @return [Array] ex) ['.gitlab/dashboards/dashboard1.yml']
def list_dashboards(project)
Gitlab::Metrics::Dashboard::Cache.delete_all!
Gitlab::Metrics::Dashboard::Cache.for(project).delete_all!
file_finder(project).list_files_for(DASHBOARD_ROOT)
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Dashboard::Cache, :use_clean_rails_memory_store_caching do
let_it_be(:project1) { build_stubbed(:project) }
let_it_be(:project2) { build_stubbed(:project) }
let(:project1_key1) { "#{project1.id}_key1" }
let(:project1_key2) { "#{project1.id}_key2" }
let(:project2_key1) { "#{project2.id}_key1" }
let(:cache1) { described_class.for(project1) }
let(:cache2) { described_class.for(project2) }
before do
cache1.fetch(project1_key1) { 'data1' }
cache1.fetch(project1_key2) { 'data2' }
cache2.fetch(project2_key1) { 'data3' }
end
describe '.fetch' do
it 'stores data correctly' do
described_class.fetch('key1') { 'data1' }
described_class.fetch('key2') { 'data2' }
expect(described_class.fetch('key1')).to eq('data1')
expect(described_class.fetch('key2')).to eq('data2')
end
end
describe '.for' do
it 'returns a new instance' do
expect(described_class.for(project1)).to be_instance_of(described_class)
end
end
describe '#fetch' do
it 'stores data correctly' do
expect(cache1.fetch(project1_key1)).to eq('data1')
expect(cache1.fetch(project1_key2)).to eq('data2')
expect(cache2.fetch(project2_key1)).to eq('data3')
end
end
describe '#delete_all!' do
it 'deletes keys of the given project', :aggregate_failures do
cache1.delete_all!
expect(Rails.cache.exist?(project1_key1)).to be(false)
expect(Rails.cache.exist?(project1_key2)).to be(false)
expect(cache2.fetch(project2_key1)).to eq('data3')
cache2.delete_all!
expect(Rails.cache.exist?(project2_key1)).to be(false)
end
it 'does not fail when nothing to delete' do
project3 = build_stubbed(:project)
cache3 = described_class.for(project3)
expect { cache3.delete_all! }.not_to raise_error
end
end
context 'multiple fetches and deletes' do
specify :aggregate_failures do
cache1.delete_all!
expect(Rails.cache.exist?(project1_key1)).to be(false)
expect(Rails.cache.exist?(project1_key2)).to be(false)
cache1.fetch("#{project1.id}_key3") { 'data1' }
cache1.fetch("#{project1.id}_key4") { 'data2' }
expect(cache1.fetch("#{project1.id}_key3")).to eq('data1')
expect(cache1.fetch("#{project1.id}_key4")).to eq('data2')
cache1.delete_all!
expect(Rails.cache.exist?("#{project1.id}_key3")).to be(false)
expect(Rails.cache.exist?("#{project1.id}_key4")).to be(false)
end
end
end
......@@ -9,7 +9,10 @@ RSpec.describe Gitlab::Metrics::Dashboard::RepoDashboardFinder do
describe '.list_dashboards' do
it 'deletes dashboard cache entries' do
expect(Gitlab::Metrics::Dashboard::Cache).to receive(:delete_all!).and_call_original
cache = instance_double(Gitlab::Metrics::Dashboard::Cache)
allow(Gitlab::Metrics::Dashboard::Cache).to receive(:for).and_return(cache)
expect(cache).to receive(:delete_all!)
described_class.list_dashboards(project)
end
......
......@@ -111,7 +111,8 @@ RSpec.describe Metrics::Dashboard::CustomMetricEmbedService do
it_behaves_like 'valid embedded dashboard service response'
it 'does not cache the unprocessed dashboard' do
expect(Gitlab::Metrics::Dashboard::Cache).not_to receive(:fetch)
# Fail spec if any method of Cache class is called.
stub_const('Gitlab::Metrics::Dashboard::Cache', double)
described_class.new(*service_params).get_dashboard
end
......
......@@ -87,7 +87,8 @@ RSpec.describe Metrics::Dashboard::GitlabAlertEmbedService do
end
it 'does not cache the unprocessed dashboard' do
expect(Gitlab::Metrics::Dashboard::Cache).not_to receive(:fetch)
# Fail spec if any method of Cache class is called.
stub_const('Gitlab::Metrics::Dashboard::Cache', double)
described_class.new(*service_params).get_dashboard
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment