Commit 5be0daca authored by Robert May's avatar Robert May

Use Rails' serialisation instead of JSON

parent 6bfecb09
......@@ -65,6 +65,8 @@ class Repository
xcode_config: :xcode_project?
}.freeze
MERGED_BRANCH_NAMES_CACHE_DURATION = 10.minutes
def initialize(full_path, project, disk_path: nil, repo_type: Gitlab::GlRepository::PROJECT)
@full_path = full_path
@disk_path = disk_path || full_path
......@@ -923,7 +925,7 @@ class Repository
return raw_repository.merged_branch_names(branch_names) if skip_cache
cached_branch_names = cache.read(:merged_branch_names)
merged_branch_names_hash = cached_branch_names.present? ? JSON.parse(cached_branch_names) : {}
merged_branch_names_hash = cached_branch_names || {}
missing_branch_names = branch_names.select { |bn| !merged_branch_names_hash.key?(bn) }
# Track some metrics here whilst feature flag is enabled
......@@ -935,17 +937,17 @@ class Repository
counter.increment(full_hit: missing_branch_names.empty?)
end
unless missing_branch_names.empty?
if missing_branch_names.any?
merged = raw_repository.merged_branch_names(missing_branch_names)
missing_branch_names.each do |bn|
merged_branch_names_hash[bn] = merged.include?(bn).to_s
merged_branch_names_hash[bn] = merged.include?(bn)
end
cache.write(:merged_branch_names, merged_branch_names_hash.to_json, expires_in: 10.minutes)
cache.write(:merged_branch_names, merged_branch_names_hash, expires_in: MERGED_BRANCH_NAMES_CACHE_DURATION)
end
Set.new(merged_branch_names_hash.select { |k, v| v.to_s == "true" }.keys)
Set.new(merged_branch_names_hash.select { |_, v| v }.keys)
end
def merge_base(*commits_or_ids)
......
......@@ -502,10 +502,10 @@ describe Repository do
let(:merge_state_hash) do
{
"test" => "false",
"beep" => "false",
"boop" => "false",
"definitely_merged" => "true"
"test" => false,
"beep" => false,
"boop" => false,
"definitely_merged" => true
}
end
......@@ -538,7 +538,7 @@ describe Repository do
describe "cache values" do
it "writes the values to redis" do
expect(cache).to receive(:write).with(cache_key, merge_state_hash.to_json, expires_in: 10.minutes)
expect(cache).to receive(:write).with(cache_key, merge_state_hash, expires_in: Repository::MERGED_BRANCH_NAMES_CACHE_DURATION)
subject
end
......@@ -546,20 +546,21 @@ describe Repository do
it "matches the supplied hash" do
subject
expect(JSON.parse(cache.read(cache_key))).to eq(merge_state_hash)
expect(cache.read(cache_key)).to eq(merge_state_hash)
end
end
end
context "cache is not empty" do
before do
cache.write(cache_key, merge_state_hash.to_json)
cache.write(cache_key, merge_state_hash)
end
it { is_expected.to eq(already_merged) }
it "doesn't fetch from the disk" do
expect(repository.raw_repository).not_to receive(:merged_branch_names)
subject
end
end
......@@ -568,13 +569,14 @@ describe Repository do
before do
allow(repository.raw_repository).to receive(:merged_branch_names).with(["boop"]).and_return([])
hash = merge_state_hash.except("boop")
cache.write(cache_key, hash.to_json)
cache.write(cache_key, hash)
end
it { is_expected.to eq(already_merged) }
it "does fetch from the disk" do
expect(repository.raw_repository).to receive(:merged_branch_names).with(["boop"])
subject
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment