Commit 1de5b8db authored by Shinya Maeda's avatar Shinya Maeda

Fix Live trace

parent 3a99a6b9
......@@ -202,17 +202,6 @@ module Gitlab
written_size
end
def truncate_chunk(offset)
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
with_callbacks(:truncate_chunk, store) do
removed_size = store.size - offset
store.truncate!(offset)
removed_size
end
end
end
def params_for_store(c_index = chunk_index)
{
buffer_size: buffer_size,
......@@ -241,12 +230,8 @@ module Gitlab
(size / buffer_size.to_f).ceil
end
def first_chunk?
chunk_index == 0
end
def last_chunk?
(chunk_start...chunk_end).include?(tell)
def last_range
((size / buffer_size) * buffer_size..size)
end
def chunk_store
......
......@@ -5,8 +5,7 @@ module Gitlab
class LiveTrace < ChunkedIO
class << self
def exist?(job_id)
ChunkStores::Redis.chunks_count(job_id) > 0 ||
ChunkStores::Database.chunks_count(job_id) > 0
ChunkStore::Redis.chunks_count(job_id) > 0 || ChunkStore::Database.chunks_count(job_id) > 0
end
end
......@@ -22,7 +21,7 @@ module Gitlab
end
end
# Efficient process than iterating each
# This is more efficient than iterating each chunk store and deleting
def truncate(offset)
if offset == 0
delete
......@@ -33,13 +32,9 @@ module Gitlab
end
end
def present?
self.exist?(job_id)
end
def delete
ChunkStores::Redis.delete_all(job_id)
ChunkStores::Database.delete_all(job_id)
ChunkStore::Redis.delete_all(job_id)
ChunkStore::Database.delete_all(job_id)
end
private
......@@ -50,7 +45,7 @@ module Gitlab
end
def chunk_store
if last_chunk? || eof?
if last_range.include?(tell)
ChunkStore::Redis
else
ChunkStore::Database
......
......@@ -9,10 +9,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
let(:mode) { 'rb' }
describe 'ChunkStore is Redis', :partial_support do
let(:chunk_store) { Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis }
let(:chunk_stores) { [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis] }
before do
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store)
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_stores.first)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes)
end
......@@ -20,10 +20,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
end
describe 'ChunkStore is Database' do
let(:chunk_store) { Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database }
let(:chunk_stores) { [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database] }
before do
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store)
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_stores.first)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes)
end
......
require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedFile::LiveTrace, :clean_gitlab_redis_cache do
include LiveTraceHelpers
include ChunkedIOHelpers
let(:chunked_io) { described_class.new(job_id, mode) }
let(:chunked_io) { described_class.new(job_id, nil, mode) }
let(:job) { create(:ci_build) }
let(:job_id) { job.id }
let(:size) { sample_trace_size }
let(:mode) { 'rb' }
describe '#write' do
subject { chunked_io.write(data) }
let(:data) { sample_trace_raw }
context 'when write mode' do
let(:mode) { 'wb' }
let(:chunk_stores) do
[Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis,
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database]
end
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
describe 'ChunkStores are Redis and Database', :partial_support do
it_behaves_like 'ChunkedIO shared tests'
end
it 'writes a trace' do
is_expected.to eq(data.length)
describe '.exist?' do
subject { described_class.exist?(job_id) }
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(data.length)
end
end
context 'when a chunk exists in a store' do
before do
fill_trace_to_chunks(sample_trace_raw)
end
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it { is_expected.to be_truthy }
end
it 'writes a trace' do
is_expected.to eq(data.length)
context 'when chunks do not exists in any store' do
it { is_expected.to be_falsey }
end
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(data.length)
end
end
end
describe '#truncate' do
subject { chunked_io.truncate(offset) }
context 'when data is nil' do
let(:data) { nil }
let(:mode) { 'a+b' }
it 'writes a trace' do
expect { subject } .to raise_error('Could not write empty data')
end
end
before do
fill_trace_to_chunks(sample_trace_raw)
end
context 'when append mode' do
let(:original_data) { 'original data' }
let(:total_size) { original_data.length + data.length }
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(original_data)
end
it 'appends a trace' do
described_class.open(job_id, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(original_data + data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(total_size)
end
end
end
context 'when offset is 0' do
let(:offset) { 0 }
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(original_data)
end
it 'appends a trace' do
described_class.open(job_id, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(original_data + data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(total_size)
end
end
it 'deletes all chunks' do
expect { subject }.to change { described_class.exist?(job_id) }.from(true).to(false)
end
end
end
describe '#truncate' do
context 'when data exists' do
context 'when buffer size is smaller than file size' do
before do
puts "#{self.class.name} - #{__callee__}: ===== 1"
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
end
it 'truncates a trace' do
puts "#{self.class.name} - #{__callee__}: ===== 2"
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(sample_trace_raw)
end
puts "#{self.class.name} - #{__callee__}: ===== 3"
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
end
puts "#{self.class.name} - #{__callee__}: ===== 4"
expect(total_chunks_count).to eq(0)
expect(total_chunks_size).to eq(0)
puts "#{self.class.name} - #{__callee__}: ===== 5"
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to be_empty
end
end
context 'when offset is negative' do
it 'raises an error' do
described_class.open(job_id, 'wb') do |stream|
expect { stream.truncate(-1) }.to raise_error('Offset is out of bound')
end
end
end
context 'when offset is larger than file size' do
it 'raises an error' do
described_class.open(job_id, 'wb') do |stream|
expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound')
end
end
end
end
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
end
it 'truncates a trace' do
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(sample_trace_raw)
end
context 'when offset is size' do
let(:offset) { sample_trace_raw.length }
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
end
it 'does nothing' do
expect { subject }.not_to change { described_class.exist?(job_id) }
end
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to be_empty
end
context 'when offset is else' do
let(:offset) { 10 }
expect(total_chunks_count).to eq(0)
expect(total_chunks_size).to eq(0)
end
it 'raises an error' do
expect { subject }.to raise_error('Unexpected operation')
end
end
end
context 'when data does not exist' do
describe '#delete' do
subject { chunked_io.delete }
context 'when a chunk exists in a store' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
end
it 'truncates a trace' do
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
expect(stream.send(:tell)).to eq(0)
expect(stream.send(:size)).to eq(0)
end
it 'deletes' do
expect { subject }.to change { described_class.exist?(job_id) }.from(true).to(false)
end
end
end
def total_chunks_count
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_count(job_id) +
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_count(job_id)
end
def total_chunks_size
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_size(job_id) +
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_size(job_id)
context 'when chunks do not exists in any store' do
it 'deletes' do
expect { subject }.not_to change { described_class.exist?(job_id) }
end
end
end
end
......@@ -6,31 +6,14 @@ module ChunkedIOHelpers
end
def sample_trace_raw
if chunk_store == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis
File.read(expand_fixture_path('trace/sample_trace'))
# ChunkStore::Database doesn't support appending, so the test data size has to be least common multiple
if chunk_stores.first == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database
'01234567' * 32 # 256 bytes
else
'01234567' * 32
File.read(expand_fixture_path('trace/sample_trace'))
end
end
# def sample_trace_raw_for_live_trace
# File.read(expand_fixture_path('trace/sample_trace'))
# end
# def sample_trace_size_for_live_trace
# sample_trace_raw_for_live_trace.length
# end
# def fill_trace_to_chunks_for_live_trace(data)
# stream = described_class.new(job_id, 'a+b')
# stream.write(data)
# stream.close
# end
# def stub_chunk_store_get_failed
# allow_any_instance_of(chunk_store).to receive(:get).and_return(nil)
# end
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
......@@ -42,4 +25,8 @@ module ChunkedIOHelpers
new_size = (blocks * 2) * 128
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
def set_half_buffer_size_of(file_size)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(file_size / 2)
end
end
module LiveTraceHelpers
def fill_trace_to_chunks(data)
stream = described_class.new(job_id, 'wb')
stream.write(data)
stream.close
end
def sample_trace_raw
File.read(expand_fixture_path('trace/sample_trace'))
end
def sample_trace_size
sample_trace_raw.length
end
def stub_chunk_store_get_failed
allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis).to receive(:get).and_return(nil)
allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database).to receive(:get).and_return(nil)
end
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks * 2) * 128
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
end
shared_examples "ChunkedIO shared tests" do
around(:each, :partial_support) do |example|
example.run if chunk_store == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis
example.run if chunk_stores.first == Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis
end
describe '#new' do
......@@ -165,7 +165,7 @@ shared_examples "ChunkedIO shared tests" do
end
it 'calls get_chunk only once' do
expect(chunk_store).to receive(:open).once.and_call_original
expect(chunk_stores.first).to receive(:open).once.and_call_original
described_class.new(job_id, nil, 'rb').each_line { |line| }
end
......@@ -178,15 +178,19 @@ shared_examples "ChunkedIO shared tests" do
context 'when read the whole size' do
let(:length) { nil }
shared_examples 'reads a trace' do
it do
is_expected.to eq(sample_trace_raw)
end
end
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
fill_trace_to_chunks(sample_trace_raw)
end
it 'reads a trace' do
is_expected.to eq(sample_trace_raw)
end
it_behaves_like 'reads a trace'
end
context 'when buffer size is larger than file size', :partial_support do
......@@ -195,9 +199,16 @@ shared_examples "ChunkedIO shared tests" do
fill_trace_to_chunks(sample_trace_raw)
end
it 'reads a trace' do
is_expected.to eq(sample_trace_raw)
it_behaves_like 'reads a trace'
end
context 'when buffer size is half of file size' do
before do
set_half_buffer_size_of(sample_trace_raw.length)
fill_trace_to_chunks(sample_trace_raw)
end
it_behaves_like 'reads a trace'
end
end
......@@ -286,7 +297,7 @@ shared_examples "ChunkedIO shared tests" do
let(:string_io) { StringIO.new(sample_trace_raw) }
shared_examples 'all line matching' do
it 'reads a line' do
it do
(0...sample_trace_raw.lines.count).each do
expect(chunked_io.readline).to eq(string_io.readline)
end
......@@ -311,6 +322,15 @@ shared_examples "ChunkedIO shared tests" do
it_behaves_like 'all line matching'
end
context 'when buffer size is half of file size' do
before do
set_half_buffer_size_of(sample_trace_raw.length)
fill_trace_to_chunks(sample_trace_raw)
end
it_behaves_like 'all line matching'
end
context 'when pos is at middle of the file' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
......@@ -331,40 +351,46 @@ shared_examples "ChunkedIO shared tests" do
let(:data) { sample_trace_raw }
context 'when append mode', :partial_support do
context 'when append mode' do
let(:mode) { 'a+b' }
context 'when data does not exist' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
end
it 'writes a trace' do
shared_examples 'writes a trace' do
it do
is_expected.to eq(data.length)
described_class.new(job_id, nil, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count))
expect(chunk_store.chunks_size(job_id)).to eq(data.length)
expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_count(job_id) })
.to eq(stream.send(:chunks_count))
expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_size(job_id) })
.to eq(data.length)
end
end
end
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(data.length)
end
it_behaves_like 'writes a trace'
end
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(data.length)
end
it 'writes a trace' do
is_expected.to eq(data.length)
it_behaves_like 'writes a trace'
end
described_class.new(job_id, nil, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count))
expect(chunk_store.chunks_size(job_id)).to eq(data.length)
end
context 'when buffer size is half of file size' do
before do
set_half_buffer_size_of(data.length)
end
it_behaves_like 'writes a trace'
end
context 'when data is nil' do
......@@ -376,46 +402,51 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when data already exists' do
context 'when data already exists', :partial_support do
let(:exist_data) { 'exist data' }
let(:total_size) { exist_data.length + data.length }
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(data.length)
fill_trace_to_chunks(exist_data)
end
it 'appends a trace' do
shared_examples 'appends a trace' do
it do
described_class.new(job_id, nil, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
end
described_class.new(job_id, nil, 'rb') do |stream|
expect(stream.read).to eq(exist_data + data)
expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count))
expect(chunk_store.chunks_size(job_id)).to eq(total_size)
expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_count(job_id) })
.to eq(stream.send(:chunks_count))
expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_size(job_id) })
.to eq(total_size)
end
end
end
context 'when buffer size is larger than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(data.length)
fill_trace_to_chunks(exist_data)
end
it_behaves_like 'appends a trace'
end
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(data.length)
fill_trace_to_chunks(exist_data)
end
it 'appends a trace' do
described_class.new(job_id, nil, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
end
it_behaves_like 'appends a trace'
end
described_class.new(job_id, nil, 'rb') do |stream|
expect(stream.read).to eq(exist_data + data)
expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count))
expect(chunk_store.chunks_size(job_id)).to eq(total_size)
end
context 'when buffer size is half of file size' do
before do
set_half_buffer_size_of(data.length)
fill_trace_to_chunks(exist_data)
end
it_behaves_like 'appends a trace'
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment