Commit d1632da8 authored by Shinya Maeda's avatar Shinya Maeda

Implement basic live trace feature

parent 8a1c2bc4
...@@ -61,8 +61,8 @@ module Gitlab ...@@ -61,8 +61,8 @@ module Gitlab
stream = Gitlab::Ci::Trace::Stream.new do stream = Gitlab::Ci::Trace::Stream.new do
if trace_artifact if trace_artifact
trace_artifact.open trace_artifact.open
elsif Feature.enabled?('ci_enable_live_trace') && LiveTraceFile.exists?(job.id) elsif Feature.enabled?('ci_enable_live_trace') && ChunkedFile::LiveTrace.exists?(job.id)
LiveTraceFile.new(job.id, "rb") ChunkedFile::LiveTrace.new(job.id, "rb")
elsif current_path elsif current_path
File.open(current_path, "rb") File.open(current_path, "rb")
elsif old_trace elsif old_trace
...@@ -81,7 +81,7 @@ module Gitlab ...@@ -81,7 +81,7 @@ module Gitlab
if current_path if current_path
current_path current_path
else else
LiveTraceFile.new(job.id, "a+b") ChunkedFile::LiveTrace.new(job.id, "a+b")
end end
else else
File.open(ensure_path, "a+b") File.open(ensure_path, "a+b")
...@@ -109,8 +109,8 @@ module Gitlab ...@@ -109,8 +109,8 @@ module Gitlab
raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Already archived' if trace_artifact
raise ArchiveError, 'Job is not finished yet' unless job.complete? raise ArchiveError, 'Job is not finished yet' unless job.complete?
if Feature.enabled?('ci_enable_live_trace') && LiveTraceFile.exists?(job.id) if Feature.enabled?('ci_enable_live_trace') && ChunkedFile::LiveTrace.exists?(job.id)
LiveTraceFile.open(job.id, "wb") do |stream| ChunkedFile::LiveTrace.open(job.id, "wb") do |stream|
archive_stream!(stream) archive_stream!(stream)
stream.truncate(0) stream.truncate(0)
end end
......
...@@ -4,14 +4,10 @@ module Gitlab ...@@ -4,14 +4,10 @@ module Gitlab
module ChunkedFile module ChunkedFile
module ChunkStore module ChunkStore
class Base class Base
attr_reader :buffer_size attr_reader :params
attr_reader :chunk_start
attr_reader :url
def initialize(*identifiers, **params) def initialize(*identifiers, **params)
@buffer_size = params[:buffer_size] @params = params
@chunk_start = params[:chunk_start]
@url = params[:url]
end end
def close def close
...@@ -43,7 +39,7 @@ module Gitlab ...@@ -43,7 +39,7 @@ module Gitlab
end end
def filled? def filled?
size == buffer_size size == params[:buffer_size]
end end
end end
end end
......
...@@ -52,7 +52,8 @@ module Gitlab ...@@ -52,7 +52,8 @@ module Gitlab
end end
def write!(data) def write!(data)
raise NotImplementedError, 'Partial write is not supported' unless buffer_size == data&.length puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
raise NotImplementedError, 'Partial write is not supported' unless params[:buffer_size] == data&.length
raise NotImplementedError, 'UPDATE is not supported' if job_trace_chunk.data raise NotImplementedError, 'UPDATE is not supported' if job_trace_chunk.data
job_trace_chunk.data = data job_trace_chunk.data = data
...@@ -66,10 +67,13 @@ module Gitlab ...@@ -66,10 +67,13 @@ module Gitlab
end end
def truncate!(offset) def truncate!(offset)
raise NotImplementedError raise NotImplementedError, 'Partial truncate is not supported' unless offset == 0
delete!
end end
def delete! def delete!
puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}"
job_trace_chunk.destroy! job_trace_chunk.destroy!
end end
end end
......
...@@ -68,6 +68,7 @@ module Gitlab ...@@ -68,6 +68,7 @@ module Gitlab
end end
def write!(data) def write!(data)
puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis| Gitlab::Redis::Cache.with do |redis|
redis.set(buffer_key, data) redis.set(buffer_key, data)
redis.strlen(buffer_key) redis.strlen(buffer_key)
...@@ -75,6 +76,7 @@ module Gitlab ...@@ -75,6 +76,7 @@ module Gitlab
end end
def append!(data) def append!(data)
puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis| Gitlab::Redis::Cache.with do |redis|
redis.append(buffer_key, data) redis.append(buffer_key, data)
data.length data.length
...@@ -82,8 +84,10 @@ module Gitlab ...@@ -82,8 +84,10 @@ module Gitlab
end end
def truncate!(offset) def truncate!(offset)
puts "#{self.class.name} - #{__callee__}: offset: #{offset.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis| Gitlab::Redis::Cache.with do |redis|
return unless redis.exists(buffer_key) return 0 unless redis.exists(buffer_key)
return delete! if offset == 0
truncated_data = redis.getrange(buffer_key, 0, offset) truncated_data = redis.getrange(buffer_key, 0, offset)
redis.set(buffer_key, truncated_data) redis.set(buffer_key, truncated_data)
...@@ -91,6 +95,7 @@ module Gitlab ...@@ -91,6 +95,7 @@ module Gitlab
end end
def delete! def delete!
puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis| Gitlab::Redis::Cache.with do |redis|
redis.del(buffer_key) redis.del(buffer_key)
end end
......
## ##
# This class is designed as it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html) # ChunkedIO Engine
#
# Choose a chunk_store with your purpose
# This class is designed that it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
module Gitlab module Gitlab
module Ci module Ci
class Trace class Trace
module ChunkedFile module ChunkedFile
class ChunkedIO class ChunkedIO
class << self class << self
def open(job_id, size, mode) def open(*args)
stream = self.new(job_id, size, mode) stream = self.new(*args)
yield stream yield stream
ensure ensure
stream.close stream&.close
end end
end end
BUFFER_SIZE = 128.kilobytes
WriteError = Class.new(StandardError) WriteError = Class.new(StandardError)
FailedToGetChunkError = Class.new(StandardError) FailedToGetChunkError = Class.new(StandardError)
...@@ -124,32 +126,16 @@ module Gitlab ...@@ -124,32 +126,16 @@ module Gitlab
raise WriteError, 'Could not write without lock' unless write_lock_uuid raise WriteError, 'Could not write without lock' unless write_lock_uuid
raise WriteError, 'Could not write empty data' unless data.present? raise WriteError, 'Could not write empty data' unless data.present?
data = data.dup _data = data.dup
chunk_index_start = chunk_index
chunk_index_end = (tell + data.length) / BUFFER_SIZE
prev_tell = tell prev_tell = tell
(chunk_index_start..chunk_index_end).each do |c_index| until _data.empty?
chunk_store.open(job_id, c_index, params_for_store) do |store| writable_space = buffer_size - chunk_offset
writable_space = BUFFER_SIZE - chunk_offset writing_size = [writable_space, _data.length].min
writing_size = [writable_space, data.length].min written_size = write_chunk!(_data.slice!(0...writing_size), &block)
break unless writing_size > 0
if store.size > 0
written_size = store.append!(data.slice!(0...writing_size))
else
written_size = store.write!(data.slice!(0...writing_size))
end
raise WriteError, 'Written size mismatch' unless writing_size == written_size @tell += written_size
@size = [tell, size].max
@tell += written_size
@size = [tell, size].max
block.call(store, c_index) if block_given?
end
end end
tell - prev_tell tell - prev_tell
...@@ -159,24 +145,19 @@ module Gitlab ...@@ -159,24 +145,19 @@ module Gitlab
raise WriteError, 'Could not write without lock' unless write_lock_uuid raise WriteError, 'Could not write without lock' unless write_lock_uuid
raise WriteError, 'Offset is out of bound' if offset > size || offset < 0 raise WriteError, 'Offset is out of bound' if offset > size || offset < 0
chunk_index_start = (offset / BUFFER_SIZE) @tell = size - 1
chunk_index_end = chunks_count - 1
(chunk_index_start..chunk_index_end).reverse_each do |c_index| until size == offset
chunk_store.open(job_id, c_index, params_for_store) do |store| truncatable_space = size - chunk_start
c_index_start = c_index * BUFFER_SIZE _chunk_offset = (offset <= chunk_start) ? 0 : offset % buffer_size
removed_size = truncate_chunk!(_chunk_offset, &block)
if offset <= c_index_start @tell -= removed_size
store.delete! @size -= removed_size
else
store.truncate!(offset - c_index_start) if store.size > 0
end
block.call(store, c_index) if block_given?
end
end end
@tell = @size = offset @tell = [tell, 0].max
@size = [size, 0].max
end end
def flush def flush
...@@ -198,48 +179,76 @@ module Gitlab ...@@ -198,48 +179,76 @@ module Gitlab
chunk_store.open(job_id, chunk_index, params_for_store) do |store| chunk_store.open(job_id, chunk_index, params_for_store) do |store|
@chunk = store.get @chunk = store.get
raise FailedToGetChunkError unless chunk raise FailedToGetChunkError unless chunk && chunk.length > 0
@chunk_range = (chunk_start...(chunk_start + chunk.length)) @chunk_range = (chunk_start...(chunk_start + chunk.length))
end end
end end
@chunk[chunk_offset..BUFFER_SIZE] @chunk[chunk_offset..buffer_size]
end
def write_chunk!(data, &block)
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
written_size = if buffer_size == data.length
store.write!(data)
else
store.append!(data)
end
raise WriteError, 'Written size mismatch' unless data.length == written_size
block.call(store) if block_given?
written_size
end
end
def truncate_chunk!(offset, &block)
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
removed_size = store.size - offset
store.truncate!(offset)
block.call(store) if block_given?
removed_size
end
end end
def params_for_store def params_for_store(c_index = chunk_index)
{ {
buffer_size: BUFFER_SIZE, buffer_size: buffer_size,
chunk_start: chunk_start chunk_start: c_index * buffer_size,
chunk_index: c_index
} }
end end
def chunk_offset def chunk_offset
tell % BUFFER_SIZE tell % buffer_size
end end
def chunk_start def chunk_start
(tell / BUFFER_SIZE) * BUFFER_SIZE chunk_index * buffer_size
end end
def chunk_end def chunk_end
[chunk_start + BUFFER_SIZE, size].min [chunk_start + buffer_size, size].min
end end
def chunk_index def chunk_index
(tell / BUFFER_SIZE) (tell / buffer_size)
end end
def chunks_count def chunks_count
(size / BUFFER_SIZE) + (has_extra? ? 1 : 0) (size / buffer_size) + (has_extra? ? 1 : 0)
end end
def has_extra? def has_extra?
(size % BUFFER_SIZE) > 0 (size % buffer_size) > 0
end end
def last_chunk? def last_chunk?
chunk_index == (chunks_count - 1) chunks_count == 0 || chunk_index == (chunks_count - 1) || chunk_index == chunks_count
end end
def write_lock_key def write_lock_key
...@@ -249,6 +258,10 @@ module Gitlab ...@@ -249,6 +258,10 @@ module Gitlab
def chunk_store def chunk_store
raise NotImplementedError raise NotImplementedError
end end
def buffer_size
raise NotImplementedError
end
end end
end end
end end
......
...@@ -2,19 +2,9 @@ module Gitlab ...@@ -2,19 +2,9 @@ module Gitlab
module Ci module Ci
class Trace class Trace
module ChunkedFile module ChunkedFile
class Remote < ChunkedIO class HttpIO < ChunkedIO
BUFFER_SIZE = 128.kilobytes BUFFER_SIZE = 128.kilobytes
class << self
def open(job_id, mode)
stream = self.new(job_id, mode)
yield stream
ensure
stream.close
end
end
InvalidURLError = Class.new(StandardError) InvalidURLError = Class.new(StandardError)
attr_reader :uri attr_reader :uri
......
...@@ -3,17 +3,7 @@ module Gitlab ...@@ -3,17 +3,7 @@ module Gitlab
class Trace class Trace
module ChunkedFile module ChunkedFile
class LiveTrace < ChunkedIO class LiveTrace < ChunkedIO
BUFFER_SIZE = 128.kilobytes
class << self class << self
def open(job_id, mode)
stream = self.new(job_id, mode)
yield stream
ensure
stream.close
end
def exist?(job_id) def exist?(job_id)
ChunkStores::Redis.chunks_count(job_id) > 0 || ChunkStores::Redis.chunks_count(job_id) > 0 ||
ChunkStores::Database.chunks_count(job_id) > 0 ChunkStores::Database.chunks_count(job_id) > 0
...@@ -21,7 +11,7 @@ module Gitlab ...@@ -21,7 +11,7 @@ module Gitlab
end end
def initialize(job_id, mode) def initialize(job_id, mode)
super(job_id, calculate_size, mode) super(job_id, calculate_size(job_id), mode)
end end
def write(data) def write(data)
...@@ -29,30 +19,51 @@ module Gitlab ...@@ -29,30 +19,51 @@ module Gitlab
super(data) do |store| super(data) do |store|
if store.filled? if store.filled?
# Rotate data from redis to database # Once data is filled into redis, move the data to database
ChunkStores::Database.open(job_id, chunk_index, params_for_store) do |to_store| ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store|
to_store.write!(store.get) to_store.write!(store.get)
store.delete!
end end
end
end
end
def truncate(offset)
super(offset) do |store|
next if chunk_index == 0
prev_chunk_index = chunk_index - 1
store.delete! if ChunkStore::Database.exist?(job_id, prev_chunk_index)
# Swap data from Database to Redis to truncate any size than buffer_size
ChunkStore::Database.open(job_id, prev_chunk_index, params_for_store(prev_chunk_index)) do |from_store|
ChunkStore::Redis.open(job_id, prev_chunk_index, params_for_store(prev_chunk_index)) do |to_store|
to_store.write!(from_store.get)
from_store.delete!
end
end
end end
end end
end end
private private
def calculate_size def calculate_size(job_id)
ChunkStores::Redis.chunks_size(job_id) + ChunkStore::Redis.chunks_size(job_id) +
ChunkStores::Database.chunks_size(job_id) ChunkStore::Database.chunks_size(job_id)
end end
def chunk_store def chunk_store
if last_chunk? if last_chunk?
ChunkStores::Redis ChunkStore::Redis
else else
ChunkStores::Database ChunkStore::Database
end end
end end
def buffer_size
128.kilobytes
end
end end
end end
end end
......
...@@ -14,6 +14,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -14,6 +14,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
before do before do
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes)
end end
it_behaves_like 'ChunkedIO shared tests' it_behaves_like 'ChunkedIO shared tests'
...@@ -24,6 +25,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -24,6 +25,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
before do before do
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store) allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes)
end end
it_behaves_like 'ChunkedIO shared tests' it_behaves_like 'ChunkedIO shared tests'
......
require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedFile::LiveTrace, :clean_gitlab_redis_cache do
include LiveTraceHelpers
let(:chunked_io) { described_class.new(job_id, mode) }
let(:job) { create(:ci_build) }
let(:job_id) { job.id }
let(:size) { sample_trace_size }
let(:mode) { 'rb' }
describe '#write' do
subject { chunked_io.write(data) }
let(:data) { sample_trace_raw }
context 'when write mode' do
let(:mode) { 'wb' }
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'writes a trace' do
is_expected.to eq(data.length)
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(data.length)
end
end
end
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'writes a trace' do
is_expected.to eq(data.length)
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(data.length)
end
end
end
context 'when data is nil' do
let(:data) { nil }
it 'writes a trace' do
expect { subject } .to raise_error('Could not write empty data')
end
end
end
context 'when append mode' do
let(:original_data) { 'original data' }
let(:total_size) { original_data.length + data.length }
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(original_data)
end
it 'appends a trace' do
described_class.open(job_id, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(original_data + data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(total_size)
end
end
end
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(original_data)
end
it 'appends a trace' do
described_class.open(job_id, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(original_data + data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(total_size)
end
end
end
end
end
describe '#truncate' do
context 'when data exists' do
context 'when buffer size is smaller than file size' do
before do
puts "#{self.class.name} - #{__callee__}: ===== 1"
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
end
it 'truncates a trace' do
puts "#{self.class.name} - #{__callee__}: ===== 2"
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(sample_trace_raw)
end
puts "#{self.class.name} - #{__callee__}: ===== 3"
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
end
puts "#{self.class.name} - #{__callee__}: ===== 4"
expect(total_chunks_count).to eq(0)
expect(total_chunks_size).to eq(0)
puts "#{self.class.name} - #{__callee__}: ===== 5"
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to be_empty
end
end
context 'when offset is negative' do
it 'raises an error' do
described_class.open(job_id, 'wb') do |stream|
expect { stream.truncate(-1) }.to raise_error('Offset is out of bound')
end
end
end
context 'when offset is larger than file size' do
it 'raises an error' do
described_class.open(job_id, 'wb') do |stream|
expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound')
end
end
end
end
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
end
it 'truncates a trace' do
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(sample_trace_raw)
end
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to be_empty
end
expect(total_chunks_count).to eq(0)
expect(total_chunks_size).to eq(0)
end
end
end
context 'when data does not exist' do
before do
set_smaller_buffer_size_than(size)
end
it 'truncates a trace' do
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
expect(stream.send(:tell)).to eq(0)
expect(stream.send(:size)).to eq(0)
end
end
end
end
def total_chunks_count
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_count(job_id) +
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_count(job_id)
end
def total_chunks_size
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_size(job_id) +
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_size(job_id)
end
end
module ChunkedIOHelpers module ChunkedIOHelpers
def fill_trace_to_chunks(data) def fill_trace_to_chunks(data)
stream = Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.new(job_id, data.length, 'wb') stream = described_class.new(job_id, data.length, 'wb')
stream.write(data) stream.write(data)
stream.close stream.close
end end
...@@ -17,6 +17,20 @@ module ChunkedIOHelpers ...@@ -17,6 +17,20 @@ module ChunkedIOHelpers
sample_trace_raw.length sample_trace_raw.length
end end
def sample_trace_raw_for_live_trace
File.read(expand_fixture_path('trace/sample_trace'))
end
def sample_trace_size_for_live_trace
sample_trace_raw_for_live_trace.length
end
def fill_trace_to_chunks_for_live_trace(data)
stream = described_class.new(job_id, 'wb')
stream.write(data)
stream.close
end
def stub_chunk_store_get_failed def stub_chunk_store_get_failed
allow_any_instance_of(chunk_store).to receive(:get).and_return(nil) allow_any_instance_of(chunk_store).to receive(:get).and_return(nil)
end end
...@@ -24,12 +38,12 @@ module ChunkedIOHelpers ...@@ -24,12 +38,12 @@ module ChunkedIOHelpers
def set_smaller_buffer_size_than(file_size) def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128) blocks = (file_size / 128)
new_size = (blocks / 2) * 128 new_size = (blocks / 2) * 128
stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", new_size) allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end end
def set_larger_buffer_size_than(file_size) def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128) blocks = (file_size / 128)
new_size = (blocks * 2) * 128 new_size = (blocks * 2) * 128
stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", new_size) allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end end
end end
module LiveTraceHelpers
def fill_trace_to_chunks(data)
stream = described_class.new(job_id, 'wb')
stream.write(data)
stream.close
end
def sample_trace_raw
File.read(expand_fixture_path('trace/sample_trace'))
end
def sample_trace_size
sample_trace_raw.length
end
def stub_chunk_store_get_failed
allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis).to receive(:get).and_return(nil)
allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database).to receive(:get).and_return(nil)
end
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks * 2) * 128
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
end
...@@ -82,7 +82,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -82,7 +82,7 @@ shared_examples "ChunkedIO shared tests" do
describe '#each_line' do describe '#each_line' do
let(:string_io) { StringIO.new(sample_trace_raw) } let(:string_io) { StringIO.new(sample_trace_raw) }
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -94,7 +94,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -94,7 +94,7 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when BUFFER_SIZE is larger than file size', :partial_support do context 'when buffer size is larger than file size', :partial_support do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -114,7 +114,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -114,7 +114,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when read whole size' do context 'when read whole size' do
let(:length) { nil } let(:length) { nil }
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -125,7 +125,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -125,7 +125,7 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when BUFFER_SIZE is larger than file size', :partial_support do context 'when buffer size is larger than file size', :partial_support do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -140,7 +140,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -140,7 +140,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when read only first 100 bytes' do context 'when read only first 100 bytes' do
let(:length) { 100 } let(:length) { 100 }
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -151,7 +151,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -151,7 +151,7 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when BUFFER_SIZE is larger than file size', :partial_support do context 'when buffer size is larger than file size', :partial_support do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -166,7 +166,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -166,7 +166,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when tries to read oversize' do context 'when tries to read oversize' do
let(:length) { size + 1000 } let(:length) { size + 1000 }
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -177,7 +177,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -177,7 +177,7 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when BUFFER_SIZE is larger than file size', :partial_support do context 'when buffer size is larger than file size', :partial_support do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -192,7 +192,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -192,7 +192,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when tries to read 0 bytes' do context 'when tries to read 0 bytes' do
let(:length) { 0 } let(:length) { 0 }
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -203,7 +203,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -203,7 +203,7 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when BUFFER_SIZE is larger than file size', :partial_support do context 'when buffer size is larger than file size', :partial_support do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -226,7 +226,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -226,7 +226,7 @@ shared_examples "ChunkedIO shared tests" do
end end
it 'reads a trace' do it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) expect { subject }.to raise_error(described_class::FailedToGetChunkError)
end end
end end
end end
...@@ -254,11 +254,11 @@ shared_examples "ChunkedIO shared tests" do ...@@ -254,11 +254,11 @@ shared_examples "ChunkedIO shared tests" do
end end
it 'reads a trace' do it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError) expect { subject }.to raise_error(described_class::FailedToGetChunkError)
end end
end end
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -267,7 +267,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -267,7 +267,7 @@ shared_examples "ChunkedIO shared tests" do
it_behaves_like 'all line matching' it_behaves_like 'all line matching'
end end
context 'when BUFFER_SIZE is larger than file size', :partial_support do context 'when buffer size is larger than file size', :partial_support do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -296,10 +296,10 @@ shared_examples "ChunkedIO shared tests" do ...@@ -296,10 +296,10 @@ shared_examples "ChunkedIO shared tests" do
let(:data) { sample_trace_raw } let(:data) { sample_trace_raw }
context 'when write mdoe' do context 'when write mode' do
let(:mode) { 'wb' } let(:mode) { 'wb' }
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
end end
...@@ -307,7 +307,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -307,7 +307,7 @@ shared_examples "ChunkedIO shared tests" do
it 'writes a trace' do it 'writes a trace' do
is_expected.to eq(data.length) is_expected.to eq(data.length)
Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| described_class.open(job_id, size, 'rb') do |stream|
expect(stream.read).to eq(data) expect(stream.read).to eq(data)
expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count))
expect(chunk_store.chunks_size(job_id)).to eq(data.length) expect(chunk_store.chunks_size(job_id)).to eq(data.length)
...@@ -315,7 +315,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -315,7 +315,7 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when BUFFER_SIZE is larger than file size', :partial_support do context 'when buffer size is larger than file size', :partial_support do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
end end
...@@ -323,7 +323,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -323,7 +323,7 @@ shared_examples "ChunkedIO shared tests" do
it 'writes a trace' do it 'writes a trace' do
is_expected.to eq(data.length) is_expected.to eq(data.length)
Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream| described_class.open(job_id, size, 'rb') do |stream|
expect(stream.read).to eq(data) expect(stream.read).to eq(data)
expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count)) expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count))
expect(chunk_store.chunks_size(job_id)).to eq(data.length) expect(chunk_store.chunks_size(job_id)).to eq(data.length)
...@@ -340,11 +340,11 @@ shared_examples "ChunkedIO shared tests" do ...@@ -340,11 +340,11 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when append mdoe', :partial_support do context 'when append mode', :partial_support do
let(:original_data) { 'original data' } let(:original_data) { 'original data' }
let(:total_size) { original_data.length + data.length } let(:total_size) { original_data.length + data.length }
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
fill_trace_to_chunks(original_data) fill_trace_to_chunks(original_data)
...@@ -363,7 +363,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -363,7 +363,7 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when BUFFER_SIZE is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
fill_trace_to_chunks(original_data) fill_trace_to_chunks(original_data)
...@@ -386,7 +386,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -386,7 +386,7 @@ shared_examples "ChunkedIO shared tests" do
describe '#truncate' do describe '#truncate' do
context 'when data exists' do context 'when data exists' do
context 'when BUFFER_SIZE is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
set_smaller_buffer_size_than(size) set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
...@@ -426,7 +426,7 @@ shared_examples "ChunkedIO shared tests" do ...@@ -426,7 +426,7 @@ shared_examples "ChunkedIO shared tests" do
end end
end end
context 'when BUFFER_SIZE is larger than file size', :partial_support do context 'when buffer size is larger than file size', :partial_support do
before do before do
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw) fill_trace_to_chunks(sample_trace_raw)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment