Commit d1632da8 authored by Shinya Maeda's avatar Shinya Maeda

Implement basic live trace feature

parent 8a1c2bc4
......@@ -61,8 +61,8 @@ module Gitlab
stream = Gitlab::Ci::Trace::Stream.new do
if trace_artifact
trace_artifact.open
elsif Feature.enabled?('ci_enable_live_trace') && LiveTraceFile.exists?(job.id)
LiveTraceFile.new(job.id, "rb")
elsif Feature.enabled?('ci_enable_live_trace') && ChunkedFile::LiveTrace.exists?(job.id)
ChunkedFile::LiveTrace.new(job.id, "rb")
elsif current_path
File.open(current_path, "rb")
elsif old_trace
......@@ -81,7 +81,7 @@ module Gitlab
if current_path
current_path
else
LiveTraceFile.new(job.id, "a+b")
ChunkedFile::LiveTrace.new(job.id, "a+b")
end
else
File.open(ensure_path, "a+b")
......@@ -109,8 +109,8 @@ module Gitlab
raise ArchiveError, 'Already archived' if trace_artifact
raise ArchiveError, 'Job is not finished yet' unless job.complete?
if Feature.enabled?('ci_enable_live_trace') && LiveTraceFile.exists?(job.id)
LiveTraceFile.open(job.id, "wb") do |stream|
if Feature.enabled?('ci_enable_live_trace') && ChunkedFile::LiveTrace.exists?(job.id)
ChunkedFile::LiveTrace.open(job.id, "wb") do |stream|
archive_stream!(stream)
stream.truncate(0)
end
......
......@@ -4,14 +4,10 @@ module Gitlab
module ChunkedFile
module ChunkStore
class Base
attr_reader :buffer_size
attr_reader :chunk_start
attr_reader :url
attr_reader :params
def initialize(*identifiers, **params)
@buffer_size = params[:buffer_size]
@chunk_start = params[:chunk_start]
@url = params[:url]
@params = params
end
def close
......@@ -43,7 +39,7 @@ module Gitlab
end
def filled?
size == buffer_size
size == params[:buffer_size]
end
end
end
......
......@@ -52,7 +52,8 @@ module Gitlab
end
def write!(data)
raise NotImplementedError, 'Partial write is not supported' unless buffer_size == data&.length
puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
raise NotImplementedError, 'Partial write is not supported' unless params[:buffer_size] == data&.length
raise NotImplementedError, 'UPDATE is not supported' if job_trace_chunk.data
job_trace_chunk.data = data
......@@ -66,10 +67,13 @@ module Gitlab
end
def truncate!(offset)
raise NotImplementedError
raise NotImplementedError, 'Partial truncate is not supported' unless offset == 0
delete!
end
def delete!
puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}"
job_trace_chunk.destroy!
end
end
......
......@@ -68,6 +68,7 @@ module Gitlab
end
def write!(data)
puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
redis.set(buffer_key, data)
redis.strlen(buffer_key)
......@@ -75,6 +76,7 @@ module Gitlab
end
def append!(data)
puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
redis.append(buffer_key, data)
data.length
......@@ -82,8 +84,10 @@ module Gitlab
end
def truncate!(offset)
puts "#{self.class.name} - #{__callee__}: offset: #{offset.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
return unless redis.exists(buffer_key)
return 0 unless redis.exists(buffer_key)
return delete! if offset == 0
truncated_data = redis.getrange(buffer_key, 0, offset)
redis.set(buffer_key, truncated_data)
......@@ -91,6 +95,7 @@ module Gitlab
end
def delete!
puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
redis.del(buffer_key)
end
......
##
# This class is designed as it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
# ChunkedIO Engine
#
# Choose a chunk_store with your purpose
# This class is designed that it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
module Gitlab
module Ci
class Trace
module ChunkedFile
class ChunkedIO
class << self
def open(job_id, size, mode)
stream = self.new(job_id, size, mode)
def open(*args)
stream = self.new(*args)
yield stream
ensure
stream.close
stream&.close
end
end
BUFFER_SIZE = 128.kilobytes
WriteError = Class.new(StandardError)
FailedToGetChunkError = Class.new(StandardError)
......@@ -124,32 +126,16 @@ module Gitlab
raise WriteError, 'Could not write without lock' unless write_lock_uuid
raise WriteError, 'Could not write empty data' unless data.present?
data = data.dup
chunk_index_start = chunk_index
chunk_index_end = (tell + data.length) / BUFFER_SIZE
_data = data.dup
prev_tell = tell
(chunk_index_start..chunk_index_end).each do |c_index|
chunk_store.open(job_id, c_index, params_for_store) do |store|
writable_space = BUFFER_SIZE - chunk_offset
writing_size = [writable_space, data.length].min
break unless writing_size > 0
if store.size > 0
written_size = store.append!(data.slice!(0...writing_size))
else
written_size = store.write!(data.slice!(0...writing_size))
end
until _data.empty?
writable_space = buffer_size - chunk_offset
writing_size = [writable_space, _data.length].min
written_size = write_chunk!(_data.slice!(0...writing_size), &block)
raise WriteError, 'Written size mismatch' unless writing_size == written_size
@tell += written_size
@size = [tell, size].max
block.call(store, c_index) if block_given?
end
@tell += written_size
@size = [tell, size].max
end
tell - prev_tell
......@@ -159,24 +145,19 @@ module Gitlab
raise WriteError, 'Could not write without lock' unless write_lock_uuid
raise WriteError, 'Offset is out of bound' if offset > size || offset < 0
chunk_index_start = (offset / BUFFER_SIZE)
chunk_index_end = chunks_count - 1
@tell = size - 1
(chunk_index_start..chunk_index_end).reverse_each do |c_index|
chunk_store.open(job_id, c_index, params_for_store) do |store|
c_index_start = c_index * BUFFER_SIZE
until size == offset
truncatable_space = size - chunk_start
_chunk_offset = (offset <= chunk_start) ? 0 : offset % buffer_size
removed_size = truncate_chunk!(_chunk_offset, &block)
if offset <= c_index_start
store.delete!
else
store.truncate!(offset - c_index_start) if store.size > 0
end
block.call(store, c_index) if block_given?
end
@tell -= removed_size
@size -= removed_size
end
@tell = @size = offset
@tell = [tell, 0].max
@size = [size, 0].max
end
def flush
......@@ -198,48 +179,76 @@ module Gitlab
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
@chunk = store.get
raise FailedToGetChunkError unless chunk
raise FailedToGetChunkError unless chunk && chunk.length > 0
@chunk_range = (chunk_start...(chunk_start + chunk.length))
end
end
@chunk[chunk_offset..BUFFER_SIZE]
@chunk[chunk_offset..buffer_size]
end
def write_chunk!(data, &block)
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
written_size = if buffer_size == data.length
store.write!(data)
else
store.append!(data)
end
raise WriteError, 'Written size mismatch' unless data.length == written_size
block.call(store) if block_given?
written_size
end
end
def truncate_chunk!(offset, &block)
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
removed_size = store.size - offset
store.truncate!(offset)
block.call(store) if block_given?
removed_size
end
end
def params_for_store
def params_for_store(c_index = chunk_index)
{
buffer_size: BUFFER_SIZE,
chunk_start: chunk_start
buffer_size: buffer_size,
chunk_start: c_index * buffer_size,
chunk_index: c_index
}
end
def chunk_offset
tell % BUFFER_SIZE
tell % buffer_size
end
def chunk_start
(tell / BUFFER_SIZE) * BUFFER_SIZE
chunk_index * buffer_size
end
def chunk_end
[chunk_start + BUFFER_SIZE, size].min
[chunk_start + buffer_size, size].min
end
def chunk_index
(tell / BUFFER_SIZE)
(tell / buffer_size)
end
def chunks_count
(size / BUFFER_SIZE) + (has_extra? ? 1 : 0)
(size / buffer_size) + (has_extra? ? 1 : 0)
end
def has_extra?
(size % BUFFER_SIZE) > 0
(size % buffer_size) > 0
end
def last_chunk?
chunk_index == (chunks_count - 1)
chunks_count == 0 || chunk_index == (chunks_count - 1) || chunk_index == chunks_count
end
def write_lock_key
......@@ -249,6 +258,10 @@ module Gitlab
def chunk_store
raise NotImplementedError
end
def buffer_size
raise NotImplementedError
end
end
end
end
......
......@@ -2,19 +2,9 @@ module Gitlab
module Ci
class Trace
module ChunkedFile
class Remote < ChunkedIO
class HttpIO < ChunkedIO
BUFFER_SIZE = 128.kilobytes
class << self
def open(job_id, mode)
stream = self.new(job_id, mode)
yield stream
ensure
stream.close
end
end
InvalidURLError = Class.new(StandardError)
attr_reader :uri
......
......@@ -3,17 +3,7 @@ module Gitlab
class Trace
module ChunkedFile
class LiveTrace < ChunkedIO
BUFFER_SIZE = 128.kilobytes
class << self
def open(job_id, mode)
stream = self.new(job_id, mode)
yield stream
ensure
stream.close
end
def exist?(job_id)
ChunkStores::Redis.chunks_count(job_id) > 0 ||
ChunkStores::Database.chunks_count(job_id) > 0
......@@ -21,7 +11,7 @@ module Gitlab
end
def initialize(job_id, mode)
super(job_id, calculate_size, mode)
super(job_id, calculate_size(job_id), mode)
end
def write(data)
......@@ -29,30 +19,51 @@ module Gitlab
super(data) do |store|
if store.filled?
# Rotate data from redis to database
ChunkStores::Database.open(job_id, chunk_index, params_for_store) do |to_store|
# Once data is filled into redis, move the data to database
ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store|
to_store.write!(store.get)
store.delete!
end
end
end
end
def truncate(offset)
super(offset) do |store|
next if chunk_index == 0
prev_chunk_index = chunk_index - 1
store.delete!
if ChunkStore::Database.exist?(job_id, prev_chunk_index)
# Swap data from Database to Redis to truncate any size than buffer_size
ChunkStore::Database.open(job_id, prev_chunk_index, params_for_store(prev_chunk_index)) do |from_store|
ChunkStore::Redis.open(job_id, prev_chunk_index, params_for_store(prev_chunk_index)) do |to_store|
to_store.write!(from_store.get)
from_store.delete!
end
end
end
end
end
private
def calculate_size
ChunkStores::Redis.chunks_size(job_id) +
ChunkStores::Database.chunks_size(job_id)
def calculate_size(job_id)
ChunkStore::Redis.chunks_size(job_id) +
ChunkStore::Database.chunks_size(job_id)
end
def chunk_store
if last_chunk?
ChunkStores::Redis
ChunkStore::Redis
else
ChunkStores::Database
ChunkStore::Database
end
end
def buffer_size
128.kilobytes
end
end
end
end
......
......@@ -14,6 +14,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
before do
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes)
end
it_behaves_like 'ChunkedIO shared tests'
......@@ -24,6 +25,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
before do
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_store)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes)
end
it_behaves_like 'ChunkedIO shared tests'
......
require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedFile::LiveTrace, :clean_gitlab_redis_cache do
include LiveTraceHelpers
let(:chunked_io) { described_class.new(job_id, mode) }
let(:job) { create(:ci_build) }
let(:job_id) { job.id }
let(:size) { sample_trace_size }
let(:mode) { 'rb' }
describe '#write' do
subject { chunked_io.write(data) }
let(:data) { sample_trace_raw }
context 'when write mode' do
let(:mode) { 'wb' }
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'writes a trace' do
is_expected.to eq(data.length)
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(data.length)
end
end
end
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'writes a trace' do
is_expected.to eq(data.length)
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(data.length)
end
end
end
context 'when data is nil' do
let(:data) { nil }
it 'writes a trace' do
expect { subject } .to raise_error('Could not write empty data')
end
end
end
context 'when append mode' do
let(:original_data) { 'original data' }
let(:total_size) { original_data.length + data.length }
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(original_data)
end
it 'appends a trace' do
described_class.open(job_id, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(original_data + data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(total_size)
end
end
end
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(original_data)
end
it 'appends a trace' do
described_class.open(job_id, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(original_data + data)
expect(total_chunks_count).to eq(stream.send(:chunks_count))
expect(total_chunks_size).to eq(total_size)
end
end
end
end
end
describe '#truncate' do
context 'when data exists' do
context 'when buffer size is smaller than file size' do
before do
puts "#{self.class.name} - #{__callee__}: ===== 1"
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
end
it 'truncates a trace' do
puts "#{self.class.name} - #{__callee__}: ===== 2"
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(sample_trace_raw)
end
puts "#{self.class.name} - #{__callee__}: ===== 3"
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
end
puts "#{self.class.name} - #{__callee__}: ===== 4"
expect(total_chunks_count).to eq(0)
expect(total_chunks_size).to eq(0)
puts "#{self.class.name} - #{__callee__}: ===== 5"
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to be_empty
end
end
context 'when offset is negative' do
it 'raises an error' do
described_class.open(job_id, 'wb') do |stream|
expect { stream.truncate(-1) }.to raise_error('Offset is out of bound')
end
end
end
context 'when offset is larger than file size' do
it 'raises an error' do
described_class.open(job_id, 'wb') do |stream|
expect { stream.truncate(size + 1) }.to raise_error('Offset is out of bound')
end
end
end
end
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
end
it 'truncates a trace' do
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to eq(sample_trace_raw)
end
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
end
described_class.open(job_id, 'rb') do |stream|
expect(stream.read).to be_empty
end
expect(total_chunks_count).to eq(0)
expect(total_chunks_size).to eq(0)
end
end
end
context 'when data does not exist' do
before do
set_smaller_buffer_size_than(size)
end
it 'truncates a trace' do
described_class.open(job_id, 'wb') do |stream|
stream.truncate(0)
expect(stream.send(:tell)).to eq(0)
expect(stream.send(:size)).to eq(0)
end
end
end
end
def total_chunks_count
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_count(job_id) +
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_count(job_id)
end
def total_chunks_size
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis.chunks_size(job_id) +
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database.chunks_size(job_id)
end
end
module ChunkedIOHelpers
def fill_trace_to_chunks(data)
stream = Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.new(job_id, data.length, 'wb')
stream = described_class.new(job_id, data.length, 'wb')
stream.write(data)
stream.close
end
......@@ -17,6 +17,20 @@ module ChunkedIOHelpers
sample_trace_raw.length
end
def sample_trace_raw_for_live_trace
File.read(expand_fixture_path('trace/sample_trace'))
end
def sample_trace_size_for_live_trace
sample_trace_raw_for_live_trace.length
end
def fill_trace_to_chunks_for_live_trace(data)
stream = described_class.new(job_id, 'wb')
stream.write(data)
stream.close
end
def stub_chunk_store_get_failed
allow_any_instance_of(chunk_store).to receive(:get).and_return(nil)
end
......@@ -24,12 +38,12 @@ module ChunkedIOHelpers
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", new_size)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks * 2) * 128
stub_const("Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::BUFFER_SIZE", new_size)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
end
module LiveTraceHelpers
def fill_trace_to_chunks(data)
stream = described_class.new(job_id, 'wb')
stream.write(data)
stream.close
end
def sample_trace_raw
File.read(expand_fixture_path('trace/sample_trace'))
end
def sample_trace_size
sample_trace_raw.length
end
def stub_chunk_store_get_failed
allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis).to receive(:get).and_return(nil)
allow_any_instance_of(Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database).to receive(:get).and_return(nil)
end
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks * 2) * 128
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(new_size)
end
end
......@@ -82,7 +82,7 @@ shared_examples "ChunkedIO shared tests" do
describe '#each_line' do
let(:string_io) { StringIO.new(sample_trace_raw) }
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -94,7 +94,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when BUFFER_SIZE is larger than file size', :partial_support do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -114,7 +114,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when read whole size' do
let(:length) { nil }
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -125,7 +125,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when BUFFER_SIZE is larger than file size', :partial_support do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -140,7 +140,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when read only first 100 bytes' do
let(:length) { 100 }
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -151,7 +151,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when BUFFER_SIZE is larger than file size', :partial_support do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -166,7 +166,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when tries to read oversize' do
let(:length) { size + 1000 }
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -177,7 +177,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when BUFFER_SIZE is larger than file size', :partial_support do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -192,7 +192,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when tries to read 0 bytes' do
let(:length) { 0 }
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -203,7 +203,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when BUFFER_SIZE is larger than file size', :partial_support do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -226,7 +226,7 @@ shared_examples "ChunkedIO shared tests" do
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError)
expect { subject }.to raise_error(described_class::FailedToGetChunkError)
end
end
end
......@@ -254,11 +254,11 @@ shared_examples "ChunkedIO shared tests" do
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::ChunkedFile::ChunkedIO::FailedToGetChunkError)
expect { subject }.to raise_error(described_class::FailedToGetChunkError)
end
end
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -267,7 +267,7 @@ shared_examples "ChunkedIO shared tests" do
it_behaves_like 'all line matching'
end
context 'when BUFFER_SIZE is larger than file size', :partial_support do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -296,10 +296,10 @@ shared_examples "ChunkedIO shared tests" do
let(:data) { sample_trace_raw }
context 'when write mdoe' do
context 'when write mode' do
let(:mode) { 'wb' }
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
......@@ -307,7 +307,7 @@ shared_examples "ChunkedIO shared tests" do
it 'writes a trace' do
is_expected.to eq(data.length)
Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream|
described_class.open(job_id, size, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count))
expect(chunk_store.chunks_size(job_id)).to eq(data.length)
......@@ -315,7 +315,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when BUFFER_SIZE is larger than file size', :partial_support do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(size)
end
......@@ -323,7 +323,7 @@ shared_examples "ChunkedIO shared tests" do
it 'writes a trace' do
is_expected.to eq(data.length)
Gitlab::Ci::Trace::ChunkedFile::ChunkedIO.open(job_id, size, 'rb') do |stream|
described_class.open(job_id, size, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(chunk_store.chunks_count(job_id)).to eq(stream.send(:chunks_count))
expect(chunk_store.chunks_size(job_id)).to eq(data.length)
......@@ -340,11 +340,11 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when append mdoe', :partial_support do
context 'when append mode', :partial_support do
let(:original_data) { 'original data' }
let(:total_size) { original_data.length + data.length }
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(original_data)
......@@ -363,7 +363,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when BUFFER_SIZE is larger than file size' do
context 'when buffer size is larger than file size' do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(original_data)
......@@ -386,7 +386,7 @@ shared_examples "ChunkedIO shared tests" do
describe '#truncate' do
context 'when data exists' do
context 'when BUFFER_SIZE is smaller than file size' do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......@@ -426,7 +426,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
context 'when BUFFER_SIZE is larger than file size', :partial_support do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(size)
fill_trace_to_chunks(sample_trace_raw)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment