Commit b94c84e5 authored by Shinya Maeda's avatar Shinya Maeda

Add spec for ChunkedIO

parent f8f62ea5
...@@ -35,14 +35,6 @@ module Gitlab ...@@ -35,14 +35,6 @@ module Gitlab
true true
end end
def path
nil
end
def url
nil
end
def seek(pos, where = IO::SEEK_SET) def seek(pos, where = IO::SEEK_SET)
new_pos = new_pos =
case where case where
...@@ -74,18 +66,21 @@ module Gitlab ...@@ -74,18 +66,21 @@ module Gitlab
end end
end end
def read(length = (size - tell), outbuf = "") def read(length = nil, outbuf = "")
out = "" out = ""
end_tell = [tell + length, size].min
until end_tell <= tell length = size - tell unless length
until length <= 0 || eof?
data = chunk_slice_from_offset data = chunk_slice_from_offset
break if data.empty? break if data.empty?
data = data[0, (length % CHUNK_SIZE)] if data.bytesize + tell >= end_tell chunk_bytes = [CHUNK_SIZE - chunk_offset, length].min
chunk_data = data.byteslice(0, chunk_bytes)
out << data out << chunk_data
@tell += data.bytesize @tell += chunk_data.bytesize
length -= chunk_data.bytesize
end end
# If outbuf is passed, we put the output into the buffer. This supports IO.copy_stream functionality # If outbuf is passed, we put the output into the buffer. This supports IO.copy_stream functionality
...@@ -118,7 +113,10 @@ module Gitlab ...@@ -118,7 +113,10 @@ module Gitlab
end end
def write(data) def write(data)
raise 'Could not write empty data' unless data.present?
start_pos = tell start_pos = tell
data = data.force_encoding(Encoding::BINARY)
while tell < start_pos + data.bytesize while tell < start_pos + data.bytesize
# get slice from current offset till the end where it falls into chunk # get slice from current offset till the end where it falls into chunk
...@@ -129,9 +127,13 @@ module Gitlab ...@@ -129,9 +127,13 @@ module Gitlab
ensure_chunk.append(chunk_data, chunk_offset) ensure_chunk.append(chunk_data, chunk_offset)
# move offsets within buffer # move offsets within buffer
@tell += chunk_bytes @tell += chunk_data.bytesize
@size = [size, tell].max @size = [size, tell].max
end end
tell - start_pos
ensure
invalidate_chunk_cache
end end
def truncate(offset) def truncate(offset)
...@@ -139,13 +141,14 @@ module Gitlab ...@@ -139,13 +141,14 @@ module Gitlab
@tell = offset @tell = offset
@size = offset @size = offset
invalidate_chunk_cache
# remove all next chunks # remove all next chunks
job_chunks.where('chunk_index > ?', chunk_index).destroy_all job_chunks.where('chunk_index > ?', chunk_index).destroy_all
# truncate current chunk # truncate current chunk
current_chunk.truncate(chunk_offset) if chunk_offset != 0 current_chunk.truncate(chunk_offset) if chunk_offset != 0
ensure
invalidate_chunk_cache
end end
def flush def flush
...@@ -158,6 +161,8 @@ module Gitlab ...@@ -158,6 +161,8 @@ module Gitlab
def destroy! def destroy!
job_chunks.destroy_all job_chunks.destroy_all
@tell = @size = 0
ensure
invalidate_chunk_cache invalidate_chunk_cache
end end
......
require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
include ChunkedIOHelpers
set(:job) { create(:ci_build, :running) }
let(:chunked_io) { described_class.new(job) }
before do
stub_feature_flags(ci_enable_live_trace: true)
end
context "#initialize" do
context 'when a chunk exists' do
before do
job.trace.set('ABC')
end
it { expect(chunked_io.size).to eq(3) }
end
context 'when two chunks exist' do
before do
stub_buffer_size(4)
job.trace.set('ABCDEF')
end
it { expect(chunked_io.size).to eq(6) }
end
context 'when no chunks exists' do
it { expect(chunked_io.size).to eq(0) }
end
end
context "#seek" do
subject { chunked_io.seek(pos, where) }
before do
job.trace.set(sample_trace_raw)
end
context 'when moves pos to end of the file' do
let(:pos) { 0 }
let(:where) { IO::SEEK_END }
it { is_expected.to eq(sample_trace_raw.bytesize) }
end
context 'when moves pos to middle of the file' do
let(:pos) { sample_trace_raw.bytesize / 2 }
let(:where) { IO::SEEK_SET }
it { is_expected.to eq(pos) }
end
context 'when moves pos around' do
it 'matches the result' do
expect(chunked_io.seek(0)).to eq(0)
expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100)
expect { chunked_io.seek(sample_trace_raw.bytesize + 1, IO::SEEK_CUR) }
.to raise_error('new position is outside of file')
end
end
end
context "#eof?" do
subject { chunked_io.eof? }
before do
job.trace.set(sample_trace_raw)
end
context 'when current pos is at end of the file' do
before do
chunked_io.seek(sample_trace_raw.bytesize, IO::SEEK_SET)
end
it { is_expected.to be_truthy }
end
context 'when current pos is not at end of the file' do
before do
chunked_io.seek(0, IO::SEEK_SET)
end
it { is_expected.to be_falsey }
end
end
context "#each_line" do
let(:string_io) { StringIO.new(sample_trace_raw) }
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
end
it 'yields lines' do
expect { |b| chunked_io.each_line(&b) }
.to yield_successive_args(*string_io.each_line.to_a)
end
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
end
it 'calls get_chunk only once' do
expect_any_instance_of(Gitlab::Ci::Trace::ChunkedIO)
.to receive(:current_chunk).once.and_call_original
chunked_io.each_line { |line| }
end
end
end
context "#read" do
subject { chunked_io.read(length) }
context 'when read the whole size' do
let(:length) { nil }
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
end
it { is_expected.to eq(sample_trace_raw) }
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
end
it { is_expected.to eq(sample_trace_raw) }
end
end
context 'when read only first 100 bytes' do
let(:length) { 100 }
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
end
it 'reads a trace' do
is_expected.to eq(sample_trace_raw.byteslice(0, length))
end
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
end
it 'reads a trace' do
is_expected.to eq(sample_trace_raw.byteslice(0, length))
end
end
end
context 'when tries to read oversize' do
let(:length) { sample_trace_raw.bytesize + 1000 }
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
end
it 'reads a trace' do
is_expected.to eq(sample_trace_raw)
end
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
end
it 'reads a trace' do
is_expected.to eq(sample_trace_raw)
end
end
end
context 'when tries to read 0 bytes' do
let(:length) { 0 }
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
end
end
context "#readline" do
subject { chunked_io.readline }
let(:string_io) { StringIO.new(sample_trace_raw) }
shared_examples 'all line matching' do
it do
(0...sample_trace_raw.lines.count).each do
expect(chunked_io.readline).to eq(string_io.readline)
end
end
end
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
end
it_behaves_like 'all line matching'
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
end
it_behaves_like 'all line matching'
end
context 'when pos is at middle of the file' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
chunked_io.seek(chunked_io.size / 2)
string_io.seek(string_io.size / 2)
end
it 'reads from pos' do
expect(chunked_io.readline).to eq(string_io.readline)
end
end
end
context "#write" do
subject { chunked_io.write(data) }
let(:data) { sample_trace_raw }
context 'when data does not exist' do
shared_examples 'writes a trace' do
it do
is_expected.to eq(data.bytesize)
chunked_io.seek(0, IO::SEEK_SET)
expect(chunked_io.read).to eq(data)
end
end
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(data.bytesize / 2)
end
it_behaves_like 'writes a trace'
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(data.bytesize * 2)
end
it_behaves_like 'writes a trace'
end
context 'when data is nil' do
let(:data) { nil }
it 'writes a trace' do
expect { subject } .to raise_error('Could not write empty data')
end
end
end
context 'when data already exists' do
let(:exist_data) { 'exist data' }
shared_examples 'appends a trace' do
it do
chunked_io.seek(0, IO::SEEK_END)
is_expected.to eq(data.bytesize)
chunked_io.seek(0, IO::SEEK_SET)
expect(chunked_io.read).to eq(exist_data + data)
end
end
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(exist_data)
end
it_behaves_like 'appends a trace'
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(exist_data)
end
it_behaves_like 'appends a trace'
end
end
end
context "#truncate" do
subject { chunked_io.truncate(offset) }
let(:offset) { 10 }
context 'when data does not exist' do
shared_examples 'truncates a trace' do
it do
subject
chunked_io.seek(0, IO::SEEK_SET)
expect(chunked_io.read).to eq(sample_trace_raw.byteslice(0, offset))
end
end
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
end
it_behaves_like 'truncates a trace'
end
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
end
it_behaves_like 'truncates a trace'
end
end
end
context "#destroy!" do
subject { chunked_io.destroy! }
before do
job.trace.set(sample_trace_raw)
end
it 'deletes' do
expect { subject }.to change { chunked_io.size }
.from(sample_trace_raw.bytesize).to(0)
expect(Ci::JobTraceChunk.where(job: job).count).to eq(0)
end
end
end
...@@ -5,6 +5,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -5,6 +5,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:chunk_index) { 0 } let(:chunk_index) { 0 }
let(:data_store) { :redis } let(:data_store) { :redis }
let(:raw_data) { nil } let(:raw_data) { nil }
let(:job_trace_chunk) do let(:job_trace_chunk) do
described_class.new(job: job, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) described_class.new(job: job, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data)
end end
......
module ChunkedIOHelpers
def sample_trace_raw
@sample_trace_raw ||= File.read(expand_fixture_path('trace/sample_trace'))
.force_encoding(Encoding::BINARY)
end
def stub_buffer_size(size)
stub_const('Ci::JobTraceChunk::CHUNK_SIZE', size)
stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size)
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment