Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Boxiang Sun
gitlab-ce
Commits
1de5b8db
Commit
1de5b8db
authored
Apr 02, 2018
by
Shinya Maeda
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fix Live trace
parent
3a99a6b9
Changes
7
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
146 additions
and
292 deletions
+146
-292
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
+2
-17
lib/gitlab/ci/trace/chunked_file/live_trace.rb
lib/gitlab/ci/trace/chunked_file/live_trace.rb
+5
-10
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
+4
-4
spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb
spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb
+52
-164
spec/support/chunked_io/chunked_io_helpers.rb
spec/support/chunked_io/chunked_io_helpers.rb
+8
-21
spec/support/chunked_io/live_trace_helpers.rb
spec/support/chunked_io/live_trace_helpers.rb
+0
-32
spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
...itlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
+75
-44
No files found.
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
View file @
1de5b8db
...
...
@@ -202,17 +202,6 @@ module Gitlab
written_size
end
def
truncate_chunk
(
offset
)
chunk_store
.
open
(
job_id
,
chunk_index
,
params_for_store
)
do
|
store
|
with_callbacks
(
:truncate_chunk
,
store
)
do
removed_size
=
store
.
size
-
offset
store
.
truncate!
(
offset
)
removed_size
end
end
end
def
params_for_store
(
c_index
=
chunk_index
)
{
buffer_size:
buffer_size
,
...
...
@@ -241,12 +230,8 @@ module Gitlab
(
size
/
buffer_size
.
to_f
).
ceil
end
def
first_chunk?
chunk_index
==
0
end
def
last_chunk?
(
chunk_start
...
chunk_end
).
include?
(
tell
)
def
last_range
((
size
/
buffer_size
)
*
buffer_size
..
size
)
end
def
chunk_store
...
...
lib/gitlab/ci/trace/chunked_file/live_trace.rb
View file @
1de5b8db
...
...
@@ -5,8 +5,7 @@ module Gitlab
class
LiveTrace
<
ChunkedIO
class
<<
self
def
exist?
(
job_id
)
ChunkStores
::
Redis
.
chunks_count
(
job_id
)
>
0
||
ChunkStores
::
Database
.
chunks_count
(
job_id
)
>
0
ChunkStore
::
Redis
.
chunks_count
(
job_id
)
>
0
||
ChunkStore
::
Database
.
chunks_count
(
job_id
)
>
0
end
end
...
...
@@ -22,7 +21,7 @@ module Gitlab
end
end
#
Efficient process than iterating each
#
This is more efficient than iterating each chunk store and deleting
def
truncate
(
offset
)
if
offset
==
0
delete
...
...
@@ -33,13 +32,9 @@ module Gitlab
end
end
def
present?
self
.
exist?
(
job_id
)
end
def
delete
ChunkStore
s
::
Redis
.
delete_all
(
job_id
)
ChunkStore
s
::
Database
.
delete_all
(
job_id
)
ChunkStore
::
Redis
.
delete_all
(
job_id
)
ChunkStore
::
Database
.
delete_all
(
job_id
)
end
private
...
...
@@ -50,7 +45,7 @@ module Gitlab
end
def
chunk_store
if
last_
chunk?
||
eof?
if
last_
range
.
include?
(
tell
)
ChunkStore
::
Redis
else
ChunkStore
::
Database
...
...
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
View file @
1de5b8db
...
...
@@ -9,10 +9,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
let
(
:mode
)
{
'rb'
}
describe
'ChunkStore is Redis'
,
:partial_support
do
let
(
:chunk_store
)
{
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
}
let
(
:chunk_store
s
)
{
[
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
]
}
before
do
allow_any_instance_of
(
described_class
).
to
receive
(
:chunk_store
).
and_return
(
chunk_store
)
allow_any_instance_of
(
described_class
).
to
receive
(
:chunk_store
).
and_return
(
chunk_store
s
.
first
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
128
.
kilobytes
)
end
...
...
@@ -20,10 +20,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
end
describe
'ChunkStore is Database'
do
let
(
:chunk_store
)
{
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
}
let
(
:chunk_store
s
)
{
[
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
]
}
before
do
allow_any_instance_of
(
described_class
).
to
receive
(
:chunk_store
).
and_return
(
chunk_store
)
allow_any_instance_of
(
described_class
).
to
receive
(
:chunk_store
).
and_return
(
chunk_store
s
.
first
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
128
.
kilobytes
)
end
...
...
spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb
View file @
1de5b8db
require
'spec_helper'
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
LiveTrace
,
:clean_gitlab_redis_cache
do
include
LiveTrace
Helpers
include
ChunkedIO
Helpers
let
(
:chunked_io
)
{
described_class
.
new
(
job_id
,
mode
)
}
let
(
:chunked_io
)
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}
let
(
:job
)
{
create
(
:ci_build
)
}
let
(
:job_id
)
{
job
.
id
}
let
(
:size
)
{
sample_trace_size
}
let
(
:mode
)
{
'rb'
}
describe
'#write'
do
subject
{
chunked_io
.
write
(
data
)
}
let
(
:data
)
{
sample_trace_raw
}
context
'when write mode'
do
let
(
:mode
)
{
'wb'
}
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
size
)
end
it
'writes a trace'
do
is_expected
.
to
eq
(
data
.
length
)
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
total_chunks_count
).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
total_chunks_size
).
to
eq
(
data
.
length
)
end
end
end
context
'when buffer size is larger than file size'
do
before
do
set_larger_buffer_size_than
(
size
)
end
it
'writes a trace'
do
is_expected
.
to
eq
(
data
.
length
)
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
total_chunks_count
).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
total_chunks_size
).
to
eq
(
data
.
length
)
end
let
(
:chunk_stores
)
do
[
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
,
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
]
end
end
context
'when data is nil'
do
let
(
:data
)
{
nil
}
it
'writes a trace'
do
expect
{
subject
}
.
to
raise_error
(
'Could not write empty data'
)
end
end
describe
'ChunkStores are Redis and Database'
,
:partial_support
do
it_behaves_like
'ChunkedIO shared tests'
end
context
'when append mode'
do
let
(
:original_data
)
{
'original data'
}
let
(
:total_size
)
{
original_data
.
length
+
data
.
length
}
describe
'.exist?'
do
subject
{
described_class
.
exist?
(
job_id
)
}
context
'when buffer size is smaller than file siz
e'
do
context
'when a chunk exists in a stor
e'
do
before
do
set_smaller_buffer_size_than
(
size
)
fill_trace_to_chunks
(
original_data
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'appends a trace'
do
described_class
.
open
(
job_id
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
it
{
is_expected
.
to
be_truthy
}
end
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
original_data
+
data
)
expect
(
total_chunks_count
).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
total_chunks_size
).
to
eq
(
total_size
)
context
'when chunks do not exists in any store'
do
it
{
is_expected
.
to
be_falsey
}
end
end
end
context
'when buffer size is larger than file size'
do
before
do
set_larger_buffer_size_than
(
size
)
fill_trace_to_chunks
(
original_data
)
end
it
'appends a trace'
do
described_class
.
open
(
job_id
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
end
describe
'#truncate'
do
subject
{
chunked_io
.
truncate
(
offset
)
}
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
original_data
+
data
)
expect
(
total_chunks_count
).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
total_chunks_size
).
to
eq
(
total_size
)
end
end
end
end
end
let
(
:mode
)
{
'a+b'
}
describe
'#truncate'
do
context
'when data exists'
do
context
'when buffer size is smaller than file size'
do
before
do
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 1"
set_smaller_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'truncates a trace'
do
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 2"
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
sample_trace_raw
)
end
context
'when offset is 0'
do
let
(
:offset
)
{
0
}
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 3"
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
stream
.
truncate
(
0
)
it
'deletes all chunks'
do
expect
{
subject
}.
to
change
{
described_class
.
exist?
(
job_id
)
}.
from
(
true
).
to
(
false
)
end
end
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 4"
expect
(
total_chunks_count
).
to
eq
(
0
)
expect
(
total_chunks_size
).
to
eq
(
0
)
context
'when offset is size'
do
let
(
:offset
)
{
sample_trace_raw
.
length
}
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 5"
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
be_empty
it
'does nothing'
do
expect
{
subject
}.
not_to
change
{
described_class
.
exist?
(
job_id
)
}
end
end
context
'when offset is negative'
do
it
'raises an error'
do
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
expect
{
stream
.
truncate
(
-
1
)
}.
to
raise_error
(
'Offset is out of bound'
)
end
end
end
context
'when offset is else'
do
let
(
:offset
)
{
10
}
context
'when offset is larger than file size'
do
it
'raises an error'
do
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
expect
{
stream
.
truncate
(
size
+
1
)
}.
to
raise_error
(
'Offset is out of bound'
)
end
expect
{
subject
}.
to
raise_error
(
'Unexpected operation'
)
end
end
end
context
'when buffer size is larger than file size'
do
describe
'#delete'
do
subject
{
chunked_io
.
delete
}
context
'when a chunk exists in a store'
do
before
do
set_larger_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'truncates a trace'
do
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
sample_trace_raw
)
end
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
stream
.
truncate
(
0
)
end
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
be_empty
end
expect
(
total_chunks_count
).
to
eq
(
0
)
expect
(
total_chunks_size
).
to
eq
(
0
)
end
end
it
'deletes'
do
expect
{
subject
}.
to
change
{
described_class
.
exist?
(
job_id
)
}.
from
(
true
).
to
(
false
)
end
context
'when data does not exist'
do
before
do
set_smaller_buffer_size_than
(
size
)
end
it
'truncates a trace'
do
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
stream
.
truncate
(
0
)
expect
(
stream
.
send
(
:tell
)).
to
eq
(
0
)
expect
(
stream
.
send
(
:size
)).
to
eq
(
0
)
end
end
context
'when chunks do not exists in any store'
do
it
'deletes'
do
expect
{
subject
}.
not_to
change
{
described_class
.
exist?
(
job_id
)
}
end
end
def
total_chunks_count
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
.
chunks_count
(
job_id
)
+
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
.
chunks_count
(
job_id
)
end
def
total_chunks_size
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
.
chunks_size
(
job_id
)
+
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
.
chunks_size
(
job_id
)
end
end
spec/support/chunked_io/chunked_io_helpers.rb
View file @
1de5b8db
...
...
@@ -6,31 +6,14 @@ module ChunkedIOHelpers
end
def
sample_trace_raw
if
chunk_store
==
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
File
.
read
(
expand_fixture_path
(
'trace/sample_trace'
))
# ChunkStore::Database doesn't support appending, so the test data size has to be least common multiple
if
chunk_stores
.
first
==
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
'01234567'
*
32
# 256 bytes
else
'01234567'
*
32
File
.
read
(
expand_fixture_path
(
'trace/sample_trace'
))
end
end
# def sample_trace_raw_for_live_trace
# File.read(expand_fixture_path('trace/sample_trace'))
# end
# def sample_trace_size_for_live_trace
# sample_trace_raw_for_live_trace.length
# end
# def fill_trace_to_chunks_for_live_trace(data)
# stream = described_class.new(job_id, 'a+b')
# stream.write(data)
# stream.close
# end
# def stub_chunk_store_get_failed
# allow_any_instance_of(chunk_store).to receive(:get).and_return(nil)
# end
def
set_smaller_buffer_size_than
(
file_size
)
blocks
=
(
file_size
/
128
)
new_size
=
(
blocks
/
2
)
*
128
...
...
@@ -42,4 +25,8 @@ module ChunkedIOHelpers
new_size
=
(
blocks
*
2
)
*
128
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
new_size
)
end
def
set_half_buffer_size_of
(
file_size
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
file_size
/
2
)
end
end
spec/support/chunked_io/live_trace_helpers.rb
deleted
100644 → 0
View file @
3a99a6b9
module
LiveTraceHelpers
def
fill_trace_to_chunks
(
data
)
stream
=
described_class
.
new
(
job_id
,
'wb'
)
stream
.
write
(
data
)
stream
.
close
end
def
sample_trace_raw
File
.
read
(
expand_fixture_path
(
'trace/sample_trace'
))
end
def
sample_trace_size
sample_trace_raw
.
length
end
def
stub_chunk_store_get_failed
allow_any_instance_of
(
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
).
to
receive
(
:get
).
and_return
(
nil
)
allow_any_instance_of
(
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
).
to
receive
(
:get
).
and_return
(
nil
)
end
def
set_smaller_buffer_size_than
(
file_size
)
blocks
=
(
file_size
/
128
)
new_size
=
(
blocks
/
2
)
*
128
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
new_size
)
end
def
set_larger_buffer_size_than
(
file_size
)
blocks
=
(
file_size
/
128
)
new_size
=
(
blocks
*
2
)
*
128
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
new_size
)
end
end
spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
View file @
1de5b8db
shared_examples
"ChunkedIO shared tests"
do
around
(
:each
,
:partial_support
)
do
|
example
|
example
.
run
if
chunk_store
==
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
example
.
run
if
chunk_store
s
.
first
==
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
end
describe
'#new'
do
...
...
@@ -165,7 +165,7 @@ shared_examples "ChunkedIO shared tests" do
end
it
'calls get_chunk only once'
do
expect
(
chunk_store
).
to
receive
(
:open
).
once
.
and_call_original
expect
(
chunk_store
s
.
first
).
to
receive
(
:open
).
once
.
and_call_original
described_class
.
new
(
job_id
,
nil
,
'rb'
).
each_line
{
|
line
|
}
end
...
...
@@ -178,15 +178,19 @@ shared_examples "ChunkedIO shared tests" do
context
'when read the whole size'
do
let
(
:length
)
{
nil
}
shared_examples
'reads a trace'
do
it
do
is_expected
.
to
eq
(
sample_trace_raw
)
end
end
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'reads a trace'
do
is_expected
.
to
eq
(
sample_trace_raw
)
end
it_behaves_like
'reads a trace'
end
context
'when buffer size is larger than file size'
,
:partial_support
do
...
...
@@ -195,9 +199,16 @@ shared_examples "ChunkedIO shared tests" do
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'reads a trace'
do
is_expected
.
to
eq
(
sample_trace_raw
)
it_behaves_like
'reads a trace'
end
context
'when buffer size is half of file size'
do
before
do
set_half_buffer_size_of
(
sample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it_behaves_like
'reads a trace'
end
end
...
...
@@ -286,7 +297,7 @@ shared_examples "ChunkedIO shared tests" do
let
(
:string_io
)
{
StringIO
.
new
(
sample_trace_raw
)
}
shared_examples
'all line matching'
do
it
'reads a line'
do
it
do
(
0
...
sample_trace_raw
.
lines
.
count
).
each
do
expect
(
chunked_io
.
readline
).
to
eq
(
string_io
.
readline
)
end
...
...
@@ -311,6 +322,15 @@ shared_examples "ChunkedIO shared tests" do
it_behaves_like
'all line matching'
end
context
'when buffer size is half of file size'
do
before
do
set_half_buffer_size_of
(
sample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it_behaves_like
'all line matching'
end
context
'when pos is at middle of the file'
do
before
do
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
...
...
@@ -331,40 +351,46 @@ shared_examples "ChunkedIO shared tests" do
let
(
:data
)
{
sample_trace_raw
}
context
'when append mode'
,
:partial_support
do
context
'when append mode'
do
let
(
:mode
)
{
'a+b'
}
context
'when data does not exist'
do
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
end
it
'writes a trace'
do
shared_examples
'writes a trace'
do
it
do
is_expected
.
to
eq
(
data
.
length
)
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
data
.
length
)
expect
(
chunk_stores
.
inject
(
0
)
{
|
sum
,
store
|
sum
+
store
.
chunks_count
(
job_id
)
})
.
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_stores
.
inject
(
0
)
{
|
sum
,
store
|
sum
+
store
.
chunks_size
(
job_id
)
})
.
to
eq
(
data
.
length
)
end
end
end
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
data
.
length
)
end
it_behaves_like
'writes a trace'
end
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
set_larger_buffer_size_than
(
data
.
length
)
end
it
'writes a trace'
do
is_expected
.
to
eq
(
data
.
length
)
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
data
.
length
)
it_behaves_like
'writes a trace'
end
context
'when buffer size is half of file size'
do
before
do
set_half_buffer_size_of
(
data
.
length
)
end
it_behaves_like
'writes a trace'
end
context
'when data is nil'
do
...
...
@@ -376,46 +402,51 @@ shared_examples "ChunkedIO shared tests" do
end
end
context
'when data already exists'
do
context
'when data already exists'
,
:partial_support
do
let
(
:exist_data
)
{
'exist data'
}
let
(
:total_size
)
{
exist_data
.
length
+
data
.
length
}
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
data
.
length
)
fill_trace_to_chunks
(
exist_data
)
end
it
'appends a trace'
do
shared_examples
'appends a trace'
do
it
do
described_class
.
new
(
job_id
,
nil
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
end
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
exist_data
+
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
total_size
)
expect
(
chunk_stores
.
inject
(
0
)
{
|
sum
,
store
|
sum
+
store
.
chunks_count
(
job_id
)
})
.
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_stores
.
inject
(
0
)
{
|
sum
,
store
|
sum
+
store
.
chunks_size
(
job_id
)
})
.
to
eq
(
total_size
)
end
end
end
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
data
.
length
)
fill_trace_to_chunks
(
exist_data
)
end
it_behaves_like
'appends a trace'
end
context
'when buffer size is larger than file size'
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
set_larger_buffer_size_than
(
data
.
length
)
fill_trace_to_chunks
(
exist_data
)
end
it
'appends a trace'
do
described_class
.
new
(
job_id
,
nil
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
it_behaves_like
'appends a trace'
end
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
exist_data
+
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
total_size
)
end
context
'when buffer size is half of file size'
do
before
do
set_half_buffer_size_of
(
data
.
length
)
fill_trace_to_chunks
(
exist_data
)
end
it_behaves_like
'appends a trace'
end
end
end
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment