Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Boxiang Sun
gitlab-ce
Commits
1de5b8db
Commit
1de5b8db
authored
Apr 02, 2018
by
Shinya Maeda
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fix Live trace
parent
3a99a6b9
Changes
7
Hide whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
146 additions
and
292 deletions
+146
-292
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
+2
-17
lib/gitlab/ci/trace/chunked_file/live_trace.rb
lib/gitlab/ci/trace/chunked_file/live_trace.rb
+5
-10
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
+4
-4
spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb
spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb
+52
-164
spec/support/chunked_io/chunked_io_helpers.rb
spec/support/chunked_io/chunked_io_helpers.rb
+8
-21
spec/support/chunked_io/live_trace_helpers.rb
spec/support/chunked_io/live_trace_helpers.rb
+0
-32
spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
...itlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
+75
-44
No files found.
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
View file @
1de5b8db
...
@@ -202,17 +202,6 @@ module Gitlab
...
@@ -202,17 +202,6 @@ module Gitlab
written_size
written_size
end
end
def
truncate_chunk
(
offset
)
chunk_store
.
open
(
job_id
,
chunk_index
,
params_for_store
)
do
|
store
|
with_callbacks
(
:truncate_chunk
,
store
)
do
removed_size
=
store
.
size
-
offset
store
.
truncate!
(
offset
)
removed_size
end
end
end
def
params_for_store
(
c_index
=
chunk_index
)
def
params_for_store
(
c_index
=
chunk_index
)
{
{
buffer_size:
buffer_size
,
buffer_size:
buffer_size
,
...
@@ -241,12 +230,8 @@ module Gitlab
...
@@ -241,12 +230,8 @@ module Gitlab
(
size
/
buffer_size
.
to_f
).
ceil
(
size
/
buffer_size
.
to_f
).
ceil
end
end
def
first_chunk?
def
last_range
chunk_index
==
0
((
size
/
buffer_size
)
*
buffer_size
..
size
)
end
def
last_chunk?
(
chunk_start
...
chunk_end
).
include?
(
tell
)
end
end
def
chunk_store
def
chunk_store
...
...
lib/gitlab/ci/trace/chunked_file/live_trace.rb
View file @
1de5b8db
...
@@ -5,8 +5,7 @@ module Gitlab
...
@@ -5,8 +5,7 @@ module Gitlab
class
LiveTrace
<
ChunkedIO
class
LiveTrace
<
ChunkedIO
class
<<
self
class
<<
self
def
exist?
(
job_id
)
def
exist?
(
job_id
)
ChunkStores
::
Redis
.
chunks_count
(
job_id
)
>
0
||
ChunkStore
::
Redis
.
chunks_count
(
job_id
)
>
0
||
ChunkStore
::
Database
.
chunks_count
(
job_id
)
>
0
ChunkStores
::
Database
.
chunks_count
(
job_id
)
>
0
end
end
end
end
...
@@ -22,7 +21,7 @@ module Gitlab
...
@@ -22,7 +21,7 @@ module Gitlab
end
end
end
end
#
Efficient process than iterating each
#
This is more efficient than iterating each chunk store and deleting
def
truncate
(
offset
)
def
truncate
(
offset
)
if
offset
==
0
if
offset
==
0
delete
delete
...
@@ -33,13 +32,9 @@ module Gitlab
...
@@ -33,13 +32,9 @@ module Gitlab
end
end
end
end
def
present?
self
.
exist?
(
job_id
)
end
def
delete
def
delete
ChunkStore
s
::
Redis
.
delete_all
(
job_id
)
ChunkStore
::
Redis
.
delete_all
(
job_id
)
ChunkStore
s
::
Database
.
delete_all
(
job_id
)
ChunkStore
::
Database
.
delete_all
(
job_id
)
end
end
private
private
...
@@ -50,7 +45,7 @@ module Gitlab
...
@@ -50,7 +45,7 @@ module Gitlab
end
end
def
chunk_store
def
chunk_store
if
last_
chunk?
||
eof?
if
last_
range
.
include?
(
tell
)
ChunkStore
::
Redis
ChunkStore
::
Redis
else
else
ChunkStore
::
Database
ChunkStore
::
Database
...
...
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
View file @
1de5b8db
...
@@ -9,10 +9,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
...
@@ -9,10 +9,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
let
(
:mode
)
{
'rb'
}
let
(
:mode
)
{
'rb'
}
describe
'ChunkStore is Redis'
,
:partial_support
do
describe
'ChunkStore is Redis'
,
:partial_support
do
let
(
:chunk_store
)
{
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
}
let
(
:chunk_store
s
)
{
[
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
]
}
before
do
before
do
allow_any_instance_of
(
described_class
).
to
receive
(
:chunk_store
).
and_return
(
chunk_store
)
allow_any_instance_of
(
described_class
).
to
receive
(
:chunk_store
).
and_return
(
chunk_store
s
.
first
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
128
.
kilobytes
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
128
.
kilobytes
)
end
end
...
@@ -20,10 +20,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
...
@@ -20,10 +20,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
end
end
describe
'ChunkStore is Database'
do
describe
'ChunkStore is Database'
do
let
(
:chunk_store
)
{
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
}
let
(
:chunk_store
s
)
{
[
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
]
}
before
do
before
do
allow_any_instance_of
(
described_class
).
to
receive
(
:chunk_store
).
and_return
(
chunk_store
)
allow_any_instance_of
(
described_class
).
to
receive
(
:chunk_store
).
and_return
(
chunk_store
s
.
first
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
128
.
kilobytes
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
128
.
kilobytes
)
end
end
...
...
spec/lib/gitlab/ci/trace/chunked_file/live_trace_spec.rb
View file @
1de5b8db
require
'spec_helper'
require
'spec_helper'
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
LiveTrace
,
:clean_gitlab_redis_cache
do
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
LiveTrace
,
:clean_gitlab_redis_cache
do
include
LiveTrace
Helpers
include
ChunkedIO
Helpers
let
(
:chunked_io
)
{
described_class
.
new
(
job_id
,
mode
)
}
let
(
:chunked_io
)
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}
let
(
:job
)
{
create
(
:ci_build
)
}
let
(
:job
)
{
create
(
:ci_build
)
}
let
(
:job_id
)
{
job
.
id
}
let
(
:job_id
)
{
job
.
id
}
let
(
:size
)
{
sample_trace_size
}
let
(
:mode
)
{
'rb'
}
let
(
:mode
)
{
'rb'
}
describe
'#write'
do
let
(
:chunk_stores
)
do
subject
{
chunked_io
.
write
(
data
)
}
[
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
,
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
]
let
(
:data
)
{
sample_trace_raw
}
end
context
'when write mode'
do
let
(
:mode
)
{
'wb'
}
context
'when buffer size is smaller than file size'
do
describe
'ChunkStores are Redis and Database'
,
:partial_support
do
before
do
it_behaves_like
'ChunkedIO shared tests'
set_smaller_buffer_size_than
(
size
)
end
end
it
'writes a trace
'
do
describe
'.exist?
'
do
is_expected
.
to
eq
(
data
.
length
)
subject
{
described_class
.
exist?
(
job_id
)
}
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
context
'when a chunk exists in a store'
do
expect
(
stream
.
read
).
to
eq
(
data
)
before
do
expect
(
total_chunks_count
).
to
eq
(
stream
.
send
(
:chunks_count
))
fill_trace_to_chunks
(
sample_trace_raw
)
expect
(
total_chunks_size
).
to
eq
(
data
.
length
)
end
end
end
end
context
'when buffer size is larger than file size'
do
it
{
is_expected
.
to
be_truthy
}
before
do
end
set_larger_buffer_size_than
(
size
)
end
it
'writes a trace'
do
context
'when chunks do not exists in any store'
do
is_expected
.
to
eq
(
data
.
length
)
it
{
is_expected
.
to
be_falsey
}
end
end
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
describe
'#truncate'
do
expect
(
stream
.
read
).
to
eq
(
data
)
subject
{
chunked_io
.
truncate
(
offset
)
}
expect
(
total_chunks_count
).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
total_chunks_size
).
to
eq
(
data
.
length
)
end
end
end
context
'when data is nil'
do
let
(
:mode
)
{
'a+b'
}
let
(
:data
)
{
nil
}
it
'writes a trace'
do
before
do
expect
{
subject
}
.
to
raise_error
(
'Could not write empty data'
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
end
end
context
'when append mode'
do
context
'when offset is 0'
do
let
(
:original_data
)
{
'original data'
}
let
(
:offset
)
{
0
}
let
(
:total_size
)
{
original_data
.
length
+
data
.
length
}
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
size
)
fill_trace_to_chunks
(
original_data
)
end
it
'appends a trace'
do
described_class
.
open
(
job_id
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
end
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
original_data
+
data
)
expect
(
total_chunks_count
).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
total_chunks_size
).
to
eq
(
total_size
)
end
end
end
context
'when buffer size is larger than file size'
do
it
'deletes all chunks'
do
before
do
expect
{
subject
}.
to
change
{
described_class
.
exist?
(
job_id
)
}.
from
(
true
).
to
(
false
)
set_larger_buffer_size_than
(
size
)
fill_trace_to_chunks
(
original_data
)
end
it
'appends a trace'
do
described_class
.
open
(
job_id
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
end
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
original_data
+
data
)
expect
(
total_chunks_count
).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
total_chunks_size
).
to
eq
(
total_size
)
end
end
end
end
end
end
end
describe
'#truncate'
do
context
'when offset is size'
do
context
'when data exists'
do
let
(
:offset
)
{
sample_trace_raw
.
length
}
context
'when buffer size is smaller than file size'
do
before
do
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 1"
set_smaller_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'truncates a trace'
do
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 2"
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
sample_trace_raw
)
end
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 3"
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
stream
.
truncate
(
0
)
end
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 4"
expect
(
total_chunks_count
).
to
eq
(
0
)
expect
(
total_chunks_size
).
to
eq
(
0
)
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: ===== 5"
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
be_empty
end
end
context
'when offset is negative'
do
it
'raises an error'
do
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
expect
{
stream
.
truncate
(
-
1
)
}.
to
raise_error
(
'Offset is out of bound'
)
end
end
end
context
'when offset is larger than file size'
do
it
'raises an error'
do
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
expect
{
stream
.
truncate
(
size
+
1
)
}.
to
raise_error
(
'Offset is out of bound'
)
end
end
end
end
context
'when buffer size is larger than file size'
do
before
do
set_larger_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'truncates a trace'
do
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
sample_trace_raw
)
end
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
it
'does nothing'
do
stream
.
truncate
(
0
)
expect
{
subject
}.
not_to
change
{
described_class
.
exist?
(
job_id
)
}
end
end
end
described_class
.
open
(
job_id
,
'rb'
)
do
|
stream
|
context
'when offset is else'
do
expect
(
stream
.
read
).
to
be_empty
let
(
:offset
)
{
10
}
end
expect
(
total_chunks_count
).
to
eq
(
0
)
it
'raises an error'
do
expect
(
total_chunks_size
).
to
eq
(
0
)
expect
{
subject
}.
to
raise_error
(
'Unexpected operation'
)
end
end
end
end
end
end
context
'when data does not exist'
do
describe
'#delete'
do
subject
{
chunked_io
.
delete
}
context
'when a chunk exists in a store'
do
before
do
before
do
set_smaller_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
it
'truncates a trace'
do
it
'deletes'
do
described_class
.
open
(
job_id
,
'wb'
)
do
|
stream
|
expect
{
subject
}.
to
change
{
described_class
.
exist?
(
job_id
)
}.
from
(
true
).
to
(
false
)
stream
.
truncate
(
0
)
expect
(
stream
.
send
(
:tell
)).
to
eq
(
0
)
expect
(
stream
.
send
(
:size
)).
to
eq
(
0
)
end
end
end
end
end
end
def
total_chunks_count
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
.
chunks_count
(
job_id
)
+
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
.
chunks_count
(
job_id
)
end
def
total_chunks_size
context
'when chunks do not exists in any store'
do
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
.
chunks_size
(
job_id
)
+
it
'deletes'
do
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
.
chunks_size
(
job_id
)
expect
{
subject
}.
not_to
change
{
described_class
.
exist?
(
job_id
)
}
end
end
end
end
end
end
spec/support/chunked_io/chunked_io_helpers.rb
View file @
1de5b8db
...
@@ -6,31 +6,14 @@ module ChunkedIOHelpers
...
@@ -6,31 +6,14 @@ module ChunkedIOHelpers
end
end
def
sample_trace_raw
def
sample_trace_raw
if
chunk_store
==
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
# ChunkStore::Database doesn't support appending, so the test data size has to be least common multiple
File
.
read
(
expand_fixture_path
(
'trace/sample_trace'
))
if
chunk_stores
.
first
==
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
'01234567'
*
32
# 256 bytes
else
else
'01234567'
*
32
File
.
read
(
expand_fixture_path
(
'trace/sample_trace'
))
end
end
end
end
# def sample_trace_raw_for_live_trace
# File.read(expand_fixture_path('trace/sample_trace'))
# end
# def sample_trace_size_for_live_trace
# sample_trace_raw_for_live_trace.length
# end
# def fill_trace_to_chunks_for_live_trace(data)
# stream = described_class.new(job_id, 'a+b')
# stream.write(data)
# stream.close
# end
# def stub_chunk_store_get_failed
# allow_any_instance_of(chunk_store).to receive(:get).and_return(nil)
# end
def
set_smaller_buffer_size_than
(
file_size
)
def
set_smaller_buffer_size_than
(
file_size
)
blocks
=
(
file_size
/
128
)
blocks
=
(
file_size
/
128
)
new_size
=
(
blocks
/
2
)
*
128
new_size
=
(
blocks
/
2
)
*
128
...
@@ -42,4 +25,8 @@ module ChunkedIOHelpers
...
@@ -42,4 +25,8 @@ module ChunkedIOHelpers
new_size
=
(
blocks
*
2
)
*
128
new_size
=
(
blocks
*
2
)
*
128
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
new_size
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
new_size
)
end
end
def
set_half_buffer_size_of
(
file_size
)
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
file_size
/
2
)
end
end
end
spec/support/chunked_io/live_trace_helpers.rb
deleted
100644 → 0
View file @
3a99a6b9
module
LiveTraceHelpers
def
fill_trace_to_chunks
(
data
)
stream
=
described_class
.
new
(
job_id
,
'wb'
)
stream
.
write
(
data
)
stream
.
close
end
def
sample_trace_raw
File
.
read
(
expand_fixture_path
(
'trace/sample_trace'
))
end
def
sample_trace_size
sample_trace_raw
.
length
end
def
stub_chunk_store_get_failed
allow_any_instance_of
(
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
).
to
receive
(
:get
).
and_return
(
nil
)
allow_any_instance_of
(
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
).
to
receive
(
:get
).
and_return
(
nil
)
end
def
set_smaller_buffer_size_than
(
file_size
)
blocks
=
(
file_size
/
128
)
new_size
=
(
blocks
/
2
)
*
128
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
new_size
)
end
def
set_larger_buffer_size_than
(
file_size
)
blocks
=
(
file_size
/
128
)
new_size
=
(
blocks
*
2
)
*
128
allow_any_instance_of
(
described_class
).
to
receive
(
:buffer_size
).
and_return
(
new_size
)
end
end
spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
View file @
1de5b8db
shared_examples
"ChunkedIO shared tests"
do
shared_examples
"ChunkedIO shared tests"
do
around
(
:each
,
:partial_support
)
do
|
example
|
around
(
:each
,
:partial_support
)
do
|
example
|
example
.
run
if
chunk_store
==
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
example
.
run
if
chunk_store
s
.
first
==
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
end
end
describe
'#new'
do
describe
'#new'
do
...
@@ -165,7 +165,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -165,7 +165,7 @@ shared_examples "ChunkedIO shared tests" do
end
end
it
'calls get_chunk only once'
do
it
'calls get_chunk only once'
do
expect
(
chunk_store
).
to
receive
(
:open
).
once
.
and_call_original
expect
(
chunk_store
s
.
first
).
to
receive
(
:open
).
once
.
and_call_original
described_class
.
new
(
job_id
,
nil
,
'rb'
).
each_line
{
|
line
|
}
described_class
.
new
(
job_id
,
nil
,
'rb'
).
each_line
{
|
line
|
}
end
end
...
@@ -178,15 +178,19 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -178,15 +178,19 @@ shared_examples "ChunkedIO shared tests" do
context
'when read the whole size'
do
context
'when read the whole size'
do
let
(
:length
)
{
nil
}
let
(
:length
)
{
nil
}
shared_examples
'reads a trace'
do
it
do
is_expected
.
to
eq
(
sample_trace_raw
)
end
end
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
it
'reads a trace'
do
it_behaves_like
'reads a trace'
is_expected
.
to
eq
(
sample_trace_raw
)
end
end
end
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
...
@@ -195,9 +199,16 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -195,9 +199,16 @@ shared_examples "ChunkedIO shared tests" do
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
it
'reads a trace'
do
it_behaves_like
'reads a trace'
is_expected
.
to
eq
(
sample_trace_raw
)
end
context
'when buffer size is half of file size'
do
before
do
set_half_buffer_size_of
(
sample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
it_behaves_like
'reads a trace'
end
end
end
end
...
@@ -286,7 +297,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -286,7 +297,7 @@ shared_examples "ChunkedIO shared tests" do
let
(
:string_io
)
{
StringIO
.
new
(
sample_trace_raw
)
}
let
(
:string_io
)
{
StringIO
.
new
(
sample_trace_raw
)
}
shared_examples
'all line matching'
do
shared_examples
'all line matching'
do
it
'reads a line'
do
it
do
(
0
...
sample_trace_raw
.
lines
.
count
).
each
do
(
0
...
sample_trace_raw
.
lines
.
count
).
each
do
expect
(
chunked_io
.
readline
).
to
eq
(
string_io
.
readline
)
expect
(
chunked_io
.
readline
).
to
eq
(
string_io
.
readline
)
end
end
...
@@ -311,6 +322,15 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -311,6 +322,15 @@ shared_examples "ChunkedIO shared tests" do
it_behaves_like
'all line matching'
it_behaves_like
'all line matching'
end
end
context
'when buffer size is half of file size'
do
before
do
set_half_buffer_size_of
(
sample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it_behaves_like
'all line matching'
end
context
'when pos is at middle of the file'
do
context
'when pos is at middle of the file'
do
before
do
before
do
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
...
@@ -331,40 +351,46 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -331,40 +351,46 @@ shared_examples "ChunkedIO shared tests" do
let
(
:data
)
{
sample_trace_raw
}
let
(
:data
)
{
sample_trace_raw
}
context
'when append mode'
,
:partial_support
do
context
'when append mode'
do
let
(
:mode
)
{
'a+b'
}
let
(
:mode
)
{
'a+b'
}
context
'when data does not exist'
do
context
'when data does not exist'
do
context
'when buffer size is smaller than file size'
do
shared_examples
'writes a trace'
do
before
do
it
do
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
end
it
'writes a trace'
do
is_expected
.
to
eq
(
data
.
length
)
is_expected
.
to
eq
(
data
.
length
)
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_stores
.
inject
(
0
)
{
|
sum
,
store
|
sum
+
store
.
chunks_count
(
job_id
)
})
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
data
.
length
)
.
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_stores
.
inject
(
0
)
{
|
sum
,
store
|
sum
+
store
.
chunks_size
(
job_id
)
})
.
to
eq
(
data
.
length
)
end
end
end
end
end
end
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
data
.
length
)
end
it_behaves_like
'writes a trace'
end
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
data
.
length
)
set_larger_buffer_size_than
(
data
.
length
)
end
end
it
'writes a trace'
do
it
_behaves_like
'writes a trace'
is_expected
.
to
eq
(
data
.
length
)
end
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
context
'when buffer size is half of file size'
do
expect
(
stream
.
read
).
to
eq
(
data
)
before
do
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
set_half_buffer_size_of
(
data
.
length
)
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
data
.
length
)
end
end
end
it_behaves_like
'writes a trace'
end
end
context
'when data is nil'
do
context
'when data is nil'
do
...
@@ -376,46 +402,51 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -376,46 +402,51 @@ shared_examples "ChunkedIO shared tests" do
end
end
end
end
context
'when data already exists'
do
context
'when data already exists'
,
:partial_support
do
let
(
:exist_data
)
{
'exist data'
}
let
(
:exist_data
)
{
'exist data'
}
let
(
:total_size
)
{
exist_data
.
length
+
data
.
length
}
let
(
:total_size
)
{
exist_data
.
length
+
data
.
length
}
context
'when buffer size is smaller than file size'
do
shared_examples
'appends a trace'
do
before
do
it
do
set_smaller_buffer_size_than
(
data
.
length
)
fill_trace_to_chunks
(
exist_data
)
end
it
'appends a trace'
do
described_class
.
new
(
job_id
,
nil
,
'a+b'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
end
end
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
exist_data
+
data
)
expect
(
stream
.
read
).
to
eq
(
exist_data
+
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_stores
.
inject
(
0
)
{
|
sum
,
store
|
sum
+
store
.
chunks_count
(
job_id
)
})
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
total_size
)
.
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_stores
.
inject
(
0
)
{
|
sum
,
store
|
sum
+
store
.
chunks_size
(
job_id
)
})
.
to
eq
(
total_size
)
end
end
end
end
end
end
context
'when buffer size is larger than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
data
.
length
)
fill_trace_to_chunks
(
exist_data
)
end
it_behaves_like
'appends a trace'
end
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
data
.
length
)
set_larger_buffer_size_than
(
data
.
length
)
fill_trace_to_chunks
(
exist_data
)
fill_trace_to_chunks
(
exist_data
)
end
end
it
'appends a trace'
do
it_behaves_like
'appends a trace'
described_class
.
new
(
job_id
,
nil
,
'a+b'
)
do
|
stream
|
end
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
end
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
context
'when buffer size is half of file size'
do
expect
(
stream
.
read
).
to
eq
(
exist_data
+
data
)
before
do
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
set_half_buffer_size_of
(
data
.
length
)
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
total_size
)
fill_trace_to_chunks
(
exist_data
)
end
end
end
it_behaves_like
'appends a trace'
end
end
end
end
end
end
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment