Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
gitlab-ce
Commits
3a99a6b9
Commit
3a99a6b9
authored
Apr 02, 2018
by
Shinya Maeda
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Consolidate ChunkedIO
parent
ebf69adc
Changes
12
Show whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
427 additions
and
436 deletions
+427
-436
lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb
lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb
+14
-0
lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb
lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb
+8
-2
lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb
lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb
+28
-11
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
+23
-9
lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb
lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb
+0
-63
lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb
lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb
+49
-20
lib/gitlab/ci/trace/chunked_file/live_trace.rb
lib/gitlab/ci/trace/chunked_file/live_trace.rb
+0
-4
spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb
...gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb
+58
-37
spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb
...ib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb
+77
-70
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
+1
-2
spec/support/chunked_io/chunked_io_helpers.rb
spec/support/chunked_io/chunked_io_helpers.rb
+15
-19
spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
...itlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
+154
-199
No files found.
lib/gitlab/ci/trace/chunked_file/chunk_store/base.rb
View file @
3a99a6b9
...
@@ -22,18 +22,32 @@ module Gitlab
...
@@ -22,18 +22,32 @@ module Gitlab
raise
NotImplementedError
raise
NotImplementedError
end
end
# Write data to chunk store. Always overwrite.
#
# @param [String] data
# @return [Fixnum] length of the data after writing
def
write!
(
data
)
def
write!
(
data
)
raise
NotImplementedError
raise
NotImplementedError
end
end
# Append data to chunk store
#
# @param [String] data
# @return [Fixnum] length of the appended
def
append!
(
data
)
def
append!
(
data
)
raise
NotImplementedError
raise
NotImplementedError
end
end
# Truncate data to chunk store
#
# @param [String] offset
def
truncate!
(
offset
)
def
truncate!
(
offset
)
raise
NotImplementedError
raise
NotImplementedError
end
end
# Delete data from chunk store
#
# @param [String] offset
def
delete!
def
delete!
raise
NotImplementedError
raise
NotImplementedError
end
end
...
...
lib/gitlab/ci/trace/chunked_file/chunk_store/database.rb
View file @
3a99a6b9
...
@@ -48,6 +48,8 @@ module Gitlab
...
@@ -48,6 +48,8 @@ module Gitlab
end
end
def
get
def
get
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
job_trace_chunk
.
data
job_trace_chunk
.
data
end
end
...
@@ -56,9 +58,10 @@ module Gitlab
...
@@ -56,9 +58,10 @@ module Gitlab
end
end
def
write!
(
data
)
def
write!
(
data
)
raise
NotImplementedError
,
'Partial writing is not supported'
unless
params
[
:buffer_size
]
==
data
&
.
length
raise
NotImplementedError
,
'UPDATE (Overwriting data) is not supported'
if
job_trace_chunk
.
data
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: data.length:
#{
data
.
length
.
inspect
}
params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: data.length:
#{
data
.
length
.
inspect
}
params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
raise
NotImplementedError
,
'Partial write is not supported'
unless
params
[
:buffer_size
]
==
data
&
.
length
raise
NotImplementedError
,
'UPDATE is not supported'
if
job_trace_chunk
.
data
job_trace_chunk
.
data
=
data
job_trace_chunk
.
data
=
data
job_trace_chunk
.
save!
job_trace_chunk
.
save!
...
@@ -75,7 +78,10 @@ module Gitlab
...
@@ -75,7 +78,10 @@ module Gitlab
end
end
def
delete!
def
delete!
raise
ActiveRecord
::
RecordNotFound
,
'Could not find deletable record'
unless
job_trace_chunk
.
persisted?
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
job_trace_chunk
.
destroy!
job_trace_chunk
.
destroy!
end
end
end
end
...
...
lib/gitlab/ci/trace/chunked_file/chunk_store/redis.rb
View file @
3a99a6b9
...
@@ -51,6 +51,9 @@ module Gitlab
...
@@ -51,6 +51,9 @@ module Gitlab
end
end
end
end
BufferKeyNotFoundError
=
Class
.
new
(
StandardError
)
WriteError
=
Class
.
new
(
StandardError
)
attr_reader
:buffer_key
attr_reader
:buffer_key
def
initialize
(
buffer_key
,
**
params
)
def
initialize
(
buffer_key
,
**
params
)
...
@@ -64,6 +67,8 @@ module Gitlab
...
@@ -64,6 +67,8 @@ module Gitlab
end
end
def
get
def
get
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
redis
.
get
(
buffer_key
)
redis
.
get
(
buffer_key
)
end
end
...
@@ -76,35 +81,47 @@ module Gitlab
...
@@ -76,35 +81,47 @@ module Gitlab
end
end
def
write!
(
data
)
def
write!
(
data
)
raise
ArgumentError
,
'Could not write empty data'
unless
data
.
present?
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: data.length:
#{
data
.
length
.
inspect
}
params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: data.length:
#{
data
.
length
.
inspect
}
params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
redis
.
set
(
buffer_key
,
data
)
unless
redis
.
set
(
buffer_key
,
data
)
==
'OK'
raise
WriteError
,
'Failed to write'
end
redis
.
strlen
(
buffer_key
)
redis
.
strlen
(
buffer_key
)
end
end
end
end
def
append!
(
data
)
def
append!
(
data
)
raise
ArgumentError
,
'Could not write empty data'
unless
data
.
present?
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: data.length:
#{
data
.
length
.
inspect
}
params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: data.length:
#{
data
.
length
.
inspect
}
params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
redis
.
append
(
buffer_key
,
data
)
raise
BufferKeyNotFoundError
,
'Buffer key is not found'
unless
redis
.
exists
(
buffer_key
)
data
.
length
original_size
=
size
new_size
=
redis
.
append
(
buffer_key
,
data
)
appended_size
=
new_size
-
original_size
raise
WriteError
,
'Failed to append'
unless
appended_size
==
data
.
length
appended_size
end
end
end
end
def
truncate!
(
offset
)
def
truncate!
(
offset
)
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: offset:
#{
offset
.
inspect
}
params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
raise
NotImplementedError
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
return
0
unless
redis
.
exists
(
buffer_key
)
truncated_data
=
redis
.
getrange
(
buffer_key
,
0
,
offset
)
redis
.
set
(
buffer_key
,
truncated_data
)
end
end
end
def
delete!
def
delete!
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
puts
"
#{
self
.
class
.
name
}
-
#{
__callee__
}
: params[:chunk_index]:
#{
params
[
:chunk_index
]
}
"
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
redis
.
del
(
buffer_key
)
raise
BufferKeyNotFoundError
,
'Buffer key is not found'
unless
redis
.
exists
(
buffer_key
)
unless
redis
.
del
(
buffer_key
)
==
1
raise
WriteError
,
'Failed to delete'
end
end
end
end
end
end
end
...
...
lib/gitlab/ci/trace/chunked_file/chunked_io.rb
View file @
3a99a6b9
...
@@ -8,7 +8,7 @@ module Gitlab
...
@@ -8,7 +8,7 @@ module Gitlab
class
Trace
class
Trace
module
ChunkedFile
module
ChunkedFile
class
ChunkedIO
class
ChunkedIO
extend
ChunkedFile
::
Concerns
::
Opener
#
extend ChunkedFile::Concerns::Opener
include
ChunkedFile
::
Concerns
::
Errors
include
ChunkedFile
::
Concerns
::
Errors
include
ChunkedFile
::
Concerns
::
Hooks
include
ChunkedFile
::
Concerns
::
Hooks
include
ChunkedFile
::
Concerns
::
Callbacks
include
ChunkedFile
::
Concerns
::
Callbacks
...
@@ -22,13 +22,21 @@ module Gitlab
...
@@ -22,13 +22,21 @@ module Gitlab
alias_method
:pos
,
:tell
alias_method
:pos
,
:tell
def
initialize
(
job_id
,
size
,
mode
=
'rb'
)
def
initialize
(
job_id
,
size
=
nil
,
mode
=
'rb'
,
&
block
)
@size
=
size
raise
NotImplementedError
,
"Mode 'w' is not supported"
if
mode
.
include?
(
'w'
)
@size
=
size
||
calculate_size
(
job_id
)
@tell
=
0
@tell
=
0
@job_id
=
job_id
@job_id
=
job_id
@mode
=
mode
@mode
=
mode
raise
NotImplementedError
,
"Mode 'w' is not supported"
if
mode
.
include?
(
'w'
)
if
block_given?
begin
yield
self
ensure
self
.
close
end
end
end
end
def
close
def
close
...
@@ -128,7 +136,7 @@ module Gitlab
...
@@ -128,7 +136,7 @@ module Gitlab
end
end
def
present?
def
present?
chunk
_store
.
chunks_count
(
job_id
)
>
0
chunk
s_count
>
0
end
end
def
delete
def
delete
...
@@ -177,19 +185,21 @@ module Gitlab
...
@@ -177,19 +185,21 @@ module Gitlab
end
end
def
write_chunk
(
data
)
def
write_chunk
(
data
)
written_size
=
0
chunk_store
.
open
(
job_id
,
chunk_index
,
params_for_store
)
do
|
store
|
chunk_store
.
open
(
job_id
,
chunk_index
,
params_for_store
)
do
|
store
|
with_callbacks
(
:write_chunk
,
store
)
do
with_callbacks
(
:write_chunk
,
store
)
do
written_size
=
if
buffer_size
==
data
.
length
written_size
=
if
buffer_size
==
data
.
length
||
store
.
size
==
0
store
.
write!
(
data
)
store
.
write!
(
data
)
else
else
store
.
append!
(
data
)
store
.
append!
(
data
)
end
end
raise
WriteError
,
'Written size mismatch'
unless
data
.
length
==
written_size
raise
WriteError
,
'Written size mismatch'
unless
data
.
length
==
written_size
written_size
end
end
end
end
written_size
end
end
def
truncate_chunk
(
offset
)
def
truncate_chunk
(
offset
)
...
@@ -228,7 +238,7 @@ module Gitlab
...
@@ -228,7 +238,7 @@ module Gitlab
end
end
def
chunks_count
def
chunks_count
(
size
/
buffer_size
)
(
size
/
buffer_size
.
to_f
).
ceil
end
end
def
first_chunk?
def
first_chunk?
...
@@ -246,6 +256,10 @@ module Gitlab
...
@@ -246,6 +256,10 @@ module Gitlab
def
buffer_size
def
buffer_size
raise
NotImplementedError
raise
NotImplementedError
end
end
def
calculate_size
(
job_id
)
chunk_store
.
chunks_size
(
job_id
)
end
end
end
end
end
end
end
...
...
lib/gitlab/ci/trace/chunked_file/concerns/hooks.rb
deleted
100644 → 0
View file @
ebf69adc
module
Gitlab
module
Ci
class
Trace
module
ChunkedFile
module
Concerns
module
Hooks
extend
ActiveSupport
::
Concern
included
do
class_attribute
:_before_methods
,
:_after_methods
,
:instance_writer
=>
false
self
.
_before_methods
=
Hash
.
new
[]
self
.
_after_methods
=
Hash
.
new
[]
end
class_methods
do
def
before_method
(
kind
,
callback
)
self
.
_before_methods
=
self
.
_before_methods
.
merge
kind
=>
_before_methods
[
kind
]
+
[
callback
]
end
def
after_method
(
kind
,
callback
)
self
.
_after_methods
=
self
.
_after_methods
.
merge
kind
=>
_after_methods
[
kind
]
+
[
callback
]
end
end
def
method_added
(
method_name
)
return
if
self
.
class
.
_before_methods
.
values
.
include?
(
method_name
)
return
if
self
.
class
.
_after_methods
.
values
.
include?
(
method_name
)
return
if
hooked_methods
.
include?
(
method_name
)
add_hooks_to
(
method_name
)
end
private
def
hooked_methods
@hooked_methods
||=
[]
end
def
add_hooks_to
(
method_name
)
hooked_methods
<<
method_name
original_method
=
instance_method
(
method_name
)
# re-define the method, but notice how we reference the original
# method definition
define_method
(
method_name
)
do
|*
args
,
&
block
|
self
.
class
.
_before_methods
[
method_name
].
each
{
|
hook
|
method
(
hook
).
call
}
# now invoke the original method
original_method
.
bind
(
self
).
call
(
*
args
,
&
block
).
tap
do
self
.
class
.
_after_methods
[
method_name
].
each
{
|
hook
|
method
(
hook
).
call
}
end
end
end
end
end
end
end
end
end
lib/gitlab/ci/trace/chunked_file/concerns/permissions.rb
View file @
3a99a6b9
...
@@ -6,28 +6,19 @@ module Gitlab
...
@@ -6,28 +6,19 @@ module Gitlab
module
Permissions
module
Permissions
extend
ActiveSupport
::
Concern
extend
ActiveSupport
::
Concern
WRITABLE_MODE
=
%w[a]
READABLE_MODE
=
%w[r +]
included
do
included
do
PermissionError
=
Class
.
new
(
StandardError
)
PermissionError
=
Class
.
new
(
StandardError
)
attr_reader
:write_lock_uuid
attr_reader
:write_lock_uuid
# mode checks
before_method
:read
,
:can_read!
before_method
:readline
,
:can_read!
before_method
:each_line
,
:can_read!
before_method
:write
,
:can_write!
before_method
:truncate
,
:can_write!
# write_lock
before_method
:write
,
:check_lock!
before_method
:truncate
,
:check_lock!
before_method
:delete
,
:check_lock!
end
end
def
initialize
(
job_id
,
size
,
mode
=
'rb'
)
def
initialize
(
job_id
,
size
,
mode
=
'rb'
)
if
/(w|a)/
=~
mode
if
WRITABLE_MODE
.
any?
{
|
m
|
mode
.
include?
(
m
)
}
@write_lock_uuid
=
Gitlab
::
ExclusiveLease
@write_lock_uuid
=
Gitlab
::
ExclusiveLease
.
new
(
write_lock_key
,
timeout:
1
.
hour
.
to_i
).
try_obtain
.
new
(
write_lock_key
(
job_id
)
,
timeout:
1
.
hour
.
to_i
).
try_obtain
raise
PermissionError
,
'Already opened by another process'
unless
write_lock_uuid
raise
PermissionError
,
'Already opened by another process'
unless
write_lock_uuid
end
end
...
@@ -37,25 +28,63 @@ module Gitlab
...
@@ -37,25 +28,63 @@ module Gitlab
def
close
def
close
if
write_lock_uuid
if
write_lock_uuid
Gitlab
::
ExclusiveLease
.
cancel
(
write_lock_key
,
write_lock_uuid
)
Gitlab
::
ExclusiveLease
.
cancel
(
write_lock_key
(
job_id
),
write_lock_uuid
)
end
super
end
def
read
(
*
args
)
can_read!
super
end
def
readline
(
*
args
)
can_read!
super
end
end
def
each_line
(
*
args
)
can_read!
super
end
def
write
(
*
args
)
can_write!
super
end
def
truncate
(
*
args
)
can_write!
super
super
end
end
def
check_lock!
def
delete
(
*
args
)
raise
PermissionError
,
'Could not modify the file without lock'
unless
write_lock_uuid
can_write!
super
end
end
private
def
can_read!
def
can_read!
raise
IOError
,
'not opened for reading'
unless
/(r|+)/
=~
mode
unless
READABLE_MODE
.
any?
{
|
m
|
mode
.
include?
(
m
)
}
raise
IOError
,
'not opened for reading'
end
end
end
def
can_write!
def
can_write!
raise
IOError
,
'not opened for writing'
unless
/(w|a)/
=~
mode
unless
WRITABLE_MODE
.
any?
{
|
m
|
mode
.
include?
(
m
)
}
raise
IOError
,
'not opened for writing'
end
end
end
def
write_lock_key
def
write_lock_key
(
job_id
)
"live_trace:operation:write:
#{
job_id
}
"
"live_trace:operation:write:
#{
job_id
}
"
end
end
end
end
...
...
lib/gitlab/ci/trace/chunked_file/live_trace.rb
View file @
3a99a6b9
...
@@ -12,10 +12,6 @@ module Gitlab
...
@@ -12,10 +12,6 @@ module Gitlab
after_callback
:write_chunk
,
:stash_to_database
after_callback
:write_chunk
,
:stash_to_database
def
initialize
(
job_id
,
mode
)
super
(
job_id
,
calculate_size
(
job_id
),
mode
)
end
def
stash_to_database
(
store
)
def
stash_to_database
(
store
)
# Once data is filled into redis, move the data to database
# Once data is filled into redis, move the data to database
if
store
.
filled?
if
store
.
filled?
...
...
spec/lib/gitlab/ci/trace/chunked_file/chunk_store/database_spec.rb
View file @
3a99a6b9
require
'spec_helper'
require
'spec_helper'
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
do
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Database
do
let
(
:job
)
{
create
(
:ci_build
)
}
let
(
:job_id
)
{
job
.
id
}
let
(
:job_id
)
{
job
.
id
}
let
(
:chunk_index
)
{
0
}
let
(
:chunk_index
)
{
0
}
let
(
:buffer_size
)
{
256
}
let
(
:buffer_size
)
{
256
}
let
(
:job_trace_chunk
)
{
::
Ci
::
JobTraceChunk
.
new
(
job_id:
job_id
,
chunk_index:
chunk_index
)
}
let
(
:job_trace_chunk
)
{
::
Ci
::
JobTraceChunk
.
new
(
job_id:
job_id
,
chunk_index:
chunk_index
)
}
let
(
:params
)
{
{
buffer_size:
buffer_size
}
}
let
(
:params
)
{
{
buffer_size:
buffer_size
}
}
let
(
:trace
)
{
'A'
*
buffer_size
}
let
(
:data
)
{
'A'
*
buffer_size
}
let
(
:job
)
{
create
(
:ci_build
)
}
describe
'.open'
do
describe
'.open'
do
subject
{
described_class
.
open
(
job_id
,
chunk_index
,
params
)
}
subject
{
described_class
.
open
(
job_id
,
chunk_index
,
params
)
}
...
@@ -35,7 +35,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
...
@@ -35,7 +35,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
context
'when job_trace_chunk exists'
do
context
'when job_trace_chunk exists'
do
before
do
before
do
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
trace
)
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
be_truthy
}
it
{
is_expected
.
to
be_truthy
}
...
@@ -51,17 +51,17 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
...
@@ -51,17 +51,17 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
context
'when job_trace_chunk exists'
do
context
'when job_trace_chunk exists'
do
before
do
before
do
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
trace
)
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
eq
(
1
)
}
it
{
is_expected
.
to
eq
(
1
)
}
context
'when two chunks exists'
do
context
'when two chunks exists'
do
let
(
:job_trace_chunk_2
)
{
::
Ci
::
JobTraceChunk
.
new
(
job_id:
job_id
,
chunk_index:
chunk_index
+
1
)
}
let
(
:job_trace_chunk_2
)
{
::
Ci
::
JobTraceChunk
.
new
(
job_id:
job_id
,
chunk_index:
chunk_index
+
1
)
}
let
(
:
trace
_2
)
{
'B'
*
buffer_size
}
let
(
:
data
_2
)
{
'B'
*
buffer_size
}
before
do
before
do
described_class
.
new
(
job_trace_chunk_2
,
params
).
write!
(
trace
_2
)
described_class
.
new
(
job_trace_chunk_2
,
params
).
write!
(
data
_2
)
end
end
it
{
is_expected
.
to
eq
(
2
)
}
it
{
is_expected
.
to
eq
(
2
)
}
...
@@ -78,18 +78,18 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
...
@@ -78,18 +78,18 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
context
'when job_trace_chunk exists'
do
context
'when job_trace_chunk exists'
do
before
do
before
do
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
trace
)
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
eq
(
trace
.
length
)
}
it
{
is_expected
.
to
eq
(
data
.
length
)
}
context
'when two chunks exists'
do
context
'when two chunks exists'
do
let
(
:job_trace_chunk_2
)
{
::
Ci
::
JobTraceChunk
.
new
(
job_id:
job_id
,
chunk_index:
chunk_index
+
1
)
}
let
(
:job_trace_chunk_2
)
{
::
Ci
::
JobTraceChunk
.
new
(
job_id:
job_id
,
chunk_index:
chunk_index
+
1
)
}
let
(
:
trace
_2
)
{
'B'
*
buffer_size
}
let
(
:
data
_2
)
{
'B'
*
buffer_size
}
let
(
:chunks_size
)
{
trace
.
length
+
trace
_2
.
length
}
let
(
:chunks_size
)
{
data
.
length
+
data
_2
.
length
}
before
do
before
do
described_class
.
new
(
job_trace_chunk_2
,
params
).
write!
(
trace
_2
)
described_class
.
new
(
job_trace_chunk_2
,
params
).
write!
(
data
_2
)
end
end
it
{
is_expected
.
to
eq
(
chunks_size
)
}
it
{
is_expected
.
to
eq
(
chunks_size
)
}
...
@@ -101,15 +101,48 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
...
@@ -101,15 +101,48 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
end
end
end
end
describe
'.delete_all'
do
subject
{
described_class
.
delete_all
(
job_id
)
}
context
'when job_trace_chunk exists'
do
before
do
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
end
it
'deletes all'
do
expect
{
subject
}.
to
change
{
described_class
.
chunks_count
(
job_id
)
}.
by
(
-
1
)
end
context
'when two chunks exists'
do
let
(
:job_trace_chunk_2
)
{
::
Ci
::
JobTraceChunk
.
new
(
job_id:
job_id
,
chunk_index:
chunk_index
+
1
)
}
let
(
:data_2
)
{
'B'
*
buffer_size
}
before
do
described_class
.
new
(
job_trace_chunk_2
,
params
).
write!
(
data_2
)
end
it
'deletes all'
do
expect
{
subject
}.
to
change
{
described_class
.
chunks_count
(
job_id
)
}.
by
(
-
2
)
end
end
end
context
'when buffer_key does not exist'
do
it
'deletes all'
do
expect
{
subject
}.
not_to
change
{
described_class
.
chunks_count
(
job_id
)
}
end
end
end
describe
'#get'
do
describe
'#get'
do
subject
{
described_class
.
new
(
job_trace_chunk
,
params
).
get
}
subject
{
described_class
.
new
(
job_trace_chunk
,
params
).
get
}
context
'when job_trace_chunk exists'
do
context
'when job_trace_chunk exists'
do
before
do
before
do
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
trace
)
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
eq
(
trace
)
}
it
{
is_expected
.
to
eq
(
data
)
}
end
end
context
'when job_trace_chunk does not exist'
do
context
'when job_trace_chunk does not exist'
do
...
@@ -122,10 +155,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
...
@@ -122,10 +155,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
context
'when job_trace_chunk exists'
do
context
'when job_trace_chunk exists'
do
before
do
before
do
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
trace
)
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
eq
(
trace
.
length
)
}
it
{
is_expected
.
to
eq
(
data
.
length
)
}
end
end
context
'when job_trace_chunk does not exist'
do
context
'when job_trace_chunk does not exist'
do
...
@@ -134,37 +167,31 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
...
@@ -134,37 +167,31 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
end
end
describe
'#write!'
do
describe
'#write!'
do
subject
{
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
trace
)
}
subject
{
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
}
context
'when job_trace_chunk exists'
do
context
'when job_trace_chunk exists'
do
before
do
before
do
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
trace
)
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
end
end
it
{
expect
{
subject
}.
to
raise_error
(
'UPDATE is not supported'
)
}
it
{
expect
{
subject
}.
to
raise_error
(
'UPDATE
(Overwriting data)
is not supported'
)
}
end
end
context
'when job_trace_chunk does not exist'
do
context
'when job_trace_chunk does not exist'
do
let
(
:expected_data
)
{
::
Ci
::
JobTraceChunk
.
find_by
(
job_id:
job_id
,
chunk_index:
chunk_index
).
data
}
let
(
:expected_data
)
{
::
Ci
::
JobTraceChunk
.
find_by
(
job_id:
job_id
,
chunk_index:
chunk_index
).
data
}
it
'writes'
do
it
'writes'
do
is_expected
.
to
eq
(
trace
.
length
)
is_expected
.
to
eq
(
data
.
length
)
expect
(
expected_data
).
to
eq
(
trace
)
expect
(
expected_data
).
to
eq
(
data
)
end
end
end
end
context
'when data is nil'
do
context
'when data is nil'
do
let
(
:
trace
)
{
nil
}
let
(
:
data
)
{
nil
}
it
{
expect
{
subject
}.
to
raise_error
(
'Partial write is not supported'
)
}
it
{
expect
{
subject
}.
to
raise_error
(
'Partial writing is not supported'
)
}
end
end
end
describe
'#truncate!'
do
subject
{
described_class
.
new
(
job_trace_chunk
,
params
).
truncate!
(
0
)
}
it
{
expect
{
subject
}.
to
raise_error
(
NotImplementedError
)
}
end
end
describe
'#delete!'
do
describe
'#delete!'
do
...
@@ -172,7 +199,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
...
@@ -172,7 +199,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
context
'when job_trace_chunk exists'
do
context
'when job_trace_chunk exists'
do
before
do
before
do
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
trace
)
described_class
.
new
(
job_trace_chunk
,
params
).
write!
(
data
)
end
end
it
'deletes'
do
it
'deletes'
do
...
@@ -187,14 +214,8 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
...
@@ -187,14 +214,8 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
end
end
context
'when job_trace_chunk does not exist'
do
context
'when job_trace_chunk does not exist'
do
it
'deletes'
do
it
'raises an error'
do
expect
(
::
Ci
::
JobTraceChunk
.
exists?
(
job_id:
job_id
,
chunk_index:
chunk_index
))
expect
{
subject
}.
to
raise_error
(
'Could not find deletable record'
)
.
to
be_falsy
subject
expect
(
::
Ci
::
JobTraceChunk
.
exists?
(
job_id:
job_id
,
chunk_index:
chunk_index
))
.
to
be_falsy
end
end
end
end
end
end
...
...
spec/lib/gitlab/ci/trace/chunked_file/chunk_store/redis_spec.rb
View file @
3a99a6b9
require
'spec_helper'
require
'spec_helper'
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
,
:clean_gitlab_redis_cache
do
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkStore
::
Redis
,
:clean_gitlab_redis_cache
do
let
(
:job_id
)
{
1
}
let
(
:job
)
{
create
(
:ci_build
)
}
let
(
:job_id
)
{
job
.
id
}
let
(
:chunk_index
)
{
0
}
let
(
:chunk_index
)
{
0
}
let
(
:buffer_size
)
{
128
.
kilobytes
}
let
(
:buffer_size
)
{
128
.
kilobytes
}
let
(
:buffer_key
)
{
described_class
.
buffer_key
(
job_id
,
chunk_index
)
}
let
(
:buffer_key
)
{
described_class
.
buffer_key
(
job_id
,
chunk_index
)
}
let
(
:params
)
{
{
buffer_size:
buffer_size
}
}
let
(
:params
)
{
{
buffer_size:
buffer_size
}
}
let
(
:
trace
)
{
'Here is the trace'
}
let
(
:
data
)
{
'Here is the trace'
}
describe
'.open'
do
describe
'.open'
do
subject
{
described_class
.
open
(
job_id
,
chunk_index
,
params
)
}
subject
{
described_class
.
open
(
job_id
,
chunk_index
,
params
)
}
...
@@ -34,7 +35,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -34,7 +35,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
context
'when buffer_key exists'
do
context
'when buffer_key exists'
do
before
do
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
trace
)
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
be_truthy
}
it
{
is_expected
.
to
be_truthy
}
...
@@ -50,17 +51,17 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -50,17 +51,17 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
context
'when buffer_key exists'
do
context
'when buffer_key exists'
do
before
do
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
trace
)
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
eq
(
1
)
}
it
{
is_expected
.
to
eq
(
1
)
}
context
'when two chunks exists'
do
context
'when two chunks exists'
do
let
(
:buffer_key_2
)
{
described_class
.
buffer_key
(
job_id
,
chunk_index
+
1
)
}
let
(
:buffer_key_2
)
{
described_class
.
buffer_key
(
job_id
,
chunk_index
+
1
)
}
let
(
:
trace_2
)
{
'Another trace
'
}
let
(
:
data_2
)
{
'Another data
'
}
before
do
before
do
described_class
.
new
(
buffer_key_2
,
params
).
write!
(
trace
_2
)
described_class
.
new
(
buffer_key_2
,
params
).
write!
(
data
_2
)
end
end
it
{
is_expected
.
to
eq
(
2
)
}
it
{
is_expected
.
to
eq
(
2
)
}
...
@@ -77,18 +78,18 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -77,18 +78,18 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
context
'when buffer_key exists'
do
context
'when buffer_key exists'
do
before
do
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
trace
)
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
eq
(
trace
.
length
)
}
it
{
is_expected
.
to
eq
(
data
.
length
)
}
context
'when two chunks exists'
do
context
'when two chunks exists'
do
let
(
:buffer_key_2
)
{
described_class
.
buffer_key
(
job_id
,
chunk_index
+
1
)
}
let
(
:buffer_key_2
)
{
described_class
.
buffer_key
(
job_id
,
chunk_index
+
1
)
}
let
(
:
trace_2
)
{
'Another trace
'
}
let
(
:
data_2
)
{
'Another data
'
}
let
(
:chunks_size
)
{
trace
.
length
+
trace
_2
.
length
}
let
(
:chunks_size
)
{
data
.
length
+
data
_2
.
length
}
before
do
before
do
described_class
.
new
(
buffer_key_2
,
params
).
write!
(
trace
_2
)
described_class
.
new
(
buffer_key_2
,
params
).
write!
(
data
_2
)
end
end
it
{
is_expected
.
to
eq
(
chunks_size
)
}
it
{
is_expected
.
to
eq
(
chunks_size
)
}
...
@@ -100,6 +101,39 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -100,6 +101,39 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
end
end
end
end
describe
'.delete_all'
do
subject
{
described_class
.
delete_all
(
job_id
)
}
context
'when buffer_key exists'
do
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
end
it
'deletes all'
do
expect
{
subject
}.
to
change
{
described_class
.
chunks_count
(
job_id
)
}.
by
(
-
1
)
end
context
'when two chunks exists'
do
let
(
:buffer_key_2
)
{
described_class
.
buffer_key
(
job_id
,
chunk_index
+
1
)
}
let
(
:data_2
)
{
'Another data'
}
before
do
described_class
.
new
(
buffer_key_2
,
params
).
write!
(
data_2
)
end
it
'deletes all'
do
expect
{
subject
}.
to
change
{
described_class
.
chunks_count
(
job_id
)
}.
by
(
-
2
)
end
end
end
context
'when buffer_key does not exist'
do
it
'deletes all'
do
expect
{
subject
}.
not_to
change
{
described_class
.
chunks_count
(
job_id
)
}
end
end
end
describe
'.buffer_key'
do
describe
'.buffer_key'
do
subject
{
described_class
.
buffer_key
(
job_id
,
chunk_index
)
}
subject
{
described_class
.
buffer_key
(
job_id
,
chunk_index
)
}
...
@@ -111,10 +145,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -111,10 +145,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
context
'when buffer_key exists'
do
context
'when buffer_key exists'
do
before
do
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
trace
)
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
eq
(
trace
)
}
it
{
is_expected
.
to
eq
(
data
)
}
end
end
context
'when buffer_key does not exist'
do
context
'when buffer_key does not exist'
do
...
@@ -127,10 +161,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -127,10 +161,10 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
context
'when buffer_key exists'
do
context
'when buffer_key exists'
do
before
do
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
trace
)
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
end
end
it
{
is_expected
.
to
eq
(
trace
.
length
)
}
it
{
is_expected
.
to
eq
(
data
.
length
)
}
end
end
context
'when buffer_key does not exist'
do
context
'when buffer_key does not exist'
do
...
@@ -139,91 +173,72 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -139,91 +173,72 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
end
end
describe
'#write!'
do
describe
'#write!'
do
subject
{
described_class
.
new
(
buffer_key
,
params
).
write!
(
trace
)
}
subject
{
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
}
context
'when buffer_key exists'
do
context
'when buffer_key exists'
do
before
do
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
'Already data in the
chunk
'
)
described_class
.
new
(
buffer_key
,
params
).
write!
(
'Already data in the
data
'
)
end
end
it
'overwrites'
do
it
'overwrites'
do
is_expected
.
to
eq
(
trace
.
length
)
is_expected
.
to
eq
(
data
.
length
)
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
trace
)
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
data
)
end
end
end
end
end
end
context
'when buffer_key does not exist'
do
context
'when buffer_key does not exist'
do
it
'writes'
do
it
'writes'
do
is_expected
.
to
eq
(
trace
.
length
)
is_expected
.
to
eq
(
data
.
length
)
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
trace
)
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
data
)
end
end
end
end
end
end
context
'when data is nil'
do
context
'when data is nil'
do
let
(
:
trace
)
{
nil
}
let
(
:
data
)
{
nil
}
it
'clears value'
do
it
'clears value'
do
is_expected
.
to
eq
(
0
)
expect
{
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
}
.
to
raise_error
(
'Could not write empty data'
)
end
end
end
end
end
end
describe
'#truncate!'
do
describe
'#append!'
do
subject
{
described_class
.
new
(
buffer_key
,
params
).
truncate!
(
offset
)
}
subject
{
described_class
.
new
(
buffer_key
,
params
).
append!
(
data
)
}
let
(
:offset
)
{
5
}
context
'when buffer_key exists'
do
context
'when buffer_key exists'
do
before
do
let
(
:written_chunk
)
{
'Already data in the data'
}
described_class
.
new
(
buffer_key
,
params
).
write!
(
trace
)
end
it
'truncates'
do
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
trace
)
end
subject
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
written_chunk
)
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
trace
.
slice
(
0
..
offset
))
end
end
context
'when offset is larger than data size'
do
let
(
:offset
)
{
100
}
it
'truncates'
do
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
trace
)
end
end
subject
it
'appends'
do
is_expected
.
to
eq
(
data
.
length
)
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
trace
.
slice
(
0
..
offset
))
expect
(
redis
.
get
(
buffer_key
)).
to
eq
(
written_chunk
+
data
)
end
end
end
end
end
end
end
context
'when buffer_key does not exist'
do
context
'when buffer_key does not exist'
do
it
'
truncates
'
do
it
'
raises an error
'
do
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
{
subject
}.
to
raise_error
(
described_class
::
BufferKeyNotFoundError
)
expect
(
redis
.
get
(
buffer_key
)).
to
be_nil
end
end
end
subject
context
'when data is nil'
do
let
(
:data
)
{
nil
}
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
it
'raises an error'
do
expect
(
redis
.
get
(
buffer_key
)).
to
be_nil
expect
{
subject
}.
to
raise_error
(
'Could not write empty data'
)
end
end
end
end
end
end
end
...
@@ -233,7 +248,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -233,7 +248,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
context
'when buffer_key exists'
do
context
'when buffer_key exists'
do
before
do
before
do
described_class
.
new
(
buffer_key
,
params
).
write!
(
trace
)
described_class
.
new
(
buffer_key
,
params
).
write!
(
data
)
end
end
it
'deletes'
do
it
'deletes'
do
...
@@ -250,16 +265,8 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
...
@@ -250,16 +265,8 @@ describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_
end
end
context
'when buffer_key does not exist'
do
context
'when buffer_key does not exist'
do
it
'deletes'
do
it
'raises an error'
do
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
{
subject
}.
to
raise_error
(
described_class
::
BufferKeyNotFoundError
)
expect
(
redis
.
exists
(
buffer_key
)).
to
be_falsy
end
subject
Gitlab
::
Redis
::
Cache
.
with
do
|
redis
|
expect
(
redis
.
exists
(
buffer_key
)).
to
be_falsy
end
end
end
end
end
end
end
...
...
spec/lib/gitlab/ci/trace/chunked_file/chunked_io_spec.rb
View file @
3a99a6b9
...
@@ -3,10 +3,9 @@ require 'spec_helper'
...
@@ -3,10 +3,9 @@ require 'spec_helper'
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkedIO
,
:clean_gitlab_redis_cache
do
describe
Gitlab
::
Ci
::
Trace
::
ChunkedFile
::
ChunkedIO
,
:clean_gitlab_redis_cache
do
include
ChunkedIOHelpers
include
ChunkedIOHelpers
let
(
:chunked_io
)
{
described_class
.
new
(
job_id
,
size
,
mode
)
}
let
(
:chunked_io
)
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}
let
(
:job
)
{
create
(
:ci_build
)
}
let
(
:job
)
{
create
(
:ci_build
)
}
let
(
:job_id
)
{
job
.
id
}
let
(
:job_id
)
{
job
.
id
}
let
(
:size
)
{
sample_trace_size
}
let
(
:mode
)
{
'rb'
}
let
(
:mode
)
{
'rb'
}
describe
'ChunkStore is Redis'
,
:partial_support
do
describe
'ChunkStore is Redis'
,
:partial_support
do
...
...
spec/support/chunked_io/chunked_io_helpers.rb
View file @
3a99a6b9
module
ChunkedIOHelpers
module
ChunkedIOHelpers
def
fill_trace_to_chunks
(
data
)
def
fill_trace_to_chunks
(
data
)
stream
=
described_class
.
new
(
job_id
,
data
.
length
,
'w
b'
)
stream
=
described_class
.
new
(
job_id
,
nil
,
'a+
b'
)
stream
.
write
(
data
)
stream
.
write
(
data
)
stream
.
close
stream
.
close
end
end
...
@@ -13,27 +13,23 @@ module ChunkedIOHelpers
...
@@ -13,27 +13,23 @@ module ChunkedIOHelpers
end
end
end
end
def
sample_trace_siz
e
# def sample_trace_raw_for_live_trac
e
sample_trace_raw
.
length
# File.read(expand_fixture_path('trace/sample_trace'))
end
#
end
def
sample_trace_raw
_for_live_trace
# def sample_trace_size
_for_live_trace
File
.
read
(
expand_fixture_path
(
'trace/sample_trace'
))
# sample_trace_raw_for_live_trace.length
end
#
end
def
sample_trace_size_for_live_trace
# def fill_trace_to_chunks_for_live_trace(data)
sample_trace_raw_for_live_trace
.
length
# stream = described_class.new(job_id, 'a+b')
end
# stream.write(data)
# stream.close
# end
def
fill_trace_to_chunks_for_live_trace
(
data
)
# def stub_chunk_store_get_failed
stream
=
described_class
.
new
(
job_id
,
'wb'
)
# allow_any_instance_of(chunk_store).to receive(:get).and_return(nil)
stream
.
write
(
data
)
# end
stream
.
close
end
def
stub_chunk_store_get_failed
allow_any_instance_of
(
chunk_store
).
to
receive
(
:get
).
and_return
(
nil
)
end
def
set_smaller_buffer_size_than
(
file_size
)
def
set_smaller_buffer_size_than
(
file_size
)
blocks
=
(
file_size
/
128
)
blocks
=
(
file_size
/
128
)
...
...
spec/support/shared_examples/lib/gitlab/ci/trace/chunked_file/chunked_io_shared_examples.rb
View file @
3a99a6b9
...
@@ -8,53 +8,112 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -8,53 +8,112 @@ shared_examples "ChunkedIO shared tests" do
let
(
:mode
)
{
'rb'
}
let
(
:mode
)
{
'rb'
}
it
'raises no exception'
do
it
'raises no exception'
do
described_class
.
new
(
job_id
,
size
,
mode
)
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}.
not_to
raise_error
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}.
not_to
raise_error
expect
{
described_class
.
new
(
job_id
,
size
,
mode
)
}.
not_to
raise_error
end
end
end
end
context
'when mode is
write
'
do
context
'when mode is
append
'
do
let
(
:mode
)
{
'a+b'
}
let
(
:mode
)
{
'a+b'
}
it
'raises an exception'
do
it
'raises an exception'
do
described_class
.
new
(
job_id
,
size
,
mode
)
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}.
not_to
raise_error
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}.
to
raise_error
(
'Already opened by another process'
)
expect
{
described_class
.
new
(
job_id
,
size
,
mode
)
}.
to
raise_error
(
'Already opened by another process'
)
end
end
context
'when closed after open'
do
context
'when closed after open'
do
it
'does not raise an exception'
do
it
'does not raise an exception'
do
described_class
.
new
(
job_id
,
size
,
mode
).
close
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
).
close
}.
not_to
raise_error
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}.
not_to
raise_error
end
end
end
expect
{
described_class
.
new
(
job_id
,
size
,
mode
)
}.
not_to
raise_error
context
'when mode is write'
do
let
(
:mode
)
{
'wb'
}
it
'raises no exception'
do
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
)
}.
to
raise_error
(
"Mode 'w' is not supported"
)
end
end
end
end
end
end
describe
'Permissions'
,
:partial_support
do
before
do
fill_trace_to_chunks
(
sample_trace_raw
)
end
context
"when mode is 'a+b'"
do
let
(
:mode
)
{
'a+b'
}
it
'can write'
do
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
).
write
(
'abc'
)
}
.
not_to
raise_error
end
it
'can read'
do
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
).
read
(
10
)
}
.
not_to
raise_error
end
end
context
"when mode is 'ab'"
do
let
(
:mode
)
{
'ab'
}
it
'can write'
do
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
).
write
(
'abc'
)
}
.
not_to
raise_error
end
it
'can not read'
do
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
).
read
(
10
)
}
.
to
raise_error
(
'not opened for reading'
)
end
end
context
"when mode is 'rb'"
do
let
(
:mode
)
{
'rb'
}
it
'can not write'
do
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
).
write
(
'abc'
)
}
.
to
raise_error
(
'not opened for writing'
)
end
it
'can read'
do
expect
{
described_class
.
new
(
job_id
,
nil
,
mode
).
read
(
10
)
}
.
not_to
raise_error
end
end
end
end
describe
'#seek'
do
describe
'#seek'
do
subject
{
chunked_io
.
seek
(
pos
,
where
)
}
subject
{
chunked_io
.
seek
(
pos
,
where
)
}
before
do
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
context
'when moves pos to end of the file'
do
context
'when moves pos to end of the file'
do
let
(
:pos
)
{
0
}
let
(
:pos
)
{
0
}
let
(
:where
)
{
IO
::
SEEK_END
}
let
(
:where
)
{
IO
::
SEEK_END
}
it
{
is_expected
.
to
eq
(
s
ize
)
}
it
{
is_expected
.
to
eq
(
s
ample_trace_raw
.
length
)
}
end
end
context
'when moves pos to middle of the file'
do
context
'when moves pos to middle of the file'
do
let
(
:pos
)
{
s
ize
/
2
}
let
(
:pos
)
{
s
ample_trace_raw
.
length
/
2
}
let
(
:where
)
{
IO
::
SEEK_SET
}
let
(
:where
)
{
IO
::
SEEK_SET
}
it
{
is_expected
.
to
eq
(
size
/
2
)
}
it
{
is_expected
.
to
eq
(
pos
)
}
end
end
context
'when moves pos around'
do
context
'when moves pos around'
do
it
'matches the result'
do
it
'matches the result'
do
expect
(
chunked_io
.
seek
(
0
)).
to
eq
(
0
)
expect
(
chunked_io
.
seek
(
0
)).
to
eq
(
0
)
expect
(
chunked_io
.
seek
(
100
,
IO
::
SEEK_CUR
)).
to
eq
(
100
)
expect
(
chunked_io
.
seek
(
100
,
IO
::
SEEK_CUR
)).
to
eq
(
100
)
expect
{
chunked_io
.
seek
(
size
+
1
,
IO
::
SEEK_CUR
)
}.
to
raise_error
(
'new position is outside of file'
)
expect
{
chunked_io
.
seek
(
sample_trace_raw
.
length
+
1
,
IO
::
SEEK_CUR
)
}
.
to
raise_error
(
'new position is outside of file'
)
end
end
end
end
end
end
...
@@ -62,9 +121,14 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -62,9 +121,14 @@ shared_examples "ChunkedIO shared tests" do
describe
'#eof?'
do
describe
'#eof?'
do
subject
{
chunked_io
.
eof?
}
subject
{
chunked_io
.
eof?
}
before
do
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
context
'when current pos is at end of the file'
do
context
'when current pos is at end of the file'
do
before
do
before
do
chunked_io
.
seek
(
s
ize
,
IO
::
SEEK_SET
)
chunked_io
.
seek
(
s
ample_trace_raw
.
length
,
IO
::
SEEK_SET
)
end
end
it
{
is_expected
.
to
be_truthy
}
it
{
is_expected
.
to
be_truthy
}
...
@@ -84,39 +148,39 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -84,39 +148,39 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
s
ize
)
set_smaller_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
it
'yields lines'
do
it
'yields lines'
do
expect
{
|
b
|
described_class
.
new
(
job_id
,
size
,
'rb'
).
each_line
(
&
b
)
}
expect
{
|
b
|
described_class
.
new
(
job_id
,
nil
,
'rb'
).
each_line
(
&
b
)
}
.
to
yield_successive_args
(
*
string_io
.
each_line
.
to_a
)
.
to
yield_successive_args
(
*
string_io
.
each_line
.
to_a
)
end
end
end
end
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
s
ize
)
set_larger_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
it
'calls get_chunk only once'
do
it
'calls get_chunk only once'
do
expect
(
chunk_store
).
to
receive
(
:open
).
once
.
and_call_original
expect
(
chunk_store
).
to
receive
(
:open
).
once
.
and_call_original
described_class
.
new
(
job_id
,
size
,
'rb'
).
each_line
{
|
line
|
}
described_class
.
new
(
job_id
,
nil
,
'rb'
).
each_line
{
|
line
|
}
end
end
end
end
end
end
describe
'#read'
do
describe
'#read'
do
subject
{
described_class
.
new
(
job_id
,
size
,
'rb'
).
read
(
length
)
}
subject
{
described_class
.
new
(
job_id
,
nil
,
'rb'
).
read
(
length
)
}
context
'when read whole size'
do
context
'when read
the
whole size'
do
let
(
:length
)
{
nil
}
let
(
:length
)
{
nil
}
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
s
ize
)
set_smaller_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -127,7 +191,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -127,7 +191,7 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
s
ize
)
set_larger_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -142,7 +206,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -142,7 +206,7 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
s
ize
)
set_smaller_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -153,7 +217,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -153,7 +217,7 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
s
ize
)
set_larger_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -164,11 +228,11 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -164,11 +228,11 @@ shared_examples "ChunkedIO shared tests" do
end
end
context
'when tries to read oversize'
do
context
'when tries to read oversize'
do
let
(
:length
)
{
s
ize
+
1000
}
let
(
:length
)
{
s
ample_trace_raw
.
length
+
1000
}
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
s
ize
)
set_smaller_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -179,7 +243,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -179,7 +243,7 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
s
ize
)
set_larger_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -194,7 +258,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -194,7 +258,7 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
s
ize
)
set_smaller_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -205,7 +269,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -205,7 +269,7 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
s
ize
)
set_larger_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -214,21 +278,6 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -214,21 +278,6 @@ shared_examples "ChunkedIO shared tests" do
end
end
end
end
end
end
context
'when chunk store failed to get chunk'
do
let
(
:length
)
{
nil
}
before
do
set_smaller_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
stub_chunk_store_get_failed
end
it
'reads a trace'
do
expect
{
subject
}.
to
raise_error
(
described_class
::
FailedToGetChunkError
)
end
end
end
end
describe
'#readline'
do
describe
'#readline'
do
...
@@ -244,23 +293,9 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -244,23 +293,9 @@ shared_examples "ChunkedIO shared tests" do
end
end
end
end
context
'when chunk store failed to get chunk'
do
let
(
:length
)
{
nil
}
before
do
set_smaller_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
stub_chunk_store_get_failed
end
it
'reads a trace'
do
expect
{
subject
}.
to
raise_error
(
described_class
::
FailedToGetChunkError
)
end
end
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
s
ize
)
set_smaller_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -269,7 +304,7 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -269,7 +304,7 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
s
ize
)
set_larger_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
end
...
@@ -278,11 +313,11 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -278,11 +313,11 @@ shared_examples "ChunkedIO shared tests" do
context
'when pos is at middle of the file'
do
context
'when pos is at middle of the file'
do
before
do
before
do
set_smaller_buffer_size_than
(
s
ize
)
set_smaller_buffer_size_than
(
s
ample_trace_raw
.
length
)
fill_trace_to_chunks
(
sample_trace_raw
)
fill_trace_to_chunks
(
sample_trace_raw
)
chunked_io
.
seek
(
size
/
2
)
chunked_io
.
seek
(
chunked_io
.
size
/
2
)
string_io
.
seek
(
size
/
2
)
string_io
.
seek
(
s
tring_io
.
s
ize
/
2
)
end
end
it
'reads from pos'
do
it
'reads from pos'
do
...
@@ -296,18 +331,19 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -296,18 +331,19 @@ shared_examples "ChunkedIO shared tests" do
let
(
:data
)
{
sample_trace_raw
}
let
(
:data
)
{
sample_trace_raw
}
context
'when
write mode'
do
context
'when
append mode'
,
:partial_support
do
let
(
:mode
)
{
'
w
b'
}
let
(
:mode
)
{
'
a+
b'
}
context
'when data does not exist'
do
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
size
)
set_smaller_buffer_size_than
(
sample_trace_raw
.
length
)
end
end
it
'writes a trace'
do
it
'writes a trace'
do
is_expected
.
to
eq
(
data
.
length
)
is_expected
.
to
eq
(
data
.
length
)
described_class
.
open
(
job_id
,
size
,
'rb'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
data
.
length
)
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
data
.
length
)
...
@@ -317,13 +353,13 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -317,13 +353,13 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is larger than file size'
,
:partial_support
do
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
before
do
set_larger_buffer_size_than
(
size
)
set_larger_buffer_size_than
(
data
.
length
)
end
end
it
'writes a trace'
do
it
'writes a trace'
do
is_expected
.
to
eq
(
data
.
length
)
is_expected
.
to
eq
(
data
.
length
)
described_class
.
open
(
job_id
,
size
,
'rb'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
stream
.
read
).
to
eq
(
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
data
.
length
)
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
data
.
length
)
...
@@ -340,23 +376,23 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -340,23 +376,23 @@ shared_examples "ChunkedIO shared tests" do
end
end
end
end
context
'when append mode'
,
:partial_support
do
context
'when data already exists'
do
let
(
:original_data
)
{
'original
data'
}
let
(
:exist_data
)
{
'exist
data'
}
let
(
:total_size
)
{
original
_data
.
length
+
data
.
length
}
let
(
:total_size
)
{
exist
_data
.
length
+
data
.
length
}
context
'when buffer size is smaller than file size'
do
context
'when buffer size is smaller than file size'
do
before
do
before
do
set_smaller_buffer_size_than
(
size
)
set_smaller_buffer_size_than
(
data
.
length
)
fill_trace_to_chunks
(
original
_data
)
fill_trace_to_chunks
(
exist
_data
)
end
end
it
'appends a trace'
do
it
'appends a trace'
do
described_class
.
open
(
job_id
,
original_data
.
length
,
'a+b'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
end
end
described_class
.
open
(
job_id
,
total_size
,
'rb'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
original
_data
+
data
)
expect
(
stream
.
read
).
to
eq
(
exist
_data
+
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
total_size
)
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
total_size
)
end
end
...
@@ -365,17 +401,17 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -365,17 +401,17 @@ shared_examples "ChunkedIO shared tests" do
context
'when buffer size is larger than file size'
do
context
'when buffer size is larger than file size'
do
before
do
before
do
set_larger_buffer_size_than
(
size
)
set_larger_buffer_size_than
(
data
.
length
)
fill_trace_to_chunks
(
original
_data
)
fill_trace_to_chunks
(
exist
_data
)
end
end
it
'appends a trace'
do
it
'appends a trace'
do
described_class
.
open
(
job_id
,
original_data
.
length
,
'a+b'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'a+b'
)
do
|
stream
|
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
expect
(
stream
.
write
(
data
)).
to
eq
(
data
.
length
)
end
end
described_class
.
open
(
job_id
,
total_size
,
'rb'
)
do
|
stream
|
described_class
.
new
(
job_id
,
nil
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
original
_data
+
data
)
expect
(
stream
.
read
).
to
eq
(
exist
_data
+
data
)
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
stream
.
send
(
:chunks_count
))
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
total_size
)
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
total_size
)
end
end
...
@@ -383,86 +419,5 @@ shared_examples "ChunkedIO shared tests" do
...
@@ -383,86 +419,5 @@ shared_examples "ChunkedIO shared tests" do
end
end
end
end
end
end
describe
'#truncate'
do
context
'when data exists'
do
context
'when buffer size is smaller than file size'
do
before
do
set_smaller_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'truncates a trace'
do
described_class
.
open
(
job_id
,
size
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
sample_trace_raw
)
end
described_class
.
open
(
job_id
,
size
,
'wb'
)
do
|
stream
|
stream
.
truncate
(
0
)
end
described_class
.
open
(
job_id
,
0
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
be_empty
end
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
0
)
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
0
)
end
context
'when offset is negative'
do
it
'raises an error'
do
described_class
.
open
(
job_id
,
size
,
'wb'
)
do
|
stream
|
expect
{
stream
.
truncate
(
-
1
)
}.
to
raise_error
(
'Offset is out of bound'
)
end
end
end
context
'when offset is larger than file size'
do
it
'raises an error'
do
described_class
.
open
(
job_id
,
size
,
'wb'
)
do
|
stream
|
expect
{
stream
.
truncate
(
size
+
1
)
}.
to
raise_error
(
'Offset is out of bound'
)
end
end
end
end
context
'when buffer size is larger than file size'
,
:partial_support
do
before
do
set_larger_buffer_size_than
(
size
)
fill_trace_to_chunks
(
sample_trace_raw
)
end
it
'truncates a trace'
do
described_class
.
open
(
job_id
,
size
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
eq
(
sample_trace_raw
)
end
described_class
.
open
(
job_id
,
size
,
'wb'
)
do
|
stream
|
stream
.
truncate
(
0
)
end
described_class
.
open
(
job_id
,
0
,
'rb'
)
do
|
stream
|
expect
(
stream
.
read
).
to
be_empty
end
expect
(
chunk_store
.
chunks_count
(
job_id
)).
to
eq
(
0
)
expect
(
chunk_store
.
chunks_size
(
job_id
)).
to
eq
(
0
)
end
end
end
context
'when data does not exist'
do
before
do
set_smaller_buffer_size_than
(
size
)
end
it
'truncates a trace'
do
described_class
.
open
(
job_id
,
size
,
'wb'
)
do
|
stream
|
stream
.
truncate
(
0
)
expect
(
stream
.
send
(
:tell
)).
to
eq
(
0
)
expect
(
stream
.
send
(
:size
)).
to
eq
(
0
)
end
end
end
end
end
end
end
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment