Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
gitlab-ce
Commits
723f74d4
Commit
723f74d4
authored
Jul 11, 2018
by
Jacob Vosmaer
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Remove last flags from Blob and Workhorse
parent
7a3a52b4
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
96 additions
and
223 deletions
+96
-223
lib/gitlab/git/blob.rb
lib/gitlab/git/blob.rb
+3
-86
lib/gitlab/workhorse.rb
lib/gitlab/workhorse.rb
+6
-18
spec/lib/gitlab/git/blob_spec.rb
spec/lib/gitlab/git/blob_spec.rb
+72
-92
spec/lib/gitlab/workhorse_spec.rb
spec/lib/gitlab/workhorse_spec.rb
+15
-27
No files found.
lib/gitlab/git/blob.rb
View file @
723f74d4
...
...
@@ -61,17 +61,8 @@ module Gitlab
# Keep in mind that this method may allocate a lot of memory. It is up
# to the caller to limit the number of blobs and blob_size_limit.
#
# Gitaly migration issue: https://gitlab.com/gitlab-org/gitaly/issues/798
def
batch
(
repository
,
blob_references
,
blob_size_limit:
MAX_DATA_DISPLAY_SIZE
)
Gitlab
::
GitalyClient
.
migrate
(
:list_blobs_by_sha_path
)
do
|
is_enabled
|
if
is_enabled
repository
.
gitaly_blob_client
.
get_blobs
(
blob_references
,
blob_size_limit
).
to_a
else
blob_references
.
map
do
|
sha
,
path
|
find
(
repository
,
sha
,
path
,
limit:
blob_size_limit
)
end
end
end
repository
.
gitaly_blob_client
.
get_blobs
(
blob_references
,
blob_size_limit
).
to_a
end
# Returns an array of Blob instances just with the metadata, that means
...
...
@@ -84,16 +75,8 @@ module Gitlab
# Returns array of Gitlab::Git::Blob
# Does not guarantee blob data will be set
def
batch_lfs_pointers
(
repository
,
blob_ids
)
repository
.
gitaly_migrate
(
:batch_lfs_pointers
)
do
|
is_enabled
|
if
is_enabled
repository
.
gitaly_blob_client
.
batch_lfs_pointers
(
blob_ids
.
to_a
)
else
blob_ids
.
lazy
.
select
{
|
sha
|
possible_lfs_blob?
(
repository
,
sha
)
}
.
map
{
|
sha
|
rugged_raw
(
repository
,
sha
,
limit:
LFS_POINTER_MAX_SIZE
)
}
.
select
(
&
:lfs_pointer?
)
.
force
end
repository
.
wrapped_gitaly_errors
do
repository
.
gitaly_blob_client
.
batch_lfs_pointers
(
blob_ids
.
to_a
)
end
end
...
...
@@ -104,72 +87,6 @@ module Gitlab
def
size_could_be_lfs?
(
size
)
size
.
between?
(
LFS_POINTER_MIN_SIZE
,
LFS_POINTER_MAX_SIZE
)
end
private
# Recursive search of blob id by path
#
# Ex.
# blog/ # oid: 1a
# app/ # oid: 2a
# models/ # oid: 3a
# file.rb # oid: 4a
#
#
# Blob.find_entry_by_path(repo, '1a', 'blog', 'app', 'file.rb') # => '4a'
#
def
find_entry_by_path
(
repository
,
root_id
,
*
path_parts
)
root_tree
=
repository
.
lookup
(
root_id
)
entry
=
root_tree
.
find
do
|
entry
|
entry
[
:name
]
==
path_parts
[
0
]
end
return
nil
unless
entry
if
path_parts
.
size
>
1
return
nil
unless
entry
[
:type
]
==
:tree
path_parts
.
shift
find_entry_by_path
(
repository
,
entry
[
:oid
],
*
path_parts
)
else
[
:blob
,
:commit
].
include?
(
entry
[
:type
])
?
entry
:
nil
end
end
def
submodule_blob
(
blob_entry
,
path
,
sha
)
new
(
id:
blob_entry
[
:oid
],
name:
blob_entry
[
:name
],
size:
0
,
data:
''
,
path:
path
,
commit_id:
sha
)
end
def
rugged_raw
(
repository
,
sha
,
limit
:)
blob
=
repository
.
lookup
(
sha
)
return
unless
blob
.
is_a?
(
Rugged
::
Blob
)
new
(
id:
blob
.
oid
,
size:
blob
.
size
,
data:
blob
.
content
(
limit
),
binary:
blob
.
binary?
)
end
# Efficient lookup to determine if object size
# and type make it a possible LFS blob without loading
# blob content into memory with repository.lookup(sha)
def
possible_lfs_blob?
(
repository
,
sha
)
object_header
=
repository
.
rugged
.
read_header
(
sha
)
object_header
[
:type
]
==
:blob
&&
size_could_be_lfs?
(
object_header
[
:len
])
end
end
def
initialize
(
options
)
...
...
lib/gitlab/workhorse.rb
View file @
723f74d4
...
...
@@ -98,16 +98,12 @@ module Gitlab
end
def
send_git_patch
(
repository
,
diff_refs
)
params
=
if
Gitlab
::
GitalyClient
.
feature_enabled?
(
:workhorse_send_git_patch
,
status:
Gitlab
::
GitalyClient
::
MigrationStatus
::
OPT_OUT
)
{
'GitalyServer'
=>
gitaly_server_hash
(
repository
),
'RawPatchRequest'
=>
Gitaly
::
RawPatchRequest
.
new
(
gitaly_diff_or_patch_hash
(
repository
,
diff_refs
)
).
to_json
}
else
workhorse_diff_or_patch_hash
(
repository
,
diff_refs
)
end
params
=
{
'GitalyServer'
=>
gitaly_server_hash
(
repository
),
'RawPatchRequest'
=>
Gitaly
::
RawPatchRequest
.
new
(
gitaly_diff_or_patch_hash
(
repository
,
diff_refs
)
).
to_json
}
[
SEND_DATA_HEADER
,
...
...
@@ -220,14 +216,6 @@ module Gitlab
}
end
def
workhorse_diff_or_patch_hash
(
repository
,
diff_refs
)
{
'RepoPath'
=>
repository
.
path_to_repo
,
'ShaFrom'
=>
diff_refs
.
base_sha
,
'ShaTo'
=>
diff_refs
.
head_sha
}
end
def
gitaly_diff_or_patch_hash
(
repository
,
diff_refs
)
{
repository:
repository
.
gitaly_repository
,
...
...
spec/lib/gitlab/git/blob_spec.rb
View file @
723f74d4
...
...
@@ -178,77 +178,67 @@ describe Gitlab::Git::Blob, seed_helper: true do
end
describe
'.batch'
do
shared_examples
'loading blobs in batch'
do
let
(
:blob_references
)
do
[
[
SeedRepo
::
Commit
::
ID
,
"files/ruby/popen.rb"
],
[
SeedRepo
::
Commit
::
ID
,
'six'
]
]
end
let
(
:blob_references
)
do
[
[
SeedRepo
::
Commit
::
ID
,
"files/ruby/popen.rb"
],
[
SeedRepo
::
Commit
::
ID
,
'six'
]
]
end
subject
{
described_class
.
batch
(
repository
,
blob_references
)
}
subject
{
described_class
.
batch
(
repository
,
blob_references
)
}
it
{
expect
(
subject
.
size
).
to
eq
(
blob_references
.
size
)
}
it
{
expect
(
subject
.
size
).
to
eq
(
blob_references
.
size
)
}
context
'first blob'
do
let
(
:blob
)
{
subject
[
0
]
}
context
'first blob'
do
let
(
:blob
)
{
subject
[
0
]
}
it
{
expect
(
blob
.
id
).
to
eq
(
SeedRepo
::
RubyBlob
::
ID
)
}
it
{
expect
(
blob
.
name
).
to
eq
(
SeedRepo
::
RubyBlob
::
NAME
)
}
it
{
expect
(
blob
.
path
).
to
eq
(
"files/ruby/popen.rb"
)
}
it
{
expect
(
blob
.
commit_id
).
to
eq
(
SeedRepo
::
Commit
::
ID
)
}
it
{
expect
(
blob
.
data
[
0
..
10
]).
to
eq
(
SeedRepo
::
RubyBlob
::
CONTENT
[
0
..
10
])
}
it
{
expect
(
blob
.
size
).
to
eq
(
669
)
}
it
{
expect
(
blob
.
mode
).
to
eq
(
"100644"
)
}
end
it
{
expect
(
blob
.
id
).
to
eq
(
SeedRepo
::
RubyBlob
::
ID
)
}
it
{
expect
(
blob
.
name
).
to
eq
(
SeedRepo
::
RubyBlob
::
NAME
)
}
it
{
expect
(
blob
.
path
).
to
eq
(
"files/ruby/popen.rb"
)
}
it
{
expect
(
blob
.
commit_id
).
to
eq
(
SeedRepo
::
Commit
::
ID
)
}
it
{
expect
(
blob
.
data
[
0
..
10
]).
to
eq
(
SeedRepo
::
RubyBlob
::
CONTENT
[
0
..
10
])
}
it
{
expect
(
blob
.
size
).
to
eq
(
669
)
}
it
{
expect
(
blob
.
mode
).
to
eq
(
"100644"
)
}
end
context
'second blob'
do
let
(
:blob
)
{
subject
[
1
]
}
context
'second blob'
do
let
(
:blob
)
{
subject
[
1
]
}
it
{
expect
(
blob
.
id
).
to
eq
(
'409f37c4f05865e4fb208c771485f211a22c4c2d'
)
}
it
{
expect
(
blob
.
data
).
to
eq
(
''
)
}
it
'does not mark the blob as binary'
do
expect
(
blob
).
not_to
be_binary
end
it
{
expect
(
blob
.
id
).
to
eq
(
'409f37c4f05865e4fb208c771485f211a22c4c2d'
)
}
it
{
expect
(
blob
.
data
).
to
eq
(
''
)
}
it
'does not mark the blob as binary'
do
expect
(
blob
).
not_to
be_binary
end
end
context
'limiting'
do
subject
{
described_class
.
batch
(
repository
,
blob_references
,
blob_size_limit:
blob_size_limit
)
}
context
'limiting'
do
subject
{
described_class
.
batch
(
repository
,
blob_references
,
blob_size_limit:
blob_size_limit
)
}
context
'positive'
do
let
(
:blob_size_limit
)
{
10
}
context
'positive'
do
let
(
:blob_size_limit
)
{
10
}
it
{
expect
(
subject
.
first
.
data
.
size
).
to
eq
(
10
)
}
end
it
{
expect
(
subject
.
first
.
data
.
size
).
to
eq
(
10
)
}
end
context
'zero'
do
let
(
:blob_size_limit
)
{
0
}
context
'zero'
do
let
(
:blob_size_limit
)
{
0
}
it
'only loads the metadata'
do
expect
(
subject
.
first
.
size
).
not_to
be
(
0
)
expect
(
subject
.
first
.
data
).
to
eq
(
''
)
end
it
'only loads the metadata'
do
expect
(
subject
.
first
.
size
).
not_to
be
(
0
)
expect
(
subject
.
first
.
data
).
to
eq
(
''
)
end
end
context
'negative'
do
let
(
:blob_size_limit
)
{
-
1
}
context
'negative'
do
let
(
:blob_size_limit
)
{
-
1
}
it
'ignores MAX_DATA_DISPLAY_SIZE'
do
stub_const
(
'Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE'
,
100
)
it
'ignores MAX_DATA_DISPLAY_SIZE'
do
stub_const
(
'Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE'
,
100
)
expect
(
subject
.
first
.
data
.
size
).
to
eq
(
669
)
end
expect
(
subject
.
first
.
data
.
size
).
to
eq
(
669
)
end
end
end
context
'when Gitaly list_blobs_by_sha_path feature is enabled'
do
it_behaves_like
'loading blobs in batch'
end
context
'when Gitaly list_blobs_by_sha_path feature is disabled'
,
:disable_gitaly
do
it_behaves_like
'loading blobs in batch'
end
end
describe
'.batch_metadata'
do
...
...
@@ -294,58 +284,48 @@ describe Gitlab::Git::Blob, seed_helper: true do
)
end
shared_examples
'fetching batch of LFS pointers'
do
it
'returns a list of Gitlab::Git::Blob'
do
blobs
=
described_class
.
batch_lfs_pointers
(
repository
,
[
lfs_blob
.
id
])
expect
(
blobs
.
count
).
to
eq
(
1
)
expect
(
blobs
).
to
all
(
be_a
(
Gitlab
::
Git
::
Blob
)
)
expect
(
blobs
).
to
be_an
(
Array
)
end
it
'accepts blob IDs as a lazy enumerator'
do
blobs
=
described_class
.
batch_lfs_pointers
(
repository
,
[
lfs_blob
.
id
].
lazy
)
expect
(
blobs
.
count
).
to
eq
(
1
)
expect
(
blobs
).
to
all
(
be_a
(
Gitlab
::
Git
::
Blob
)
)
end
it
'returns a list of Gitlab::Git::Blob'
do
blobs
=
described_class
.
batch_lfs_pointers
(
repository
,
[
lfs_blob
.
id
])
it
'handles empty list of IDs gracefully'
do
blobs_1
=
described_class
.
batch_lfs_pointers
(
repository
,
[].
lazy
)
blobs_2
=
described_class
.
batch_lfs_pointers
(
repository
,
[])
expect
(
blobs
.
count
).
to
eq
(
1
)
expect
(
blobs
).
to
all
(
be_a
(
Gitlab
::
Git
::
Blob
)
)
expect
(
blobs
).
to
be_an
(
Array
)
end
expect
(
blobs_1
).
to
eq
([])
expect
(
blobs_2
).
to
eq
([])
end
it
'accepts blob IDs as a lazy enumerator'
do
blobs
=
described_class
.
batch_lfs_pointers
(
repository
,
[
lfs_blob
.
id
].
lazy
)
it
'silently ignores tree objects'
do
blobs
=
described_class
.
batch_lfs_pointers
(
repository
,
[
tree_object
.
oid
])
expect
(
blobs
.
count
).
to
eq
(
1
)
expect
(
blobs
).
to
all
(
be_a
(
Gitlab
::
Git
::
Blob
)
)
end
expect
(
blobs
).
to
eq
([])
end
it
'handles empty list of IDs gracefully'
do
blobs_1
=
described_class
.
batch_lfs_pointers
(
repository
,
[].
lazy
)
blobs_2
=
described_class
.
batch_lfs_pointers
(
repository
,
[])
it
'silently ignores non lfs objects'
do
blobs
=
described_class
.
batch_lfs_pointers
(
repository
,
[
non_lfs_blob
.
id
])
expect
(
blobs_1
).
to
eq
([])
expect
(
blobs_2
).
to
eq
([])
end
expect
(
blobs
).
to
eq
([])
end
it
'silently ignores tree objects'
do
blobs
=
described_class
.
batch_lfs_pointers
(
repository
,
[
tree_object
.
oid
])
it
'avoids loading large blobs into memory'
do
# This line could call `lookup` on `repository`, so do here before mocking.
non_lfs_blob_id
=
non_lfs_blob
.
id
expect
(
blobs
).
to
eq
([])
end
expect
(
repository
).
not_to
receive
(
:lookup
)
it
'silently ignores non lfs objects'
do
blobs
=
described_class
.
batch_lfs_pointers
(
repository
,
[
non_lfs_blob
.
id
])
described_class
.
batch_lfs_pointers
(
repository
,
[
non_lfs_blob_id
])
end
expect
(
blobs
).
to
eq
([])
end
context
'when Gitaly batch_lfs_pointers is enabled'
do
it_behaves_like
'fetching batch of LFS pointers'
end
it
'avoids loading large blobs into memory'
do
# This line could call `lookup` on `repository`, so do here before mocking.
non_lfs_blob_id
=
non_lfs_blob
.
id
expect
(
repository
).
not_to
receive
(
:lookup
)
context
'when Gitaly batch_lfs_pointers is disabled'
,
:disable_gitaly
do
it_behaves_like
'fetching batch of LFS pointers'
described_class
.
batch_lfs_pointers
(
repository
,
[
non_lfs_blob_id
])
end
end
...
...
spec/lib/gitlab/workhorse_spec.rb
View file @
723f74d4
...
...
@@ -68,34 +68,22 @@ describe Gitlab::Workhorse do
let
(
:diff_refs
)
{
double
(
base_sha:
"base"
,
head_sha:
"head"
)
}
subject
{
described_class
.
send_git_patch
(
repository
,
diff_refs
)
}
context
'when Gitaly workhorse_send_git_patch feature is enabled'
do
it
'sets the header correctly'
do
key
,
command
,
params
=
decode_workhorse_header
(
subject
)
expect
(
key
).
to
eq
(
"Gitlab-Workhorse-Send-Data"
)
expect
(
command
).
to
eq
(
"git-format-patch"
)
expect
(
params
).
to
eq
({
'GitalyServer'
=>
{
address:
Gitlab
::
GitalyClient
.
address
(
project
.
repository_storage
),
token:
Gitlab
::
GitalyClient
.
token
(
project
.
repository_storage
)
},
'RawPatchRequest'
=>
Gitaly
::
RawPatchRequest
.
new
(
repository:
repository
.
gitaly_repository
,
left_commit_id:
'base'
,
right_commit_id:
'head'
).
to_json
}.
deep_stringify_keys
)
end
end
context
'when Gitaly workhorse_send_git_patch feature is disabled'
,
:disable_gitaly
do
it
'sets the header correctly'
do
key
,
command
,
params
=
decode_workhorse_header
(
subject
)
it
'sets the header correctly'
do
key
,
command
,
params
=
decode_workhorse_header
(
subject
)
expect
(
key
).
to
eq
(
"Gitlab-Workhorse-Send-Data"
)
expect
(
command
).
to
eq
(
"git-format-patch"
)
expect
(
params
).
to
eq
(
"RepoPath"
=>
repository
.
path_to_repo
,
"ShaFrom"
=>
"base"
,
"ShaTo"
=>
"head"
)
end
expect
(
key
).
to
eq
(
"Gitlab-Workhorse-Send-Data"
)
expect
(
command
).
to
eq
(
"git-format-patch"
)
expect
(
params
).
to
eq
({
'GitalyServer'
=>
{
address:
Gitlab
::
GitalyClient
.
address
(
project
.
repository_storage
),
token:
Gitlab
::
GitalyClient
.
token
(
project
.
repository_storage
)
},
'RawPatchRequest'
=>
Gitaly
::
RawPatchRequest
.
new
(
repository:
repository
.
gitaly_repository
,
left_commit_id:
'base'
,
right_commit_id:
'head'
).
to_json
}.
deep_stringify_keys
)
end
end
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment