Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Léo-Paul Géneau
gitlab-ce
Commits
6e70870a
Commit
6e70870a
authored
Jan 15, 2016
by
Kamil Trzcinski
Committed by
James Edwards-Jones
Jan 31, 2017
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Move most of PagesWorker logic UpdatePagesService
parent
1d159ffb
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
152 additions
and
144 deletions
+152
-144
app/models/ci/build.rb
app/models/ci/build.rb
+1
-1
app/services/pages_service.rb
app/services/pages_service.rb
+1
-1
app/services/projects/update_pages_service.rb
app/services/projects/update_pages_service.rb
+132
-0
app/workers/pages_worker.rb
app/workers/pages_worker.rb
+2
-131
spec/services/pages_service_spec.rb
spec/services/pages_service_spec.rb
+2
-2
spec/services/projects/update_pages_worker_spec.rb
spec/services/projects/update_pages_worker_spec.rb
+14
-9
No files found.
app/models/ci/build.rb
View file @
6e70870a
...
@@ -457,7 +457,7 @@ module Ci
...
@@ -457,7 +457,7 @@ module Ci
build_data
=
Gitlab
::
DataBuilder
::
Build
.
build
(
self
)
build_data
=
Gitlab
::
DataBuilder
::
Build
.
build
(
self
)
project
.
execute_hooks
(
build_data
.
dup
,
:build_hooks
)
project
.
execute_hooks
(
build_data
.
dup
,
:build_hooks
)
project
.
execute_services
(
build_data
.
dup
,
:build_hooks
)
project
.
execute_services
(
build_data
.
dup
,
:build_hooks
)
Update
PagesService
.
new
(
build_data
).
execute
PagesService
.
new
(
build_data
).
execute
project
.
running_or_pending_build_count
(
force:
true
)
project
.
running_or_pending_build_count
(
force:
true
)
end
end
...
...
app/services/
update_
pages_service.rb
→
app/services/pages_service.rb
View file @
6e70870a
class
Update
PagesService
class
PagesService
attr_reader
:data
attr_reader
:data
def
initialize
(
data
)
def
initialize
(
data
)
...
...
app/services/projects/update_pages_service.rb
0 → 100644
View file @
6e70870a
module
Projects
class
UpdatePagesService
<
BaseService
BLOCK_SIZE
=
32
.
kilobytes
MAX_SIZE
=
1
.
terabyte
attr_reader
:build
def
initialize
(
project
,
build
)
@project
,
@build
=
project
,
build
end
def
execute
# Create status notifying the deployment of pages
@status
=
create_status
@status
.
run!
raise
'missing pages artifacts'
unless
build
.
artifacts_file?
raise
'pages are outdated'
unless
latest?
# Create temporary directory in which we will extract the artifacts
FileUtils
.
mkdir_p
(
tmp_path
)
Dir
.
mktmpdir
(
nil
,
tmp_path
)
do
|
archive_path
|
extract_archive!
(
archive_path
)
# Check if we did extract public directory
archive_public_path
=
File
.
join
(
archive_path
,
'public'
)
raise
'pages miss the public folder'
unless
Dir
.
exists?
(
archive_public_path
)
raise
'pages are outdated'
unless
latest?
deploy_page!
(
archive_public_path
)
success
end
rescue
=>
e
error
(
e
.
message
)
end
private
def
success
@status
.
success
super
end
def
error
(
message
,
http_status
=
nil
)
@status
.
allow_failure
=
!
latest?
@status
.
description
=
message
@status
.
drop
super
end
def
create_status
GenericCommitStatus
.
new
(
project:
project
,
commit:
build
.
commit
,
user:
build
.
user
,
ref:
build
.
ref
,
stage:
'deploy'
,
name:
'pages:deploy'
)
end
def
extract_archive!
(
temp_path
)
results
=
Open3
.
pipeline
(
%W(gunzip -c
#{
artifacts
}
)
,
%W(dd bs=
#{
BLOCK_SIZE
}
count=
#{
blocks
}
)
,
%W(tar -x -C
#{
temp_path
}
public/)
,
err:
'/dev/null'
)
raise
'pages failed to extract'
unless
results
.
compact
.
all?
(
&
:success?
)
end
def
deploy_page!
(
archive_public_path
)
# Do atomic move of pages
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
# 1. We move deployed public to previous public path (file removal is slow)
# 2. We move temporary public to be deployed public
# 3. We remove previous public path
FileUtils
.
mkdir_p
(
pages_path
)
begin
FileUtils
.
move
(
public_path
,
previous_public_path
)
rescue
end
FileUtils
.
move
(
archive_public_path
,
public_path
)
ensure
FileUtils
.
rm_r
(
previous_public_path
,
force:
true
)
end
def
latest?
# check if sha for the ref is still the most recent one
# this helps in case when multiple deployments happens
sha
==
latest_sha
end
def
blocks
# Calculate dd parameters: we limit the size of pages
max_size
=
current_application_settings
.
max_pages_size
.
megabytes
max_size
||=
MAX_SIZE
blocks
=
1
+
max_size
/
BLOCK_SIZE
blocks
end
def
tmp_path
@tmp_path
||=
File
.
join
(
Settings
.
pages
.
path
,
'tmp'
)
end
def
pages_path
@pages_path
||=
project
.
pages_path
end
def
public_path
@public_path
||=
File
.
join
(
pages_path
,
'public'
)
end
def
previous_public_path
@previous_public_path
||=
File
.
join
(
pages_path
,
"public.
#{
SecureRandom
.
hex
}
"
)
end
def
ref
build
.
ref
end
def
artifacts
build
.
artifacts_file
.
path
end
def
latest_sha
project
.
commit
(
build
.
ref
).
try
(
:sha
).
to_s
end
def
sha
build
.
sha
end
end
end
app/workers/pages_worker.rb
View file @
6e70870a
class
PagesWorker
class
PagesWorker
include
Sidekiq
::
Worker
include
Sidekiq
::
Worker
include
Gitlab
::
CurrentSettings
BLOCK_SIZE
=
32
.
kilobytes
MAX_SIZE
=
1
.
terabyte
sidekiq_options
queue: :pages
,
retry:
false
sidekiq_options
queue: :pages
,
retry:
false
...
@@ -12,137 +8,12 @@ class PagesWorker
...
@@ -12,137 +8,12 @@ class PagesWorker
end
end
def
deploy
(
build_id
)
def
deploy
(
build_id
)
@build_id
=
build_id
build
=
Ci
::
Build
.
find_by
(
id:
build_id
)
return
unless
valid?
Projects
::
UpdatePagesService
.
new
(
build
.
project
,
build
).
execute
# Create status notifying the deployment of pages
@status
=
create_status
@status
.
run!
raise
'pages are outdated'
unless
latest?
# Create temporary directory in which we will extract the artifacts
FileUtils
.
mkdir_p
(
tmp_path
)
Dir
.
mktmpdir
(
nil
,
tmp_path
)
do
|
archive_path
|
extract_archive!
(
archive_path
)
# Check if we did extract public directory
archive_public_path
=
File
.
join
(
archive_path
,
'public'
)
raise
'pages miss the public folder'
unless
Dir
.
exists?
(
archive_public_path
)
raise
'pages are outdated'
unless
latest?
deploy_page!
(
archive_public_path
)
@status
.
success
end
rescue
=>
e
fail
(
e
.
message
,
!
latest?
)
return
false
end
end
def
remove
(
namespace_path
,
project_path
)
def
remove
(
namespace_path
,
project_path
)
full_path
=
File
.
join
(
Settings
.
pages
.
path
,
namespace_path
,
project_path
)
full_path
=
File
.
join
(
Settings
.
pages
.
path
,
namespace_path
,
project_path
)
FileUtils
.
rm_r
(
full_path
,
force:
true
)
FileUtils
.
rm_r
(
full_path
,
force:
true
)
end
end
private
def
create_status
GenericCommitStatus
.
new
(
project:
project
,
commit:
build
.
commit
,
user:
build
.
user
,
ref:
build
.
ref
,
stage:
'deploy'
,
name:
'pages:deploy'
)
end
def
extract_archive!
(
temp_path
)
results
=
Open3
.
pipeline
(
%W(gunzip -c
#{
artifacts
}
)
,
%W(dd bs=
#{
BLOCK_SIZE
}
count=
#{
blocks
}
)
,
%W(tar -x -C
#{
temp_path
}
public/)
,
err:
'/dev/null'
)
raise
'pages failed to extract'
unless
results
.
compact
.
all?
(
&
:success?
)
end
def
deploy_page!
(
archive_public_path
)
# Do atomic move of pages
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
# 1. We move deployed public to previous public path (file removal is slow)
# 2. We move temporary public to be deployed public
# 3. We remove previous public path
FileUtils
.
mkdir_p
(
pages_path
)
begin
FileUtils
.
move
(
public_path
,
previous_public_path
)
rescue
end
FileUtils
.
move
(
archive_public_path
,
public_path
)
ensure
FileUtils
.
rm_r
(
previous_public_path
,
force:
true
)
end
def
fail
(
message
,
allow_failure
=
true
)
@status
.
allow_failure
=
allow_failure
@status
.
description
=
message
@status
.
drop
end
def
valid?
build
&&
build
.
artifacts_file?
end
def
latest?
# check if sha for the ref is still the most recent one
# this helps in case when multiple deployments happens
sha
==
latest_sha
end
def
blocks
# Calculate dd parameters: we limit the size of pages
max_size
=
current_application_settings
.
max_pages_size
.
megabytes
max_size
||=
MAX_SIZE
blocks
=
1
+
max_size
/
BLOCK_SIZE
blocks
end
def
build
@build
||=
Ci
::
Build
.
find_by
(
id:
@build_id
)
end
def
project
@project
||=
build
.
project
end
def
tmp_path
@tmp_path
||=
File
.
join
(
Settings
.
pages
.
path
,
'tmp'
)
end
def
pages_path
@pages_path
||=
project
.
pages_path
end
def
public_path
@public_path
||=
File
.
join
(
pages_path
,
'public'
)
end
def
previous_public_path
@previous_public_path
||=
File
.
join
(
pages_path
,
"public.
#{
SecureRandom
.
hex
}
"
)
end
def
ref
build
.
ref
end
def
artifacts
build
.
artifacts_file
.
path
end
def
latest_sha
project
.
commit
(
build
.
ref
).
try
(
:sha
).
to_s
end
def
sha
build
.
sha
end
end
end
spec/services/
update_
pages_service_spec.rb
→
spec/services/pages_service_spec.rb
View file @
6e70870a
require
'spec_helper'
require
'spec_helper'
describe
Update
PagesService
,
services:
true
do
describe
PagesService
,
services:
true
do
let
(
:build
)
{
create
(
:ci_build
)
}
let
(
:build
)
{
create
(
:ci_build
)
}
let
(
:data
)
{
Gitlab
::
BuildDataBuilder
.
build
(
build
)
}
let
(
:data
)
{
Gitlab
::
BuildDataBuilder
.
build
(
build
)
}
let
(
:service
)
{
Update
PagesService
.
new
(
data
)
}
let
(
:service
)
{
PagesService
.
new
(
data
)
}
before
do
before
do
allow
(
Gitlab
.
config
.
pages
).
to
receive
(
:enabled
).
and_return
(
true
)
allow
(
Gitlab
.
config
.
pages
).
to
receive
(
:enabled
).
and_return
(
true
)
...
...
spec/
workers/
pages_worker_spec.rb
→
spec/
services/projects/update_
pages_worker_spec.rb
View file @
6e70870a
require
"spec_helper"
require
"spec_helper"
describe
P
agesWorker
do
describe
P
rojects
::
UpdatePagesService
do
let
(
:project
)
{
create
:project
}
let
(
:project
)
{
create
:project
}
let
(
:commit
)
{
create
:ci_commit
,
project:
project
,
sha:
project
.
commit
(
'HEAD'
).
sha
}
let
(
:commit
)
{
create
:ci_commit
,
project:
project
,
sha:
project
.
commit
(
'HEAD'
).
sha
}
let
(
:build
)
{
create
:ci_build
,
commit:
commit
,
ref:
'HEAD'
}
let
(
:build
)
{
create
:ci_build
,
commit:
commit
,
ref:
'HEAD'
}
let
(
:worker
)
{
PagesWorker
.
new
}
let
(
:file
)
{
fixture_file_upload
(
Rails
.
root
+
'spec/fixtures/pages.tar.gz'
,
'application/octet-stream'
)
}
let
(
:file
)
{
fixture_file_upload
(
Rails
.
root
+
'spec/fixtures/pages.tar.gz'
,
'application/octet-stream'
)
}
let
(
:empty_file
)
{
fixture_file_upload
(
Rails
.
root
+
'spec/fixtures/pages_empty.tar.gz'
,
'application/octet-stream'
)
}
let
(
:empty_file
)
{
fixture_file_upload
(
Rails
.
root
+
'spec/fixtures/pages_empty.tar.gz'
,
'application/octet-stream'
)
}
let
(
:invalid_file
)
{
fixture_file_upload
(
Rails
.
root
+
'spec/fixtures/dk.png'
,
'application/octet-stream'
)
}
let
(
:invalid_file
)
{
fixture_file_upload
(
Rails
.
root
+
'spec/fixtures/dk.png'
,
'application/octet-stream'
)
}
subject
{
described_class
.
new
(
project
,
build
)
}
before
do
before
do
project
.
remove_pages
project
.
remove_pages
...
@@ -18,19 +19,19 @@ describe PagesWorker do
...
@@ -18,19 +19,19 @@ describe PagesWorker do
it
'succeeds'
do
it
'succeeds'
do
expect
(
project
.
pages_url
).
to
be_nil
expect
(
project
.
pages_url
).
to
be_nil
expect
(
worker
.
deploy
(
build
.
id
)).
to
be_truthy
expect
(
execute
).
to
eq
(
:success
)
expect
(
project
.
pages_url
).
to_not
be_nil
expect
(
project
.
pages_url
).
to_not
be_nil
end
end
it
'limits pages size'
do
it
'limits pages size'
do
stub_application_setting
(
max_pages_size:
1
)
stub_application_setting
(
max_pages_size:
1
)
expect
(
worker
.
deploy
(
build
.
id
)).
to_not
be_truthy
expect
(
execute
).
to_not
eq
(
:success
)
end
end
it
'removes pages after destroy'
do
it
'removes pages after destroy'
do
expect
(
PagesWorker
).
to
receive
(
:perform_in
)
expect
(
PagesWorker
).
to
receive
(
:perform_in
)
expect
(
project
.
pages_url
).
to
be_nil
expect
(
project
.
pages_url
).
to
be_nil
expect
(
worker
.
deploy
(
build
.
id
)).
to
be_truthy
expect
(
execute
).
to
eq
(
:success
)
expect
(
project
.
pages_url
).
to_not
be_nil
expect
(
project
.
pages_url
).
to_not
be_nil
project
.
destroy
project
.
destroy
expect
(
Dir
.
exist?
(
project
.
public_pages_path
)).
to
be_falsey
expect
(
Dir
.
exist?
(
project
.
public_pages_path
)).
to
be_falsey
...
@@ -44,22 +45,26 @@ describe PagesWorker do
...
@@ -44,22 +45,26 @@ describe PagesWorker do
end
end
it
'fails if no artifacts'
do
it
'fails if no artifacts'
do
expect
(
worker
.
deploy
(
build
.
id
)).
to_not
be_truthy
expect
(
execute
).
to_not
eq
(
:success
)
end
end
it
'fails for empty file fails'
do
it
'fails for empty file fails'
do
build
.
update_attributes
(
artifacts_file:
empty_file
)
build
.
update_attributes
(
artifacts_file:
empty_file
)
expect
(
worker
.
deploy
(
build
.
id
)).
to_not
be_truthy
expect
(
execute
).
to_not
eq
(
:success
)
end
end
it
'fails for invalid archive'
do
it
'fails for invalid archive'
do
build
.
update_attributes
(
artifacts_file:
invalid_file
)
build
.
update_attributes
(
artifacts_file:
invalid_file
)
expect
(
worker
.
deploy
(
build
.
id
)).
to_not
be_truthy
expect
(
execute
).
to_not
eq
(
:success
)
end
end
it
'fails if sha on branch is not latest'
do
it
'fails if sha on branch is not latest'
do
commit
.
update_attributes
(
sha:
'old_sha'
)
commit
.
update_attributes
(
sha:
'old_sha'
)
build
.
update_attributes
(
artifacts_file:
file
)
build
.
update_attributes
(
artifacts_file:
file
)
expect
(
worker
.
deploy
(
build
.
id
)).
to_not
be_truthy
expect
(
execute
).
to_not
eq
(
:success
)
end
def
execute
subject
.
execute
[
:status
]
end
end
end
end
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment