Commit 120f9aba authored by Kamil Trzcinski's avatar Kamil Trzcinski Committed by James Edwards-Jones

Add GitLab Pages

- The pages are created when build artifacts for `pages` job are uploaded
- Pages serve the content under: http://group.pages.domain.com/project
- Pages can be used to serve the group page, special project named as host: group.pages.domain.com
- User can provide own 403 and 404 error pages by creating 403.html and 404.html in group page project
- Pages can be explicitly removed from the project by clicking Remove Pages in Project Settings
- The size of pages is limited by Application Setting: max pages size, which limits the maximum size of unpacked archive (default: 100MB)
- The public/ is extracted from artifacts and content is served as static pages
- Pages asynchronous worker use `dd` to limit the unpacked tar size
- Pages needs to be explicitly enabled and domain needs to be specified in gitlab.yml
- Pages are part of backups
- Pages notify the deployment status using Commit Status API
- Pages use a new sidekiq queue: pages
- Pages use a separate nginx config which needs to be explicitly added
parent 659cceb0
......@@ -109,6 +109,7 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
:plantuml_url,
:max_artifacts_size,
:max_attachment_size,
:max_pages_size,
:metrics_enabled,
:metrics_host,
:metrics_method_call_threshold,
......
......@@ -151,6 +151,16 @@ class ProjectsController < Projects::ApplicationController
end
end
def remove_pages
return access_denied! unless can?(current_user, :remove_pages, @project)
@project.remove_pages
respond_to do |format|
format.html { redirect_to project_path(@project) }
end
end
def housekeeping
::Projects::HousekeepingService.new(@project).execute
......
......@@ -256,7 +256,7 @@ module Ci
end
def project_id
pipeline.project_id
gl_project_id
end
def project_name
......@@ -457,6 +457,7 @@ module Ci
build_data = Gitlab::DataBuilder::Build.build(self)
project.execute_hooks(build_data.dup, :build_hooks)
project.execute_services(build_data.dup, :build_hooks)
UpdatePagesService.new(build_data).execute
project.running_or_pending_build_count(force: true)
end
......
......@@ -53,6 +53,8 @@ class Project < ActiveRecord::Base
update_column(:last_activity_at, self.created_at)
end
after_destroy :remove_pages
# update visibility_level of forks
after_update :update_forks_visibility_level
def update_forks_visibility_level
......@@ -1160,6 +1162,29 @@ class Project < ActiveRecord::Base
ensure_runners_token!
end
def pages_url
if Dir.exist?(public_pages_path)
host = "#{namespace.path}.#{Settings.pages.domain}"
# If the project path is the same as host, leave the short version
return "http://#{host}" if host == path
"http://#{host}/#{path}"
end
end
def pages_path
File.join(Settings.pages.path, path_with_namespace)
end
def public_pages_path
File.join(pages_path, 'public')
end
def remove_pages
FileUtils.rm_r(pages_path, force: true)
end
def wiki
@wiki ||= ProjectWiki.new(self, self.owner)
end
......
......@@ -136,6 +136,7 @@ class ProjectPolicy < BasePolicy
can! :remove_fork_project
can! :destroy_merge_request
can! :destroy_issue
can! :remove_pages
end
def team_member_owner_access!
......
class UpdatePagesService
attr_reader :data
def initialize(data)
@data = data
end
def execute
return unless Settings.pages.enabled
return unless data[:build_name] == 'pages'
return unless data[:build_status] == 'success'
PagesWorker.perform_async(data[:build_id])
end
end
......@@ -186,6 +186,14 @@
= f.text_area :help_page_text, class: 'form-control', rows: 4
.help-block Markdown enabled
%fieldset
%legend Pages
.form-group
= f.label :max_pages_size, 'Maximum size of pages (MB)', class: 'control-label col-sm-2'
.col-sm-10
= f.number_field :max_pages_size, class: 'form-control'
.help-block Zero for unlimited
%fieldset
%legend Continuous Integration
.form-group
......
......@@ -133,6 +133,41 @@
%hr
= link_to 'Remove avatar', namespace_project_avatar_path(@project.namespace, @project), data: { confirm: "Project avatar will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-avatar"
= f.submit 'Save changes', class: "btn btn-save"
- if Settings.pages.enabled
.pages-settings
.panel.panel-default
.panel-heading Pages
.errors-holder
.panel-body
- if @project.pages_url
%strong
Congratulations. Your pages are served at:
%p= link_to @project.pages_url, @project.pages_url
- else
%p
To publish pages create .gitlab-ci.yml with
%strong pages job
and send public/ folder to GitLab.
%p
Use existing tools:
%ul
%li
%pre
:plain
pages:
image: jekyll
script: jekyll build
artifacts:
paths:
- public
- if @project.pages_url && can?(current_user, :remove_pages, @project)
.form-actions
= link_to 'Remove pages', remove_pages_namespace_project_path(@project.namespace, @project),
data: { confirm: "Are you sure that you want to remove pages for this project?" },
method: :post, class: "btn btn-warning"
.row.prepend-top-default
%hr
.row.prepend-top-default
......
class PagesWorker
include Sidekiq::Worker
include Gitlab::CurrentSettings
BLOCK_SIZE = 32.kilobytes
MAX_SIZE = 1.terabyte
sidekiq_options queue: :pages
def perform(build_id)
@build_id = build_id
return unless valid?
# Create status notifying the deployment of pages
@status = GenericCommitStatus.new(
project: project,
commit: build.commit,
user: build.user,
ref: build.ref,
stage: 'deploy',
name: 'pages:deploy'
)
@status.run!
FileUtils.mkdir_p(tmp_path)
# Calculate dd parameters: we limit the size of pages
max_size = current_application_settings.max_pages_size.megabytes
max_size ||= MAX_SIZE
blocks = 1 + max_size / BLOCK_SIZE
# Create temporary directory in which we will extract the artifacts
Dir.mktmpdir(nil, tmp_path) do |temp_path|
# We manually extract the archive and limit the archive size with dd
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} public/))
return unless results.compact.all?(&:success?)
# Check if we did extract public directory
temp_public_path = File.join(temp_path, 'public')
return unless Dir.exists?(temp_public_path)
FileUtils.mkdir_p(pages_path)
# Lock file for time of deployment to prevent the two processes from doing the concurrent deployment
File.open(lock_path, File::RDWR|File::CREAT, 0644) do |f|
f.flock(File::LOCK_EX)
return unless valid?
# Do atomic move of pages
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
# 1. We move deployed public to previous public path (file removal is slow)
# 2. We move temporary public to be deployed public
# 3. We remove previous public path
if File.exists?(public_path)
FileUtils.move(public_path, previous_public_path)
end
FileUtils.move(temp_public_path, public_path)
end
if File.exists?(previous_public_path)
FileUtils.rm_r(previous_public_path, force: true)
end
@status.success
end
ensure
@status.drop if @status && @status.active?
end
private
def valid?
# check if sha for the ref is still the most recent one
# this helps in case when multiple deployments happens
build && build.artifacts_file? && sha == latest_sha
end
def build
@build ||= Ci::Build.find_by(id: @build_id)
end
def project
@project ||= build.project
end
def tmp_path
@tmp_path ||= File.join(Settings.pages.path, 'tmp')
end
def pages_path
@pages_path ||= project.pages_path
end
def public_path
@public_path ||= File.join(pages_path, 'public')
end
def previous_public_path
@previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
end
def lock_path
@lock_path ||= File.join(pages_path, 'deploy.lock')
end
def ref
build.ref
end
def artifacts
build.artifacts_file.path
end
def latest_sha
project.commit(build.ref).try(:sha).to_s
end
def sha
build.sha
end
end
......@@ -153,6 +153,17 @@ production: &base
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
## GitLab Pages
pages:
enabled: false
# The location where pages are stored (default: shared/pages).
# path: shared/pages
# The domain under which the pages are served:
# http://group.example.com/project
# or project path can be a group page: group.example.com
domain: example.com
## Mattermost
## For enabling Add to Mattermost button
mattermost:
......
......@@ -254,6 +254,12 @@ Settings.registry['issuer'] ||= nil
Settings.registry['host_port'] ||= [Settings.registry['host'], Settings.registry['port']].compact.join(':')
Settings.registry['path'] = File.expand_path(Settings.registry['path'] || File.join(Settings.shared['path'], 'registry'), Rails.root)
# Pages
Settings['pages'] ||= Settingslogic.new({})
Settings.pages['enabled'] = false if Settings.pages['enabled'].nil?
Settings.pages['path'] = File.expand_path('shared/pages/', Rails.root)
Settings.pages['domain'] ||= "example.com"
#
# Git LFS
#
......
......@@ -329,6 +329,7 @@ constraints(ProjectUrlConstrainer.new) do
post :archive
post :unarchive
post :housekeeping
post :remove_pages
post :toggle_star
post :preview_markdown
post :export
......
class AddPagesSizeToApplicationSettings < ActiveRecord::Migration
def up
add_column :application_settings, :max_pages_size, :integer, default: 100, null: false
end
end
......@@ -61,6 +61,7 @@ ActiveRecord::Schema.define(version: 20170130204620) do
t.boolean "shared_runners_enabled", default: true, null: false
t.integer "max_artifacts_size", default: 100, null: false
t.string "runners_registration_token"
t.integer "max_pages_size", default: 100, null: false
t.boolean "require_two_factor_authentication", default: false
t.integer "two_factor_grace_period", default: 48
t.boolean "metrics_enabled", default: false
......
......@@ -12,6 +12,7 @@
- [GitLab as OAuth2 authentication service provider](integration/oauth_provider.md). It allows you to login to other applications from GitLab.
- [Container Registry](user/project/container_registry.md) Learn how to use GitLab Container Registry.
- [GitLab basics](gitlab-basics/README.md) Find step by step how to start working on your commandline and on GitLab.
- [GitLab Pages](pages/README.md) Using GitLab Pages.
- [Importing to GitLab](workflow/importing/README.md) Import your projects from GitHub, Bitbucket, GitLab.com, FogBugz and SVN into GitLab.
- [Importing and exporting projects between instances](user/project/settings/import_export.md).
- [Markdown](user/markdown.md) GitLab's advanced formatting system.
......
......@@ -313,6 +313,9 @@ sudo usermod -aG redis git
# Change the permissions of the directory where CI artifacts are stored
sudo chmod -R u+rwX shared/artifacts/
# Change the permissions of the directory where CI artifacts are stored
sudo chmod -R ug+rwX shared/pages/
# Copy the example Unicorn config
sudo -u git -H cp config/unicorn.rb.example config/unicorn.rb
......@@ -484,6 +487,16 @@ Make sure to edit the config file to match your setup. Also, ensure that you mat
# or else sudo rm -f /etc/nginx/sites-enabled/default
sudo editor /etc/nginx/sites-available/gitlab
Copy the GitLab pages site config:
sudo cp lib/support/nginx/gitlab-pages /etc/nginx/sites-available/gitlab-pages
sudo ln -s /etc/nginx/sites-available/gitlab-pages /etc/nginx/sites-enabled/gitlab-pages
# Change YOUR_GITLAB_PAGES\.DOMAIN to the fully-qualified
# domain name under which the pages will be served.
# The . (dot) replace with \. (backslash+dot)
sudo editor /etc/nginx/sites-available/gitlab-pages
**Note:** If you want to use HTTPS, replace the `gitlab` Nginx config with `gitlab-ssl`. See [Using HTTPS](#using-https) for HTTPS configuration details.
### Test Configuration
......
# GitLab Pages
To start using GitLab Pages add to your project .gitlab-ci.yml with special pages job.
pages:
image: jekyll
script: jekyll build
artifacts:
paths:
- public
TODO
require 'backup/files'
module Backup
class Pages < Files
def initialize
super('pages', Gitlab.config.pages.path)
end
def create_files_dir
Dir.mkdir(app_files_dir, 0700)
end
end
end
## Pages serving host
server {
listen 0.0.0.0:80;
listen [::]:80 ipv6only=on;
## Replace this with something like pages.gitlab.com
server_name ~^(?<group>.*)\.YOUR_GITLAB_PAGES\.DOMAIN$;
root /home/git/gitlab/shared/pages/${group};
## Individual nginx logs for GitLab pages
access_log /var/log/nginx/gitlab_pages_access.log;
error_log /var/log/nginx/gitlab_pages_error.log;
# 1. Try to get /project/ to => shared/pages/${group}/public/ or index.html
# 2. Try to get / to => shared/pages/${group}/${host}/public/ or index.html
location ~ ^/([^/]*)(/.*)?$ {
try_files "/$1/public$2"
"/$1/public$2/index.html"
"/${host}/public/${uri}"
"/${host}/public/${uri}/index.html"
=404;
}
# Define custom error pages
error_page 403 /403.html;
error_page 404 /404.html;
}
......@@ -13,6 +13,7 @@ namespace :gitlab do
Rake::Task["gitlab:backup:uploads:create"].invoke
Rake::Task["gitlab:backup:builds:create"].invoke
Rake::Task["gitlab:backup:artifacts:create"].invoke
Rake::Task["gitlab:backup:pages:create"].invoke
Rake::Task["gitlab:backup:lfs:create"].invoke
Rake::Task["gitlab:backup:registry:create"].invoke
......@@ -56,6 +57,7 @@ namespace :gitlab do
Rake::Task['gitlab:backup:uploads:restore'].invoke unless backup.skipped?('uploads')
Rake::Task['gitlab:backup:builds:restore'].invoke unless backup.skipped?('builds')
Rake::Task['gitlab:backup:artifacts:restore'].invoke unless backup.skipped?('artifacts')
Rake::Task["gitlab:backup:pages:restore"].invoke unless backup.skipped?("pages")
Rake::Task['gitlab:backup:lfs:restore'].invoke unless backup.skipped?('lfs')
Rake::Task['gitlab:backup:registry:restore'].invoke unless backup.skipped?('registry')
Rake::Task['gitlab:shell:setup'].invoke
......@@ -159,6 +161,25 @@ namespace :gitlab do
end
end
namespace :pages do
task create: :environment do
$progress.puts "Dumping pages ... ".blue
if ENV["SKIP"] && ENV["SKIP"].include?("pages")
$progress.puts "[SKIPPED]".cyan
else
Backup::Pages.new.dump
$progress.puts "done".green
end
end
task restore: :environment do
$progress.puts "Restoring pages ... ".blue
Backup::Pages.new.restore
$progress.puts "done".green
end
end
namespace :lfs do
task create: :environment do
$progress.puts "Dumping lfs objects ... ".color(:blue)
......
require 'spec_helper'
describe UpdatePagesService, services: true do
let(:build) { create(:ci_build) }
let(:data) { Gitlab::BuildDataBuilder.build(build) }
let(:service) { UpdatePagesService.new(data) }
context 'execute asynchronously for pages job' do
before { build.name = 'pages' }
context 'on success' do
before { build.success }
it 'should execute worker' do
expect(PagesWorker).to receive(:perform_async)
service.execute
end
end
%w(pending running failed canceled).each do |status|
context "on #{status}" do
before { build.status = status }
it 'should not execute worker' do
expect(PagesWorker).to_not receive(:perform_async)
service.execute
end
end
end
end
context 'for other jobs' do
before do
build.name = 'other job'
build.success
end
it 'should not execute worker' do
expect(PagesWorker).to_not receive(:perform_async)
service.execute
end
end
end
......@@ -28,7 +28,7 @@ describe 'gitlab:app namespace rake task' do
end
def reenable_backup_sub_tasks
%w{db repo uploads builds artifacts lfs registry}.each do |subtask|
%w{db repo uploads builds artifacts pages lfs registry}.each do |subtask|
Rake::Task["gitlab:backup:#{subtask}:create"].reenable
end
end
......@@ -71,6 +71,7 @@ describe 'gitlab:app namespace rake task' do
expect(Rake::Task['gitlab:backup:builds:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:uploads:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:artifacts:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:pages:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:lfs:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:registry:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:shell:setup']).to receive(:invoke)
......@@ -202,7 +203,7 @@ describe 'gitlab:app namespace rake task' do
it 'sets correct permissions on the tar contents' do
tar_contents, exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz lfs.tar.gz registry.tar.gz}
%W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz registry.tar.gz}
)
expect(exit_status).to eq(0)
expect(tar_contents).to match('db/')
......@@ -210,14 +211,15 @@ describe 'gitlab:app namespace rake task' do
expect(tar_contents).to match('repositories/')
expect(tar_contents).to match('builds.tar.gz')
expect(tar_contents).to match('artifacts.tar.gz')
expect(tar_contents).to match('pages.tar.gz')
expect(tar_contents).to match('lfs.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).not_to match(/^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|artifacts.tar.gz|registry.tar.gz)\/$/)
expect(tar_contents).not_to match(/^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|pages.tar.gz|artifacts.tar.gz|registry.tar.gz)\/$/)
end
it 'deletes temp directories' do
temp_dirs = Dir.glob(
File.join(Gitlab.config.backup.path, '{db,repositories,uploads,builds,artifacts,lfs,registry}')
File.join(Gitlab.config.backup.path, '{db,repositories,uploads,builds,artifacts,pages,lfs,registry}')
)
expect(temp_dirs).to be_empty
......@@ -304,7 +306,7 @@ describe 'gitlab:app namespace rake task' do
it "does not contain skipped item" do
tar_contents, _exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz lfs.tar.gz registry.tar.gz}
%W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz registry.tar.gz}
)
expect(tar_contents).to match('db/')
......@@ -312,6 +314,7 @@ describe 'gitlab:app namespace rake task' do
expect(tar_contents).to match('builds.tar.gz')
expect(tar_contents).to match('artifacts.tar.gz')
expect(tar_contents).to match('lfs.tar.gz')
expect(tar_contents).to match('pages.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).not_to match('repositories/')
end
......@@ -327,6 +330,7 @@ describe 'gitlab:app namespace rake task' do
expect(Rake::Task['gitlab:backup:uploads:restore']).not_to receive :invoke
expect(Rake::Task['gitlab:backup:builds:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:artifacts:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:pages:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:lfs:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:registry:restore']).to receive :invoke
expect(Rake::Task['gitlab:shell:setup']).to receive :invoke
......
require "spec_helper"
describe PagesWorker do
let(:project) { create :project }
let(:commit) { create :ci_commit, project: project, sha: project.commit('HEAD').sha }
let(:build) { create :ci_build, commit: commit, ref: 'HEAD' }
let(:worker) { PagesWorker.new }
let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages.tar.gz', 'application/octet-stream') }
let(:empty_file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages_empty.tar.gz', 'application/octet-stream') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'application/octet-stream') }
before do
project.remove_pages
end
context 'for valid file' do
before { build.update_attributes(artifacts_file: file) }
it 'succeeds' do
expect(project.pages_url).to be_nil
expect(worker.perform(build.id)).to be_truthy
expect(project.pages_url).to_not be_nil
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(worker.perform(build.id)).to_not be_truthy
end
it 'removes pages after destroy' do
expect(project.pages_url).to be_nil
expect(worker.perform(build.id)).to be_truthy
expect(project.pages_url).to_not be_nil
project.destroy
expect(Dir.exist?(project.public_pages_path)).to be_falsey
end
end
it 'fails if no artifacts' do
expect(worker.perform(build.id)).to_not be_truthy
end
it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file)
expect(worker.perform(build.id)).to_not be_truthy
end
it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file)
expect(worker.perform(build.id)).to_not be_truthy
end
it 'fails if sha on branch is not latest' do
commit.update_attributes(sha: 'old_sha')
build.update_attributes(artifacts_file: file)
expect(worker.perform(build.id)).to_not be_truthy
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment