Commit f715d682 authored by Vladimir Shushlin's avatar Vladimir Shushlin

Add uploader for GitLab Pages deployments

Use GitLab uploader and override only dynamic_segment
Add settings to gitlab.yml.examle
Use pages path as root directory for local storage
Add settings for pages object storage(not set anywhere ATM)
parent d6f2a64d
......@@ -2,10 +2,14 @@
# PagesDeployment stores a zip archive containing GitLab Pages web-site
class PagesDeployment < ApplicationRecord
include FileStoreMounter
belongs_to :project, optional: false
belongs_to :ci_build, class_name: 'Ci::Build', optional: true
validates :file, presence: true
validates :file_store, presence: true, inclusion: { in: ObjectStorage::SUPPORTED_STORES }
validates :size, presence: true, numericality: { greater_than: 0, only_integer: true }
mount_file_store_uploader ::Pages::DeploymentUploader
end
# frozen_string_literal: true
module Pages
class DeploymentUploader < GitlabUploader
include ObjectStorage::Concern
storage_options Gitlab.config.pages
alias_method :upload, :model
private
def dynamic_segment
Gitlab::HashedPath.new('pages_deployments', model.id, root_hash: model.project_id)
end
# @hashed is chosen to avoid conflict with namespace name because we use the same directory for storage
# @ is not valid character for namespace
def base_dir
"@hashed"
end
end
end
......@@ -394,6 +394,14 @@ production: &base
# File that contains the shared secret key for verifying access for gitlab-pages.
# Default is '.gitlab_pages_secret' relative to Rails.root (i.e. root of the GitLab app).
# secret_file: /home/git/gitlab/.gitlab_pages_secret
object_store:
enabled: false
remote_directory: pages # The bucket name
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: us-east-1
## Mattermost
## For enabling Add to Mattermost button
......@@ -1318,6 +1326,14 @@ test:
# user: YOUR_USERNAME
pages:
path: tmp/tests/pages
object_store:
enabled: false
remote_directory: pages # The bucket name
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: us-east-1
repositories:
storages:
default:
......
......@@ -297,6 +297,10 @@ Settings.pages['external_http'] ||= false unless Settings.pages['external_http']
Settings.pages['external_https'] ||= false unless Settings.pages['external_https'].present?
Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pages['artifacts_server'].nil?
Settings.pages['secret_file'] ||= Rails.root.join('.gitlab_pages_secret')
# We want pages zip archives to be stored on the same directory as old pages hierarchical structure
# this will allow us to easier migrate existing instances with NFS
Settings.pages['storage_path'] = Settings.pages['path']
Settings.pages['object_store'] = ObjectStoreSettings.legacy_parse(Settings.pages['object_store'])
#
# Geo
......
......@@ -4,9 +4,12 @@ FactoryBot.define do
factory :pages_deployment, class: 'PagesDeployment' do
project
file_store { ObjectStorage::SUPPORTED_STORES.first }
size { 1.megabytes }
# TODO: replace with proper file uploaded in https://gitlab.com/gitlab-org/gitlab/-/issues/245295
file { "dummy string" }
after(:build) do |deployment, _evaluator|
deployment.file = fixture_file_upload(
Rails.root.join("spec/fixtures/pages.zip")
)
deployment.size = deployment.file.size
end
end
end
......@@ -89,6 +89,13 @@ module StubObjectStorage
**params)
end
def stub_pages_object_storage(uploader = described_class, **params)
stub_object_storage_uploader(config: Gitlab.config.pages.object_store,
uploader: uploader,
remote_directory: 'pages',
**params)
end
def stub_object_storage_multipart_init(endpoint, upload_id = "upload_id")
stub_request(:post, %r{\A#{endpoint}tmp/uploads/[a-z0-9-]*\?uploads\z})
.to_return status: 200, body: <<-EOS.strip_heredoc
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Pages::DeploymentUploader do
let(:pages_deployment) { create(:pages_deployment) }
let(:uploader) { described_class.new(pages_deployment, :file) }
subject { uploader }
it_behaves_like "builds correct paths",
store_dir: %r[/\h{2}/\h{2}/\h{64}/pages_deployments/\d+],
cache_dir: %r[pages/@hashed/tmp/cache],
work_dir: %r[pages/@hashed/tmp/work]
context 'when object store is REMOTE' do
before do
stub_pages_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths', store_dir: %r[\A\h{2}/\h{2}/\h{64}/pages_deployments/\d+\z]
end
context 'when file is stored in valid local_path' do
let(:file) do
fixture_file_upload("spec/fixtures/pages.zip")
end
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to match(%r[#{uploader.root}/@hashed/\h{2}/\h{2}/\h{64}/pages_deployments/#{pages_deployment.id}/pages.zip]) }
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment