Commit 081ed443 authored by Robert Speicher's avatar Robert Speicher

Merge branch 'ce-to-ee-2018-07-11' into 'master'

CE upstream - 2018-07-11 15:22 UTC

Closes gitlab-qa#231, gitaly#798, gitaly#529, and gitaly#875

See merge request gitlab-org/gitlab-ee!6464
parents 6ca17152 eb127ba6
...@@ -6,14 +6,14 @@ ...@@ -6,14 +6,14 @@
= sprite_icon('admin', size: 24) = sprite_icon('admin', size: 24)
.sidebar-context-title Admin Area .sidebar-context-title Admin Area
%ul.sidebar-top-level-items %ul.sidebar-top-level-items
= nav_link(controller: %w(dashboard admin projects users groups jobs runners cohorts conversational_development_index), html_options: {class: 'home'}) do = nav_link(controller: %w(dashboard admin projects users groups jobs runners gitaly_servers cohorts conversational_development_index), html_options: {class: 'home'}) do
= link_to admin_root_path, class: 'shortcuts-tree' do = link_to admin_root_path, class: 'shortcuts-tree' do
.nav-icon-container .nav-icon-container
= sprite_icon('overview') = sprite_icon('overview')
%span.nav-item-name %span.nav-item-name
Overview Overview
%ul.sidebar-sub-level-items %ul.sidebar-sub-level-items
= nav_link(controller: %w(dashboard admin projects users groups jobs runners cohorts conversational_development_index), html_options: { class: "fly-out-top-item" } ) do = nav_link(controller: %w(dashboard admin projects users groups jobs runners gitaly_servers cohorts conversational_development_index), html_options: { class: "fly-out-top-item" } ) do
= link_to admin_root_path do = link_to admin_root_path do
%strong.fly-out-top-item-name %strong.fly-out-top-item-name
#{ _('Overview') } #{ _('Overview') }
...@@ -42,6 +42,10 @@ ...@@ -42,6 +42,10 @@
= link_to admin_runners_path, title: 'Runners' do = link_to admin_runners_path, title: 'Runners' do
%span %span
Runners Runners
= nav_link(controller: :gitaly_servers) do
= link_to admin_gitaly_servers_path, title: 'Gitaly Servers' do
%span
Gitaly Servers
= nav_link path: 'cohorts#index' do = nav_link path: 'cohorts#index' do
= link_to admin_cohorts_path, title: 'Cohorts' do = link_to admin_cohorts_path, title: 'Cohorts' do
%span %span
......
...@@ -122,7 +122,7 @@ ...@@ -122,7 +122,7 @@
= render_if_exists 'projects/sidebar/issues_service_desk' = render_if_exists 'projects/sidebar/issues_service_desk'
= nav_link(controller: :milestones) do = nav_link(controller: :milestones) do
= link_to project_milestones_path(@project), title: 'Milestones' do = link_to project_milestones_path(@project), title: 'Milestones', class: 'qa-milestones-link' do
%span %span
= _('Milestones') = _('Milestones')
- if project_nav_tab? :external_issue_tracker - if project_nav_tab? :external_issue_tracker
......
...@@ -7,12 +7,12 @@ ...@@ -7,12 +7,12 @@
.form-group.row .form-group.row
= f.label :title, "Title", class: "col-form-label col-sm-2" = f.label :title, "Title", class: "col-form-label col-sm-2"
.col-sm-10 .col-sm-10
= f.text_field :title, maxlength: 255, class: "form-control", required: true, autofocus: true = f.text_field :title, maxlength: 255, class: "qa-milestone-title form-control", required: true, autofocus: true
.form-group.row.milestone-description .form-group.row.milestone-description
= f.label :description, "Description", class: "col-form-label col-sm-2" = f.label :description, "Description", class: "col-form-label col-sm-2"
.col-sm-10 .col-sm-10
= render layout: 'projects/md_preview', locals: { url: preview_markdown_path(@project) } do = render layout: 'projects/md_preview', locals: { url: preview_markdown_path(@project) } do
= render 'projects/zen', f: f, attr: :description, classes: 'note-textarea', placeholder: 'Write milestone description...' = render 'projects/zen', f: f, attr: :description, classes: 'qa-milestone-description note-textarea', placeholder: 'Write milestone description...'
= render 'shared/notes/hints' = render 'shared/notes/hints'
.clearfix .clearfix
.error-alert .error-alert
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
.form-actions .form-actions
- if @milestone.new_record? - if @milestone.new_record?
= f.submit 'Create milestone', class: "btn-create btn" = f.submit 'Create milestone', class: "btn-create btn qa-milestone-create-button"
= link_to "Cancel", project_milestones_path(@project), class: "btn btn-cancel" = link_to "Cancel", project_milestones_path(@project), class: "btn btn-cancel"
- else - else
= f.submit 'Save changes', class: "btn-save btn" = f.submit 'Save changes', class: "btn-save btn"
......
...@@ -8,7 +8,7 @@ ...@@ -8,7 +8,7 @@
.nav-controls .nav-controls
= render 'shared/milestones_sort_dropdown' = render 'shared/milestones_sort_dropdown'
- if can?(current_user, :admin_milestone, @project) - if can?(current_user, :admin_milestone, @project)
= link_to new_project_milestone_path(@project), class: "btn btn-new", title: 'New milestone' do = link_to new_project_milestone_path(@project), class: "btn btn-new qa-new-project-milestone", title: 'New milestone' do
New milestone New milestone
.milestones .milestones
......
...@@ -60,7 +60,7 @@ ...@@ -60,7 +60,7 @@
= icon('angle-double-left') = icon('angle-double-left')
.detail-page-description.milestone-detail .detail-page-description.milestone-detail
%h2.title %h2.title.qa-milestone-title
= markdown_field(@milestone, :title) = markdown_field(@milestone, :title)
%div %div
......
- if any_projects?(@projects) - if any_projects?(@projects)
.project-item-select-holder.btn-group .project-item-select-holder.btn-group
%a.btn.btn-new.new-project-item-link{ href: '', data: { label: local_assigns[:label], type: local_assigns[:type] } } %a.btn.btn-new.new-project-item-link.qa-new-project-item-link{ href: '', data: { label: local_assigns[:label], type: local_assigns[:type] } }
= icon('spinner spin') = icon('spinner spin')
= project_select_tag :project_path, class: "project-item-select", data: { include_groups: local_assigns[:include_groups], order_by: 'last_activity_at', relative_path: local_assigns[:path] }, with_feature_enabled: local_assigns[:with_feature_enabled] = project_select_tag :project_path, class: "project-item-select", data: { include_groups: local_assigns[:include_groups], order_by: 'last_activity_at', relative_path: local_assigns[:path] }, with_feature_enabled: local_assigns[:with_feature_enabled]
%button.btn.btn-new.new-project-item-select-button %button.btn.btn-new.new-project-item-select-button.qa-new-project-item-select-button
= icon('caret-down') = icon('caret-down')
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
- dropdown_title = local_assigns.fetch(:dropdown_title, "Filter by milestone") - dropdown_title = local_assigns.fetch(:dropdown_title, "Filter by milestone")
- if selected.present? || params[:milestone_title].present? - if selected.present? || params[:milestone_title].present?
= hidden_field_tag(name, name == :milestone_title ? selected_text : selected.id) = hidden_field_tag(name, name == :milestone_title ? selected_text : selected.id)
= dropdown_tag(milestone_dropdown_label(selected_text), options: { title: dropdown_title, toggle_class: "js-milestone-select js-filter-submit #{extra_class}", filter: true, dropdown_class: "dropdown-menu-selectable dropdown-menu-milestone", = dropdown_tag(milestone_dropdown_label(selected_text), options: { title: dropdown_title, toggle_class: "qa-issuable-milestone-dropdown js-milestone-select js-filter-submit #{extra_class}", filter: true, dropdown_class: "qa-issuable-dropdown-menu-milestone dropdown-menu-selectable dropdown-menu-milestone",
placeholder: "Search milestones", footer_content: project.present?, data: { show_no: true, show_menu_above: show_menu_above, show_any: show_any, show_upcoming: show_upcoming, show_started: show_started, field_name: name, selected: selected_text, project_id: project.try(:id), milestones: milestones_filter_dropdown_path, default_label: "Milestone" } }) do placeholder: "Search milestones", footer_content: project.present?, data: { show_no: true, show_menu_above: show_menu_above, show_any: show_any, show_upcoming: show_upcoming, show_started: show_started, field_name: name, selected: selected_text, project_id: project.try(:id), milestones: milestones_filter_dropdown_path, default_label: "Milestone" } }) do
- if project - if project
%ul.dropdown-footer-list %ul.dropdown-footer-list
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
= form.label :milestone_id, "Milestone", class: "col-form-label #{has_due_date ? "col-md-2 col-lg-4" : "col-sm-2"}" = form.label :milestone_id, "Milestone", class: "col-form-label #{has_due_date ? "col-md-2 col-lg-4" : "col-sm-2"}"
.col-sm-10{ class: ("col-md-8" if has_due_date) } .col-sm-10{ class: ("col-md-8" if has_due_date) }
.issuable-form-select-holder .issuable-form-select-holder
= render "shared/issuable/milestone_dropdown", selected: issuable.milestone, name: "#{issuable.class.model_name.param_key}[milestone_id]", show_any: false, show_upcoming: false, show_started: false, extra_class: "js-issuable-form-dropdown js-dropdown-keep-input", dropdown_title: "Select milestone" = render "shared/issuable/milestone_dropdown", selected: issuable.milestone, name: "#{issuable.class.model_name.param_key}[milestone_id]", show_any: false, show_upcoming: false, show_started: false, extra_class: "qa-issuable-milestone-dropdown js-issuable-form-dropdown js-dropdown-keep-input", dropdown_title: "Select milestone"
.form-group.row .form-group.row
- has_labels = @labels && @labels.any? - has_labels = @labels && @labels.any?
= form.label :label_ids, "Labels", class: "col-form-label #{has_due_date ? "col-md-2 col-lg-4" : "col-sm-2"}" = form.label :label_ids, "Labels", class: "col-form-label #{has_due_date ? "col-md-2 col-lg-4" : "col-sm-2"}"
......
...@@ -79,9 +79,10 @@ class ProcessCommitWorker ...@@ -79,9 +79,10 @@ class ProcessCommitWorker
# Avoid reprocessing commits that already exist in the upstream # Avoid reprocessing commits that already exist in the upstream
# when project is forked. This will also prevent duplicated system notes. # when project is forked. This will also prevent duplicated system notes.
def commit_exists_in_upstream?(project, commit_hash) def commit_exists_in_upstream?(project, commit_hash)
return false unless project.forked? upstream_project = project.fork_source
return false unless upstream_project
upstream_project = project.forked_from_project
commit_id = commit_hash.with_indifferent_access[:id] commit_id = commit_hash.with_indifferent_access[:id]
upstream_project.commit(commit_id).present? upstream_project.commit(commit_id).present?
end end
......
---
title: Gitaly Servers link into Admin > Overview navigation menu
merge_request: 20550
author:
type: added
---
title: Process commits as normal in forks when the upstream project is deleted
merge_request: 20534
author:
type: fixed
...@@ -30,14 +30,14 @@ class Gitlab::Seeder::Environments ...@@ -30,14 +30,14 @@ class Gitlab::Seeder::Environments
def create_merge_request_review_deployments! def create_merge_request_review_deployments!
@project @project
.merge_requests .merge_requests
.select { |mr| mr.source_branch.match(/\p{Alnum}+/) } .select { |mr| mr.source_branch.match(/[^a-zA-Z0-9]+/) }
.sample(4) .sample(4)
.each do |merge_request| .each do |merge_request|
next unless merge_request.diff_head_sha next unless merge_request.diff_head_sha
create_deployment!( create_deployment!(
merge_request.source_project, merge_request.source_project,
"review/#{merge_request.source_branch.gsub(/[^a-zA-Z0-9]/, '')}", "review/#{merge_request.source_branch.gsub(/[^a-zA-Z0-9]+/, '')}",
merge_request.source_branch, merge_request.source_branch,
merge_request.diff_head_sha merge_request.diff_head_sha
) )
......
class EnqueueDeleteDiffFilesWorkers < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
SCHEDULER = 'ScheduleDiffFilesDeletion'.freeze
TMP_INDEX = 'tmp_partial_diff_id_with_files_index'.freeze
disable_ddl_transaction!
def up
unless index_exists_by_name?(:merge_request_diffs, TMP_INDEX)
add_concurrent_index(:merge_request_diffs, :id, where: "(state NOT IN ('without_files', 'empty'))", name: TMP_INDEX)
end
BackgroundMigrationWorker.perform_async(SCHEDULER)
# We don't remove the index since it's going to be used on DeleteDiffFiles
# worker. We should remove it in an upcoming release.
end
def down
if index_exists_by_name?(:merge_request_diffs, TMP_INDEX)
remove_concurrent_index_by_name(:merge_request_diffs, TMP_INDEX)
end
end
end
...@@ -4,41 +4,77 @@ ...@@ -4,41 +4,77 @@
module Gitlab module Gitlab
module BackgroundMigration module BackgroundMigration
class DeleteDiffFiles class DeleteDiffFiles
def perform(merge_request_diff_id) class MergeRequestDiff < ActiveRecord::Base
merge_request_diff = MergeRequestDiff.find_by(id: merge_request_diff_id) self.table_name = 'merge_request_diffs'
return unless merge_request_diff belongs_to :merge_request
return unless should_delete_diff_files?(merge_request_diff) has_many :merge_request_diff_files
end
MergeRequestDiff.transaction do class MergeRequestDiffFile < ActiveRecord::Base
merge_request_diff.update_column(:state, 'without_files') self.table_name = 'merge_request_diff_files'
end
# explain (analyze, buffers) when deleting 453 diff files:
# DEAD_TUPLES_THRESHOLD = 50_000
# Delete on merge_request_diff_files (cost=0.57..8487.35 rows=4846 width=6) (actual time=43.265..43.265 rows=0 loops=1) VACUUM_WAIT_TIME = 5.minutes
# Buffers: shared hit=2043 read=259 dirtied=254
# -> Index Scan using index_merge_request_diff_files_on_mr_diff_id_and_order on merge_request_diff_files (cost=0.57..8487.35 rows=4846 width=6) (actu def perform(ids)
# al time=0.466..26.317 rows=453 loops=1) @ids = ids
# Index Cond: (merge_request_diff_id = 463448)
# Buffers: shared hit=17 read=84 # We should reschedule until deadtuples get in a desirable
# Planning time: 0.107 ms # state (e.g. < 50_000). That may take more than one reschedule.
# Execution time: 43.287 ms #
# if should_wait_deadtuple_vacuum?
MergeRequestDiffFile.where(merge_request_diff_id: merge_request_diff.id).delete_all reschedule
return
end end
prune_diff_files
end
def should_wait_deadtuple_vacuum?
return false unless Gitlab::Database.postgresql?
diff_files_dead_tuples_count >= DEAD_TUPLES_THRESHOLD
end end
private private
def should_delete_diff_files?(merge_request_diff) def reschedule
return false if merge_request_diff.state == 'without_files' BackgroundMigrationWorker.perform_in(VACUUM_WAIT_TIME, self.class.name.demodulize, [@ids])
end
def diffs_collection
MergeRequestDiff.where(id: @ids)
end
def diff_files_dead_tuples_count
dead_tuple =
execute_statement("SELECT n_dead_tup FROM pg_stat_all_tables "\
"WHERE relname = 'merge_request_diff_files'")[0]
merge_request = merge_request_diff.merge_request dead_tuple&.fetch('n_dead_tup', 0).to_i
end
def prune_diff_files
removed = 0
updated = 0
return false unless merge_request.state == 'merged' MergeRequestDiff.transaction do
return false if merge_request_diff.id == merge_request.latest_merge_request_diff_id updated = diffs_collection.update_all(state: 'without_files')
removed = MergeRequestDiffFile.where(merge_request_diff_id: @ids).delete_all
end
log_info("Removed #{removed} merge_request_diff_files rows, "\
"updated #{updated} merge_request_diffs rows")
end
def execute_statement(sql)
ActiveRecord::Base.connection.execute(sql)
end
true def log_info(message)
Rails.logger.info("BackgroundMigration::DeleteDiffFiles - #{message}")
end end
end end
end end
......
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class ScheduleDiffFilesDeletion
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
belongs_to :merge_request
include EachBatch
end
DIFF_BATCH_SIZE = 5_000
INTERVAL = 5.minutes
MIGRATION = 'DeleteDiffFiles'
def perform
diffs = MergeRequestDiff
.from("(#{diffs_collection.to_sql}) merge_request_diffs")
.where('merge_request_diffs.id != merge_request_diffs.latest_merge_request_diff_id')
.select(:id)
diffs.each_batch(of: DIFF_BATCH_SIZE) do |relation, index|
ids = relation.pluck(:id)
BackgroundMigrationWorker.perform_in(index * INTERVAL, MIGRATION, [ids])
end
end
private
def diffs_collection
MergeRequestDiff
.joins(:merge_request)
.where("merge_requests.state = 'merged'")
.where('merge_requests.latest_merge_request_diff_id IS NOT NULL')
.where("merge_request_diffs.state NOT IN ('without_files', 'empty')")
.select('merge_requests.latest_merge_request_diff_id, merge_request_diffs.id')
end
end
end
end
...@@ -61,17 +61,8 @@ module Gitlab ...@@ -61,17 +61,8 @@ module Gitlab
# Keep in mind that this method may allocate a lot of memory. It is up # Keep in mind that this method may allocate a lot of memory. It is up
# to the caller to limit the number of blobs and blob_size_limit. # to the caller to limit the number of blobs and blob_size_limit.
# #
# Gitaly migration issue: https://gitlab.com/gitlab-org/gitaly/issues/798
def batch(repository, blob_references, blob_size_limit: MAX_DATA_DISPLAY_SIZE) def batch(repository, blob_references, blob_size_limit: MAX_DATA_DISPLAY_SIZE)
Gitlab::GitalyClient.migrate(:list_blobs_by_sha_path) do |is_enabled| repository.gitaly_blob_client.get_blobs(blob_references, blob_size_limit).to_a
if is_enabled
repository.gitaly_blob_client.get_blobs(blob_references, blob_size_limit).to_a
else
blob_references.map do |sha, path|
find(repository, sha, path, limit: blob_size_limit)
end
end
end
end end
# Returns an array of Blob instances just with the metadata, that means # Returns an array of Blob instances just with the metadata, that means
...@@ -84,16 +75,8 @@ module Gitlab ...@@ -84,16 +75,8 @@ module Gitlab
# Returns array of Gitlab::Git::Blob # Returns array of Gitlab::Git::Blob
# Does not guarantee blob data will be set # Does not guarantee blob data will be set
def batch_lfs_pointers(repository, blob_ids) def batch_lfs_pointers(repository, blob_ids)
repository.gitaly_migrate(:batch_lfs_pointers) do |is_enabled| repository.wrapped_gitaly_errors do
if is_enabled repository.gitaly_blob_client.batch_lfs_pointers(blob_ids.to_a)
repository.gitaly_blob_client.batch_lfs_pointers(blob_ids.to_a)
else
blob_ids.lazy
.select { |sha| possible_lfs_blob?(repository, sha) }
.map { |sha| rugged_raw(repository, sha, limit: LFS_POINTER_MAX_SIZE) }
.select(&:lfs_pointer?)
.force
end
end end
end end
...@@ -104,72 +87,6 @@ module Gitlab ...@@ -104,72 +87,6 @@ module Gitlab
def size_could_be_lfs?(size) def size_could_be_lfs?(size)
size.between?(LFS_POINTER_MIN_SIZE, LFS_POINTER_MAX_SIZE) size.between?(LFS_POINTER_MIN_SIZE, LFS_POINTER_MAX_SIZE)
end end
private
# Recursive search of blob id by path
#
# Ex.
# blog/ # oid: 1a
# app/ # oid: 2a
# models/ # oid: 3a
# file.rb # oid: 4a
#
#
# Blob.find_entry_by_path(repo, '1a', 'blog', 'app', 'file.rb') # => '4a'
#
def find_entry_by_path(repository, root_id, *path_parts)
root_tree = repository.lookup(root_id)
entry = root_tree.find do |entry|
entry[:name] == path_parts[0]
end
return nil unless entry
if path_parts.size > 1
return nil unless entry[:type] == :tree
path_parts.shift
find_entry_by_path(repository, entry[:oid], *path_parts)
else
[:blob, :commit].include?(entry[:type]) ? entry : nil
end
end
def submodule_blob(blob_entry, path, sha)
new(
id: blob_entry[:oid],
name: blob_entry[:name],
size: 0,
data: '',
path: path,
commit_id: sha
)
end
def rugged_raw(repository, sha, limit:)
blob = repository.lookup(sha)
return unless blob.is_a?(Rugged::Blob)
new(
id: blob.oid,
size: blob.size,
data: blob.content(limit),
binary: blob.binary?
)
end
# Efficient lookup to determine if object size
# and type make it a possible LFS blob without loading
# blob content into memory with repository.lookup(sha)
def possible_lfs_blob?(repository, sha)
object_header = repository.rugged.read_header(sha)
object_header[:type] == :blob &&
size_could_be_lfs?(object_header[:len])
end
end end
def initialize(options) def initialize(options)
......
...@@ -98,16 +98,12 @@ module Gitlab ...@@ -98,16 +98,12 @@ module Gitlab
end end
def send_git_patch(repository, diff_refs) def send_git_patch(repository, diff_refs)
params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_send_git_patch, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) params = {
{ 'GitalyServer' => gitaly_server_hash(repository),
'GitalyServer' => gitaly_server_hash(repository), 'RawPatchRequest' => Gitaly::RawPatchRequest.new(
'RawPatchRequest' => Gitaly::RawPatchRequest.new( gitaly_diff_or_patch_hash(repository, diff_refs)
gitaly_diff_or_patch_hash(repository, diff_refs) ).to_json
).to_json }
}
else
workhorse_diff_or_patch_hash(repository, diff_refs)
end
[ [
SEND_DATA_HEADER, SEND_DATA_HEADER,
...@@ -220,14 +216,6 @@ module Gitlab ...@@ -220,14 +216,6 @@ module Gitlab
} }
end end
def workhorse_diff_or_patch_hash(repository, diff_refs)
{
'RepoPath' => repository.path_to_repo,
'ShaFrom' => diff_refs.base_sha,
'ShaTo' => diff_refs.head_sha
}
end
def gitaly_diff_or_patch_hash(repository, diff_refs) def gitaly_diff_or_patch_hash(repository, diff_refs)
{ {
repository: repository.gitaly_repository, repository: repository.gitaly_repository,
......
...@@ -47,6 +47,7 @@ module QA ...@@ -47,6 +47,7 @@ module QA
autoload :Runner, 'qa/factory/resource/runner' autoload :Runner, 'qa/factory/resource/runner'
autoload :PersonalAccessToken, 'qa/factory/resource/personal_access_token' autoload :PersonalAccessToken, 'qa/factory/resource/personal_access_token'
autoload :KubernetesCluster, 'qa/factory/resource/kubernetes_cluster' autoload :KubernetesCluster, 'qa/factory/resource/kubernetes_cluster'
autoload :ProjectMilestone, 'qa/factory/resource/project_milestone'
autoload :Wiki, 'qa/factory/resource/wiki' autoload :Wiki, 'qa/factory/resource/wiki'
end end
...@@ -166,6 +167,11 @@ module QA ...@@ -166,6 +167,11 @@ module QA
autoload :Index, 'qa/page/project/issue/index' autoload :Index, 'qa/page/project/issue/index'
end end
module Milestone
autoload :New, 'qa/page/project/milestone/new'
autoload :Index, 'qa/page/project/milestone/index'
end
module Operations module Operations
module Kubernetes module Kubernetes
autoload :Index, 'qa/page/project/operations/kubernetes/index' autoload :Index, 'qa/page/project/operations/kubernetes/index'
......
...@@ -7,7 +7,10 @@ module QA ...@@ -7,7 +7,10 @@ module QA
attr_accessor :title, attr_accessor :title,
:description, :description,
:source_branch, :source_branch,
:target_branch :target_branch,
:assignee,
:milestone,
:labels
product :project do |factory| product :project do |factory|
factory.project factory.project
...@@ -42,16 +45,18 @@ module QA ...@@ -42,16 +45,18 @@ module QA
@description = 'This is a test merge request' @description = 'This is a test merge request'
@source_branch = "qa-test-feature-#{SecureRandom.hex(8)}" @source_branch = "qa-test-feature-#{SecureRandom.hex(8)}"
@target_branch = "master" @target_branch = "master"
@assignee = nil
@milestone = nil
@labels = []
end end
def fabricate! def fabricate!
project.visit! project.visit!
Page::Project::Show.act { new_merge_request } Page::Project::Show.act { new_merge_request }
Page::MergeRequest::New.perform do |page| Page::MergeRequest::New.perform do |page|
page.fill_title(@title) page.fill_title(@title)
page.fill_description(@description) page.fill_description(@description)
page.choose_milestone(@milestone) if @milestone
page.create_merge_request page.create_merge_request
end end
end end
......
module QA
module Factory
module Resource
class ProjectMilestone < Factory::Base
attr_accessor :description
attr_reader :title
dependency Factory::Resource::Project, as: :project
product(:title) { |factory| factory.title }
def title=(title)
@title = "#{title}-#{SecureRandom.hex(4)}"
@description = 'A milestone'
end
def fabricate!
project.visit!
Page::Menu::Side.act do
click_issues
click_milestones
end
Page::Project::Milestone::Index.act { click_new_milestone }
Page::Project::Milestone::New.perform do |milestone_new|
milestone_new.set_title(@title)
milestone_new.set_description(@description)
milestone_new.create_new_milestone
end
end
end
end
end
end
...@@ -16,6 +16,7 @@ module QA ...@@ -16,6 +16,7 @@ module QA
element :operations_section, "class: 'shortcuts-operations'" element :operations_section, "class: 'shortcuts-operations'"
element :activity_link, "title: 'Activity'" element :activity_link, "title: 'Activity'"
element :wiki_link_text, "Wiki" element :wiki_link_text, "Wiki"
element :milestones_link
end end
view 'app/assets/javascripts/fly_out_nav.js' do view 'app/assets/javascripts/fly_out_nav.js' do
...@@ -70,6 +71,12 @@ module QA ...@@ -70,6 +71,12 @@ module QA
end end
end end
def click_milestones
within_sidebar do
click_element :milestones_link
end
end
def click_wiki def click_wiki
within_sidebar do within_sidebar do
click_link('Wiki') click_link('Wiki')
......
...@@ -10,10 +10,18 @@ module QA ...@@ -10,10 +10,18 @@ module QA
element :issuable_form_title element :issuable_form_title
end end
view 'app/views/shared/issuable/form/_metadata.html.haml' do
element :issuable_milestone_dropdown
end
view 'app/views/shared/form_elements/_description.html.haml' do view 'app/views/shared/form_elements/_description.html.haml' do
element :issuable_form_description element :issuable_form_description
end end
view 'app/views/shared/issuable/_milestone_dropdown.html.haml' do
element :issuable_dropdown_menu_milestone
end
def create_merge_request def create_merge_request
click_element :issuable_create_button click_element :issuable_create_button
end end
...@@ -25,6 +33,13 @@ module QA ...@@ -25,6 +33,13 @@ module QA
def fill_description(description) def fill_description(description)
fill_element :issuable_form_description, description fill_element :issuable_form_description, description
end end
def choose_milestone(milestone)
click_element :issuable_milestone_dropdown
within_element(:issuable_dropdown_menu_milestone) do
click_on milestone.title
end
end
end end
end end
end end
......
...@@ -81,6 +81,12 @@ module QA ...@@ -81,6 +81,12 @@ module QA
click_element :squash_checkbox click_element :squash_checkbox
end end
def has_milestone?(milestone_title)
page.within('.issuable-sidebar') do
!!find("[href*='/milestones/']", text: milestone_title, wait: 1)
end
end
end end
end end
end end
......
module QA
module Page
module Project
module Milestone
class Index < Page::Base
view 'app/views/projects/milestones/index.html.haml' do
element :new_project_milestone
end
def click_new_milestone
click_element :new_project_milestone
end
end
end
end
end
end
module QA
module Page
module Project
module Milestone
class New < Page::Base
view 'app/views/projects/milestones/_form.html.haml' do
element :milestone_create_button
element :milestone_title
element :milestone_description
end
def set_title(title)
fill_element :milestone_title, title
end
def set_description(description)
fill_element :milestone_description, description
end
def create_new_milestone
click_element :milestone_create_button
end
end
end
end
end
end
...@@ -4,14 +4,28 @@ module QA ...@@ -4,14 +4,28 @@ module QA
Runtime::Browser.visit(:gitlab, Page::Main::Login) Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.act { sign_in_using_credentials } Page::Main::Login.act { sign_in_using_credentials }
current_project = Factory::Resource::Project.fabricate! do |project|
project.name = 'project-with-merge-request-and-milestone'
end
current_milestone = Factory::Resource::ProjectMilestone.fabricate! do |milestone|
milestone.title = 'unique-milestone'
milestone.project = current_project
end
Factory::Resource::MergeRequest.fabricate! do |merge_request| Factory::Resource::MergeRequest.fabricate! do |merge_request|
merge_request.title = 'This is a merge request' merge_request.title = 'This is a merge request with a milestone'
merge_request.description = 'Great feature' merge_request.description = 'Great feature with milestone'
merge_request.project = current_project
merge_request.milestone = current_milestone
end end
expect(page).to have_content('This is a merge request') Page::MergeRequest::Show.perform do |merge_request|
expect(page).to have_content('Great feature') expect(page).to have_content('This is a merge request with a milestone')
expect(page).to have_content(/Opened [\w\s]+ ago/) expect(page).to have_content('Great feature with milestone')
expect(page).to have_content(/Opened [\w\s]+ ago/)
expect(merge_request).to have_milestone(current_milestone.title)
end
end end
end end
end end
require 'spec_helper' require 'spec_helper'
describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180626125654 do describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180619121030 do
describe '#perform' do describe '#perform' do
context 'when diff files can be deleted' do context 'when diff files can be deleted' do
let(:merge_request) { create(:merge_request, :merged) } let(:merge_request) { create(:merge_request, :merged) }
let(:merge_request_diff) do let!(:merge_request_diff) do
merge_request.create_merge_request_diff merge_request.create_merge_request_diff
merge_request.merge_request_diffs.first merge_request.merge_request_diffs.first
end end
let(:perform) do
described_class.new.perform(MergeRequestDiff.pluck(:id))
end
it 'deletes all merge request diff files' do it 'deletes all merge request diff files' do
expect { described_class.new.perform(merge_request_diff.id) } expect { perform }
.to change { merge_request_diff.merge_request_diff_files.count } .to change { merge_request_diff.merge_request_diff_files.count }
.from(20).to(0) .from(20).to(0)
end end
it 'updates state to without_files' do it 'updates state to without_files' do
expect { described_class.new.perform(merge_request_diff.id) } expect { perform }
.to change { merge_request_diff.reload.state } .to change { merge_request_diff.reload.state }
.from('collected').to('without_files') .from('collected').to('without_files')
end end
it 'rollsback if something goes wrong' do it 'rollsback if something goes wrong' do
expect(MergeRequestDiffFile).to receive_message_chain(:where, :delete_all) expect(described_class::MergeRequestDiffFile).to receive_message_chain(:where, :delete_all)
.and_raise .and_raise
expect { described_class.new.perform(merge_request_diff.id) } expect { perform }
.to raise_error .to raise_error
merge_request_diff.reload merge_request_diff.reload
...@@ -35,35 +39,35 @@ describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180 ...@@ -35,35 +39,35 @@ describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180
end end
end end
it 'deletes no merge request diff files when MR is not merged' do it 'reschedules itself when should_wait_deadtuple_vacuum' do
merge_request = create(:merge_request, :opened)
merge_request.create_merge_request_diff
merge_request_diff = merge_request.merge_request_diffs.first
expect { described_class.new.perform(merge_request_diff.id) }
.not_to change { merge_request_diff.merge_request_diff_files.count }
.from(20)
end
it 'deletes no merge request diff files when diff is marked as "without_files"' do
merge_request = create(:merge_request, :merged) merge_request = create(:merge_request, :merged)
merge_request.create_merge_request_diff first_diff = merge_request.merge_request_diff
merge_request_diff = merge_request.merge_request_diffs.first second_diff = merge_request.create_merge_request_diff
merge_request_diff.clean! Sidekiq::Testing.fake! do
worker = described_class.new
allow(worker).to receive(:should_wait_deadtuple_vacuum?) { true }
expect { described_class.new.perform(merge_request_diff.id) } worker.perform([first_diff.id, second_diff.id])
.not_to change { merge_request_diff.merge_request_diff_files.count }
.from(20) expect(described_class.name.demodulize).to be_scheduled_delayed_migration(5.minutes, [first_diff.id, second_diff.id])
expect(BackgroundMigrationWorker.jobs.size).to eq(1)
end
end end
end
it 'deletes no merge request diff files when diff is the latest' do describe '#should_wait_deadtuple_vacuum?' do
merge_request = create(:merge_request, :merged) it 'returns true when hitting merge_request_diff_files hits DEAD_TUPLES_THRESHOLD', :postgresql do
merge_request_diff = merge_request.merge_request_diff worker = described_class.new
threshold_query_result = [{ "n_dead_tup" => described_class::DEAD_TUPLES_THRESHOLD.to_s }]
normal_query_result = [{ "n_dead_tup" => '3' }]
allow(worker)
.to receive(:execute_statement)
.with(/SELECT n_dead_tup */)
.and_return(threshold_query_result, normal_query_result)
expect { described_class.new.perform(merge_request_diff.id) } expect(worker.should_wait_deadtuple_vacuum?).to be(true)
.not_to change { merge_request_diff.merge_request_diff_files.count }
.from(20)
end end
end end
end end
require 'spec_helper'
describe Gitlab::BackgroundMigration::ScheduleDiffFilesDeletion, :migration, schema: 20180619121030 do
describe '#perform' do
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
before do
stub_const("#{described_class.name}::DIFF_BATCH_SIZE", 3)
namespaces.create!(id: 1, name: 'gitlab', path: 'gitlab')
projects.create!(id: 1, namespace_id: 1, name: 'gitlab', path: 'gitlab')
merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master', state: 'merged')
merge_request_diffs.create!(id: 1, merge_request_id: 1, state: 'collected')
merge_request_diffs.create!(id: 2, merge_request_id: 1, state: 'empty')
merge_request_diffs.create!(id: 3, merge_request_id: 1, state: 'without_files')
merge_request_diffs.create!(id: 4, merge_request_id: 1, state: 'collected')
merge_request_diffs.create!(id: 5, merge_request_id: 1, state: 'collected')
merge_request_diffs.create!(id: 6, merge_request_id: 1, state: 'collected')
merge_request_diffs.create!(id: 7, merge_request_id: 1, state: 'collected')
merge_requests.update(1, latest_merge_request_diff_id: 7)
end
it 'correctly schedules diff file deletion workers' do
Sidekiq::Testing.fake! do
Timecop.freeze do
described_class.new.perform
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, [1, 4, 5])
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, [6])
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
end
...@@ -178,77 +178,67 @@ describe Gitlab::Git::Blob, seed_helper: true do ...@@ -178,77 +178,67 @@ describe Gitlab::Git::Blob, seed_helper: true do
end end
describe '.batch' do describe '.batch' do
shared_examples 'loading blobs in batch' do let(:blob_references) do
let(:blob_references) do [
[ [SeedRepo::Commit::ID, "files/ruby/popen.rb"],
[SeedRepo::Commit::ID, "files/ruby/popen.rb"], [SeedRepo::Commit::ID, 'six']
[SeedRepo::Commit::ID, 'six'] ]
] end
end
subject { described_class.batch(repository, blob_references) } subject { described_class.batch(repository, blob_references) }
it { expect(subject.size).to eq(blob_references.size) } it { expect(subject.size).to eq(blob_references.size) }
context 'first blob' do context 'first blob' do
let(:blob) { subject[0] } let(:blob) { subject[0] }
it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) } it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) }
it { expect(blob.name).to eq(SeedRepo::RubyBlob::NAME) } it { expect(blob.name).to eq(SeedRepo::RubyBlob::NAME) }
it { expect(blob.path).to eq("files/ruby/popen.rb") } it { expect(blob.path).to eq("files/ruby/popen.rb") }
it { expect(blob.commit_id).to eq(SeedRepo::Commit::ID) } it { expect(blob.commit_id).to eq(SeedRepo::Commit::ID) }
it { expect(blob.data[0..10]).to eq(SeedRepo::RubyBlob::CONTENT[0..10]) } it { expect(blob.data[0..10]).to eq(SeedRepo::RubyBlob::CONTENT[0..10]) }
it { expect(blob.size).to eq(669) } it { expect(blob.size).to eq(669) }
it { expect(blob.mode).to eq("100644") } it { expect(blob.mode).to eq("100644") }
end end
context 'second blob' do context 'second blob' do
let(:blob) { subject[1] } let(:blob) { subject[1] }
it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') } it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') }
it { expect(blob.data).to eq('') } it { expect(blob.data).to eq('') }
it 'does not mark the blob as binary' do it 'does not mark the blob as binary' do
expect(blob).not_to be_binary expect(blob).not_to be_binary
end
end end
end
context 'limiting' do context 'limiting' do
subject { described_class.batch(repository, blob_references, blob_size_limit: blob_size_limit) } subject { described_class.batch(repository, blob_references, blob_size_limit: blob_size_limit) }
context 'positive' do context 'positive' do
let(:blob_size_limit) { 10 } let(:blob_size_limit) { 10 }
it { expect(subject.first.data.size).to eq(10) } it { expect(subject.first.data.size).to eq(10) }
end end
context 'zero' do context 'zero' do
let(:blob_size_limit) { 0 } let(:blob_size_limit) { 0 }
it 'only loads the metadata' do it 'only loads the metadata' do
expect(subject.first.size).not_to be(0) expect(subject.first.size).not_to be(0)
expect(subject.first.data).to eq('') expect(subject.first.data).to eq('')
end
end end
end
context 'negative' do context 'negative' do
let(:blob_size_limit) { -1 } let(:blob_size_limit) { -1 }
it 'ignores MAX_DATA_DISPLAY_SIZE' do it 'ignores MAX_DATA_DISPLAY_SIZE' do
stub_const('Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE', 100) stub_const('Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE', 100)
expect(subject.first.data.size).to eq(669) expect(subject.first.data.size).to eq(669)
end
end end
end end
end end
context 'when Gitaly list_blobs_by_sha_path feature is enabled' do
it_behaves_like 'loading blobs in batch'
end
context 'when Gitaly list_blobs_by_sha_path feature is disabled', :disable_gitaly do
it_behaves_like 'loading blobs in batch'
end
end end
describe '.batch_metadata' do describe '.batch_metadata' do
...@@ -294,58 +284,48 @@ describe Gitlab::Git::Blob, seed_helper: true do ...@@ -294,58 +284,48 @@ describe Gitlab::Git::Blob, seed_helper: true do
) )
end end
shared_examples 'fetching batch of LFS pointers' do it 'returns a list of Gitlab::Git::Blob' do
it 'returns a list of Gitlab::Git::Blob' do blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id])
blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id])
expect(blobs.count).to eq(1)
expect(blobs).to all( be_a(Gitlab::Git::Blob) )
expect(blobs).to be_an(Array)
end
it 'accepts blob IDs as a lazy enumerator' do
blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id].lazy)
expect(blobs.count).to eq(1)
expect(blobs).to all( be_a(Gitlab::Git::Blob) )
end
it 'handles empty list of IDs gracefully' do expect(blobs.count).to eq(1)
blobs_1 = described_class.batch_lfs_pointers(repository, [].lazy) expect(blobs).to all( be_a(Gitlab::Git::Blob) )
blobs_2 = described_class.batch_lfs_pointers(repository, []) expect(blobs).to be_an(Array)
end
expect(blobs_1).to eq([]) it 'accepts blob IDs as a lazy enumerator' do
expect(blobs_2).to eq([]) blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id].lazy)
end
it 'silently ignores tree objects' do expect(blobs.count).to eq(1)
blobs = described_class.batch_lfs_pointers(repository, [tree_object.oid]) expect(blobs).to all( be_a(Gitlab::Git::Blob) )
end
expect(blobs).to eq([]) it 'handles empty list of IDs gracefully' do
end blobs_1 = described_class.batch_lfs_pointers(repository, [].lazy)
blobs_2 = described_class.batch_lfs_pointers(repository, [])
it 'silently ignores non lfs objects' do expect(blobs_1).to eq([])
blobs = described_class.batch_lfs_pointers(repository, [non_lfs_blob.id]) expect(blobs_2).to eq([])
end
expect(blobs).to eq([]) it 'silently ignores tree objects' do
end blobs = described_class.batch_lfs_pointers(repository, [tree_object.oid])
it 'avoids loading large blobs into memory' do expect(blobs).to eq([])
# This line could call `lookup` on `repository`, so do here before mocking. end
non_lfs_blob_id = non_lfs_blob.id
expect(repository).not_to receive(:lookup) it 'silently ignores non lfs objects' do
blobs = described_class.batch_lfs_pointers(repository, [non_lfs_blob.id])
described_class.batch_lfs_pointers(repository, [non_lfs_blob_id]) expect(blobs).to eq([])
end
end end
context 'when Gitaly batch_lfs_pointers is enabled' do it 'avoids loading large blobs into memory' do
it_behaves_like 'fetching batch of LFS pointers' # This line could call `lookup` on `repository`, so do here before mocking.
end non_lfs_blob_id = non_lfs_blob.id
expect(repository).not_to receive(:lookup)
context 'when Gitaly batch_lfs_pointers is disabled', :disable_gitaly do described_class.batch_lfs_pointers(repository, [non_lfs_blob_id])
it_behaves_like 'fetching batch of LFS pointers'
end end
end end
......
...@@ -68,34 +68,22 @@ describe Gitlab::Workhorse do ...@@ -68,34 +68,22 @@ describe Gitlab::Workhorse do
let(:diff_refs) { double(base_sha: "base", head_sha: "head") } let(:diff_refs) { double(base_sha: "base", head_sha: "head") }
subject { described_class.send_git_patch(repository, diff_refs) } subject { described_class.send_git_patch(repository, diff_refs) }
context 'when Gitaly workhorse_send_git_patch feature is enabled' do it 'sets the header correctly' do
it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject)
key, command, params = decode_workhorse_header(subject)
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("git-format-patch")
expect(params).to eq({
'GitalyServer' => {
address: Gitlab::GitalyClient.address(project.repository_storage),
token: Gitlab::GitalyClient.token(project.repository_storage)
},
'RawPatchRequest' => Gitaly::RawPatchRequest.new(
repository: repository.gitaly_repository,
left_commit_id: 'base',
right_commit_id: 'head'
).to_json
}.deep_stringify_keys)
end
end
context 'when Gitaly workhorse_send_git_patch feature is disabled', :disable_gitaly do
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
expect(key).to eq("Gitlab-Workhorse-Send-Data") expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("git-format-patch") expect(command).to eq("git-format-patch")
expect(params).to eq("RepoPath" => repository.path_to_repo, "ShaFrom" => "base", "ShaTo" => "head") expect(params).to eq({
end 'GitalyServer' => {
address: Gitlab::GitalyClient.address(project.repository_storage),
token: Gitlab::GitalyClient.token(project.repository_storage)
},
'RawPatchRequest' => Gitaly::RawPatchRequest.new(
repository: repository.gitaly_repository,
left_commit_id: 'base',
right_commit_id: 'head'
).to_json
}.deep_stringify_keys)
end end
end end
......
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180619121030_enqueue_delete_diff_files_workers.rb')
describe EnqueueDeleteDiffFilesWorkers, :migration, :sidekiq do
it 'correctly schedules diff files deletion schedulers' do
Sidekiq::Testing.fake! do
expect(BackgroundMigrationWorker)
.to receive(:perform_async)
.with(described_class::SCHEDULER)
.and_call_original
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(1)
end
end
end
require 'spec_helper' require 'spec_helper'
describe ProcessCommitWorker do describe ProcessCommitWorker do
include ProjectForksHelper
let(:worker) { described_class.new } let(:worker) { described_class.new }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) } let(:project) { create(:project, :public, :repository) }
...@@ -32,15 +34,41 @@ describe ProcessCommitWorker do ...@@ -32,15 +34,41 @@ describe ProcessCommitWorker do
worker.perform(project.id, user.id, commit.to_hash) worker.perform(project.id, user.id, commit.to_hash)
end end
context 'when commit already exists in upstream project' do context 'when the project is forked' do
let(:forked) { create(:project, :public, :repository) } context 'when commit already exists in the upstream project' do
it 'does not process the commit message' do
forked = fork_project(project, user, repository: true)
expect(worker).not_to receive(:process_commit_message)
worker.perform(forked.id, user.id, forked.commit.to_hash)
end
end
context 'when the commit does not exist in the upstream project' do
it 'processes the commit message' do
empty_project = create(:project, :public)
forked = fork_project(empty_project, user, repository: true)
TestEnv.copy_repo(forked,
bare_repo: TestEnv.factory_repo_path_bare,
refs: TestEnv::BRANCH_SHA)
expect(worker).to receive(:process_commit_message)
worker.perform(forked.id, user.id, forked.commit.to_hash)
end
end
it 'does not process commit message' do context 'when the upstream project no longer exists' do
create(:forked_project_link, forked_to_project: forked, forked_from_project: project) it 'processes the commit message' do
forked = fork_project(project, user, repository: true)
project.destroy!
expect(worker).not_to receive(:process_commit_message) expect(worker).to receive(:process_commit_message)
worker.perform(forked.id, user.id, forked.commit.to_hash) worker.perform(forked.id, user.id, forked.commit.to_hash)
end
end end
end end
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment