Commit 9363a476 authored by Rémy Coutable's avatar Rémy Coutable

Merge branch 'ce-to-ee-2017-09-27' into 'master'

CE upstream - Wednesday

Closes gitlab-ce#34415

See merge request gitlab-org/gitlab-ee!3026
parents c49ad301 738de26c
...@@ -26,7 +26,7 @@ gem 'doorkeeper', '~> 4.2.0' ...@@ -26,7 +26,7 @@ gem 'doorkeeper', '~> 4.2.0'
gem 'doorkeeper-openid_connect', '~> 1.1.0' gem 'doorkeeper-openid_connect', '~> 1.1.0'
gem 'omniauth', '~> 1.4.2' gem 'omniauth', '~> 1.4.2'
gem 'omniauth-auth0', '~> 1.4.1' gem 'omniauth-auth0', '~> 1.4.1'
gem 'omniauth-azure-oauth2', '~> 0.0.6' gem 'omniauth-azure-oauth2', '~> 0.0.9'
gem 'omniauth-cas3', '~> 1.1.4' gem 'omniauth-cas3', '~> 1.1.4'
gem 'omniauth-facebook', '~> 4.0.0' gem 'omniauth-facebook', '~> 4.0.0'
gem 'omniauth-github', '~> 1.1.1' gem 'omniauth-github', '~> 1.1.1'
......
...@@ -215,7 +215,7 @@ GEM ...@@ -215,7 +215,7 @@ GEM
factory_girl_rails (4.7.0) factory_girl_rails (4.7.0)
factory_girl (~> 4.7.0) factory_girl (~> 4.7.0)
railties (>= 3.0.0) railties (>= 3.0.0)
faraday (0.12.1) faraday (0.12.2)
multipart-post (>= 1.2, < 3) multipart-post (>= 1.2, < 3)
faraday_middleware (0.11.0.1) faraday_middleware (0.11.0.1)
faraday (>= 0.7.4, < 1.0) faraday (>= 0.7.4, < 1.0)
...@@ -545,10 +545,10 @@ GEM ...@@ -545,10 +545,10 @@ GEM
omniauth-oauth2 (~> 1.1) omniauth-oauth2 (~> 1.1)
omniauth-authentiq (0.3.1) omniauth-authentiq (0.3.1)
omniauth-oauth2 (~> 1.3, >= 1.3.1) omniauth-oauth2 (~> 1.3, >= 1.3.1)
omniauth-azure-oauth2 (0.0.6) omniauth-azure-oauth2 (0.0.9)
jwt (~> 1.0) jwt (~> 1.0)
omniauth (~> 1.0) omniauth (~> 1.0)
omniauth-oauth2 (~> 1.1) omniauth-oauth2 (~> 1.4)
omniauth-cas3 (1.1.4) omniauth-cas3 (1.1.4)
addressable (~> 2.3) addressable (~> 2.3)
nokogiri (~> 1.7, >= 1.7.1) nokogiri (~> 1.7, >= 1.7.1)
...@@ -574,7 +574,7 @@ GEM ...@@ -574,7 +574,7 @@ GEM
omniauth-oauth (1.1.0) omniauth-oauth (1.1.0)
oauth oauth
omniauth (~> 1.0) omniauth (~> 1.0)
omniauth-oauth2 (1.3.1) omniauth-oauth2 (1.4.0)
oauth2 (~> 1.0) oauth2 (~> 1.0)
omniauth (~> 1.2) omniauth (~> 1.2)
omniauth-oauth2-generic (0.2.2) omniauth-oauth2-generic (0.2.2)
...@@ -1113,7 +1113,7 @@ DEPENDENCIES ...@@ -1113,7 +1113,7 @@ DEPENDENCIES
omniauth (~> 1.4.2) omniauth (~> 1.4.2)
omniauth-auth0 (~> 1.4.1) omniauth-auth0 (~> 1.4.1)
omniauth-authentiq (~> 0.3.1) omniauth-authentiq (~> 0.3.1)
omniauth-azure-oauth2 (~> 0.0.6) omniauth-azure-oauth2 (~> 0.0.9)
omniauth-cas3 (~> 1.1.4) omniauth-cas3 (~> 1.1.4)
omniauth-facebook (~> 4.0.0) omniauth-facebook (~> 4.0.0)
omniauth-github (~> 1.1.1) omniauth-github (~> 1.1.1)
......
...@@ -28,148 +28,149 @@ ...@@ -28,148 +28,149 @@
// </div> // </div>
// </div> // </div>
// //
(function() {
this.LineHighlighter = (function() {
// CSS class applied to highlighted lines
LineHighlighter.prototype.highlightClass = 'hll';
// Internal copy of location.hash so we're not dependent on `location` in tests
LineHighlighter.prototype._hash = '';
function LineHighlighter(hash) {
if (hash == null) {
// Initialize a LineHighlighter object
//
// hash - String URL hash for dependency injection in tests
hash = location.hash;
}
this.setHash = this.setHash.bind(this);
this.highlightLine = this.highlightLine.bind(this);
this.clickHandler = this.clickHandler.bind(this);
this.highlightHash = this.highlightHash.bind(this);
this._hash = hash;
this.bindEvents();
this.highlightHash();
}
LineHighlighter.prototype.bindEvents = function() { const LineHighlighter = function(options = {}) {
const $fileHolder = $('.file-holder'); options.highlightLineClass = options.highlightLineClass || 'hll';
$fileHolder.on('click', 'a[data-line-number]', this.clickHandler); options.fileHolderSelector = options.fileHolderSelector || '.file-holder';
$fileHolder.on('highlight:line', this.highlightHash); options.scrollFileHolder = options.scrollFileHolder || false;
}; options.hash = options.hash || location.hash;
LineHighlighter.prototype.highlightHash = function() {
var range;
if (this._hash !== '') {
range = this.hashToRange(this._hash);
if (range[0]) {
this.highlightRange(range);
$.scrollTo("#L" + range[0], {
// Scroll to the first highlighted line on initial load
// Offset -50 for the sticky top bar, and another -100 for some context
offset: -150
});
}
}
};
LineHighlighter.prototype.clickHandler = function(event) {
var current, lineNumber, range;
event.preventDefault();
this.clearHighlight();
lineNumber = $(event.target).closest('a').data('line-number');
current = this.hashToRange(this._hash);
if (!(current[0] && event.shiftKey)) {
// If there's no current selection, or there is but Shift wasn't held,
// treat this like a single-line selection.
this.setHash(lineNumber);
return this.highlightLine(lineNumber);
} else if (event.shiftKey) {
if (lineNumber < current[0]) {
range = [lineNumber, current[0]];
} else {
range = [current[0], lineNumber];
}
this.setHash(range[0], range[1]);
return this.highlightRange(range);
}
};
LineHighlighter.prototype.clearHighlight = function() {
return $("." + this.highlightClass).removeClass(this.highlightClass);
// Unhighlight previously highlighted lines
};
// Convert a URL hash String into line numbers
//
// hash - Hash String
//
// Examples:
//
// hashToRange('#L5') # => [5, null]
// hashToRange('#L5-15') # => [5, 15]
// hashToRange('#foo') # => [null, null]
//
// Returns an Array
LineHighlighter.prototype.hashToRange = function(hash) {
var first, last, matches;
// ?L(\d+)(?:-(\d+))?$/)
matches = hash.match(/^#?L(\d+)(?:-(\d+))?$/);
if (matches && matches.length) {
first = parseInt(matches[1], 10);
last = matches[2] ? parseInt(matches[2], 10) : null;
return [first, last];
} else {
return [null, null];
}
};
// Highlight a single line
//
// lineNumber - Line number to highlight
LineHighlighter.prototype.highlightLine = function(lineNumber) {
return $("#LC" + lineNumber).addClass(this.highlightClass);
};
// Highlight all lines within a range
//
// range - Array containing the starting and ending line numbers
LineHighlighter.prototype.highlightRange = function(range) {
var i, lineNumber, ref, ref1, results;
if (range[1]) {
results = [];
for (lineNumber = i = ref = range[0], ref1 = range[1]; ref <= ref1 ? i <= ref1 : i >= ref1; lineNumber = ref <= ref1 ? (i += 1) : (i -= 1)) {
results.push(this.highlightLine(lineNumber));
}
return results;
} else {
return this.highlightLine(range[0]);
}
};
// Set the URL hash string this.options = options;
LineHighlighter.prototype.setHash = function(firstLineNumber, lastLineNumber) { this._hash = options.hash;
var hash; this.highlightLineClass = options.highlightLineClass;
if (lastLineNumber) { this.setHash = this.setHash.bind(this);
hash = "#L" + firstLineNumber + "-" + lastLineNumber; this.highlightLine = this.highlightLine.bind(this);
this.clickHandler = this.clickHandler.bind(this);
this.highlightHash = this.highlightHash.bind(this);
this.bindEvents();
this.highlightHash();
};
LineHighlighter.prototype.bindEvents = function() {
const $fileHolder = $(this.options.fileHolderSelector);
$fileHolder.on('click', 'a[data-line-number]', this.clickHandler);
$fileHolder.on('highlight:line', this.highlightHash);
};
LineHighlighter.prototype.highlightHash = function() {
var range;
if (this._hash !== '') {
range = this.hashToRange(this._hash);
if (range[0]) {
this.highlightRange(range);
const lineSelector = `#L${range[0]}`;
const scrollOptions = {
// Scroll to the first highlighted line on initial load
// Offset -50 for the sticky top bar, and another -100 for some context
offset: -150
};
if (this.options.scrollFileHolder) {
$(this.options.fileHolderSelector).scrollTo(lineSelector, scrollOptions);
} else { } else {
hash = "#L" + firstLineNumber; $.scrollTo(lineSelector, scrollOptions);
} }
this._hash = hash; }
return this.__setLocationHash__(hash); }
}; };
// Make the actual hash change in the browser LineHighlighter.prototype.clickHandler = function(event) {
// var current, lineNumber, range;
// This method is stubbed in tests. event.preventDefault();
LineHighlighter.prototype.__setLocationHash__ = function(value) { this.clearHighlight();
return history.pushState({ lineNumber = $(event.target).closest('a').data('line-number');
url: value current = this.hashToRange(this._hash);
// We're using pushState instead of assigning location.hash directly to if (!(current[0] && event.shiftKey)) {
// prevent the page from scrolling on the hashchange event // If there's no current selection, or there is but Shift wasn't held,
}, document.title, value); // treat this like a single-line selection.
}; this.setHash(lineNumber);
return this.highlightLine(lineNumber);
return LineHighlighter; } else if (event.shiftKey) {
})(); if (lineNumber < current[0]) {
}).call(window); range = [lineNumber, current[0]];
} else {
range = [current[0], lineNumber];
}
this.setHash(range[0], range[1]);
return this.highlightRange(range);
}
};
LineHighlighter.prototype.clearHighlight = function() {
return $("." + this.highlightLineClass).removeClass(this.highlightLineClass);
};
// Convert a URL hash String into line numbers
//
// hash - Hash String
//
// Examples:
//
// hashToRange('#L5') # => [5, null]
// hashToRange('#L5-15') # => [5, 15]
// hashToRange('#foo') # => [null, null]
//
// Returns an Array
LineHighlighter.prototype.hashToRange = function(hash) {
var first, last, matches;
// ?L(\d+)(?:-(\d+))?$/)
matches = hash.match(/^#?L(\d+)(?:-(\d+))?$/);
if (matches && matches.length) {
first = parseInt(matches[1], 10);
last = matches[2] ? parseInt(matches[2], 10) : null;
return [first, last];
} else {
return [null, null];
}
};
// Highlight a single line
//
// lineNumber - Line number to highlight
LineHighlighter.prototype.highlightLine = function(lineNumber) {
return $("#LC" + lineNumber).addClass(this.highlightLineClass);
};
// Highlight all lines within a range
//
// range - Array containing the starting and ending line numbers
LineHighlighter.prototype.highlightRange = function(range) {
var i, lineNumber, ref, ref1, results;
if (range[1]) {
results = [];
for (lineNumber = i = ref = range[0], ref1 = range[1]; ref <= ref1 ? i <= ref1 : i >= ref1; lineNumber = ref <= ref1 ? (i += 1) : (i -= 1)) {
results.push(this.highlightLine(lineNumber));
}
return results;
} else {
return this.highlightLine(range[0]);
}
};
// Set the URL hash string
LineHighlighter.prototype.setHash = function(firstLineNumber, lastLineNumber) {
var hash;
if (lastLineNumber) {
hash = "#L" + firstLineNumber + "-" + lastLineNumber;
} else {
hash = "#L" + firstLineNumber;
}
this._hash = hash;
return this.__setLocationHash__(hash);
};
// Make the actual hash change in the browser
//
// This method is stubbed in tests.
LineHighlighter.prototype.__setLocationHash__ = function(value) {
return history.pushState({
url: value
// We're using pushState instead of assigning location.hash directly to
// prevent the page from scrolling on the hashchange event
}, document.title, value);
};
window.LineHighlighter = LineHighlighter;
...@@ -352,7 +352,7 @@ import { ...@@ -352,7 +352,7 @@ import {
} }
expandViewContainer() { expandViewContainer() {
const $wrapper = $('.content-wrapper .container-fluid'); const $wrapper = $('.content-wrapper .container-fluid').not('.breadcrumbs');
if (this.fixedLayoutPref === null) { if (this.fixedLayoutPref === null) {
this.fixedLayoutPref = $wrapper.hasClass('container-limited'); this.fixedLayoutPref = $wrapper.hasClass('container-limited');
} }
......
<script> <script>
/* global LineHighlighter */
import Store from '../stores/repo_store'; import Store from '../stores/repo_store';
export default { export default {
data: () => Store, data: () => Store,
mounted() {
this.highlightFile();
},
computed: { computed: {
html() { html() {
return this.activeFile.html; return this.activeFile.html;
}, },
}, },
methods: { methods: {
highlightFile() { highlightFile() {
$(this.$el).find('.file-content').syntaxHighlight(); $(this.$el).find('.file-content').syntaxHighlight();
}, },
}, },
mounted() {
this.highlightFile();
this.lineHighlighter = new LineHighlighter({
fileHolderSelector: '.blob-viewer-container',
scrollFileHolder: true,
});
},
watch: { watch: {
html() { html() {
this.$nextTick(() => { this.$nextTick(() => {
......
...@@ -306,6 +306,8 @@ header.navbar-gitlab-new { ...@@ -306,6 +306,8 @@ header.navbar-gitlab-new {
display: flex; display: flex;
width: 100%; width: 100%;
position: relative; position: relative;
padding-top: $gl-padding / 2;
padding-bottom: $gl-padding / 2;
align-items: center; align-items: center;
border-bottom: 1px solid $border-color; border-bottom: 1px solid $border-color;
} }
...@@ -317,11 +319,6 @@ header.navbar-gitlab-new { ...@@ -317,11 +319,6 @@ header.navbar-gitlab-new {
align-self: center; align-self: center;
color: $gl-text-color-secondary; color: $gl-text-color-secondary;
@media (max-width: $screen-xs-max) {
padding-left: 17px;
border-left: 1px solid $gl-text-color-quaternary;
}
.avatar-tile { .avatar-tile {
margin-right: 4px; margin-right: 4px;
border: 1px solid $border-color; border: 1px solid $border-color;
...@@ -351,6 +348,7 @@ header.navbar-gitlab-new { ...@@ -351,6 +348,7 @@ header.navbar-gitlab-new {
display: flex; display: flex;
align-items: center; align-items: center;
position: relative; position: relative;
padding: 2px 0;
&:not(:last-child) { &:not(:last-child) {
margin-right: 20px; margin-right: 20px;
...@@ -386,7 +384,7 @@ header.navbar-gitlab-new { ...@@ -386,7 +384,7 @@ header.navbar-gitlab-new {
margin: 0; margin: 0;
font-size: 12px; font-size: 12px;
font-weight: 600; font-weight: 600;
line-height: 1; line-height: 16px;
a { a {
color: $gl-text-color; color: $gl-text-color;
......
...@@ -461,6 +461,13 @@ $new-sidebar-collapsed-width: 50px; ...@@ -461,6 +461,13 @@ $new-sidebar-collapsed-width: 50px;
font-size: 18px; font-size: 18px;
} }
} }
@media (max-width: $screen-xs-max) {
+ .breadcrumbs-links {
padding-left: 17px;
border-left: 1px solid $gl-text-color-quaternary;
}
}
} }
@media (max-width: $screen-xs-max) { @media (max-width: $screen-xs-max) {
......
...@@ -54,6 +54,10 @@ ...@@ -54,6 +54,10 @@
border-radius: $border-radius-default; border-radius: $border-radius-default;
color: $almost-black; color: $almost-black;
.code.white pre .hll {
background-color: $well-light-border !important;
}
.tree-content-holder { .tree-content-holder {
display: flex; display: flex;
min-height: 300px; min-height: 300px;
......
...@@ -447,7 +447,7 @@ module Ci ...@@ -447,7 +447,7 @@ module Ci
def update_duration def update_duration
return unless started_at return unless started_at
self.duration = Gitlab::Ci::PipelineDuration.from_pipeline(self) self.duration = Gitlab::Ci::Pipeline::Duration.from_pipeline(self)
end end
def execute_hooks def execute_hooks
......
...@@ -25,8 +25,8 @@ class Commit ...@@ -25,8 +25,8 @@ class Commit
DIFF_HARD_LIMIT_FILES = 1000 DIFF_HARD_LIMIT_FILES = 1000
DIFF_HARD_LIMIT_LINES = 50000 DIFF_HARD_LIMIT_LINES = 50000
# The SHA can be between 7 and 40 hex characters. MIN_SHA_LENGTH = 7
COMMIT_SHA_PATTERN = '\h{7,40}'.freeze COMMIT_SHA_PATTERN = /\h{#{MIN_SHA_LENGTH},40}/.freeze
def banzai_render_context(field) def banzai_render_context(field)
context = { pipeline: :single_line, project: self.project } context = { pipeline: :single_line, project: self.project }
...@@ -53,7 +53,7 @@ class Commit ...@@ -53,7 +53,7 @@ class Commit
# Truncate sha to 8 characters # Truncate sha to 8 characters
def truncate_sha(sha) def truncate_sha(sha)
sha[0..7] sha[0..MIN_SHA_LENGTH]
end end
def max_diff_options def max_diff_options
...@@ -100,7 +100,7 @@ class Commit ...@@ -100,7 +100,7 @@ class Commit
def self.reference_pattern def self.reference_pattern
@reference_pattern ||= %r{ @reference_pattern ||= %r{
(?:#{Project.reference_pattern}#{reference_prefix})? (?:#{Project.reference_pattern}#{reference_prefix})?
(?<commit>\h{7,40}) (?<commit>#{COMMIT_SHA_PATTERN})
}x }x
end end
...@@ -216,9 +216,8 @@ class Commit ...@@ -216,9 +216,8 @@ class Commit
@raw.respond_to?(method, include_private) || super @raw.respond_to?(method, include_private) || super
end end
# Truncate sha to 8 characters
def short_id def short_id
@raw.short_id(7) @raw.short_id(MIN_SHA_LENGTH)
end end
def diff_refs def diff_refs
......
...@@ -541,8 +541,11 @@ class Repository ...@@ -541,8 +541,11 @@ class Repository
cache_method :tag_count, fallback: 0 cache_method :tag_count, fallback: 0
def avatar def avatar
if tree = file_on_head(:avatar) # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/38327
tree.path Gitlab::GitalyClient.allow_n_plus_1_calls do
if tree = file_on_head(:avatar)
tree.path
end
end end
end end
cache_method :avatar cache_method :avatar
......
...@@ -2,114 +2,57 @@ module Ci ...@@ -2,114 +2,57 @@ module Ci
class CreatePipelineService < BaseService class CreatePipelineService < BaseService
attr_reader :pipeline attr_reader :pipeline
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, mirror_update: false) SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
Gitlab::Ci::Pipeline::Chain::Validate::Repository,
Gitlab::Ci::Pipeline::Chain::Validate::Config,
Gitlab::Ci::Pipeline::Chain::Skip,
Gitlab::Ci::Pipeline::Chain::Create].freeze
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, mirror_update: false, &block)
@pipeline = Ci::Pipeline.new( @pipeline = Ci::Pipeline.new(
source: source, source: source,
project: project, project: project,
ref: ref, ref: ref,
sha: sha, sha: sha,
before_sha: before_sha, before_sha: before_sha,
tag: tag?, tag: tag_exists?,
trigger_requests: Array(trigger_request), trigger_requests: Array(trigger_request),
user: current_user, user: current_user,
pipeline_schedule: schedule, pipeline_schedule: schedule,
protected: project.protected_for?(ref) protected: project.protected_for?(ref)
) )
result = validate_project_and_git_items(mirror_update: mirror_update) || # VALIDATE mirror_update!
validate_pipeline(ignore_skip_ci: ignore_skip_ci, command = OpenStruct.new(ignore_skip_ci: ignore_skip_ci,
save_on_errors: save_on_errors) save_incompleted: save_on_errors,
allow_mirror_update: mirror_update,
seeds_block: block,
project: project,
current_user: current_user)
return result if result sequence = Gitlab::Ci::Pipeline::Chain::Sequence
.new(pipeline, command, SEQUENCE)
begin sequence.build! do |pipeline, sequence|
Ci::Pipeline.transaction do update_merge_requests_head_pipeline if pipeline.persisted?
pipeline.save!
yield(pipeline) if block_given? if sequence.complete?
cancel_pending_pipelines if project.auto_cancel_pending_pipelines?
pipeline_created_counter.increment(source: source)
Ci::CreatePipelineStagesService pipeline.process!
.new(project, current_user)
.execute(pipeline)
end end
rescue ActiveRecord::RecordInvalid => e
return error("Failed to persist the pipeline: #{e}")
end end
update_merge_requests_head_pipeline
cancel_pending_pipelines if project.auto_cancel_pending_pipelines?
pipeline_created_counter.increment(source: source)
pipeline.tap(&:process!)
end end
private private
def validate_project_and_git_items(mirror_update: false) def commit
unless project.builds_enabled? @commit ||= project.commit(origin_sha || origin_ref)
return error('Pipeline is disabled')
end
if mirror_update && !project.mirror_trigger_builds?
return error('Pipeline is disabled for mirror updates')
end
unless allowed_to_trigger_pipeline?
if can?(current_user, :create_pipeline, project)
return error("Insufficient permissions for protected ref '#{ref}'")
else
return error('Insufficient permissions to create a new pipeline')
end
end
unless branch? || tag?
return error('Reference not found')
end
unless commit
return error('Commit not found')
end
end
def validate_pipeline(ignore_skip_ci:, save_on_errors:)
unless pipeline.config_processor
unless pipeline.ci_yaml_file
return error("Missing #{pipeline.ci_yaml_file_path} file")
end
return error(pipeline.yaml_errors, save: save_on_errors)
end
if !ignore_skip_ci && skip_ci?
pipeline.skip if save_on_errors
return pipeline
end
unless pipeline.has_stage_seeds?
return error('No stages / jobs for this pipeline.')
end
end
def allowed_to_trigger_pipeline?
if current_user
allowed_to_create?
else # legacy triggers don't have a corresponding user
!project.protected_for?(ref)
end
end end
def allowed_to_create? def sha
return unless can?(current_user, :create_pipeline, project) commit.try(:id)
access = Gitlab::UserAccess.new(current_user, project: project)
if branch?
access.can_update_branch?(ref)
elsif tag?
access.can_create_tag?(ref)
else
true # Allow it for now and we'll reject when we check ref existence
end
end end
def update_merge_requests_head_pipeline def update_merge_requests_head_pipeline
...@@ -119,11 +62,6 @@ module Ci ...@@ -119,11 +62,6 @@ module Ci
.update_all(head_pipeline_id: @pipeline.id) .update_all(head_pipeline_id: @pipeline.id)
end end
def skip_ci?
return false unless pipeline.git_commit_message
pipeline.git_commit_message =~ /\[(ci[ _-]skip|skip[ _-]ci)\]/i
end
def cancel_pending_pipelines def cancel_pending_pipelines
Gitlab::OptimisticLocking.retry_lock(auto_cancelable_pipelines) do |cancelables| Gitlab::OptimisticLocking.retry_lock(auto_cancelable_pipelines) do |cancelables|
cancelables.find_each do |cancelable| cancelables.find_each do |cancelable|
...@@ -140,14 +78,6 @@ module Ci ...@@ -140,14 +78,6 @@ module Ci
.created_or_pending .created_or_pending
end end
def commit
@commit ||= project.commit(origin_sha || origin_ref)
end
def sha
commit.try(:id)
end
def before_sha def before_sha
params[:checkout_sha] || params[:before] || Gitlab::Git::BLANK_SHA params[:checkout_sha] || params[:before] || Gitlab::Git::BLANK_SHA
end end
...@@ -160,41 +90,17 @@ module Ci ...@@ -160,41 +90,17 @@ module Ci
params[:ref] params[:ref]
end end
def branch? def tag_exists?
return @is_branch if defined?(@is_branch) project.repository.tag_exists?(ref)
@is_branch =
project.repository.ref_exists?(Gitlab::Git::BRANCH_REF_PREFIX + ref)
end
def tag?
return @is_tag if defined?(@is_tag)
@is_tag =
project.repository.ref_exists?(Gitlab::Git::TAG_REF_PREFIX + ref)
end end
def ref def ref
@ref ||= Gitlab::Git.ref_name(origin_ref) @ref ||= Gitlab::Git.ref_name(origin_ref)
end end
def valid_sha?
origin_sha && origin_sha != Gitlab::Git::BLANK_SHA
end
def error(message, save: false)
pipeline.tap do
pipeline.errors.add(:base, message)
if save
pipeline.drop
update_merge_requests_head_pipeline
end
end
end
def pipeline_created_counter def pipeline_created_counter
@pipeline_created_counter ||= Gitlab::Metrics.counter(:pipelines_created_total, "Counter of pipelines created") @pipeline_created_counter ||= Gitlab::Metrics
.counter(:pipelines_created_total, "Counter of pipelines created")
end end
end end
end end
...@@ -6,10 +6,10 @@ ...@@ -6,10 +6,10 @@
.svg-container .svg-container
= custom_icon('icon_autodevops') = custom_icon('icon_autodevops')
.user-callout-copy .user-callout-copy
%h4= _('Auto DevOps (Beta)') %h4= s_('AutoDevOps|Auto DevOps (Beta)')
%p= _('Auto DevOps can be activated for this project. It will automatically build, test, and deploy your application based on a predefined CI/CD configuration.') %p= s_('AutoDevOps|Auto DevOps can be activated for this project. It will automatically build, test, and deploy your application based on a predefined CI/CD configuration.')
%p %p
#{s_('AutoDevOps|Learn more in the')} - link = link_to(s_('AutoDevOps|Auto DevOps documentation'), help_page_path('topics/autodevops/index.md'), target: '_blank', rel: 'noopener noreferrer')
= link_to _('Auto DevOps documentation'), help_page_path('topics/autodevops/index.md'), target: '_blank', rel: 'noopener noreferrer' = s_('AutoDevOps|Learn more in the %{link_to_documentation}').html_safe % { link_to_documentation: link }
= link_to _('Enable in settings'), project_settings_ci_cd_path(@project, anchor: 'js-general-pipeline-settings'), class: 'btn btn-primary js-close-callout' = link_to s_('AutoDevOps|Enable in settings'), project_settings_ci_cd_path(@project, anchor: 'js-general-pipeline-settings'), class: 'btn btn-primary js-close-callout'
---
title: Improves i18n for Auto Devops callout
merge_request:
author:
type: other
---
title: find_user Users helper method no longer overrides find_user API helper method.
merge_request: 14418
author:
type: fixed
---
title: Notes will not show an empty bubble when the author isn't a member.
merge_request: 14450
author:
type: fixed
---
title: Some checks in `rake gitlab:check` were failling with 'undefined method `run_command`'
merge_request: 14469
author:
type: fixed
---
title: breadcrumbs receives padding when double lined
merge_request:
author:
type: changed
---
title: Fix bug that caused merge requests with diff notes imported from Bitbucket
to raise errors
merge_request:
author:
type: fixed
---
title: Expose avatar_url when requesting list of projects from API with simple=true
merge_request:
author:
type: added
---
title: Expose last pipeline details in API response when getting a single commit
merge_request: 13521
author: Mehdi Lahmam (@mehlah)
type: added
---
title: Make locked setting of Runner to not affect jobs scheduling
merge_request: 14483
author:
type: fixed
--- ---
title: Re-allow `name` attribute on user-provided anchor HTML title: Fixed breadcrumbs container expanding in side-by-side diff view
merge_request: merge_request:
author: author:
type: fixed type: fixed
---
title: Remove an index on ci_builds meant to be only temporary
merge_request:
author:
type: other
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class RemoveTemporaryCiBuildsIndex < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
# To use create/remove index concurrently
disable_ddl_transaction!
def up
return unless index_exists?(:ci_builds, :id, name: 'index_for_ci_builds_retried_migration')
remove_concurrent_index(:ci_builds, :id, name: "index_for_ci_builds_retried_migration")
end
def down
# this was a temporary index for a migration that was never
# present previously so this probably shouldn't be here but it's
# easier to test the drop if we have a way to create it.
add_concurrent_index("ci_builds", ["id"],
name: "index_for_ci_builds_retried_migration",
where: "(retried IS NULL)",
using: :btree)
end
end
...@@ -317,7 +317,6 @@ ActiveRecord::Schema.define(version: 20170921115009) do ...@@ -317,7 +317,6 @@ ActiveRecord::Schema.define(version: 20170921115009) do
add_index "ci_builds", ["commit_id", "status", "type"], name: "index_ci_builds_on_commit_id_and_status_and_type", using: :btree add_index "ci_builds", ["commit_id", "status", "type"], name: "index_ci_builds_on_commit_id_and_status_and_type", using: :btree
add_index "ci_builds", ["commit_id", "type", "name", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_name_and_ref", using: :btree add_index "ci_builds", ["commit_id", "type", "name", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_name_and_ref", using: :btree
add_index "ci_builds", ["commit_id", "type", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_ref", using: :btree add_index "ci_builds", ["commit_id", "type", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_ref", using: :btree
add_index "ci_builds", ["id"], name: "index_for_ci_builds_retried_migration", where: "(retried IS NULL)", using: :btree
add_index "ci_builds", ["project_id"], name: "index_ci_builds_on_project_id", using: :btree add_index "ci_builds", ["project_id"], name: "index_ci_builds_on_project_id", using: :btree
add_index "ci_builds", ["protected"], name: "index_ci_builds_on_protected", using: :btree add_index "ci_builds", ["protected"], name: "index_ci_builds_on_protected", using: :btree
add_index "ci_builds", ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree add_index "ci_builds", ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree
......
...@@ -181,6 +181,12 @@ Example response: ...@@ -181,6 +181,12 @@ Example response:
"parent_ids": [ "parent_ids": [
"ae1d9fb46aa2b07ee9836d49862ec4e2c46fbbba" "ae1d9fb46aa2b07ee9836d49862ec4e2c46fbbba"
], ],
"last_pipeline" : {
"id": 8,
"ref": "master",
"sha": "2dc6aa325a317eda67812f05600bdf0fcdc70ab0"
"status": "created"
}
"stats": { "stats": {
"additions": 15, "additions": 15,
"deletions": 10, "deletions": 10,
......
...@@ -1570,6 +1570,11 @@ Read more on [GitLab Pages user documentation](../../user/project/pages/index.md ...@@ -1570,6 +1570,11 @@ Read more on [GitLab Pages user documentation](../../user/project/pages/index.md
Each instance of GitLab CI has an embedded debug tool called Lint. Each instance of GitLab CI has an embedded debug tool called Lint.
You can find the link under `/ci/lint` of your gitlab instance. You can find the link under `/ci/lint` of your gitlab instance.
## Using reserved keywords
If you get validation error when using specific values (e.g., `true` or `false`),
try to quote them, or change them to a different form (e.g., `/bin/true`).
## Skipping jobs ## Skipping jobs
If your commit message contains `[ci skip]` or `[skip ci]`, using any If your commit message contains `[ci skip]` or `[skip ci]`, using any
......
...@@ -45,6 +45,7 @@ ...@@ -45,6 +45,7 @@
- [Building a package for testing purposes](build_test_package.md) - [Building a package for testing purposes](build_test_package.md)
- [Manage feature flags](feature_flags.md) - [Manage feature flags](feature_flags.md)
- [View sent emails or preview mailers](emails.md) - [View sent emails or preview mailers](emails.md)
- [Working with Gitaly](gitaly.md)
## Databases ## Databases
......
...@@ -29,34 +29,6 @@ For our currently-supported browsers, see our [requirements][requirements]. ...@@ -29,34 +29,6 @@ For our currently-supported browsers, see our [requirements][requirements].
## Development Process ## Development Process
When you are assigned an issue please follow the next steps:
### Divide a big feature into small Merge Requests
1. Big Merge Request are painful to review. In order to make this process easier we
must break a big feature into smaller ones and create a Merge Request for each step.
1. First step is to create a branch from `master`, let's call it `new-feature`. This branch
will be the recipient of all the smaller Merge Requests. Only this one will be merged to master.
1. Don't do any work on this one, let's keep it synced with master.
1. Create a new branch from `new-feature`, let's call it `new-feature-step-1`. We advise you
to clearly identify which step the branch represents.
1. Do the first part of the modifications in this branch. The target branch of this Merge Request
should be `new-feature`.
1. Once `new-feature-step-1` gets merged into `new-feature` we can continue our work. Create a new
branch from `new-feature`, let's call it `new-feature-step-2` and repeat the process done before.
```shell
master
└─ new-feature
├─ new-feature-step-1
├─ new-feature-step-2
└─ new-feature-step-3
```
**Tips**
- Make sure `new-feature` branch is always synced with `master`: merge master frequently.
- Do the same for the feature branch you have opened. This can be accomplished by merging `master` into `new-feature` and `new-feature` into `new-feature-step-*`
- Avoid rewriting history.
### Share your work early ### Share your work early
1. Before writing code guarantee your vision of the architecture is aligned with 1. Before writing code guarantee your vision of the architecture is aligned with
GitLab's architecture. GitLab's architecture.
......
# GitLab Developers Guide to Working with Gitaly
[Gitaly](https://gitlab.com/gitlab-org/gitaly) is a high-level Git RPC service used by GitLab CE/EE,
Workhorse and GitLab-Shell. All Rugged operations in GitLab CE/EE are currently being phased out to
be replaced by Gitaly API calls.
Visit the [Gitaly Migration Board](https://gitlab.com/gitlab-org/gitaly/boards/331341) for current
status of the migration.
## Feature Flags
Gitaly makes heavy use of [feature flags](feature_flags.md).
Each Rugged-to-Gitaly migration goes through a [series of phases](https://gitlab.com/gitlab-org/gitaly/blob/master/doc/MIGRATION_PROCESS.md):
* **Opt-In**: by default the Rugged implementation is used.
* Production instances can choose to enable the Gitaly endpoint by enabling the feature flag.
* For testing purposes, you may wish to enable all feature flags by default. This can be done by exporting the following
environment variable: `GITALY_FEATURE_DEFAULT_ON=1`.
* On developer instances (ie, when `Rails.env.development?` is true), the Gitaly endpoint
is enabled by default, but can be _disabled_ using feature flags.
* **Opt-Out**: by default, the Gitaly endpoint is used, but the feature can be explicitly disabled using the feature flag.
* **Madatory**: The migration is complete and cannot be disabled. The old codepath is removed.
### Enabling and Disabling Feature
In the Rails console, type:
```ruby
Feature.enable(:gitaly_feature_name)
Feature.disable(:gitaly_feature_name)
```
Where `gitaly_feature_name` is the name of the Gitaly feature. This can be determined by finding the appropriate
`gitaly_migrate` code block, for example:
```ruby
gitaly_migrate(:tag_names) do
...
end
```
Since Gitaly features are always prefixed with `gitaly_`, the name of the feature flag in this case would be `gitaly_tag_names`.
## Gitaly-Related Test Failures
If your test-suite is failing with Gitaly issues, as a first step, try running:
```shell
rm -rf tmp/tests/gitaly
```
---
[Return to Development documentation](README.md)
...@@ -65,6 +65,7 @@ Libraries with the following licenses are unacceptable for use: ...@@ -65,6 +65,7 @@ Libraries with the following licenses are unacceptable for use:
- [GNU AGPLv3][AGPLv3]: AGPL-licensed libraries cannot be linked to from non-GPL projects. - [GNU AGPLv3][AGPLv3]: AGPL-licensed libraries cannot be linked to from non-GPL projects.
- [Open Software License (OSL)][OSL]: is a copyleft license. In addition, the FSF [recommend against its use][OSL-GNU]. - [Open Software License (OSL)][OSL]: is a copyleft license. In addition, the FSF [recommend against its use][OSL-GNU].
- [Facebook BSD + PATENTS][Facebook]: is a 3-clause BSD license with a patent grant that has been deemed [Category X][x-list] by the Apache foundation. - [Facebook BSD + PATENTS][Facebook]: is a 3-clause BSD license with a patent grant that has been deemed [Category X][x-list] by the Apache foundation.
- [WTFPL][WTFPL]: is a public domain dedication [rejected by the OSI (3.2)][WTFPL-OSI]. Also has a strong language which is not in accordance with our diversity policy.
## Requesting Approval for Licenses ## Requesting Approval for Licenses
...@@ -108,3 +109,5 @@ Gems which are included only in the "development" or "test" groups by Bundler ar ...@@ -108,3 +109,5 @@ Gems which are included only in the "development" or "test" groups by Bundler ar
[x-list]: https://www.apache.org/legal/resolved.html#category-x [x-list]: https://www.apache.org/legal/resolved.html#category-x
[Acceptable-Licenses]: #acceptable-licenses [Acceptable-Licenses]: #acceptable-licenses
[Unacceptable-Licenses]: #unacceptable-licenses [Unacceptable-Licenses]: #unacceptable-licenses
[WTFPL]: https://wtfpl.net
[WTFPL-OSI]: https://opensource.org/minutes20090304
...@@ -74,6 +74,9 @@ To enable the Microsoft Azure OAuth2 OmniAuth provider you must register your ap ...@@ -74,6 +74,9 @@ To enable the Microsoft Azure OAuth2 OmniAuth provider you must register your ap
tenant_id: "TENANT ID" } } tenant_id: "TENANT ID" } }
``` ```
The `base_azure_url` is optional and can be added for different locales;
e.g. `base_azure_url: "https://login.microsoftonline.de"`.
1. Replace 'CLIENT ID', 'CLIENT SECRET' and 'TENANT ID' with the values you got above. 1. Replace 'CLIENT ID', 'CLIENT SECRET' and 'TENANT ID' with the values you got above.
1. Save the configuration file. 1. Save the configuration file.
......
...@@ -13,38 +13,39 @@ template, see the [Services Templates](services_templates.md) document. ...@@ -13,38 +13,39 @@ template, see the [Services Templates](services_templates.md) document.
## Configuration ## Configuration
Navigate to the [Integrations page](project_services.md#accessing-the-project-services) Navigate to the [Integrations page](project_services.md#accessing-the-project-services)
of your project and select the **Kubernetes** service to configure it. of your project and select the **Kubernetes** service to configure it. Fill in
all the needed parameters, check the "Active" checkbox and hit **Save changes**
for the changes to take effect.
![Kubernetes configuration settings](img/kubernetes_configuration.png) ![Kubernetes configuration settings](img/kubernetes_configuration.png)
The Kubernetes service takes the following arguments: The Kubernetes service takes the following parameters:
1. API URL - **API URL** -
1. Custom CA bundle It's the URL that GitLab uses to access the Kubernetes API. Kubernetes
1. Kubernetes namespace exposes several APIs, we want the "base" URL that is common to all of them,
1. Service token e.g., `https://kubernetes.example.com` rather than `https://kubernetes.example.com/api/v1`.
- **CA certificate** (optional) -
The API URL is the URL that GitLab uses to access the Kubernetes API. Kubernetes If the API is using a self-signed TLS certificate, you'll also need to include
exposes several APIs - we want the "base" URL that is common to all of them, the `ca.crt` contents here.
e.g., `https://kubernetes.example.com` rather than `https://kubernetes.example.com/api/v1`. - **Project namespace** (optional) - The following apply:
- By default you don't have to fill it in; by leaving it blank, GitLab will
A [namespace] is just a logical grouping of resources. This is mostly for ease of create one for you.
management, so you can group things together. For example, if you have 50 - Each project should have a unique namespace.
projects using the same cluster, providing a simple list of all pods would be - The project namespace is not necessarily the namespace of the secret, if
really difficult to work with. In that case, you can provide a separate you're using a secret with broader permissions, like the secret from `default`.
namespace to group things, as well as reduce name collision issues. - You should **not** use `default` as the project namespace.
- If you or someone created a secret specifically for the project, usually
GitLab authenticates against Kubernetes using service tokens, which are with limited permissions, the secret's namespace and project namespace may
scoped to a particular `namespace`. If you don't have a service token yet, be the same.
you can follow the - **Token** -
[Kubernetes documentation](http://kubernetes.io/docs/user-guide/service-accounts/) GitLab authenticates against Kubernetes using service tokens, which are
to create one. You can also view or create service tokens in the scoped to a particular `namespace`. If you don't have a service token yet,
[Kubernetes dashboard](http://kubernetes.io/docs/user-guide/ui/) - visit you can follow the
**Config ➔ Secrets**. [Kubernetes documentation](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/)
to create one. You can also view or create service tokens in the
Fill in the service token and namespace according to the values you just got. [Kubernetes dashboard](https://kubernetes.io/docs/tasks/access-application-cluster/web-ui-dashboard/#config)
If the API is using a self-signed TLS certificate, you'll also need to include (under **Config > Secrets**).
the `ca.crt` contents as the `Custom CA bundle`.
[namespace]: https://kubernetes.io/docs/user-guide/namespaces/ [namespace]: https://kubernetes.io/docs/user-guide/namespaces/
...@@ -67,7 +68,7 @@ GitLab CI build environment: ...@@ -67,7 +68,7 @@ GitLab CI build environment:
## Web terminals ## Web terminals
>**NOTE:** NOTE: **Note:**
Added in GitLab 8.15. You must be the project owner or have `master` permissions Added in GitLab 8.15. You must be the project owner or have `master` permissions
to use terminals. Support is currently limited to the first container in the to use terminals. Support is currently limited to the first container in the
first pod of your environment. first pod of your environment.
......
...@@ -99,6 +99,9 @@ module API ...@@ -99,6 +99,9 @@ module API
expose :ssh_url_to_repo, :http_url_to_repo, :web_url expose :ssh_url_to_repo, :http_url_to_repo, :web_url
expose :name, :name_with_namespace expose :name, :name_with_namespace
expose :path, :path_with_namespace expose :path, :path_with_namespace
expose :avatar_url do |project, options|
project.avatar_url(only_path: false)
end
expose :star_count, :forks_count expose :star_count, :forks_count
expose :created_at, :last_activity_at expose :created_at, :last_activity_at
end end
...@@ -156,9 +159,7 @@ module API ...@@ -156,9 +159,7 @@ module API
expose :forked_from_project, using: Entities::BasicProjectDetails, if: lambda { |project, options| project.forked? } expose :forked_from_project, using: Entities::BasicProjectDetails, if: lambda { |project, options| project.forked? }
expose :import_status expose :import_status
expose :import_error, if: lambda { |_project, options| options[:user_can_admin_project] } expose :import_error, if: lambda { |_project, options| options[:user_can_admin_project] }
expose :avatar_url do |user, options|
user.avatar_url(only_path: false)
end
expose :open_issues_count, if: lambda { |project, options| project.feature_available?(:issues, options[:current_user]) } expose :open_issues_count, if: lambda { |project, options| project.feature_available?(:issues, options[:current_user]) }
expose :runners_token, if: lambda { |_project, options| options[:user_can_admin_project] } expose :runners_token, if: lambda { |_project, options| options[:user_can_admin_project] }
expose :public_builds, as: :public_jobs expose :public_builds, as: :public_jobs
...@@ -219,8 +220,8 @@ module API ...@@ -219,8 +220,8 @@ module API
## EE-only ## EE-only
expose :lfs_enabled?, as: :lfs_enabled expose :lfs_enabled?, as: :lfs_enabled
expose :avatar_url do |user, options| expose :avatar_url do |group, options|
user.avatar_url(only_path: false) group.avatar_url(only_path: false)
end end
expose :web_url expose :web_url
expose :request_access_enabled expose :request_access_enabled
...@@ -263,6 +264,7 @@ module API ...@@ -263,6 +264,7 @@ module API
class RepoCommitDetail < RepoCommit class RepoCommitDetail < RepoCommit
expose :stats, using: Entities::RepoCommitStats expose :stats, using: Entities::RepoCommitStats
expose :status expose :status
expose :last_pipeline, using: 'API::Entities::PipelineBasic'
end end
class RepoBranch < Grape::Entity class RepoBranch < Grape::Entity
......
...@@ -11,7 +11,7 @@ module API ...@@ -11,7 +11,7 @@ module API
end end
helpers do helpers do
def find_user(params) def find_user_by_id(params)
id = params[:user_id] || params[:id] id = params[:user_id] || params[:id]
User.find_by(id: id) || not_found!('User') User.find_by(id: id) || not_found!('User')
end end
...@@ -436,7 +436,7 @@ module API ...@@ -436,7 +436,7 @@ module API
resource :impersonation_tokens do resource :impersonation_tokens do
helpers do helpers do
def finder(options = {}) def finder(options = {})
user = find_user(params) user = find_user_by_id(params)
PersonalAccessTokensFinder.new({ user: user, impersonation: true }.merge(options)) PersonalAccessTokensFinder.new({ user: user, impersonation: true }.merge(options))
end end
......
module Github module Github
class Client class Client
TIMEOUT = 60 TIMEOUT = 60
DEFAULT_PER_PAGE = 100
attr_reader :connection, :rate_limit attr_reader :connection, :rate_limit
...@@ -20,7 +21,7 @@ module Github ...@@ -20,7 +21,7 @@ module Github
exceed, reset_in = rate_limit.get exceed, reset_in = rate_limit.get
sleep reset_in if exceed sleep reset_in if exceed
Github::Response.new(connection.get(url, query)) Github::Response.new(connection.get(url, { per_page: DEFAULT_PER_PAGE }.merge(query)))
end end
private private
......
...@@ -202,13 +202,8 @@ module Github ...@@ -202,13 +202,8 @@ module Github
merge_request.save!(validate: false) merge_request.save!(validate: false)
merge_request.merge_request_diffs.create merge_request.merge_request_diffs.create
# Fetch review comments
review_comments_url = "/repos/#{repo}/pulls/#{pull_request.iid}/comments" review_comments_url = "/repos/#{repo}/pulls/#{pull_request.iid}/comments"
fetch_comments(merge_request, :review_comment, review_comments_url, LegacyDiffNote) fetch_comments(merge_request, :review_comment, review_comments_url, LegacyDiffNote)
# Fetch comments
comments_url = "/repos/#{repo}/issues/#{pull_request.iid}/comments"
fetch_comments(merge_request, :comment, comments_url)
rescue => e rescue => e
error(:pull_request, pull_request.url, e.message) error(:pull_request, pull_request.url, e.message)
ensure ensure
...@@ -241,12 +236,17 @@ module Github ...@@ -241,12 +236,17 @@ module Github
# for both features, like manipulating assignees, labels # for both features, like manipulating assignees, labels
# and milestones, are provided within the Issues API. # and milestones, are provided within the Issues API.
if representation.pull_request? if representation.pull_request?
return unless representation.has_labels? return unless representation.has_labels? || representation.has_comments?
merge_request = MergeRequest.find_by!(target_project_id: project.id, iid: representation.iid) merge_request = MergeRequest.find_by!(target_project_id: project.id, iid: representation.iid)
merge_request.update_attribute(:label_ids, label_ids(representation.labels))
if representation.has_labels?
merge_request.update_attribute(:label_ids, label_ids(representation.labels))
end
fetch_comments_conditionally(merge_request, representation)
else else
return if Issue.where(iid: representation.iid, project_id: project.id).exists? return if Issue.exists?(iid: representation.iid, project_id: project.id)
author_id = user_id(representation.author, project.creator_id) author_id = user_id(representation.author, project.creator_id)
issue = Issue.new issue = Issue.new
...@@ -263,17 +263,20 @@ module Github ...@@ -263,17 +263,20 @@ module Github
issue.updated_at = representation.updated_at issue.updated_at = representation.updated_at
issue.save!(validate: false) issue.save!(validate: false)
# Fetch comments fetch_comments_conditionally(issue, representation)
if representation.has_comments?
comments_url = "/repos/#{repo}/issues/#{issue.iid}/comments"
fetch_comments(issue, :comment, comments_url)
end
end end
rescue => e rescue => e
error(:issue, representation.url, e.message) error(:issue, representation.url, e.message)
end end
end end
def fetch_comments_conditionally(issuable, representation)
if representation.has_comments?
comments_url = "/repos/#{repo}/issues/#{issuable.iid}/comments"
fetch_comments(issuable, :comment, comments_url)
end
end
def fetch_comments(noteable, type, url, klass = Note) def fetch_comments(noteable, type, url, klass = Note)
while url while url
comments = Github::Client.new(options).get(url) comments = Github::Client.new(options).get(url)
......
...@@ -149,16 +149,21 @@ module Gitlab ...@@ -149,16 +149,21 @@ module Gitlab
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author) description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author)
description += pull_request.description description += pull_request.description
source_branch_sha = pull_request.source_branch_sha
target_branch_sha = pull_request.target_branch_sha
source_branch_sha = project.repository.commit(source_branch_sha)&.sha || source_branch_sha
target_branch_sha = project.repository.commit(target_branch_sha)&.sha || target_branch_sha
merge_request = project.merge_requests.create!( merge_request = project.merge_requests.create!(
iid: pull_request.iid, iid: pull_request.iid,
title: pull_request.title, title: pull_request.title,
description: description, description: description,
source_project: project, source_project: project,
source_branch: pull_request.source_branch_name, source_branch: pull_request.source_branch_name,
source_branch_sha: pull_request.source_branch_sha, source_branch_sha: source_branch_sha,
target_project: project, target_project: project,
target_branch: pull_request.target_branch_name, target_branch: pull_request.target_branch_name,
target_branch_sha: pull_request.target_branch_sha, target_branch_sha: target_branch_sha,
state: pull_request.state, state: pull_request.state,
author_id: gitlab_user_id(project, pull_request.author), author_id: gitlab_user_id(project, pull_request.author),
assignee_id: nil, assignee_id: nil,
......
module Gitlab
module Ci
module Pipeline
module Chain
class Base
attr_reader :pipeline, :project, :current_user
def initialize(pipeline, command)
@pipeline = pipeline
@command = command
@project = command.project
@current_user = command.current_user
end
def perform!
raise NotImplementedError
end
def break?
raise NotImplementedError
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Chain
class Create < Chain::Base
include Chain::Helpers
def perform!
::Ci::Pipeline.transaction do
pipeline.save!
@command.seeds_block&.call(pipeline)
::Ci::CreatePipelineStagesService
.new(project, current_user)
.execute(pipeline)
end
rescue ActiveRecord::RecordInvalid => e
error("Failed to persist the pipeline: #{e}")
end
def break?
!pipeline.persisted?
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Chain
module Helpers
def branch_exists?
return @is_branch if defined?(@is_branch)
@is_branch = project.repository.branch_exists?(pipeline.ref)
end
def tag_exists?
return @is_tag if defined?(@is_tag)
@is_tag = project.repository.tag_exists?(pipeline.ref)
end
def error(message)
pipeline.errors.add(:base, message)
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Chain
class Sequence
def initialize(pipeline, command, sequence)
@pipeline = pipeline
@completed = []
@sequence = sequence.map do |chain|
chain.new(pipeline, command)
end
end
def build!
@sequence.each do |step|
step.perform!
break if step.break?
@completed << step
end
@pipeline.tap do
yield @pipeline, self if block_given?
end
end
def complete?
@completed.size == @sequence.size
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Chain
class Skip < Chain::Base
SKIP_PATTERN = /\[(ci[ _-]skip|skip[ _-]ci)\]/i
def perform!
if skipped?
@pipeline.skip if @command.save_incompleted
end
end
def skipped?
!@command.ignore_skip_ci && commit_message_skips_ci?
end
def break?
skipped?
end
private
def commit_message_skips_ci?
return false unless @pipeline.git_commit_message
@skipped ||= !!(@pipeline.git_commit_message =~ SKIP_PATTERN)
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Chain
module Validate
class Abilities < Chain::Base
include Gitlab::Allowable
include Chain::Helpers
def perform!
unless project.builds_enabled?
return error('Pipelines are disabled!')
end
if @command.allow_mirror_update && !project.mirror_trigger_builds?
return error('Pipeline is disabled for mirror updates')
end
unless allowed_to_trigger_pipeline?
if can?(current_user, :create_pipeline, project)
return error("Insufficient permissions for protected ref '#{pipeline.ref}'")
else
return error('Insufficient permissions to create a new pipeline')
end
end
end
def break?
@pipeline.errors.any?
end
def allowed_to_trigger_pipeline?
if current_user
allowed_to_create?
else # legacy triggers don't have a corresponding user
!project.protected_for?(@pipeline.ref)
end
end
def allowed_to_create?
return unless can?(current_user, :create_pipeline, project)
access = Gitlab::UserAccess.new(current_user, project: project)
if branch_exists?
access.can_update_branch?(@pipeline.ref)
elsif tag_exists?
access.can_create_tag?(@pipeline.ref)
else
true # Allow it for now and we'll reject when we check ref existence
end
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Chain
module Validate
class Config < Chain::Base
include Chain::Helpers
def perform!
unless @pipeline.config_processor
unless @pipeline.ci_yaml_file
return error("Missing #{@pipeline.ci_yaml_file_path} file")
end
if @command.save_incompleted && @pipeline.has_yaml_errors?
@pipeline.drop
end
return error(@pipeline.yaml_errors)
end
unless @pipeline.has_stage_seeds?
return error('No stages / jobs for this pipeline.')
end
end
def break?
@pipeline.errors.any? || @pipeline.persisted?
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
module Chain
module Validate
class Repository < Chain::Base
include Chain::Helpers
def perform!
unless branch_exists? || tag_exists?
return error('Reference not found')
end
## TODO, we check commit in the service, that is why
# there is no repository access here.
#
unless pipeline.sha
return error('Commit not found')
end
end
def break?
@pipeline.errors.any?
end
end
end
end
end
end
end
module Gitlab
module Ci
module Pipeline
# # Introduction - total running time
#
# The problem this module is trying to solve is finding the total running
# time amongst all the jobs, excluding retries and pending (queue) time.
# We could reduce this problem down to finding the union of periods.
#
# So each job would be represented as a `Period`, which consists of
# `Period#first` as when the job started and `Period#last` as when the
# job was finished. A simple example here would be:
#
# * A (1, 3)
# * B (2, 4)
# * C (6, 7)
#
# Here A begins from 1, and ends to 3. B begins from 2, and ends to 4.
# C begins from 6, and ends to 7. Visually it could be viewed as:
#
# 0 1 2 3 4 5 6 7
# AAAAAAA
# BBBBBBB
# CCCC
#
# The union of A, B, and C would be (1, 4) and (6, 7), therefore the
# total running time should be:
#
# (4 - 1) + (7 - 6) => 4
#
# # The Algorithm
#
# The algorithm used here for union would be described as follow.
# First we make sure that all periods are sorted by `Period#first`.
# Then we try to merge periods by iterating through the first period
# to the last period. The goal would be merging all overlapped periods
# so that in the end all the periods are discrete. When all periods
# are discrete, we're free to just sum all the periods to get real
# running time.
#
# Here we begin from A, and compare it to B. We could find that
# before A ends, B already started. That is `B.first <= A.last`
# that is `2 <= 3` which means A and B are overlapping!
#
# When we found that two periods are overlapping, we would need to merge
# them into a new period and disregard the old periods. To make a new
# period, we take `A.first` as the new first because remember? we sorted
# them, so `A.first` must be smaller or equal to `B.first`. And we take
# `[A.last, B.last].max` as the new last because we want whoever ended
# later. This could be broken into two cases:
#
# 0 1 2 3 4
# AAAAAAA
# BBBBBBB
#
# Or:
#
# 0 1 2 3 4
# AAAAAAAAAA
# BBBB
#
# So that we need to take whoever ends later. Back to our example,
# after merging and discard A and B it could be visually viewed as:
#
# 0 1 2 3 4 5 6 7
# DDDDDDDDDD
# CCCC
#
# Now we could go on and compare the newly created D and the old C.
# We could figure out that D and C are not overlapping by checking
# `C.first <= D.last` is `false`. Therefore we need to keep both C
# and D. The example would end here because there are no more jobs.
#
# After having the union of all periods, we just need to sum the length
# of all periods to get total time.
#
# (4 - 1) + (7 - 6) => 4
#
# That is 4 is the answer in the example.
module Duration
extend self
Period = Struct.new(:first, :last) do
def duration
last - first
end
end
def from_pipeline(pipeline)
status = %w[success failed running canceled]
builds = pipeline.builds.latest
.where(status: status).where.not(started_at: nil).order(:started_at)
from_builds(builds)
end
def from_builds(builds)
now = Time.now
periods = builds.map do |b|
Period.new(b.started_at, b.finished_at || now)
end
from_periods(periods)
end
# periods should be sorted by `first`
def from_periods(periods)
process_duration(process_periods(periods))
end
private
def process_periods(periods)
return periods if periods.empty?
periods.drop(1).inject([periods.first]) do |result, current|
previous = result.last
if overlap?(previous, current)
result[-1] = merge(previous, current)
result
else
result << current
end
end
end
def overlap?(previous, current)
current.first <= previous.last
end
def merge(previous, current)
Period.new(previous.first, [previous.last, current.last].max)
end
def process_duration(periods)
periods.sum(&:duration)
end
end
end
end
end
module Gitlab
module Ci
# # Introduction - total running time
#
# The problem this module is trying to solve is finding the total running
# time amongst all the jobs, excluding retries and pending (queue) time.
# We could reduce this problem down to finding the union of periods.
#
# So each job would be represented as a `Period`, which consists of
# `Period#first` as when the job started and `Period#last` as when the
# job was finished. A simple example here would be:
#
# * A (1, 3)
# * B (2, 4)
# * C (6, 7)
#
# Here A begins from 1, and ends to 3. B begins from 2, and ends to 4.
# C begins from 6, and ends to 7. Visually it could be viewed as:
#
# 0 1 2 3 4 5 6 7
# AAAAAAA
# BBBBBBB
# CCCC
#
# The union of A, B, and C would be (1, 4) and (6, 7), therefore the
# total running time should be:
#
# (4 - 1) + (7 - 6) => 4
#
# # The Algorithm
#
# The algorithm used here for union would be described as follow.
# First we make sure that all periods are sorted by `Period#first`.
# Then we try to merge periods by iterating through the first period
# to the last period. The goal would be merging all overlapped periods
# so that in the end all the periods are discrete. When all periods
# are discrete, we're free to just sum all the periods to get real
# running time.
#
# Here we begin from A, and compare it to B. We could find that
# before A ends, B already started. That is `B.first <= A.last`
# that is `2 <= 3` which means A and B are overlapping!
#
# When we found that two periods are overlapping, we would need to merge
# them into a new period and disregard the old periods. To make a new
# period, we take `A.first` as the new first because remember? we sorted
# them, so `A.first` must be smaller or equal to `B.first`. And we take
# `[A.last, B.last].max` as the new last because we want whoever ended
# later. This could be broken into two cases:
#
# 0 1 2 3 4
# AAAAAAA
# BBBBBBB
#
# Or:
#
# 0 1 2 3 4
# AAAAAAAAAA
# BBBB
#
# So that we need to take whoever ends later. Back to our example,
# after merging and discard A and B it could be visually viewed as:
#
# 0 1 2 3 4 5 6 7
# DDDDDDDDDD
# CCCC
#
# Now we could go on and compare the newly created D and the old C.
# We could figure out that D and C are not overlapping by checking
# `C.first <= D.last` is `false`. Therefore we need to keep both C
# and D. The example would end here because there are no more jobs.
#
# After having the union of all periods, we just need to sum the length
# of all periods to get total time.
#
# (4 - 1) + (7 - 6) => 4
#
# That is 4 is the answer in the example.
module PipelineDuration
extend self
Period = Struct.new(:first, :last) do
def duration
last - first
end
end
def from_pipeline(pipeline)
status = %w[success failed running canceled]
builds = pipeline.builds.latest
.where(status: status).where.not(started_at: nil).order(:started_at)
from_builds(builds)
end
def from_builds(builds)
now = Time.now
periods = builds.map do |b|
Period.new(b.started_at, b.finished_at || now)
end
from_periods(periods)
end
# periods should be sorted by `first`
def from_periods(periods)
process_duration(process_periods(periods))
end
private
def process_periods(periods)
return periods if periods.empty?
periods.drop(1).inject([periods.first]) do |result, current|
previous = result.last
if overlap?(previous, current)
result[-1] = merge(previous, current)
result
else
result << current
end
end
end
def overlap?(previous, current)
current.first <= previous.last
end
def merge(previous, current)
Period.new(previous.first, [previous.last, current.last].max)
end
def process_duration(periods)
periods.sum(&:duration)
end
end
end
end
...@@ -13,9 +13,9 @@ module Gitlab ...@@ -13,9 +13,9 @@ module Gitlab
def ==(other) def ==(other)
other.is_a?(self.class) && other.is_a?(self.class) &&
base_sha == other.base_sha && shas_equal?(base_sha, other.base_sha) &&
start_sha == other.start_sha && shas_equal?(start_sha, other.start_sha) &&
head_sha == other.head_sha shas_equal?(head_sha, other.head_sha)
end end
alias_method :eql?, :== alias_method :eql?, :==
...@@ -47,6 +47,22 @@ module Gitlab ...@@ -47,6 +47,22 @@ module Gitlab
CompareService.new(project, head_sha).execute(project, start_sha, straight: straight) CompareService.new(project, head_sha).execute(project, start_sha, straight: straight)
end end
end end
private
def shas_equal?(sha1, sha2)
return true if sha1 == sha2
return false if sha1.nil? || sha2.nil?
return false unless sha1.class == sha2.class
length = [sha1.length, sha2.length].min
# If either of the shas is below the minimum length, we cannot be sure
# that they actually refer to the same commit because of hash collision.
return false if length < Commit::MIN_SHA_LENGTH
sha1[0, length] == sha2[0, length]
end
end end
end end
end end
...@@ -49,12 +49,13 @@ module Gitlab ...@@ -49,12 +49,13 @@ module Gitlab
coder['attributes'] = self.to_h coder['attributes'] = self.to_h
end end
def key
@key ||= [base_sha, start_sha, head_sha, Digest::SHA1.hexdigest(old_path || ""), Digest::SHA1.hexdigest(new_path || ""), old_line, new_line]
end
def ==(other) def ==(other)
other.is_a?(self.class) && key == other.key other.is_a?(self.class) &&
other.diff_refs == diff_refs &&
other.old_path == old_path &&
other.new_path == new_path &&
other.old_line == old_line &&
other.new_line == new_line
end end
def to_h def to_h
......
...@@ -140,25 +140,28 @@ msgstr "" ...@@ -140,25 +140,28 @@ msgstr ""
msgid "Authentication Log" msgid "Authentication Log"
msgstr "" msgstr ""
msgid "Auto DevOps (Beta)" msgid "Auto Review Apps and Auto Deploy need a domain name and the %{kubernetes} to work correctly."
msgstr "" msgstr ""
msgid "Auto DevOps can be activated for this project. It will automatically build, test, and deploy your application based on a predefined CI/CD configuration." msgid "Auto Review Apps and Auto Deploy need a domain name to work correctly."
msgstr "" msgstr ""
msgid "Auto DevOps documentation" msgid "Auto Review Apps and Auto Deploy need the %{kubernetes} to work correctly."
msgstr "" msgstr ""
msgid "Auto Review Apps and Auto Deploy need a domain name and the %{kubernetes} to work correctly." msgid "AutoDevOps|Auto DevOps (Beta)"
msgstr "" msgstr ""
msgid "Auto Review Apps and Auto Deploy need a domain name to work correctly." msgid "AutoDevOps|Auto DevOps can be activated for this project. It will automatically build, test, and deploy your application based on a predefined CI/CD configuration."
msgstr "" msgstr ""
msgid "Auto Review Apps and Auto Deploy need the %{kubernetes} to work correctly." msgid "AutoDevOps|Auto DevOps documentation"
msgstr ""
msgid "AutoDevOps|Enable in settings"
msgstr "" msgstr ""
msgid "AutoDevOps|Learn more in the" msgid "AutoDevOps|Learn more in the %{link_to_documentation}"
msgstr "" msgstr ""
msgid "Billing" msgid "Billing"
...@@ -552,6 +555,12 @@ msgstr "" ...@@ -552,6 +555,12 @@ msgstr ""
msgid "CycleAnalyticsStage|Test" msgid "CycleAnalyticsStage|Test"
msgstr "" msgstr ""
msgid "DashboardProjects|All"
msgstr ""
msgid "DashboardProjects|Personal"
msgstr ""
msgid "Define a custom pattern with cron syntax" msgid "Define a custom pattern with cron syntax"
msgstr "" msgstr ""
...@@ -623,9 +632,6 @@ msgstr "" ...@@ -623,9 +632,6 @@ msgstr ""
msgid "Emails" msgid "Emails"
msgstr "" msgstr ""
msgid "Enable in settings"
msgstr ""
msgid "EventFilterBy|Filter by all" msgid "EventFilterBy|Filter by all"
msgstr "" msgstr ""
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
"type": "object", "type": "object",
"required": [ "required": [
"id", "id",
"avatar_url",
"description", "description",
"default_branch", "default_branch",
"tag_list", "tag_list",
...@@ -31,6 +32,7 @@ ...@@ -31,6 +32,7 @@
], ],
"properties": { "properties": {
"id": { "type": "integer" }, "id": { "type": "integer" },
"avatar_url": { "type": ["string", "null"] },
"description": { "type": ["string", "null"] }, "description": { "type": ["string", "null"] },
"default_branch": { "type": ["string", "null"] }, "default_branch": { "type": ["string", "null"] },
"tag_list": { "type": "array" }, "tag_list": { "type": "array" },
......
...@@ -5,11 +5,18 @@ ...@@ -5,11 +5,18 @@
{ {
"required" : [ "required" : [
"stats", "stats",
"status" "status",
"last_pipeline"
], ],
"properties": { "properties": {
"stats": { "$ref": "../commit_stats.json" }, "stats": { "$ref": "../commit_stats.json" },
"status": { "type": ["string", "null"] } "status": { "type": ["string", "null"] },
"last_pipeline": {
"oneOf": [
{ "type": "null" },
{ "$ref": "../pipeline/basic.json" }
]
}
} }
} }
] ]
......
{
"type": "object",
"required" : [
"id",
"sha",
"ref",
"status"
],
"properties" : {
"id": { "type": "integer" },
"sha": { "type": "string" },
"ref": { "type": "string" },
"status": { "type": "string" }
},
"additionalProperties": false
}
...@@ -18,19 +18,25 @@ import '~/line_highlighter'; ...@@ -18,19 +18,25 @@ import '~/line_highlighter';
beforeEach(function() { beforeEach(function() {
loadFixtures('static/line_highlighter.html.raw'); loadFixtures('static/line_highlighter.html.raw');
this["class"] = new LineHighlighter(); this["class"] = new LineHighlighter();
this.css = this["class"].highlightClass; this.css = this["class"].highlightLineClass;
return this.spies = { return this.spies = {
__setLocationHash__: spyOn(this["class"], '__setLocationHash__').and.callFake(function() {}) __setLocationHash__: spyOn(this["class"], '__setLocationHash__').and.callFake(function() {})
}; };
}); });
describe('behavior', function() { describe('behavior', function() {
it('highlights one line given in the URL hash', function() { it('highlights one line given in the URL hash', function() {
new LineHighlighter('#L13'); new LineHighlighter({ hash: '#L13' });
return expect($('#LC13')).toHaveClass(this.css); return expect($('#LC13')).toHaveClass(this.css);
}); });
it('highlights one line given in the URL hash with given CSS class name', function() {
const hiliter = new LineHighlighter({ hash: '#L13', highlightLineClass: 'hilite' });
expect(hiliter.highlightLineClass).toBe('hilite');
expect($('#LC13')).toHaveClass('hilite');
expect($('#LC13')).not.toHaveClass('hll');
});
it('highlights a range of lines given in the URL hash', function() { it('highlights a range of lines given in the URL hash', function() {
var line, results; var line, results;
new LineHighlighter('#L5-25'); new LineHighlighter({ hash: '#L5-25' });
expect($("." + this.css).length).toBe(21); expect($("." + this.css).length).toBe(21);
results = []; results = [];
for (line = 5; line <= 25; line += 1) { for (line = 5; line <= 25; line += 1) {
...@@ -41,7 +47,7 @@ import '~/line_highlighter'; ...@@ -41,7 +47,7 @@ import '~/line_highlighter';
it('scrolls to the first highlighted line on initial load', function() { it('scrolls to the first highlighted line on initial load', function() {
var spy; var spy;
spy = spyOn($, 'scrollTo'); spy = spyOn($, 'scrollTo');
new LineHighlighter('#L5-25'); new LineHighlighter({ hash: '#L5-25' });
return expect(spy).toHaveBeenCalledWith('#L5', jasmine.anything()); return expect(spy).toHaveBeenCalledWith('#L5', jasmine.anything());
}); });
it('discards click events', function() { it('discards click events', function() {
...@@ -50,10 +56,10 @@ import '~/line_highlighter'; ...@@ -50,10 +56,10 @@ import '~/line_highlighter';
clickLine(13); clickLine(13);
return expect(spy).toHaveBeenPrevented(); return expect(spy).toHaveBeenPrevented();
}); });
return it('handles garbage input from the hash', function() { it('handles garbage input from the hash', function() {
var func; var func;
func = function() { func = function() {
return new LineHighlighter('#blob-content-holder'); return new LineHighlighter({ fileHolderSelector: '#blob-content-holder' });
}; };
return expect(func).not.toThrow(); return expect(func).not.toThrow();
}); });
......
...@@ -416,5 +416,28 @@ import 'vendor/jquery.scrollTo'; ...@@ -416,5 +416,28 @@ import 'vendor/jquery.scrollTo';
}); });
}); });
}); });
describe('expandViewContainer', function () {
beforeEach(() => {
$('body').append('<div class="content-wrapper"><div class="container-fluid container-limited"></div></div>');
});
afterEach(() => {
$('.content-wrapper').remove();
});
it('removes container-limited from containers', function () {
this.class.expandViewContainer();
expect($('.content-wrapper')).not.toContainElement('.container-limited');
});
it('does remove container-limited from breadcrumbs', function () {
$('.container-limited').addClass('breadcrumbs');
this.class.expandViewContainer();
expect($('.content-wrapper')).toContainElement('.container-limited');
});
});
}); });
}).call(window); }).call(window);
require 'spec_helper'
describe Github::Client do
let(:connection) { spy }
let(:rate_limit) { double(get: [false, 1]) }
let(:client) { described_class.new({}) }
let(:results) { double }
let(:response) { double }
before do
allow(Faraday).to receive(:new).and_return(connection)
allow(Github::RateLimit).to receive(:new).with(connection).and_return(rate_limit)
end
describe '#get' do
before do
allow(Github::Response).to receive(:new).with(results).and_return(response)
end
it 'uses a default per_page param' do
expect(connection).to receive(:get).with('/foo', per_page: 100).and_return(results)
expect(client.get('/foo')).to eq(response)
end
context 'with per_page given' do
it 'overwrites the default per_page' do
expect(connection).to receive(:get).with('/foo', per_page: 30).and_return(results)
expect(client.get('/foo', per_page: 30)).to eq(response)
end
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Create do
set(:project) { create(:project) }
set(:user) { create(:user) }
let(:pipeline) do
build(:ci_pipeline_with_one_job, project: project,
ref: 'master')
end
let(:command) do
double('command', project: project,
current_user: user,
seeds_block: nil)
end
let(:step) { described_class.new(pipeline, command) }
before do
step.perform!
end
context 'when pipeline is ready to be saved' do
it 'saves a pipeline' do
expect(pipeline).to be_persisted
end
it 'does not break the chain' do
expect(step.break?).to be false
end
it 'creates stages' do
expect(pipeline.reload.stages).to be_one
end
end
context 'when pipeline has validation errors' do
let(:pipeline) do
build(:ci_pipeline, project: project, ref: nil)
end
it 'breaks the chain' do
expect(step.break?).to be true
end
it 'appends validation error' do
expect(pipeline.errors.to_a)
.to include /Failed to persist the pipeline/
end
end
context 'when there is a seed block present' do
let(:seeds) { spy('pipeline seeds') }
let(:command) do
double('command', project: project,
current_user: user,
seeds_block: seeds)
end
it 'executes the block' do
expect(seeds).to have_received(:call).with(pipeline)
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Sequence do
set(:project) { create(:project) }
set(:user) { create(:user) }
let(:pipeline) { build_stubbed(:ci_pipeline) }
let(:command) { double('command' ) }
let(:first_step) { spy('first step') }
let(:second_step) { spy('second step') }
let(:sequence) { [first_step, second_step] }
subject do
described_class.new(pipeline, command, sequence)
end
context 'when one of steps breaks the chain' do
before do
allow(first_step).to receive(:break?).and_return(true)
end
it 'does not process the second step' do
subject.build! do |pipeline, sequence|
expect(sequence).not_to be_complete
end
expect(second_step).not_to have_received(:perform!)
end
it 'returns a pipeline object' do
expect(subject.build!).to eq pipeline
end
end
context 'when all chains are executed correctly' do
before do
sequence.each do |step|
allow(step).to receive(:break?).and_return(false)
end
end
it 'iterates through entire sequence' do
subject.build! do |pipeline, sequence|
expect(sequence).to be_complete
end
expect(first_step).to have_received(:perform!)
expect(second_step).to have_received(:perform!)
end
it 'returns a pipeline object' do
expect(subject.build!).to eq pipeline
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Skip do
set(:project) { create(:project) }
set(:user) { create(:user) }
set(:pipeline) { create(:ci_pipeline, project: project) }
let(:command) do
double('command', project: project,
current_user: user,
ignore_skip_ci: false,
save_incompleted: true)
end
let(:step) { described_class.new(pipeline, command) }
context 'when pipeline has been skipped by a user' do
before do
allow(pipeline).to receive(:git_commit_message)
.and_return('commit message [ci skip]')
step.perform!
end
it 'should break the chain' do
expect(step.break?).to be true
end
it 'skips the pipeline' do
expect(pipeline.reload).to be_skipped
end
end
context 'when pipeline has not been skipped' do
before do
step.perform!
end
it 'should not break the chain' do
expect(step.break?).to be false
end
it 'should not skip a pipeline chain' do
expect(pipeline.reload).not_to be_skipped
end
end
context 'when [ci skip] should be ignored' do
let(:command) do
double('command', project: project,
current_user: user,
ignore_skip_ci: true)
end
it 'does not break the chain' do
step.perform!
expect(step.break?).to be false
end
end
context 'when pipeline should be skipped but not persisted' do
let(:command) do
double('command', project: project,
current_user: user,
ignore_skip_ci: false,
save_incompleted: false)
end
before do
allow(pipeline).to receive(:git_commit_message)
.and_return('commit message [ci skip]')
step.perform!
end
it 'breaks the chain' do
expect(step.break?).to be true
end
it 'does not skip pipeline' do
expect(pipeline.reload).not_to be_skipped
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let(:pipeline) do
build_stubbed(:ci_pipeline, ref: ref, project: project)
end
let(:command) do
double('command', project: project,
current_user: user,
allow_mirror_update: false)
end
let(:step) { described_class.new(pipeline, command) }
let(:ref) { 'master' }
context 'when users has no ability to run a pipeline' do
before do
step.perform!
end
it 'adds an error about insufficient permissions' do
expect(pipeline.errors.to_a)
.to include /Insufficient permissions/
end
it 'breaks the pipeline builder chain' do
expect(step.break?).to eq true
end
end
context 'when user has ability to create a pipeline' do
before do
project.add_developer(user)
step.perform!
end
it 'does not invalidate the pipeline' do
expect(pipeline).to be_valid
end
it 'does not break the chain' do
expect(step.break?).to eq false
end
end
describe '#allowed_to_create?' do
subject { step.allowed_to_create? }
context 'when user is a developer' do
before do
project.add_developer(user)
end
it { is_expected.to be_truthy }
context 'when the branch is protected' do
let!(:protected_branch) do
create(:protected_branch, project: project, name: ref)
end
it { is_expected.to be_falsey }
context 'when developers are allowed to merge' do
let!(:protected_branch) do
create(:protected_branch,
:developers_can_merge,
project: project,
name: ref)
end
it { is_expected.to be_truthy }
end
end
context 'when the tag is protected' do
let(:ref) { 'v1.0.0' }
let!(:protected_tag) do
create(:protected_tag, project: project, name: ref)
end
it { is_expected.to be_falsey }
context 'when developers are allowed to create the tag' do
let!(:protected_tag) do
create(:protected_tag,
:developers_can_create,
project: project,
name: ref)
end
it { is_expected.to be_truthy }
end
end
end
context 'when user is a master' do
before do
project.add_master(user)
end
it { is_expected.to be_truthy }
context 'when the branch is protected' do
let!(:protected_branch) do
create(:protected_branch, project: project, name: ref)
end
it { is_expected.to be_truthy }
end
context 'when the tag is protected' do
let(:ref) { 'v1.0.0' }
let!(:protected_tag) do
create(:protected_tag, project: project, name: ref)
end
it { is_expected.to be_truthy }
context 'when no one can create the tag' do
let!(:protected_tag) do
create(:protected_tag,
:no_one_can_create,
project: project,
name: ref)
end
it { is_expected.to be_falsey }
end
end
end
context 'when owner cannot create pipeline' do
it { is_expected.to be_falsey }
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Validate::Config do
set(:project) { create(:project) }
set(:user) { create(:user) }
let(:command) do
double('command', project: project,
current_user: user,
save_incompleted: true)
end
let!(:step) { described_class.new(pipeline, command) }
before do
step.perform!
end
context 'when pipeline has no YAML configuration' do
let(:pipeline) do
build_stubbed(:ci_pipeline, project: project)
end
it 'appends errors about missing configuration' do
expect(pipeline.errors.to_a)
.to include 'Missing .gitlab-ci.yml file'
end
it 'breaks the chain' do
expect(step.break?).to be true
end
end
context 'when YAML configuration contains errors' do
let(:pipeline) do
build(:ci_pipeline, project: project, config: 'invalid YAML')
end
it 'appends errors about YAML errors' do
expect(pipeline.errors.to_a)
.to include 'Invalid configuration format'
end
it 'breaks the chain' do
expect(step.break?).to be true
end
context 'when saving incomplete pipeline is allowed' do
let(:command) do
double('command', project: project,
current_user: user,
save_incompleted: true)
end
it 'fails the pipeline' do
expect(pipeline.reload).to be_failed
end
end
context 'when saving incomplete pipeline is not allowed' do
let(:command) do
double('command', project: project,
current_user: user,
save_incompleted: false)
end
it 'does not drop pipeline' do
expect(pipeline).not_to be_failed
expect(pipeline).not_to be_persisted
end
end
end
context 'when pipeline has no stages / jobs' do
let(:config) do
{ rspec: {
script: 'ls',
only: ['something']
} }
end
let(:pipeline) do
build(:ci_pipeline, project: project, config: config)
end
it 'appends an error about missing stages' do
expect(pipeline.errors.to_a)
.to include 'No stages / jobs for this pipeline.'
end
it 'breaks the chain' do
expect(step.break?).to be true
end
end
context 'when pipeline contains configuration validation errors' do
let(:config) { { rspec: {} } }
let(:pipeline) do
build(:ci_pipeline, project: project, config: config)
end
it 'appends configuration validation errors to pipeline errors' do
expect(pipeline.errors.to_a)
.to include "jobs:rspec config can't be blank"
end
it 'breaks the chain' do
expect(step.break?).to be true
end
end
context 'when pipeline is correct and complete' do
let(:pipeline) do
build(:ci_pipeline_with_one_job, project: project)
end
it 'does not invalidate the pipeline' do
expect(pipeline).to be_valid
end
it 'does not break the chain' do
expect(step.break?).to be false
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let(:command) do
double('command', project: project, current_user: user)
end
let!(:step) { described_class.new(pipeline, command) }
before do
step.perform!
end
context 'when pipeline ref and sha exists' do
let(:pipeline) do
build_stubbed(:ci_pipeline, ref: 'master', sha: '123', project: project)
end
it 'does not break the chain' do
expect(step.break?).to be false
end
it 'does not append pipeline errors' do
expect(pipeline.errors).to be_empty
end
end
context 'when pipeline ref does not exist' do
let(:pipeline) do
build_stubbed(:ci_pipeline, ref: 'something', project: project)
end
it 'breaks the chain' do
expect(step.break?).to be true
end
it 'adds an error about missing ref' do
expect(pipeline.errors.to_a)
.to include 'Reference not found'
end
end
context 'when pipeline does not have SHA set' do
let(:pipeline) do
build_stubbed(:ci_pipeline, ref: 'master', sha: nil, project: project)
end
it 'breaks the chain' do
expect(step.break?).to be true
end
it 'adds an error about missing SHA' do
expect(pipeline.errors.to_a)
.to include 'Commit not found'
end
end
end
require 'spec_helper' require 'spec_helper'
describe Gitlab::Ci::PipelineDuration do describe Gitlab::Ci::Pipeline::Duration do
let(:calculated_duration) { calculate(data) } let(:calculated_duration) { calculate(data) }
shared_examples 'calculating duration' do shared_examples 'calculating duration' do
...@@ -107,9 +107,9 @@ describe Gitlab::Ci::PipelineDuration do ...@@ -107,9 +107,9 @@ describe Gitlab::Ci::PipelineDuration do
def calculate(data) def calculate(data)
periods = data.shuffle.map do |(first, last)| periods = data.shuffle.map do |(first, last)|
Gitlab::Ci::PipelineDuration::Period.new(first, last) described_class::Period.new(first, last)
end end
Gitlab::Ci::PipelineDuration.from_periods(periods.sort_by(&:first)) described_class.from_periods(periods.sort_by(&:first))
end end
end end
...@@ -3,6 +3,61 @@ require 'spec_helper' ...@@ -3,6 +3,61 @@ require 'spec_helper'
describe Gitlab::Diff::DiffRefs do describe Gitlab::Diff::DiffRefs do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
describe '#==' do
let(:commit) { project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') }
subject { commit.diff_refs }
context 'when shas are missing' do
let(:other) { described_class.new(base_sha: subject.base_sha, start_sha: subject.start_sha, head_sha: nil) }
it 'returns false' do
expect(subject).not_to eq(other)
end
end
context 'when shas are equal' do
let(:other) { described_class.new(base_sha: subject.base_sha, start_sha: subject.start_sha, head_sha: subject.head_sha) }
it 'returns true' do
expect(subject).to eq(other)
end
end
context 'when shas are unequal' do
let(:other) { described_class.new(base_sha: subject.base_sha, start_sha: subject.start_sha, head_sha: subject.head_sha.reverse) }
it 'returns false' do
expect(subject).not_to eq(other)
end
end
context 'when shas are truncated' do
context 'when sha prefixes are too short' do
let(:other) { described_class.new(base_sha: subject.base_sha[0, 4], start_sha: subject.start_sha[0, 4], head_sha: subject.head_sha[0, 4]) }
it 'returns false' do
expect(subject).not_to eq(other)
end
end
context 'when sha prefixes are equal' do
let(:other) { described_class.new(base_sha: subject.base_sha[0, 10], start_sha: subject.start_sha[0, 10], head_sha: subject.head_sha[0, 10]) }
it 'returns true' do
expect(subject).to eq(other)
end
end
context 'when sha prefixes are unequal' do
let(:other) { described_class.new(base_sha: subject.base_sha[0, 10], start_sha: subject.start_sha[0, 10], head_sha: subject.head_sha[0, 10].reverse) }
it 'returns false' do
expect(subject).not_to eq(other)
end
end
end
end
describe '#compare_in' do describe '#compare_in' do
context 'with diff refs for the initial commit' do context 'with diff refs for the initial commit' do
let(:commit) { project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') } let(:commit) { project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') }
......
...@@ -429,6 +429,44 @@ describe Gitlab::Diff::Position do ...@@ -429,6 +429,44 @@ describe Gitlab::Diff::Position do
end end
end end
describe '#==' do
let(:commit) { project.commit("570e7b2abdd848b95f2f578043fc23bd6f6fd24d") }
subject do
described_class.new(
old_path: "files/ruby/popen.rb",
new_path: "files/ruby/popen.rb",
old_line: nil,
new_line: 14,
diff_refs: commit.diff_refs
)
end
context 'when positions are equal' do
let(:other) { described_class.new(subject.to_h) }
it 'returns true' do
expect(subject).to eq(other)
end
end
context 'when positions are equal, except for truncated shas' do
let(:other) { described_class.new(subject.to_h.merge(start_sha: subject.start_sha[0, 10])) }
it 'returns true' do
expect(subject).to eq(other)
end
end
context 'when positions are unequal' do
let(:other) { described_class.new(subject.to_h.merge(start_sha: subject.start_sha.reverse)) }
it 'returns false' do
expect(subject).not_to eq(other)
end
end
end
describe "#to_json" do describe "#to_json" do
let(:hash) do let(:hash) do
{ {
......
...@@ -491,6 +491,7 @@ describe API::Commits do ...@@ -491,6 +491,7 @@ describe API::Commits do
expect(json_response['stats']['deletions']).to eq(commit.stats.deletions) expect(json_response['stats']['deletions']).to eq(commit.stats.deletions)
expect(json_response['stats']['total']).to eq(commit.stats.total) expect(json_response['stats']['total']).to eq(commit.stats.total)
expect(json_response['status']).to be_nil expect(json_response['status']).to be_nil
expect(json_response['last_pipeline']).to be_nil
end end
context 'when ref does not exist' do context 'when ref does not exist' do
...@@ -573,6 +574,10 @@ describe API::Commits do ...@@ -573,6 +574,10 @@ describe API::Commits do
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
expect(response).to match_response_schema('public_api/v4/commit/detail') expect(response).to match_response_schema('public_api/v4/commit/detail')
expect(json_response['status']).to eq('created') expect(json_response['status']).to eq('created')
expect(json_response['last_pipeline']['id']).to eq(pipeline.id)
expect(json_response['last_pipeline']['ref']).to eq(pipeline.ref)
expect(json_response['last_pipeline']['sha']).to eq(pipeline.sha)
expect(json_response['last_pipeline']['status']).to eq(pipeline.status)
end end
context 'when pipeline succeeds' do context 'when pipeline succeeds' do
......
...@@ -20,6 +20,7 @@ describe API::Environments do ...@@ -20,6 +20,7 @@ describe API::Environments do
path path_with_namespace path path_with_namespace
star_count forks_count star_count forks_count
created_at last_activity_at created_at last_activity_at
avatar_url
) )
get api("/projects/#{project.id}/environments", user) get api("/projects/#{project.id}/environments", user)
......
...@@ -193,6 +193,7 @@ describe API::Projects do ...@@ -193,6 +193,7 @@ describe API::Projects do
path path_with_namespace path path_with_namespace
star_count forks_count star_count forks_count
created_at last_activity_at created_at last_activity_at
avatar_url
) )
get api('/projects?simple=true', user) get api('/projects?simple=true', user)
......
...@@ -125,6 +125,15 @@ describe API::Users do ...@@ -125,6 +125,15 @@ describe API::Users do
end end
context "when admin" do context "when admin" do
context 'when sudo is defined' do
it 'does not return 500' do
admin_personal_access_token = create(:personal_access_token, user: admin).token
get api("/users?private_token=#{admin_personal_access_token}&sudo=#{user.id}", admin)
expect(response).to have_http_status(:success)
end
end
it "returns an array of users" do it "returns an array of users" do
get api("/users", admin) get api("/users", admin)
......
...@@ -89,6 +89,7 @@ describe API::V3::Projects do ...@@ -89,6 +89,7 @@ describe API::V3::Projects do
path path_with_namespace path path_with_namespace
star_count forks_count star_count forks_count
created_at last_activity_at created_at last_activity_at
avatar_url
) )
get v3_api('/projects?simple=true', user) get v3_api('/projects?simple=true', user)
......
...@@ -133,6 +133,26 @@ describe Ci::CreatePipelineService do ...@@ -133,6 +133,26 @@ describe Ci::CreatePipelineService do
expect(merge_request.reload.head_pipeline).to eq head_pipeline expect(merge_request.reload.head_pipeline).to eq head_pipeline
end end
end end
context 'when pipeline has been skipped' do
before do
allow_any_instance_of(Ci::Pipeline)
.to receive(:git_commit_message)
.and_return('some commit [ci skip]')
end
it 'updates merge request head pipeline' do
merge_request = create(:merge_request, source_branch: 'master',
target_branch: 'feature',
source_project: project)
head_pipeline = execute_service
expect(head_pipeline).to be_skipped
expect(head_pipeline).to be_persisted
expect(merge_request.reload.head_pipeline).to eq head_pipeline
end
end
end end
context 'auto-cancel enabled' do context 'auto-cancel enabled' do
...@@ -481,104 +501,4 @@ describe Ci::CreatePipelineService do ...@@ -481,104 +501,4 @@ describe Ci::CreatePipelineService do
end end
end end
end end
describe '#allowed_to_create?' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:ref) { 'master' }
subject do
described_class.new(project, user, ref: ref)
.send(:allowed_to_create?)
end
context 'when user is a developer' do
before do
project.add_developer(user)
end
it { is_expected.to be_truthy }
context 'when the branch is protected' do
let!(:protected_branch) do
create(:protected_branch, project: project, name: ref)
end
it { is_expected.to be_falsey }
context 'when developers are allowed to merge' do
let!(:protected_branch) do
create(:protected_branch,
:developers_can_merge,
project: project,
name: ref)
end
it { is_expected.to be_truthy }
end
end
context 'when the tag is protected' do
let(:ref) { 'v1.0.0' }
let!(:protected_tag) do
create(:protected_tag, project: project, name: ref)
end
it { is_expected.to be_falsey }
context 'when developers are allowed to create the tag' do
let!(:protected_tag) do
create(:protected_tag,
:developers_can_create,
project: project,
name: ref)
end
it { is_expected.to be_truthy }
end
end
end
context 'when user is a master' do
before do
project.add_master(user)
end
it { is_expected.to be_truthy }
context 'when the branch is protected' do
let!(:protected_branch) do
create(:protected_branch, project: project, name: ref)
end
it { is_expected.to be_truthy }
end
context 'when the tag is protected' do
let(:ref) { 'v1.0.0' }
let!(:protected_tag) do
create(:protected_tag, project: project, name: ref)
end
it { is_expected.to be_truthy }
context 'when no one can create the tag' do
let!(:protected_tag) do
create(:protected_tag,
:no_one_can_create,
project: project,
name: ref)
end
it { is_expected.to be_falsey }
end
end
end
context 'when owner cannot create pipeline' do
it { is_expected.to be_falsey }
end
end
end end
...@@ -308,6 +308,9 @@ module TestEnv ...@@ -308,6 +308,9 @@ module TestEnv
ensure_component_dir_name_is_correct!(component, install_dir) ensure_component_dir_name_is_correct!(component, install_dir)
# On CI, once installed, components never need update
return if File.exist?(install_dir) && ENV['CI']
if component_needs_update?(install_dir, version) if component_needs_update?(install_dir, version)
# Cleanup the component entirely to ensure we start fresh # Cleanup the component entirely to ensure we start fresh
FileUtils.rm_rf(install_dir) FileUtils.rm_rf(install_dir)
......
...@@ -70,12 +70,15 @@ describe PostReceive do ...@@ -70,12 +70,15 @@ describe PostReceive do
context "creates a Ci::Pipeline for every change" do context "creates a Ci::Pipeline for every change" do
before do before do
allow_any_instance_of(Ci::CreatePipelineService).to receive(:commit) do
OpenStruct.new(id: '123456')
end
allow_any_instance_of(Ci::CreatePipelineService).to receive(:branch?).and_return(true)
allow_any_instance_of(Repository).to receive(:ref_exists?).and_return(true)
stub_ci_pipeline_to_return_yaml_file stub_ci_pipeline_to_return_yaml_file
# TODO, don't stub private methods
#
allow_any_instance_of(Ci::CreatePipelineService)
.to receive(:commit).and_return(OpenStruct.new(id: '123456'))
allow_any_instance_of(Repository)
.to receive(:branch_exists?).and_return(true)
end end
it { expect { subject }.to change { Ci::Pipeline.count }.by(2) } it { expect { subject }.to change { Ci::Pipeline.count }.by(2) }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment