Commit 43ec8c81 authored by Stan Hu's avatar Stan Hu

Merge branch 'ce-to-ee-2017-11-03' into 'master'

CE upstream - Friday

Closes gitaly#693, #3890, #3274, and gitlab-ce#34435

See merge request gitlab-org/gitlab-ee!3261
parents 807f4b16 6c01376c
7.5
\ No newline at end of file
9.0.0
......@@ -105,8 +105,7 @@ the remaining issues on the GitHub issue tracker.
## I want to contribute!
If you want to contribute to GitLab, but are not sure where to start,
look for [issues with the label `Accepting Merge Requests` and small weight][accepting-mrs-weight].
If you want to contribute to GitLab, [issues with the label `Accepting Merge Requests` and small weight][accepting-mrs-weight] is a great place to start. Issues with a lower weight (1 or 2) are deemed suitable for beginners.
These issues will be of reasonable size and challenge, for anyone to start
contributing to GitLab.
......
......@@ -16,7 +16,7 @@ import CILintEditor from './ci_lint_editor';
import groupsSelect from './groups_select';
/* global Search */
/* global Admin */
/* global NamespaceSelects */
import NamespaceSelect from './namespace_select';
/* global NewCommitForm */
/* global NewBranchForm */
/* global Project */
......@@ -650,7 +650,8 @@ import initGroupAnalytics from './init_group_analytics';
new UsersSelect();
break;
case 'projects':
new NamespaceSelects();
document.querySelectorAll('.js-namespace-select')
.forEach(dropdown => new NamespaceSelect({ dropdown }));
break;
case 'labels':
switch (path[2]) {
......
......@@ -119,11 +119,9 @@ export default function dropzoneInput(form) {
// removeAllFiles(true) stops uploading files (if any)
// and remove them from dropzone files queue.
$cancelButton.on('click', (e) => {
const target = e.target.closest('.js-main-target-form').querySelector('.div-dropzone');
e.preventDefault();
e.stopPropagation();
Dropzone.forElement(target).removeAllFiles(true);
Dropzone.forElement($formDropzone.get(0)).removeAllFiles(true);
});
// If 'error' event is fired, we store a failed files,
......
/* eslint-disable func-names, space-before-function-paren, no-var, prefer-rest-params, wrap-iife, one-var, vars-on-top, one-var-declaration-per-line, comma-dangle, object-shorthand, no-else-return, prefer-template, quotes, prefer-arrow-callback, no-param-reassign, no-cond-assign, max-len */
/* eslint-disable func-names, space-before-function-paren, no-var, comma-dangle, object-shorthand, no-else-return, prefer-template, quotes, prefer-arrow-callback, max-len */
import Api from './api';
import './lib/utils/url_utility';
(function() {
window.NamespaceSelect = (function() {
function NamespaceSelect(opts) {
this.onSelectItem = this.onSelectItem.bind(this);
var fieldName, showAny;
this.dropdown = opts.dropdown;
showAny = true;
fieldName = 'namespace_id';
if (this.dropdown.attr('data-field-name')) {
fieldName = this.dropdown.data('fieldName');
}
if (this.dropdown.attr('data-show-any')) {
showAny = this.dropdown.data('showAny');
}
this.dropdown.glDropdown({
filterable: true,
selectable: true,
filterRemote: true,
search: {
fields: ['path']
},
fieldName: fieldName,
toggleLabel: function(selected) {
if (selected.id == null) {
return selected.text;
} else {
return selected.kind + ": " + selected.full_path;
}
},
data: function(term, dataCallback) {
return Api.namespaces(term, function(namespaces) {
var anyNamespace;
if (showAny) {
anyNamespace = {
text: 'Any namespace',
id: null
};
namespaces.unshift(anyNamespace);
namespaces.splice(1, 0, 'divider');
}
return dataCallback(namespaces);
});
},
text: function(namespace) {
if (namespace.id == null) {
return namespace.text;
} else {
return namespace.kind + ": " + namespace.full_path;
}
},
renderRow: this.renderRow,
clicked: this.onSelectItem
});
}
NamespaceSelect.prototype.onSelectItem = function(options) {
const { e } = options;
return e.preventDefault();
};
export default class NamespaceSelect {
constructor(opts) {
const isFilter = opts.dropdown.dataset.isFilter === 'true';
const fieldName = opts.dropdown.dataset.fieldName || 'namespace_id';
return NamespaceSelect;
})();
window.NamespaceSelects = (function() {
function NamespaceSelects(opts) {
var ref;
if (opts == null) {
opts = {};
}
this.$dropdowns = (ref = opts.$dropdowns) != null ? ref : $('.js-namespace-select');
this.$dropdowns.each(function(i, dropdown) {
var $dropdown;
$dropdown = $(dropdown);
return new window.NamespaceSelect({
dropdown: $dropdown
$(opts.dropdown).glDropdown({
filterable: true,
selectable: true,
filterRemote: true,
search: {
fields: ['path']
},
fieldName: fieldName,
toggleLabel: function(selected) {
if (selected.id == null) {
return selected.text;
} else {
return selected.kind + ": " + selected.full_path;
}
},
data: function(term, dataCallback) {
return Api.namespaces(term, function(namespaces) {
if (isFilter) {
const anyNamespace = {
text: 'Any namespace',
id: null
};
namespaces.unshift(anyNamespace);
namespaces.splice(1, 0, 'divider');
}
return dataCallback(namespaces);
});
});
}
return NamespaceSelects;
})();
}).call(window);
},
text: function(namespace) {
if (namespace.id == null) {
return namespace.text;
} else {
return namespace.kind + ": " + namespace.full_path;
}
},
renderRow: this.renderRow,
clicked(options) {
if (!isFilter) {
const { e } = options;
e.preventDefault();
}
},
url(namespace) {
return gl.utils.mergeUrlParams({ [fieldName]: namespace.id }, window.location.href);
},
});
}
}
......@@ -27,6 +27,8 @@ export default {
'changeFileContent',
]),
initMonaco() {
if (this.shouldHideEditor) return;
if (this.monacoInstance) {
this.monacoInstance.setModel(null);
}
......@@ -94,8 +96,12 @@ export default {
<template>
<div
id="ide"
v-if='!shouldHideEditor'
class="blob-viewer-container blob-editor-container"
>
<div
v-if="shouldHideEditor"
v-html="activeFile.html"
>
</div>
</div>
</template>
......@@ -4,9 +4,6 @@
.cred { color: $common-red; }
.cgreen { color: $common-green; }
.cdark { color: $common-gray-dark; }
.text-secondary {
color: $gl-text-color-secondary;
}
.underlined-link { text-decoration: underline; }
.hint { font-style: italic; color: $hint-color; }
......
......@@ -48,6 +48,10 @@ class MergeRequestDiff < ActiveRecord::Base
# Collect information about commits and diff from repository
# and save it to the database as serialized data
def save_git_content
MergeRequest
.where('id = ? AND COALESCE(latest_merge_request_diff_id, 0) < ?', self.merge_request_id, self.id)
.update_all(latest_merge_request_diff_id: self.id)
ensure_commit_shas
save_commits
save_diffs
......
......@@ -49,6 +49,7 @@ module MergeRequests
create_branch_change_note(merge_request, 'target',
merge_request.previous_changes['target_branch'].first,
merge_request.target_branch)
reset_approvals(merge_request)
end
......
......@@ -6,8 +6,7 @@ class MetricsService
Gitlab::HealthChecks::Redis::RedisCheck,
Gitlab::HealthChecks::Redis::CacheCheck,
Gitlab::HealthChecks::Redis::QueuesCheck,
Gitlab::HealthChecks::Redis::SharedStateCheck,
Gitlab::HealthChecks::FsShardsCheck
Gitlab::HealthChecks::Redis::SharedStateCheck
].freeze
def prometheus_metrics_text
......
......@@ -24,7 +24,7 @@
%td
= truncate(hook_log.url, length: 50)
%td.light
#{number_with_precision(hook_log.execution_duration, precision: 2)} ms
#{number_with_precision(hook_log.execution_duration, precision: 2)} sec
%td.light
= time_ago_with_tooltip(hook_log.created_at)
%td
......
......@@ -14,7 +14,7 @@
= hidden_field_tag :namespace_id, params[:namespace_id]
- namespace = Namespace.find(params[:namespace_id])
- toggle_text = "#{namespace.kind}: #{namespace.full_path}"
= dropdown_toggle(toggle_text, { toggle: 'dropdown' }, { toggle_class: 'js-namespace-select large' })
= dropdown_toggle(toggle_text, { toggle: 'dropdown', is_filter: 'true' }, { toggle_class: 'js-namespace-select large' })
.dropdown-menu.dropdown-select.dropdown-menu-align-right
= dropdown_title('Namespaces')
= dropdown_filter("Search for Namespace")
......
......@@ -117,7 +117,7 @@
= f.label :new_namespace_id, "Namespace", class: 'control-label'
.col-sm-10
.dropdown
= dropdown_toggle('Search for Namespace', { toggle: 'dropdown', field_name: 'new_namespace_id', show_any: 'false' }, { toggle_class: 'js-namespace-select large' })
= dropdown_toggle('Search for Namespace', { toggle: 'dropdown', field_name: 'new_namespace_id' }, { toggle_class: 'js-namespace-select large' })
.dropdown-menu.dropdown-select
= dropdown_title('Namespaces')
= dropdown_filter("Search for Namespace")
......
......@@ -8,7 +8,7 @@
%li.todos-pending{ class: active_when(params[:state].blank? || params[:state] == 'pending') }>
= link_to todos_filter_path(state: 'pending') do
%span
To do
Todos
%span.badge
= number_with_delimiter(todos_pending_count)
%li.todos-done{ class: active_when(params[:state] == 'done') }>
......
......@@ -24,7 +24,7 @@
%td
= truncate(hook_log.url, length: 50)
%td.light
#{number_with_precision(hook_log.execution_duration, precision: 2)} ms
#{number_with_precision(hook_log.execution_duration, precision: 2)} sec
%td.light
= time_ago_with_tooltip(hook_log.created_at)
%td
......
......@@ -11,7 +11,7 @@
= hook_log.trigger.singularize.titleize
%p
%strong Elapsed time:
#{number_with_precision(hook_log.execution_duration, precision: 2)} ms
#{number_with_precision(hook_log.execution_duration, precision: 2)} sec
%p
%strong Request time:
= time_ago_with_tooltip(hook_log.created_at)
......
---
title: Tighten up whitelisting of certain Geo routes
merge_request: 15082
author:
type: fixed
---
title: Add a latest_merge_request_diff_id column to merge_requests
merge_request: 15035
author:
type: performance
---
title: Todos spelled correctly on Todos list page
merge_request: 15015
author:
type: changed
---
title: Fix webhooks recent deliveries
merge_request: 15146
author: Alexander Randa (@randaalex)
type: fixed
---
title: Fix cancel button not working while uploading on the new issue page
merge_request: 15137
author:
type: fixed
---
title: Remove Filesystem check metrics that use too much CPU to handle requests
merge_request:
author:
type: performance
---
title: Make NamespaceSelect change URL when filtering
merge_request: 14888
author:
type: fixed
class AddLatestMergeRequestDiffIdToMergeRequests < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column :merge_requests, :latest_merge_request_diff_id, :integer
add_concurrent_index :merge_requests, :latest_merge_request_diff_id
add_concurrent_foreign_key :merge_requests, :merge_request_diffs,
column: :latest_merge_request_diff_id,
on_delete: :nullify
end
def down
remove_foreign_key :merge_requests, column: :latest_merge_request_diff_id
if index_exists?(:merge_requests, :latest_merge_request_diff_id)
remove_concurrent_index :merge_requests, :latest_merge_request_diff_id
end
remove_column :merge_requests, :latest_merge_request_diff_id
end
end
class PopulateMergeRequestsLatestMergeRequestDiffId < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 1_000
class MergeRequest < ActiveRecord::Base
self.table_name = 'merge_requests'
include ::EachBatch
end
disable_ddl_transaction!
def up
update = '
latest_merge_request_diff_id = (
SELECT MAX(id)
FROM merge_request_diffs
WHERE merge_requests.id = merge_request_diffs.merge_request_id
)'.squish
MergeRequest.where(latest_merge_request_diff_id: nil).each_batch(of: BATCH_SIZE) do |relation|
relation.update_all(update)
end
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171017145932) do
ActiveRecord::Schema.define(version: 20171026082505) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -1240,6 +1240,7 @@ ActiveRecord::Schema.define(version: 20171017145932) do
t.boolean "ref_fetched"
t.string "merge_jid"
t.boolean "discussion_locked"
t.integer "latest_merge_request_diff_id"
end
add_index "merge_requests", ["assignee_id"], name: "index_merge_requests_on_assignee_id", using: :btree
......@@ -1248,6 +1249,7 @@ ActiveRecord::Schema.define(version: 20171017145932) do
add_index "merge_requests", ["deleted_at"], name: "index_merge_requests_on_deleted_at", using: :btree
add_index "merge_requests", ["description"], name: "index_merge_requests_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
add_index "merge_requests", ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id", using: :btree
add_index "merge_requests", ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id", using: :btree
add_index "merge_requests", ["milestone_id"], name: "index_merge_requests_on_milestone_id", using: :btree
add_index "merge_requests", ["source_branch"], name: "index_merge_requests_on_source_branch", using: :btree
add_index "merge_requests", ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch", using: :btree
......@@ -2279,6 +2281,7 @@ ActiveRecord::Schema.define(version: 20171017145932) do
add_foreign_key "merge_request_metrics", "ci_pipelines", column: "pipeline_id", on_delete: :cascade
add_foreign_key "merge_request_metrics", "merge_requests", on_delete: :cascade
add_foreign_key "merge_requests", "ci_pipelines", column: "head_pipeline_id", name: "fk_fd82eae0b9", on_delete: :nullify
add_foreign_key "merge_requests", "merge_request_diffs", column: "latest_merge_request_diff_id", name: "fk_06067f5644", on_delete: :nullify
add_foreign_key "merge_requests", "projects", column: "target_project_id", name: "fk_a6963e8447", on_delete: :cascade
add_foreign_key "merge_requests_closing_issues", "issues", on_delete: :cascade
add_foreign_key "merge_requests_closing_issues", "merge_requests", on_delete: :cascade
......
......@@ -28,7 +28,7 @@ The MemoryKiller is controlled using environment variables.
delayed shutdown is triggered. The default value for Omnibus packages is set
[in the omnibus-gitlab
repository](https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/attributes/default.rb).
- `SIDEKIQ_MEMORY_KILLER_GRACE_TIME`: defaults 900 seconds (15 minutes). When
- `SIDEKIQ_MEMORY_KILLER_GRACE_TIME`: defaults to 900 seconds (15 minutes). When
a shutdown is triggered, the Sidekiq process will keep working normally for
another 15 minutes.
- `SIDEKIQ_MEMORY_KILLER_SHUTDOWN_WAIT`: defaults to 30 seconds. When the grace
......@@ -36,5 +36,3 @@ The MemoryKiller is controlled using environment variables.
Existing jobs get 30 seconds to finish. After that, the MemoryKiller tells
Sidekiq to shut down, and an external supervision mechanism (e.g. Runit) must
restart Sidekiq.
- `SIDEKIQ_MEMORY_KILLER_SHUTDOWN_SIGNAL`: defaults to `SIGKILL`. The name of
the final signal sent to the Sidekiq process when we want it to shut down.
......@@ -31,12 +31,12 @@ There are three methods to enable the use of `docker build` and `docker run` dur
The simplest approach is to install GitLab Runner in `shell` execution mode.
GitLab Runner then executes job scripts as the `gitlab-runner` user.
1. Install [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-ci-multi-runner/#installation).
1. Install [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner/#installation).
1. During GitLab Runner installation select `shell` as method of executing job scripts or use command:
```bash
sudo gitlab-ci-multi-runner register -n \
sudo gitlab-runner register -n \
--url https://gitlab.com/ \
--registration-token REGISTRATION_TOKEN \
--executor shell \
......@@ -93,7 +93,7 @@ In order to do that, follow the steps:
mode:
```bash
sudo gitlab-ci-multi-runner register -n \
sudo gitlab-runner register -n \
--url https://gitlab.com/ \
--registration-token REGISTRATION_TOKEN \
--executor docker \
......@@ -178,7 +178,7 @@ In order to do that, follow the steps:
1. Register GitLab Runner from the command line to use `docker` and share `/var/run/docker.sock`:
```bash
sudo gitlab-ci-multi-runner register -n \
sudo gitlab-runner register -n \
--url https://gitlab.com/ \
--registration-token REGISTRATION_TOKEN \
--executor docker \
......
......@@ -501,8 +501,8 @@ First start with creating a file named `build_script`:
```bash
cat <<EOF > build_script
git clone https://gitlab.com/gitlab-org/gitlab-ci-multi-runner.git /builds/gitlab-org/gitlab-ci-multi-runner
cd /builds/gitlab-org/gitlab-ci-multi-runner
git clone https://gitlab.com/gitlab-org/gitlab-runner.git /builds/gitlab-org/gitlab-runner
cd /builds/gitlab-org/gitlab-runner
make
EOF
```
......
......@@ -267,10 +267,10 @@ terminal execute:
```bash
# Check using docker executor
gitlab-ci-multi-runner exec docker test:app
gitlab-runner exec docker test:app
# Check using shell executor
gitlab-ci-multi-runner exec shell test:app
gitlab-runner exec shell test:app
```
## Example project
......
......@@ -64,7 +64,7 @@ To build this project you also need to have [GitLab Runner](https://docs.gitlab.
You can use public runners available on `gitlab.com`, but you can register your own:
```
gitlab-ci-multi-runner register \
gitlab-runner register \
--non-interactive \
--url "https://gitlab.com/" \
--registration-token "PROJECT_REGISTRATION_TOKEN" \
......
......@@ -61,7 +61,7 @@ correctly with your CI jobs:
1. First, make sure you have used [relative URLs](#configuring-the-gitmodules-file)
for the submodules located in the same GitLab server.
1. Next, if you are using `gitlab-ci-multi-runner` v1.10+, you can set the
1. Next, if you are using `gitlab-runner` v1.10+, you can set the
`GIT_SUBMODULE_STRATEGY` variable to either `normal` or `recursive` to tell
the runner to fetch your submodules before the job:
```yaml
......@@ -71,7 +71,7 @@ correctly with your CI jobs:
See the [`.gitlab-ci.yml` reference](yaml/README.md#git-submodule-strategy)
for more details about `GIT_SUBMODULE_STRATEGY`.
1. If you are using an older version of `gitlab-ci-multi-runner`, then use
1. If you are using an older version of `gitlab-runner`, then use
`git submodule sync/update` in `before_script`:
```yaml
......
......@@ -60,6 +60,35 @@ writing one](testing_levels.md#consider-not-writing-a-system-test)!
- It's ok to look for DOM elements but don't abuse it since it makes the tests
more brittle
#### Debugging Capybara
Sometimes you may need to debug Capybara tests by observing browser behavior.
You can pause Capybara and view the website on the browser by using the
`live_debug` method in your spec. The current page will be automatically opened
in your default browser.
You may need to sign in first (the current user's credentials are displayed in
the terminal).
To resume the test run, press any key.
For example:
```
$ bin/rspec spec/features/auto_deploy_spec.rb:34
Running via Spring preloader in process 8999
Run options: include {:locations=>{"./spec/features/auto_deploy_spec.rb"=>[34]}}
Current example is paused for live debugging
The current user credentials are: user2 / 12345678
Press any key to resume the execution of the example!
Back to the example!
.
Finished in 34.51 seconds (files took 0.76702 seconds to load)
1 example, 0 failures
```
### `let` variables
GitLab's RSpec suite has made extensive use of `let` variables to reduce
......
......@@ -126,7 +126,7 @@ always in-sync with the codebase.
[GitLab Workhorse]: https://gitlab.com/gitlab-org/gitlab-workhorse
[Gitaly]: https://gitlab.com/gitlab-org/gitaly
[GitLab Pages]: https://gitlab.com/gitlab-org/gitlab-pages
[GitLab Runner]: https://gitlab.com/gitlab-org/gitlab-ci-multi-runner
[GitLab Runner]: https://gitlab.com/gitlab-org/gitlab-runner
[GitLab Omnibus]: https://gitlab.com/gitlab-org/omnibus-gitlab
[GitLab QA]: https://gitlab.com/gitlab-org/gitlab-qa
[part of GitLab Rails]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/qa
......
......@@ -186,7 +186,7 @@ Runner.
We recommend using a separate machine for each GitLab Runner, if you plan to
use the CI features.
[security reasons]: https://gitlab.com/gitlab-org/gitlab-ci-multi-runner/blob/master/docs/security/index.md
[security reasons]: https://gitlab.com/gitlab-org/gitlab-runner/blob/master/docs/security/index.md
## Supported web browsers
......
......@@ -55,10 +55,10 @@ The curriculum is composed of GitLab videos, screencasts, presentations, project
#### 1.5. Migrating from other Source Control
1. [Migrating from BitBucket/Stash](https://docs.gitlab.com/ee/workflow/importing/import_projects_from_bitbucket.html)
1. [Migrating from GitHub](https://docs.gitlab.com/ee/workflow/importing/import_projects_from_github.html)
1. [Migrating from SVN](https://docs.gitlab.com/ee/workflow/importing/migrating_from_svn.html)
1. [Migrating from Fogbugz](https://docs.gitlab.com/ee/workflow/importing/import_projects_from_fogbugz.html)
1. [Migrating from BitBucket/Stash](https://docs.gitlab.com/ee/user/project/import/bitbucket.html)
1. [Migrating from GitHub](https://docs.gitlab.com/ee/user/project/import/github.html)
1. [Migrating from SVN](https://docs.gitlab.com/ee/user/project/import/svn.html)
1. [Migrating from Fogbugz](https://docs.gitlab.com/ee/user/project/import/fogbugz.html)
#### 1.6. GitLab Inc.
......@@ -80,13 +80,13 @@ The curriculum is composed of GitLab videos, screencasts, presentations, project
- Being part of our Great Community and Contributing to GitLab
1. [Getting Started with the GitLab Development Kit (GDK)](https://about.gitlab.com/2016/06/08/getting-started-with-gitlab-development-kit/)
1. [Contributing Technical Articles to the GitLab Blog](https://about.gitlab.com/2016/01/26/call-for-writers/)
1. [GitLab Training Workshops](https://about.gitlab.com/training)
1. [GitLab Training Workshops](https://docs.gitlab.com/ce/university/training/end-user/)
1. [GitLab Professional Services](https://about.gitlab.com/services/)
#### 1.8 GitLab Training Material
1. [Git and GitLab Terminology](glossary/README.md)
1. [Git and GitLab Workshop - Slides](https://docs.google.com/presentation/d/1JzTYD8ij9slejV2-TO-NzjCvlvj6mVn9BORePXNJoMI/edit?usp=drive_web)
1. [Git and GitLab Revision](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/university/training/end-user)
---
......
......@@ -460,7 +460,7 @@ A route table contains rules (called routes) that determine where network traffi
### Runners
Actual build machines/containers that [run and execute tests](https://gitlab.com/gitlab-org/gitlab-ci-multi-runner) you have specified to be run on GitLab CI.
Actual build machines/containers that [run and execute tests](https://gitlab.com/gitlab-org/gitlab-runner) you have specified to be run on GitLab CI.
### Sidekiq
......
......@@ -53,8 +53,8 @@ git log --since=1.month.ago --until=3.weeks.ago
```
cd ~/workspace
git clone git@gitlab.com:gitlab-org/gitlab-ci-multi-runner.git
cd gitlab-ci-multi-runner
git clone git@gitlab.com:gitlab-org/gitlab-runner.git
cd gitlab-runner
git log --author="Travis"
git log --since=1.month.ago --until=3.weeks.ago
git log --since=1.month.ago --until=1.day.ago --author="Travis"
......
......@@ -52,7 +52,8 @@ directly in the job artifacts browser without the need to download them.
>**Note:**
With [GitLab 10.1][ce-14399], HTML files in a public project can be previewed
directly in a new tab without the need to download them.
directly in a new tab without the need to download them when
[GitLab Pages](../../../administration/pages/index.md) is enabled
After a job finishes, if you visit the job's specific page, there are three
buttons. You can download the artifacts archive or browse its contents, whereas
......@@ -69,7 +70,8 @@ browse inside them.
Below you can see how browsing looks like. In this case we have browsed inside
the archive and at this point there is one directory, a couple files, and
one HTML file that you can view directly online (opens in a new tab).
one HTML file that you can view directly online when
[GitLab Pages](../../../administration/pages/index.md) is enabled (opens in a new tab).
![Job artifacts browser](img/job_artifacts_browser.png)
......
......@@ -73,7 +73,7 @@ module Gitlab
decorate(repo, commit) if commit
rescue Rugged::ReferenceError, Rugged::InvalidError, Rugged::ObjectError,
Gitlab::Git::CommandError, Gitlab::Git::Repository::NoRepository,
Rugged::OdbError, Rugged::TreeError
Rugged::OdbError, Rugged::TreeError, ArgumentError
nil
end
......
......@@ -758,13 +758,13 @@ module Gitlab
end
def ff_merge(user, source_sha, target_branch)
OperationService.new(user, self).with_branch(target_branch) do |our_commit|
raise ArgumentError, 'Invalid merge target' unless our_commit
source_sha
gitaly_migrate(:operation_user_ff_branch) do |is_enabled|
if is_enabled
gitaly_ff_merge(user, source_sha, target_branch)
else
rugged_ff_merge(user, source_sha, target_branch)
end
end
rescue Rugged::ReferenceError
raise ArgumentError, 'Invalid merge source'
end
def revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
......@@ -1177,10 +1177,10 @@ module Gitlab
Gitlab::GitalyClient.migrate(method, status: status, &block)
rescue GRPC::NotFound => e
raise NoRepository.new(e)
rescue GRPC::BadStatus => e
raise CommandError.new(e)
rescue GRPC::InvalidArgument => e
raise ArgumentError.new(e)
rescue GRPC::BadStatus => e
raise CommandError.new(e)
end
private
......@@ -1622,6 +1622,22 @@ module Gitlab
run_git(args, env: env)
end
def gitaly_ff_merge(user, source_sha, target_branch)
gitaly_operations_client.user_ff_branch(user, source_sha, target_branch)
rescue GRPC::FailedPrecondition => e
raise CommitError, e
end
def rugged_ff_merge(user, source_sha, target_branch)
OperationService.new(user, self).with_branch(target_branch) do |our_commit|
raise ArgumentError, 'Invalid merge target' unless our_commit
source_sha
end
rescue Rugged::ReferenceError
raise ArgumentError, 'Invalid merge source'
end
end
end
end
......@@ -34,10 +34,11 @@ module Gitlab
private_constant :MUTEX
class << self
attr_accessor :query_time
attr_accessor :query_time, :migrate_histogram
end
self.query_time = 0
self.migrate_histogram = Gitlab::Metrics.histogram(:gitaly_migrate_call_duration, "Gitaly migration call execution timings")
def self.stub(name, storage)
MUTEX.synchronize do
......@@ -171,8 +172,11 @@ module Gitlab
feature_stack = Thread.current[:gitaly_feature_stack] ||= []
feature_stack.unshift(feature)
begin
start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
yield is_enabled
ensure
total_time = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start
migrate_histogram.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time)
feature_stack.shift
Thread.current[:gitaly_feature_stack] = nil if feature_stack.empty?
end
......
......@@ -105,6 +105,23 @@ module Gitlab
ensure
request_enum.close
end
def user_ff_branch(user, source_sha, target_branch)
request = Gitaly::UserFFBranchRequest.new(
repository: @gitaly_repo,
user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
commit_id: source_sha,
branch: GitalyClient.encode(target_branch)
)
branch_update = GitalyClient.call(
@repository.storage,
:operation_service,
:user_ff_branch,
request
).branch_update
Gitlab::Git::OperationService::BranchUpdate.from_gitaly(branch_update)
end
end
end
end
......@@ -119,6 +119,7 @@ excluded_attributes:
- :milestone_id
- :ref_fetched
- :merge_jid
- :latest_merge_request_diff_id
award_emoji:
- :awardable_id
statuses:
......
......@@ -12,6 +12,7 @@ module Gitlab
def call(env)
@env = env
@route_hash = nil
if disallowed_request? && Gitlab::Database.read_only?
Rails.logger.debug('GitLab ReadOnly: preventing possible non read-only operation')
......@@ -77,11 +78,11 @@ module Gitlab
end
def grack_route
request.path.end_with?('.git/git-upload-pack')
route_hash[:controller] == 'projects/git_http' && route_hash[:action] == 'git_upload_pack'
end
def lfs_route
request.path.end_with?('/info/lfs/objects/batch')
route_hash[:controller] == 'projects/lfs_api' && route_hash[:action] == 'batch'
end
end
end
......
......@@ -7,7 +7,6 @@ module Gitlab
GRACE_TIME = (ENV['SIDEKIQ_MEMORY_KILLER_GRACE_TIME'] || 15 * 60).to_s.to_i
# Wait 30 seconds for running jobs to finish during graceful shutdown
SHUTDOWN_WAIT = (ENV['SIDEKIQ_MEMORY_KILLER_SHUTDOWN_WAIT'] || 30).to_s.to_i
SHUTDOWN_SIGNAL = (ENV['SIDEKIQ_MEMORY_KILLER_SHUTDOWN_SIGNAL'] || 'SIGKILL').to_s
# Create a mutex used to ensure there will be only one thread waiting to
# shut Sidekiq down
......@@ -15,6 +14,7 @@ module Gitlab
def call(worker, job, queue)
yield
current_rss = get_rss
return unless MAX_RSS > 0 && current_rss > MAX_RSS
......@@ -23,32 +23,45 @@ module Gitlab
# Return if another thread is already waiting to shut Sidekiq down
return unless MUTEX.try_lock
Sidekiq.logger.warn "current RSS #{current_rss} exceeds maximum RSS "\
"#{MAX_RSS}"
Sidekiq.logger.warn "this thread will shut down PID #{Process.pid} - Worker #{worker.class} - JID-#{job['jid']} "\
"in #{GRACE_TIME} seconds"
sleep(GRACE_TIME)
Sidekiq.logger.warn "Sidekiq worker PID-#{pid} current RSS #{current_rss}"\
" exceeds maximum RSS #{MAX_RSS} after finishing job #{worker.class} JID-#{job['jid']}"
Sidekiq.logger.warn "Sidekiq worker PID-#{pid} will stop fetching new jobs in #{GRACE_TIME} seconds, and will be shut down #{SHUTDOWN_WAIT} seconds later"
Sidekiq.logger.warn "sending SIGTERM to PID #{Process.pid} - Worker #{worker.class} - JID-#{job['jid']}"
Process.kill('SIGTERM', Process.pid)
# Wait `GRACE_TIME` to give the memory intensive job time to finish.
# Then, tell Sidekiq to stop fetching new jobs.
wait_and_signal(GRACE_TIME, 'SIGSTP', 'stop fetching new jobs')
Sidekiq.logger.warn "waiting #{SHUTDOWN_WAIT} seconds before sending "\
"#{SHUTDOWN_SIGNAL} to PID #{Process.pid} - Worker #{worker.class} - JID-#{job['jid']}"
sleep(SHUTDOWN_WAIT)
# Wait `SHUTDOWN_WAIT` to give already fetched jobs time to finish.
# Then, tell Sidekiq to gracefully shut down by giving jobs a few more
# moments to finish, killing and requeuing them if they didn't, and
# then terminating itself.
wait_and_signal(SHUTDOWN_WAIT, 'SIGTERM', 'gracefully shut down')
Sidekiq.logger.warn "sending #{SHUTDOWN_SIGNAL} to PID #{Process.pid} - Worker #{worker.class} - JID-#{job['jid']}"
Process.kill(SHUTDOWN_SIGNAL, Process.pid)
# Wait for Sidekiq to shutdown gracefully, and kill it if it didn't.
wait_and_signal(Sidekiq.options[:timeout] + 2, 'SIGKILL', 'die')
end
end
private
def get_rss
output, status = Gitlab::Popen.popen(%W(ps -o rss= -p #{Process.pid}))
output, status = Gitlab::Popen.popen(%W(ps -o rss= -p #{pid}))
return 0 unless status.zero?
output.to_i
end
def wait_and_signal(time, signal, explanation)
Sidekiq.logger.warn "waiting #{time} seconds before sending Sidekiq worker PID-#{pid} #{signal} (#{explanation})"
sleep(time)
Sidekiq.logger.warn "sending Sidekiq worker PID-#{pid} #{signal} (#{explanation})"
Process.kill(signal, pid)
end
def pid
Process.pid
end
end
end
end
......@@ -59,17 +59,6 @@ describe MetricsController do
expect(response.body).to match(/^redis_shared_state_ping_latency_seconds [0-9\.]+$/)
end
it 'returns file system check metrics' do
get :index
expect(response.body).to match(/^filesystem_access_latency_seconds{shard="default"} [0-9\.]+$/)
expect(response.body).to match(/^filesystem_accessible{shard="default"} 1$/)
expect(response.body).to match(/^filesystem_write_latency_seconds{shard="default"} [0-9\.]+$/)
expect(response.body).to match(/^filesystem_writable{shard="default"} 1$/)
expect(response.body).to match(/^filesystem_read_latency_seconds{shard="default"} [0-9\.]+$/)
expect(response.body).to match(/^filesystem_readable{shard="default"} 1$/)
end
context 'prometheus metrics are disabled' do
before do
allow(Gitlab::Metrics).to receive(:prometheus_metrics_enabled?).and_return(false)
......
......@@ -52,7 +52,7 @@ feature 'Dashboard Todos' do
end
it 'updates todo count' do
expect(page).to have_content 'To do 0'
expect(page).to have_content 'Todos 0'
expect(page).to have_content 'Done 1'
end
......@@ -81,7 +81,7 @@ feature 'Dashboard Todos' do
end
it 'updates todo count' do
expect(page).to have_content 'To do 1'
expect(page).to have_content 'Todos 1'
expect(page).to have_content 'Done 0'
end
end
......@@ -200,7 +200,7 @@ feature 'Dashboard Todos' do
end
it 'updates todo count' do
expect(page).to have_content 'To do 1'
expect(page).to have_content 'Todos 1'
expect(page).to have_content 'Done 0'
end
end
......@@ -256,7 +256,7 @@ feature 'Dashboard Todos' do
end
it 'shows "All done" message!' do
expect(page).to have_content 'To do 0'
expect(page).to have_content 'Todos 0'
expect(page).to have_content "You're all done!"
expect(page).not_to have_selector('.gl-pagination')
end
......@@ -283,7 +283,7 @@ feature 'Dashboard Todos' do
it 'updates todo count' do
mark_all_and_undo
expect(page).to have_content 'To do 2'
expect(page).to have_content 'Todos 2'
expect(page).to have_content 'Done 0'
end
......
......@@ -27,9 +27,8 @@ describe 'Recent searches', :js do
input_filtered_search('foo', submit: true)
input_filtered_search('bar', submit: true)
items = all('.filtered-search-history-dropdown-item', visible: false)
items = all('.filtered-search-history-dropdown-item', visible: false, count: 2)
expect(items.count).to eq(2)
expect(items[0].text).to eq('bar')
expect(items[1].text).to eq('foo')
end
......@@ -38,9 +37,8 @@ describe 'Recent searches', :js do
visit project_issues_path(project_1, label_name: 'foo', search: 'bar')
visit project_issues_path(project_1, label_name: 'qux', search: 'garply')
items = all('.filtered-search-history-dropdown-item', visible: false)
items = all('.filtered-search-history-dropdown-item', visible: false, count: 2)
expect(items.count).to eq(2)
expect(items[0].text).to eq('label:~qux garply')
expect(items[1].text).to eq('label:~foo bar')
end
......@@ -50,9 +48,8 @@ describe 'Recent searches', :js do
visit project_issues_path(project_1, search: 'foo')
items = all('.filtered-search-history-dropdown-item', visible: false)
items = all('.filtered-search-history-dropdown-item', visible: false, count: 3)
expect(items.count).to eq(3)
expect(items[0].text).to eq('foo')
expect(items[1].text).to eq('saved1')
expect(items[2].text).to eq('saved2')
......@@ -69,9 +66,8 @@ describe 'Recent searches', :js do
input_filtered_search('more', submit: true)
input_filtered_search('things', submit: true)
items = all('.filtered-search-history-dropdown-item', visible: false)
items = all('.filtered-search-history-dropdown-item', visible: false, count: 2)
expect(items.count).to eq(2)
expect(items[0].text).to eq('things')
expect(items[1].text).to eq('more')
end
......
/* eslint-disable space-before-function-paren, one-var, one-var-declaration-per-line, no-use-before-define, comma-dangle, max-len */
import Issue from '~/issue';
import CloseReopenReportToggle from '~/close_reopen_report_toggle';
import '~/lib/utils/text_utility';
describe('Issue', function() {
......@@ -189,37 +188,4 @@ describe('Issue', function() {
});
});
});
describe('units', () => {
describe('class constructor', () => {
it('calls .initCloseReopenReport', () => {
spyOn(Issue.prototype, 'initCloseReopenReport');
new Issue(); // eslint-disable-line no-new
expect(Issue.prototype.initCloseReopenReport).toHaveBeenCalled();
});
});
describe('initCloseReopenReport', () => {
it('calls .initDroplab', () => {
const container = jasmine.createSpyObj('container', ['querySelector']);
const dropdownTrigger = {};
const dropdownList = {};
const button = {};
spyOn(document, 'querySelector').and.returnValue(container);
spyOn(CloseReopenReportToggle.prototype, 'initDroplab');
container.querySelector.and.returnValues(dropdownTrigger, dropdownList, button);
Issue.prototype.initCloseReopenReport();
expect(document.querySelector).toHaveBeenCalledWith('.js-issuable-close-dropdown');
expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-toggle');
expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-menu');
expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-button');
expect(CloseReopenReportToggle.prototype.initDroplab).toHaveBeenCalled();
});
});
});
});
import NamespaceSelect from '~/namespace_select';
describe('NamespaceSelect', () => {
beforeEach(() => {
spyOn($.fn, 'glDropdown');
});
it('initializes glDropdown', () => {
const dropdown = document.createElement('div');
// eslint-disable-next-line no-new
new NamespaceSelect({ dropdown });
expect($.fn.glDropdown).toHaveBeenCalled();
});
describe('as input', () => {
let glDropdownOptions;
beforeEach(() => {
const dropdown = document.createElement('div');
// eslint-disable-next-line no-new
new NamespaceSelect({ dropdown });
glDropdownOptions = $.fn.glDropdown.calls.argsFor(0)[0];
});
it('prevents click events', () => {
const dummyEvent = new Event('dummy');
spyOn(dummyEvent, 'preventDefault');
glDropdownOptions.clicked({ e: dummyEvent });
expect(dummyEvent.preventDefault).toHaveBeenCalled();
});
});
describe('as filter', () => {
let glDropdownOptions;
beforeEach(() => {
const dropdown = document.createElement('div');
dropdown.dataset.isFilter = 'true';
// eslint-disable-next-line no-new
new NamespaceSelect({ dropdown });
glDropdownOptions = $.fn.glDropdown.calls.argsFor(0)[0];
});
it('does not prevent click events', () => {
const dummyEvent = new Event('dummy');
spyOn(dummyEvent, 'preventDefault');
glDropdownOptions.clicked({ e: dummyEvent });
expect(dummyEvent.preventDefault).not.toHaveBeenCalled();
});
it('sets URL of dropdown items', () => {
const dummyNamespace = { id: 'eal' };
const itemUrl = glDropdownOptions.url(dummyNamespace);
expect(itemUrl).toContain(`namespace_id=${dummyNamespace.id}`);
});
});
});
......@@ -17,6 +17,7 @@ describe('RepoEditor', () => {
f.active = true;
f.tempFile = true;
vm.$store.state.openFiles.push(f);
vm.$store.getters.activeFile.html = 'testing';
vm.monaco = true;
vm.$mount();
......@@ -31,18 +32,25 @@ describe('RepoEditor', () => {
it('renders an ide container', (done) => {
Vue.nextTick(() => {
expect(vm.shouldHideEditor).toBeFalsy();
expect(vm.$el.textContent.trim()).toBe('');
done();
});
});
describe('when open file is binary and not raw', () => {
it('does not render the IDE', (done) => {
beforeEach((done) => {
vm.$store.getters.activeFile.binary = true;
Vue.nextTick(() => {
expect(vm.shouldHideEditor).toBeTruthy();
done();
});
Vue.nextTick(done);
});
it('does not render the IDE', () => {
expect(vm.shouldHideEditor).toBeTruthy();
});
it('shows activeFile html', () => {
expect(vm.$el.textContent.trim()).toBe('testing');
});
});
});
......@@ -1632,38 +1632,56 @@ describe Gitlab::Git::Repository, seed_helper: true do
subject { repository.ff_merge(user, source_sha, target_branch) }
it 'performs a ff_merge' do
expect(subject.newrev).to eq(source_sha)
expect(subject.repo_created).to be(false)
expect(subject.branch_created).to be(false)
shared_examples '#ff_merge' do
it 'performs a ff_merge' do
expect(subject.newrev).to eq(source_sha)
expect(subject.repo_created).to be(false)
expect(subject.branch_created).to be(false)
expect(repository.commit(target_branch).id).to eq(source_sha)
end
expect(repository.commit(target_branch).id).to eq(source_sha)
end
context 'with a non-existing target branch' do
subject { repository.ff_merge(user, source_sha, 'this-isnt-real') }
context 'with a non-existing target branch' do
subject { repository.ff_merge(user, source_sha, 'this-isnt-real') }
it 'throws an ArgumentError' do
expect { subject }.to raise_error(ArgumentError)
it 'throws an ArgumentError' do
expect { subject }.to raise_error(ArgumentError)
end
end
end
context 'with a non-existing source commit' do
let(:source_sha) { 'f001' }
context 'with a non-existing source commit' do
let(:source_sha) { 'f001' }
it 'throws an ArgumentError' do
expect { subject }.to raise_error(ArgumentError)
it 'throws an ArgumentError' do
expect { subject }.to raise_error(ArgumentError)
end
end
end
context 'when the source sha is not a descendant of the branch head' do
let(:source_sha) { '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863' }
context 'when the source sha is not a descendant of the branch head' do
let(:source_sha) { '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863' }
it "doesn't perform the ff_merge" do
expect { subject }.to raise_error(Gitlab::Git::CommitError)
expect(repository.commit(target_branch).id).to eq(branch_head)
end
end
end
it "doesn't perform the ff_merge" do
expect { subject }.to raise_error(Gitlab::Git::CommitError)
context 'with gitaly' do
it "calls Gitaly's OperationService" do
expect_any_instance_of(Gitlab::GitalyClient::OperationService)
.to receive(:user_ff_branch).with(user, source_sha, target_branch)
.and_return(nil)
expect(repository.commit(target_branch).id).to eq(branch_head)
subject
end
it_behaves_like '#ff_merge'
end
context 'without gitaly', :skip_gitaly_mock do
it_behaves_like '#ff_merge'
end
end
......
......@@ -89,4 +89,38 @@ describe Gitlab::GitalyClient::OperationService do
end
end
end
describe '#user_ff_branch' do
let(:target_branch) { 'my-branch' }
let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
let(:request) do
Gitaly::UserFFBranchRequest.new(
repository: repository.gitaly_repository,
branch: target_branch,
commit_id: source_sha,
user: gitaly_user
)
end
let(:branch_update) do
Gitaly::OperationBranchUpdate.new(
commit_id: source_sha,
repo_created: false,
branch_created: false
)
end
let(:response) { Gitaly::UserFFBranchResponse.new(branch_update: branch_update) }
subject { client.user_ff_branch(user, source_sha, target_branch) }
it 'sends a user_ff_branch message and returns a BranchUpdate object' do
expect_any_instance_of(Gitaly::OperationService::Stub)
.to receive(:user_ff_branch).with(request, kind_of(Hash))
.and_return(response)
expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate)
expect(subject.newrev).to eq(source_sha)
expect(subject.repo_created).to be(false)
expect(subject.branch_created).to be(false)
end
end
end
......@@ -83,6 +83,13 @@ describe Gitlab::Middleware::ReadOnly do
expect(subject).to disallow_request
end
it 'expects POST of new file that looks like an LFS batch url to be disallowed' do
response = request.post('/root/gitlab-ce/new/master/app/info/lfs/objects/batch')
expect(response).to be_a_redirect
expect(subject).to disallow_request
end
context 'whitelisted requests' do
it 'expects DELETE request to logout to be allowed' do
response = request.delete('/users/sign_out')
......@@ -104,6 +111,25 @@ describe Gitlab::Middleware::ReadOnly do
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
it 'expects a POST request to git-upload-pack URL to be allowed' do
response = request.post('/root/rouge.git/git-upload-pack')
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
it 'expects requests to sidekiq admin to be allowed' do
response = request.post('/admin/sidekiq')
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
response = request.get('/admin/sidekiq')
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
end
end
......
require 'spec_helper'
describe Gitlab::SidekiqMiddleware::MemoryKiller do
subject { described_class.new }
let(:pid) { 999 }
let(:worker) { double(:worker, class: 'TestWorker') }
let(:job) { { 'jid' => 123 } }
let(:queue) { 'test_queue' }
def run
thread = subject.call(worker, job, queue) { nil }
thread&.join
end
before do
allow(subject).to receive(:get_rss).and_return(10.kilobytes)
allow(subject).to receive(:pid).and_return(pid)
end
context 'when MAX_RSS is set to 0' do
before do
stub_const("#{described_class}::MAX_RSS", 0)
end
it 'does nothing' do
expect(subject).not_to receive(:sleep)
run
end
end
context 'when MAX_RSS is exceeded' do
before do
stub_const("#{described_class}::MAX_RSS", 5.kilobytes)
end
it 'sends the STP, TERM and KILL signals at expected times' do
expect(subject).to receive(:sleep).with(15 * 60).ordered
expect(Process).to receive(:kill).with('SIGSTP', pid).ordered
expect(subject).to receive(:sleep).with(30).ordered
expect(Process).to receive(:kill).with('SIGTERM', pid).ordered
expect(subject).to receive(:sleep).with(10).ordered
expect(Process).to receive(:kill).with('SIGKILL', pid).ordered
run
end
end
context 'when MAX_RSS is not exceeded' do
before do
stub_const("#{described_class}::MAX_RSS", 15.kilobytes)
end
it 'does nothing' do
expect(subject).not_to receive(:sleep)
run
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20171026082505_populate_merge_requests_latest_merge_request_diff_id')
describe PopulateMergeRequestsLatestMergeRequestDiffId, :migration do
let(:projects_table) { table(:projects) }
let(:merge_requests_table) { table(:merge_requests) }
let(:merge_request_diffs_table) { table(:merge_request_diffs) }
let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') }
def create_mr!(name, diffs: 0)
merge_request =
merge_requests_table.create!(target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: name,
title: name)
diffs.times do
merge_request_diffs_table.create!(merge_request_id: merge_request.id)
end
merge_request
end
def diffs_for(merge_request)
merge_request_diffs_table.where(merge_request_id: merge_request.id)
end
describe '#up' do
it 'ignores MRs without diffs' do
merge_request_without_diff = create_mr!('without_diff')
expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil
expect { migrate! }
.not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id }
end
it 'ignores MRs that have a diff ID already set' do
merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3)
diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id)
merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id)
expect { migrate! }
.not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id }
end
it 'migrates multiple MR diffs to the correct values' do
merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) }
migrate!
merge_requests.each do |merge_request|
expect(merge_request.reload.latest_merge_request_diff_id)
.to eq(diffs_for(merge_request).maximum(:id))
end
end
end
end
......@@ -55,6 +55,7 @@ RSpec.configure do |config|
config.include InputHelper, :js
config.include InspectRequests, :js
config.include WaitForRequests, :js
config.include LiveDebugger, :js
config.include StubConfiguration
config.include EmailHelpers, :mailer, type: :mailer
config.include TestEnv
......
require 'io/console'
module LiveDebugger
def live_debug
puts
puts "Current example is paused for live debugging."
puts "Opening #{current_url} in your default browser..."
puts "The current user credentials are: #{@current_user.username} / #{@current_user.password}" if @current_user
puts "Press any key to resume the execution of the example!!"
`open #{current_url}`
loop until $stdin.getch
puts "Back to the example!"
end
end
......@@ -3,6 +3,21 @@ require_relative 'devise_helpers'
module LoginHelpers
include DeviseHelpers
# Overriding Devise::Test::IntegrationHelpers#sign_in to store @current_user
# since we may need it in LiveDebugger#live_debug.
def sign_in(resource, scope: nil)
super
@current_user = resource
end
# Overriding Devise::Test::IntegrationHelpers#sign_out to clear @current_user.
def sign_out(resource_or_scope)
super
@current_user = nil
end
# Internal: Log in as a specific user or a new user of a specific role
#
# user_or_role - User object, or a role to create (e.g., :admin, :user)
......@@ -28,7 +43,7 @@ module LoginHelpers
gitlab_sign_in_with(user, **kwargs)
user
@current_user = user
end
def gitlab_sign_in_via(provider, user, uid)
......@@ -41,6 +56,7 @@ module LoginHelpers
def gitlab_sign_out
find(".header-user-dropdown-toggle").click
click_link "Sign out"
@current_user = nil
expect(page).to have_button('Sign in')
end
......
......@@ -183,6 +183,8 @@ module TestEnv
return unless @gitaly_pid
Process.kill('KILL', @gitaly_pid)
rescue Errno::ESRCH
# The process can already be gone if the test run was INTerrupted.
end
def setup_factory_repo
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment