Commit 5d88e736 authored by Phil Hughes's avatar Phil Hughes

Merge branch 'master' into ph-axios-2

parents 7bd66940 078dac42
--- ---
engines: engines:
brakeman:
enabled: true
bundler-audit: bundler-audit:
enabled: true enabled: true
duplication: duplication:
......
...@@ -132,7 +132,7 @@ gem 'asciidoctor-plantuml', '0.0.7' ...@@ -132,7 +132,7 @@ gem 'asciidoctor-plantuml', '0.0.7'
gem 'rouge', '~> 2.0' gem 'rouge', '~> 2.0'
gem 'truncato', '~> 0.7.9' gem 'truncato', '~> 0.7.9'
gem 'bootstrap_form', '~> 2.7.0' gem 'bootstrap_form', '~> 2.7.0'
gem 'nokogiri', '~> 1.8.1' gem 'nokogiri', '~> 1.8.2'
# Diffs # Diffs
gem 'diffy', '~> 3.1.0' gem 'diffy', '~> 3.1.0'
...@@ -406,7 +406,7 @@ group :ed25519 do ...@@ -406,7 +406,7 @@ group :ed25519 do
end end
# Gitaly GRPC client # Gitaly GRPC client
gem 'gitaly-proto', '~> 0.78.0', require: 'gitaly' gem 'gitaly-proto', '~> 0.83.0', require: 'gitaly'
gem 'toml-rb', '~> 0.3.15', require: false gem 'toml-rb', '~> 0.3.15', require: false
......
...@@ -285,7 +285,7 @@ GEM ...@@ -285,7 +285,7 @@ GEM
po_to_json (>= 1.0.0) po_to_json (>= 1.0.0)
rails (>= 3.2.0) rails (>= 3.2.0)
gherkin-ruby (0.3.2) gherkin-ruby (0.3.2)
gitaly-proto (0.78.0) gitaly-proto (0.83.0)
google-protobuf (~> 3.1) google-protobuf (~> 3.1)
grpc (~> 1.0) grpc (~> 1.0)
github-linguist (4.7.6) github-linguist (4.7.6)
...@@ -340,7 +340,7 @@ GEM ...@@ -340,7 +340,7 @@ GEM
mime-types (~> 3.0) mime-types (~> 3.0)
representable (~> 3.0) representable (~> 3.0)
retriable (>= 2.0, < 4.0) retriable (>= 2.0, < 4.0)
google-protobuf (3.4.1.1) google-protobuf (3.5.1.1-universal-darwin)
googleapis-common-protos-types (1.0.1) googleapis-common-protos-types (1.0.1)
google-protobuf (~> 3.0) google-protobuf (~> 3.0)
googleauth (0.5.3) googleauth (0.5.3)
...@@ -369,7 +369,7 @@ GEM ...@@ -369,7 +369,7 @@ GEM
rake rake
grape_logging (1.7.0) grape_logging (1.7.0)
grape grape
grpc (1.8.3) grpc (1.8.3-universal-darwin)
google-protobuf (~> 3.1) google-protobuf (~> 3.1)
googleapis-common-protos-types (~> 1.0.0) googleapis-common-protos-types (~> 1.0.0)
googleauth (>= 0.5.1, < 0.7) googleauth (>= 0.5.1, < 0.7)
...@@ -513,7 +513,7 @@ GEM ...@@ -513,7 +513,7 @@ GEM
net-ldap (0.16.0) net-ldap (0.16.0)
net-ssh (4.1.0) net-ssh (4.1.0)
netrc (0.11.0) netrc (0.11.0)
nokogiri (1.8.1) nokogiri (1.8.2)
mini_portile2 (~> 2.3.0) mini_portile2 (~> 2.3.0)
numerizer (0.1.1) numerizer (0.1.1)
oauth (0.5.1) oauth (0.5.1)
...@@ -1056,7 +1056,7 @@ DEPENDENCIES ...@@ -1056,7 +1056,7 @@ DEPENDENCIES
gettext (~> 3.2.2) gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0) gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0) gettext_i18n_rails_js (~> 1.2.0)
gitaly-proto (~> 0.78.0) gitaly-proto (~> 0.83.0)
github-linguist (~> 4.7.0) github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1) gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-markup (~> 1.6.2) gitlab-markup (~> 1.6.2)
...@@ -1100,7 +1100,7 @@ DEPENDENCIES ...@@ -1100,7 +1100,7 @@ DEPENDENCIES
mysql2 (~> 0.4.10) mysql2 (~> 0.4.10)
net-ldap net-ldap
net-ssh (~> 4.1.0) net-ssh (~> 4.1.0)
nokogiri (~> 1.8.1) nokogiri (~> 1.8.2)
oauth2 (~> 1.4) oauth2 (~> 1.4)
octokit (~> 4.6.2) octokit (~> 4.6.2)
oj (~> 2.17.4) oj (~> 2.17.4)
......
import $ from 'jquery'; import _ from 'underscore';
import axios from './lib/utils/axios_utils'; import axios from './lib/utils/axios_utils';
const Api = { const Api = {
groupsPath: '/api/:version/groups.json', groupsPath: '/api/:version/groups.json',
groupPath: '/api/:version/groups/:id.json', groupPath: '/api/:version/groups/:id',
namespacesPath: '/api/:version/namespaces.json', namespacesPath: '/api/:version/namespaces.json',
groupProjectsPath: '/api/:version/groups/:id/projects.json', groupProjectsPath: '/api/:version/groups/:id/projects.json',
projectsPath: '/api/:version/projects.json', projectsPath: '/api/:version/projects.json',
...@@ -23,42 +23,44 @@ const Api = { ...@@ -23,42 +23,44 @@ const Api = {
group(groupId, callback) { group(groupId, callback) {
const url = Api.buildUrl(Api.groupPath) const url = Api.buildUrl(Api.groupPath)
.replace(':id', groupId); .replace(':id', groupId);
return $.ajax({ return axios.get(url)
url, .then(({ data }) => {
dataType: 'json', callback(data);
})
.done(group => callback(group)); return data;
});
}, },
// Return groups list. Filtered by query // Return groups list. Filtered by query
groups(query, options, callback) { groups(query, options, callback) {
const url = Api.buildUrl(Api.groupsPath); const url = Api.buildUrl(Api.groupsPath);
return $.ajax({ return axios.get(url, {
url, params: Object.assign({
data: Object.assign({
search: query, search: query,
per_page: 20, per_page: 20,
}, options), }, options),
dataType: 'json',
}) })
.done(groups => callback(groups)); .then(({ data }) => {
callback(data);
return data;
});
}, },
// Return namespaces list. Filtered by query // Return namespaces list. Filtered by query
namespaces(query, callback) { namespaces(query, callback) {
const url = Api.buildUrl(Api.namespacesPath); const url = Api.buildUrl(Api.namespacesPath);
return $.ajax({ return axios.get(url, {
url, params: {
data: {
search: query, search: query,
per_page: 20, per_page: 20,
}, },
dataType: 'json', })
}).done(namespaces => callback(namespaces)); .then(({ data }) => callback(data));
}, },
// Return projects list. Filtered by query // Return projects list. Filtered by query
projects(query, options, callback) { projects(query, options, callback = _.noop) {
const url = Api.buildUrl(Api.projectsPath); const url = Api.buildUrl(Api.projectsPath);
const defaults = { const defaults = {
search: query, search: query,
...@@ -70,12 +72,14 @@ const Api = { ...@@ -70,12 +72,14 @@ const Api = {
defaults.membership = true; defaults.membership = true;
} }
return $.ajax({ return axios.get(url, {
url, params: Object.assign(defaults, options),
data: Object.assign(defaults, options),
dataType: 'json',
}) })
.done(projects => callback(projects)); .then(({ data }) => {
callback(data);
return data;
});
}, },
// Return single project // Return single project
...@@ -97,41 +101,34 @@ const Api = { ...@@ -97,41 +101,34 @@ const Api = {
url = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespacePath); url = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespacePath);
} }
return $.ajax({ return axios.post(url, {
url, label: data,
type: 'POST',
data: { label: data },
dataType: 'json',
}) })
.done(label => callback(label)) .then(res => callback(res.data))
.fail(message => callback(message.responseJSON)); .catch(e => callback(e.response.data));
}, },
// Return group projects list. Filtered by query // Return group projects list. Filtered by query
groupProjects(groupId, query, callback) { groupProjects(groupId, query, callback) {
const url = Api.buildUrl(Api.groupProjectsPath) const url = Api.buildUrl(Api.groupProjectsPath)
.replace(':id', groupId); .replace(':id', groupId);
return $.ajax({ return axios.get(url, {
url, params: {
data: {
search: query, search: query,
per_page: 20, per_page: 20,
}, },
dataType: 'json',
}) })
.done(projects => callback(projects)); .then(({ data }) => callback(data));
}, },
commitMultiple(id, data) { commitMultiple(id, data) {
// see https://docs.gitlab.com/ce/api/commits.html#create-a-commit-with-multiple-files-and-actions // see https://docs.gitlab.com/ce/api/commits.html#create-a-commit-with-multiple-files-and-actions
const url = Api.buildUrl(Api.commitPath) const url = Api.buildUrl(Api.commitPath)
.replace(':id', encodeURIComponent(id)); .replace(':id', encodeURIComponent(id));
return this.wrapAjaxCall({ return axios.post(url, JSON.stringify(data), {
url, headers: {
type: 'POST', 'Content-Type': 'application/json; charset=utf-8',
contentType: 'application/json; charset=utf-8', },
data: JSON.stringify(data),
dataType: 'json',
}); });
}, },
...@@ -140,40 +137,37 @@ const Api = { ...@@ -140,40 +137,37 @@ const Api = {
.replace(':id', encodeURIComponent(id)) .replace(':id', encodeURIComponent(id))
.replace(':branch', branch); .replace(':branch', branch);
return this.wrapAjaxCall({ return axios.get(url);
url,
type: 'GET',
contentType: 'application/json; charset=utf-8',
dataType: 'json',
});
}, },
// Return text for a specific license // Return text for a specific license
licenseText(key, data, callback) { licenseText(key, data, callback) {
const url = Api.buildUrl(Api.licensePath) const url = Api.buildUrl(Api.licensePath)
.replace(':key', key); .replace(':key', key);
return $.ajax({ return axios.get(url, {
url, params: data,
data,
}) })
.done(license => callback(license)); .then(res => callback(res.data));
}, },
gitignoreText(key, callback) { gitignoreText(key, callback) {
const url = Api.buildUrl(Api.gitignorePath) const url = Api.buildUrl(Api.gitignorePath)
.replace(':key', key); .replace(':key', key);
return $.get(url, gitignore => callback(gitignore)); return axios.get(url)
.then(({ data }) => callback(data));
}, },
gitlabCiYml(key, callback) { gitlabCiYml(key, callback) {
const url = Api.buildUrl(Api.gitlabCiYmlPath) const url = Api.buildUrl(Api.gitlabCiYmlPath)
.replace(':key', key); .replace(':key', key);
return $.get(url, file => callback(file)); return axios.get(url)
.then(({ data }) => callback(data));
}, },
dockerfileYml(key, callback) { dockerfileYml(key, callback) {
const url = Api.buildUrl(Api.dockerfilePath).replace(':key', key); const url = Api.buildUrl(Api.dockerfilePath).replace(':key', key);
$.get(url, callback); return axios.get(url)
.then(({ data }) => callback(data));
}, },
issueTemplate(namespacePath, projectPath, key, type, callback) { issueTemplate(namespacePath, projectPath, key, type, callback) {
...@@ -182,23 +176,18 @@ const Api = { ...@@ -182,23 +176,18 @@ const Api = {
.replace(':type', type) .replace(':type', type)
.replace(':project_path', projectPath) .replace(':project_path', projectPath)
.replace(':namespace_path', namespacePath); .replace(':namespace_path', namespacePath);
$.ajax({ return axios.get(url)
url, .then(({ data }) => callback(null, data))
dataType: 'json', .catch(callback);
})
.done(file => callback(null, file))
.fail(callback);
}, },
users(query, options) { users(query, options) {
const url = Api.buildUrl(this.usersPath); const url = Api.buildUrl(this.usersPath);
return Api.wrapAjaxCall({ return axios.get(url, {
url, params: Object.assign({
data: Object.assign({
search: query, search: query,
per_page: 20, per_page: 20,
}, options), }, options),
dataType: 'json',
}); });
}, },
...@@ -209,21 +198,6 @@ const Api = { ...@@ -209,21 +198,6 @@ const Api = {
} }
return urlRoot + url.replace(':version', gon.api_version); return urlRoot + url.replace(':version', gon.api_version);
}, },
wrapAjaxCall(options) {
return new Promise((resolve, reject) => {
// jQuery 2 is not Promises/A+ compatible (missing catch)
$.ajax(options) // eslint-disable-line promise/catch-or-return
.then(data => resolve(data),
(jqXHR, textStatus, errorThrown) => {
const error = new Error(`${options.url}: ${errorThrown}`);
error.textStatus = textStatus;
if (jqXHR && jqXHR.responseJSON) error.responseJSON = jqXHR.responseJSON;
reject(error);
},
);
});
},
}; };
export default Api; export default Api;
import Flash from '../../flash'; import Flash from '../../flash';
import { handleLocationHash } from '../../lib/utils/common_utils'; import { handleLocationHash } from '../../lib/utils/common_utils';
import axios from '../../lib/utils/axios_utils';
export default class BlobViewer { export default class BlobViewer {
constructor() { constructor() {
...@@ -127,25 +128,18 @@ export default class BlobViewer { ...@@ -127,25 +128,18 @@ export default class BlobViewer {
const viewer = viewerParam; const viewer = viewerParam;
const url = viewer.getAttribute('data-url'); const url = viewer.getAttribute('data-url');
return new Promise((resolve, reject) => { if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) {
if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) { return Promise.resolve(viewer);
resolve(viewer); }
return;
}
viewer.setAttribute('data-loading', 'true'); viewer.setAttribute('data-loading', 'true');
$.ajax({ return axios.get(url)
url, .then(({ data }) => {
dataType: 'JSON',
})
.fail(reject)
.done((data) => {
viewer.innerHTML = data.html; viewer.innerHTML = data.html;
viewer.setAttribute('data-loaded', 'true'); viewer.setAttribute('data-loaded', 'true');
resolve(viewer); return viewer;
}); });
});
} }
} }
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
import { pluralize } from './lib/utils/text_utility'; import { pluralize } from './lib/utils/text_utility';
import { localTimeAgo } from './lib/utils/datetime_utility'; import { localTimeAgo } from './lib/utils/datetime_utility';
import Pager from './pager'; import Pager from './pager';
import axios from './lib/utils/axios_utils';
export default (function () { export default (function () {
const CommitsList = {}; const CommitsList = {};
...@@ -43,29 +44,30 @@ export default (function () { ...@@ -43,29 +44,30 @@ export default (function () {
CommitsList.filterResults = function () { CommitsList.filterResults = function () {
const form = $('.commits-search-form'); const form = $('.commits-search-form');
const search = CommitsList.searchField.val(); const search = CommitsList.searchField.val();
if (search === CommitsList.lastSearch) return; if (search === CommitsList.lastSearch) return Promise.resolve();
const commitsUrl = form.attr('action') + '?' + form.serialize(); const commitsUrl = form.attr('action') + '?' + form.serialize();
CommitsList.content.fadeTo('fast', 0.5); CommitsList.content.fadeTo('fast', 0.5);
return $.ajax({ const params = form.serializeArray().reduce((acc, obj) => Object.assign(acc, {
type: 'GET', [obj.name]: obj.value,
url: form.attr('action'), }), {});
data: form.serialize(),
complete: function () { return axios.get(form.attr('action'), {
return CommitsList.content.fadeTo('fast', 1.0); params,
}, })
success: function (data) { .then(({ data }) => {
CommitsList.lastSearch = search; CommitsList.lastSearch = search;
CommitsList.content.html(data.html); CommitsList.content.html(data.html);
return history.replaceState({ CommitsList.content.fadeTo('fast', 1.0);
page: commitsUrl,
// Change url so if user reload a page - search results are saved // Change url so if user reload a page - search results are saved
history.replaceState({
page: commitsUrl,
}, document.title, commitsUrl); }, document.title, commitsUrl);
}, })
error: function () { .catch(() => {
CommitsList.content.fadeTo('fast', 1.0);
CommitsList.lastSearch = null; CommitsList.lastSearch = null;
}, });
dataType: 'json',
});
}; };
// Prepare loaded data. // Prepare loaded data.
......
...@@ -12,6 +12,7 @@ export default class CreateItemDropdown { ...@@ -12,6 +12,7 @@ export default class CreateItemDropdown {
this.fieldName = options.fieldName; this.fieldName = options.fieldName;
this.onSelect = options.onSelect || (() => {}); this.onSelect = options.onSelect || (() => {});
this.getDataOption = options.getData; this.getDataOption = options.getData;
this.createNewItemFromValueOption = options.createNewItemFromValue;
this.$dropdown = options.$dropdown; this.$dropdown = options.$dropdown;
this.$dropdownContainer = this.$dropdown.parent(); this.$dropdownContainer = this.$dropdown.parent();
this.$dropdownFooter = this.$dropdownContainer.find('.dropdown-footer'); this.$dropdownFooter = this.$dropdownContainer.find('.dropdown-footer');
...@@ -30,15 +31,15 @@ export default class CreateItemDropdown { ...@@ -30,15 +31,15 @@ export default class CreateItemDropdown {
filterable: true, filterable: true,
remote: false, remote: false,
search: { search: {
fields: ['title'], fields: ['text'],
}, },
selectable: true, selectable: true,
toggleLabel(selected) { toggleLabel(selected) {
return (selected && 'id' in selected) ? selected.title : this.defaultToggleLabel; return (selected && 'id' in selected) ? _.escape(selected.title) : this.defaultToggleLabel;
}, },
fieldName: this.fieldName, fieldName: this.fieldName,
text(item) { text(item) {
return _.escape(item.title); return _.escape(item.text);
}, },
id(item) { id(item) {
return _.escape(item.id); return _.escape(item.id);
...@@ -51,6 +52,11 @@ export default class CreateItemDropdown { ...@@ -51,6 +52,11 @@ export default class CreateItemDropdown {
}); });
} }
clearDropdown() {
this.$dropdownContainer.find('.dropdown-content').html('');
this.$dropdownContainer.find('.dropdown-input-field').val('');
}
bindEvents() { bindEvents() {
this.$createButton.on('click', this.onClickCreateWildcard.bind(this)); this.$createButton.on('click', this.onClickCreateWildcard.bind(this));
} }
...@@ -58,9 +64,13 @@ export default class CreateItemDropdown { ...@@ -58,9 +64,13 @@ export default class CreateItemDropdown {
onClickCreateWildcard(e) { onClickCreateWildcard(e) {
e.preventDefault(); e.preventDefault();
this.refreshData();
this.$dropdown.data('glDropdown').selectRowAtIndex();
}
refreshData() {
// Refresh the dropdown's data, which ends up calling `getData` // Refresh the dropdown's data, which ends up calling `getData`
this.$dropdown.data('glDropdown').remote.execute(); this.$dropdown.data('glDropdown').remote.execute();
this.$dropdown.data('glDropdown').selectRowAtIndex();
} }
getData(term, callback) { getData(term, callback) {
...@@ -79,20 +89,28 @@ export default class CreateItemDropdown { ...@@ -79,20 +89,28 @@ export default class CreateItemDropdown {
}); });
} }
toggleCreateNewButton(item) { createNewItemFromValue(newValue) {
if (item) { if (this.createNewItemFromValueOption) {
this.selectedItem = { return this.createNewItemFromValueOption(newValue);
title: item, }
id: item,
text: item, return {
}; title: newValue,
id: newValue,
text: newValue,
};
}
toggleCreateNewButton(newValue) {
if (newValue) {
this.selectedItem = this.createNewItemFromValue(newValue);
this.$dropdownContainer this.$dropdownContainer
.find('.js-dropdown-create-new-item code') .find('.js-dropdown-create-new-item code')
.text(item); .text(newValue);
} }
this.toggleFooter(!item); this.toggleFooter(!newValue);
} }
toggleFooter(toggleState) { toggleFooter(toggleState) {
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
/* global fuzzaldrinPlus */ /* global fuzzaldrinPlus */
import _ from 'underscore'; import _ from 'underscore';
import fuzzaldrinPlus from 'fuzzaldrin-plus'; import fuzzaldrinPlus from 'fuzzaldrin-plus';
import axios from './lib/utils/axios_utils';
import { visitUrl } from './lib/utils/url_utility'; import { visitUrl } from './lib/utils/url_utility';
import { isObject } from './lib/utils/type_utility'; import { isObject } from './lib/utils/type_utility';
...@@ -212,25 +213,17 @@ GitLabDropdownRemote = (function() { ...@@ -212,25 +213,17 @@ GitLabDropdownRemote = (function() {
}; };
GitLabDropdownRemote.prototype.fetchData = function() { GitLabDropdownRemote.prototype.fetchData = function() {
return $.ajax({ if (this.options.beforeSend) {
url: this.dataEndpoint, this.options.beforeSend();
dataType: this.options.dataType, }
beforeSend: (function(_this) {
return function() { // Fetch the data through ajax if the data is a string
if (_this.options.beforeSend) { return axios.get(this.dataEndpoint)
return _this.options.beforeSend(); .then(({ data }) => {
} if (this.options.success) {
}; return this.options.success(data);
})(this), }
success: (function(_this) { });
return function(data) {
if (_this.options.success) {
return _this.options.success(data);
}
};
})(this)
});
// Fetch the data through ajax if the data is a string
}; };
return GitLabDropdownRemote; return GitLabDropdownRemote;
......
import flash from '../flash';
import { __ } from '../locale';
import axios from '../lib/utils/axios_utils';
import ContributorsStatGraph from './stat_graph_contributors'; import ContributorsStatGraph from './stat_graph_contributors';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
$.ajax({ const url = document.querySelector('.js-graphs-show').dataset.projectGraphPath;
type: 'GET',
url: document.querySelector('.js-graphs-show').dataset.projectGraphPath, axios.get(url)
dataType: 'json', .then(({ data }) => {
success(data) {
const graph = new ContributorsStatGraph(); const graph = new ContributorsStatGraph();
graph.init(data); graph.init(data);
...@@ -16,6 +18,6 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -16,6 +18,6 @@ document.addEventListener('DOMContentLoaded', () => {
$('.stat-graph').fadeIn(); $('.stat-graph').fadeIn();
$('.loading-graph').hide(); $('.loading-graph').hide();
}, })
}); .catch(() => flash(__('Error fetching contributors data.')));
}); });
import axios from './lib/utils/axios_utils';
import flash from './flash';
import { __ } from './locale';
export default class GroupLabelSubscription { export default class GroupLabelSubscription {
constructor(container) { constructor(container) {
const $container = $(container); const $container = $(container);
...@@ -13,14 +17,12 @@ export default class GroupLabelSubscription { ...@@ -13,14 +17,12 @@ export default class GroupLabelSubscription {
event.preventDefault(); event.preventDefault();
const url = this.$unsubscribeButtons.attr('data-url'); const url = this.$unsubscribeButtons.attr('data-url');
axios.post(url)
$.ajax({ .then(() => {
type: 'POST', this.toggleSubscriptionButtons();
url, this.$unsubscribeButtons.removeAttr('data-url');
}).done(() => { })
this.toggleSubscriptionButtons(); .catch(() => flash(__('There was an error when unsubscribing from this label.')));
this.$unsubscribeButtons.removeAttr('data-url');
});
} }
subscribe(event) { subscribe(event) {
...@@ -31,12 +33,9 @@ export default class GroupLabelSubscription { ...@@ -31,12 +33,9 @@ export default class GroupLabelSubscription {
this.$unsubscribeButtons.attr('data-url', url); this.$unsubscribeButtons.attr('data-url', url);
$.ajax({ axios.post(url)
type: 'POST', .then(() => this.toggleSubscriptionButtons())
url, .catch(() => flash(__('There was an error when subscribing to this label.')));
}).done(() => {
this.toggleSubscriptionButtons();
});
} }
toggleSubscriptionButtons() { toggleSubscriptionButtons() {
......
...@@ -71,7 +71,7 @@ export const setResizingStatus = ({ commit }, resizing) => { ...@@ -71,7 +71,7 @@ export const setResizingStatus = ({ commit }, resizing) => {
export const checkCommitStatus = ({ state }) => export const checkCommitStatus = ({ state }) =>
service service
.getBranchData(state.currentProjectId, state.currentBranchId) .getBranchData(state.currentProjectId, state.currentBranchId)
.then((data) => { .then(({ data }) => {
const { id } = data.commit; const { id } = data.commit;
const selectedBranch = const selectedBranch =
state.projects[state.currentProjectId].branches[state.currentBranchId]; state.projects[state.currentProjectId].branches[state.currentBranchId];
...@@ -90,7 +90,7 @@ export const commitChanges = ( ...@@ -90,7 +90,7 @@ export const commitChanges = (
) => ) =>
service service
.commit(state.currentProjectId, payload) .commit(state.currentProjectId, payload)
.then((data) => { .then(({ data }) => {
const { branch } = payload; const { branch } = payload;
if (!data.short_id) { if (!data.short_id) {
flash(data.message, 'alert', document, null, false, true); flash(data.message, 'alert', document, null, false, true);
...@@ -147,8 +147,8 @@ export const commitChanges = ( ...@@ -147,8 +147,8 @@ export const commitChanges = (
}) })
.catch((err) => { .catch((err) => {
let errMsg = 'Error committing changes. Please try again.'; let errMsg = 'Error committing changes. Please try again.';
if (err.responseJSON && err.responseJSON.message) { if (err.response.data && err.response.data.message) {
errMsg += ` (${stripHtml(err.responseJSON.message)})`; errMsg += ` (${stripHtml(err.response.data.message)})`;
} }
flash(errMsg, 'alert', document, null, false, true); flash(errMsg, 'alert', document, null, false, true);
window.dispatchEvent(new Event('resize')); window.dispatchEvent(new Event('resize'));
......
...@@ -10,7 +10,7 @@ export const getBranchData = ( ...@@ -10,7 +10,7 @@ export const getBranchData = (
!state.projects[`${projectId}`].branches[branchId]) !state.projects[`${projectId}`].branches[branchId])
|| force) { || force) {
service.getBranchData(`${projectId}`, branchId) service.getBranchData(`${projectId}`, branchId)
.then((data) => { .then(({ data }) => {
const { id } = data.commit; const { id } = data.commit;
commit(types.SET_BRANCH, { projectPath: `${projectId}`, branchName: branchId, branch: data }); commit(types.SET_BRANCH, { projectPath: `${projectId}`, branchName: branchId, branch: data });
commit(types.SET_BRANCH_WORKING_REFERENCE, { projectId, branchId, reference: id }); commit(types.SET_BRANCH_WORKING_REFERENCE, { projectId, branchId, reference: id });
......
import Flash from '../flash'; import axios from '../lib/utils/axios_utils';
import flash from '../flash';
export default class IntegrationSettingsForm { export default class IntegrationSettingsForm {
constructor(formSelector) { constructor(formSelector) {
...@@ -95,29 +96,26 @@ export default class IntegrationSettingsForm { ...@@ -95,29 +96,26 @@ export default class IntegrationSettingsForm {
*/ */
testSettings(formData) { testSettings(formData) {
this.toggleSubmitBtnState(true); this.toggleSubmitBtnState(true);
$.ajax({
type: 'PUT', return axios.put(this.testEndPoint, formData)
url: this.testEndPoint, .then(({ data }) => {
data: formData, if (data.error) {
}) flash(`${data.message} ${data.service_response}`, 'alert', document, {
.done((res) => { title: 'Save anyway',
if (res.error) { clickHandler: (e) => {
new Flash(`${res.message} ${res.service_response}`, 'alert', document, { e.preventDefault();
title: 'Save anyway', this.$form.submit();
clickHandler: (e) => { },
e.preventDefault(); });
this.$form.submit(); } else {
}, this.$form.submit();
}); }
} else {
this.$form.submit(); this.toggleSubmitBtnState(false);
} })
}) .catch(() => {
.fail(() => { flash('Something went wrong on our end.');
new Flash('Something went wrong on our end.'); this.toggleSubmitBtnState(false);
}) });
.always(() => {
this.toggleSubmitBtnState(false);
});
} }
} }
/* eslint-disable comma-dangle, quotes, consistent-return, func-names, array-callback-return, space-before-function-paren, prefer-arrow-callback, max-len, no-unused-expressions, no-sequences, no-underscore-dangle, no-unused-vars, no-param-reassign */ /* eslint-disable comma-dangle, quotes, consistent-return, func-names, array-callback-return, space-before-function-paren, prefer-arrow-callback, max-len, no-unused-expressions, no-sequences, no-underscore-dangle, no-unused-vars, no-param-reassign */
import _ from 'underscore'; import _ from 'underscore';
import axios from './lib/utils/axios_utils';
import Flash from './flash'; import Flash from './flash';
export default { export default {
...@@ -22,15 +23,9 @@ export default { ...@@ -22,15 +23,9 @@ export default {
}, },
submit() { submit() {
const _this = this; axios[this.form.attr('method')](this.form.attr('action'), this.getFormDataAsObject())
const xhr = $.ajax({ .then(() => window.location.reload())
url: this.form.attr('action'), .catch(() => this.onFormSubmitFailure());
method: this.form.attr('method'),
dataType: 'JSON',
data: this.getFormDataAsObject()
});
xhr.done(() => window.location.reload());
xhr.fail(() => this.onFormSubmitFailure());
}, },
onFormSubmitFailure() { onFormSubmitFailure() {
......
import axios from './lib/utils/axios_utils';
import flash from './flash';
import { __ } from './locale';
import IssuableBulkUpdateSidebar from './issuable_bulk_update_sidebar'; import IssuableBulkUpdateSidebar from './issuable_bulk_update_sidebar';
import IssuableBulkUpdateActions from './issuable_bulk_update_actions'; import IssuableBulkUpdateActions from './issuable_bulk_update_actions';
...@@ -20,23 +23,24 @@ export default class IssuableIndex { ...@@ -20,23 +23,24 @@ export default class IssuableIndex {
} }
static resetIncomingEmailToken() { static resetIncomingEmailToken() {
$('.incoming-email-token-reset').on('click', (e) => { const $resetToken = $('.incoming-email-token-reset');
$resetToken.on('click', (e) => {
e.preventDefault(); e.preventDefault();
$.ajax({ $resetToken.text('resetting...');
type: 'PUT',
url: $('.incoming-email-token-reset').attr('href'), axios.put($resetToken.attr('href'))
dataType: 'json', .then(({ data }) => {
success(response) { $('#issuable_email').val(data.new_address).focus();
$('#issuable_email').val(response.new_address).focus();
}, $resetToken.text('reset it');
beforeSend() { })
$('.incoming-email-token-reset').text('resetting...'); .catch(() => {
}, flash(__('There was an error when reseting email token.'));
complete() {
$('.incoming-email-token-reset').text('reset it'); $resetToken.text('reset it');
}, });
});
}); });
} }
} }
import axios from './axios_utils'; import axios from './axios_utils';
import { getLocationHash } from './url_utility'; import { getLocationHash } from './url_utility';
import axios from './axios_utils';
export const getPagePath = (index = 0) => $('body').attr('data-page').split(':')[index]; export const getPagePath = (index = 0) => $('body').attr('data-page').split(':')[index];
...@@ -380,22 +381,16 @@ export const resetFavicon = () => { ...@@ -380,22 +381,16 @@ export const resetFavicon = () => {
} }
}; };
export const setCiStatusFavicon = (pageUrl) => { export const setCiStatusFavicon = pageUrl =>
$.ajax({ axios.get(pageUrl)
url: pageUrl, .then(({ data }) => {
dataType: 'json',
success: (data) => {
if (data && data.favicon) { if (data && data.favicon) {
setFavicon(data.favicon); setFavicon(data.favicon);
} else { } else {
resetFavicon(); resetFavicon();
} }
}, })
error: () => { .catch(resetFavicon);
resetFavicon();
},
});
};
export const spriteIcon = (icon, className = '') => { export const spriteIcon = (icon, className = '') => {
const classAttribute = className.length > 0 ? `class="${className}"` : ''; const classAttribute = className.length > 0 ? `class="${className}"` : '';
......
...@@ -8,16 +8,16 @@ class UsersCache extends Cache { ...@@ -8,16 +8,16 @@ class UsersCache extends Cache {
} }
return Api.users('', { username }) return Api.users('', { username })
.then((users) => { .then(({ data }) => {
if (!users.length) { if (!data.length) {
throw new Error(`User "${username}" could not be found!`); throw new Error(`User "${username}" could not be found!`);
} }
if (users.length > 1) { if (data.length > 1) {
throw new Error(`Expected username "${username}" to be unique!`); throw new Error(`Expected username "${username}" to be unique!`);
} }
const user = users[0]; const user = data[0];
this.internalStorage[username] = user; this.internalStorage[username] = user;
return user; return user;
}); });
......
...@@ -98,7 +98,7 @@ export default class ActivityCalendar { ...@@ -98,7 +98,7 @@ export default class ActivityCalendar {
const secondLastColMonth = this.timestampsTmp[group - 2][0].date.getMonth(); const secondLastColMonth = this.timestampsTmp[group - 2][0].date.getMonth();
if (lastColMonth !== secondLastColMonth) { if (lastColMonth !== secondLastColMonth) {
extraWidthPadding = 3; extraWidthPadding = 6;
} }
return extraWidthPadding; return extraWidthPadding;
......
class Admin::GitalyServersController < Admin::ApplicationController
def index
@gitaly_servers = Gitaly::Server.all
end
end
...@@ -12,11 +12,9 @@ module IssuableCollections ...@@ -12,11 +12,9 @@ module IssuableCollections
# rubocop:disable Gitlab/ModuleWithInstanceVariables # rubocop:disable Gitlab/ModuleWithInstanceVariables
def set_issuables_index def set_issuables_index
@issuables = issuables_collection @issuables = issuables_collection
@issuables = @issuables.page(params[:page])
@issuable_meta_data = issuable_meta_data(@issuables, collection_type)
@total_pages = issuable_page_count
set_pagination
return if redirect_out_of_range(@total_pages) return if redirect_out_of_range(@total_pages)
if params[:label_name].present? if params[:label_name].present?
...@@ -35,14 +33,26 @@ module IssuableCollections ...@@ -35,14 +33,26 @@ module IssuableCollections
@users.push(author) if author @users.push(author) if author
end end
end end
def set_pagination
return if pagination_disabled?
@issuables = @issuables.page(params[:page])
@issuable_meta_data = issuable_meta_data(@issuables, collection_type)
@total_pages = issuable_page_count
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables # rubocop:enable Gitlab/ModuleWithInstanceVariables
def pagination_disabled?
false
end
def issuables_collection def issuables_collection
finder.execute.preload(preload_for_collection) finder.execute.preload(preload_for_collection)
end end
def redirect_out_of_range(total_pages) def redirect_out_of_range(total_pages)
return false if total_pages.zero? return false if total_pages.nil? || total_pages.zero?
out_of_range = @issuables.current_page > total_pages # rubocop:disable Gitlab/ModuleWithInstanceVariables out_of_range = @issuables.current_page > total_pages # rubocop:disable Gitlab/ModuleWithInstanceVariables
......
...@@ -150,7 +150,6 @@ class GroupsController < Groups::ApplicationController ...@@ -150,7 +150,6 @@ class GroupsController < Groups::ApplicationController
@projects = GroupProjectsFinder.new(params: params, group: group, options: options, current_user: current_user) @projects = GroupProjectsFinder.new(params: params, group: group, options: options, current_user: current_user)
.execute .execute
.includes(:namespace) .includes(:namespace)
.page(params[:page])
@events = EventCollection @events = EventCollection
.new(@projects, offset: params[:offset].to_i, filter: event_filter) .new(@projects, offset: params[:offset].to_i, filter: event_filter)
......
...@@ -37,6 +37,8 @@ module DiscussionOnDiff ...@@ -37,6 +37,8 @@ module DiscussionOnDiff
# Returns an array of at most 16 highlighted lines above a diff note # Returns an array of at most 16 highlighted lines above a diff note
def truncated_diff_lines(highlight: true) def truncated_diff_lines(highlight: true)
return [] if diff_line.nil? && first_note.is_a?(LegacyDiffNote)
lines = highlight ? highlighted_diff_lines : diff_lines lines = highlight ? highlighted_diff_lines : diff_lines
initial_line_index = [diff_line.index - NUMBER_OF_TRUNCATED_DIFF_LINES + 1, 0].max initial_line_index = [diff_line.index - NUMBER_OF_TRUNCATED_DIFF_LINES + 1, 0].max
......
...@@ -618,12 +618,12 @@ class MergeRequest < ActiveRecord::Base ...@@ -618,12 +618,12 @@ class MergeRequest < ActiveRecord::Base
can_be_merged? && !should_be_rebased? can_be_merged? && !should_be_rebased?
end end
def mergeable_state?(skip_ci_check: false) def mergeable_state?(skip_ci_check: false, skip_discussions_check: false)
return false unless open? return false unless open?
return false if work_in_progress? return false if work_in_progress?
return false if broken? return false if broken?
return false unless skip_ci_check || mergeable_ci_state? return false unless skip_ci_check || mergeable_ci_state?
return false unless mergeable_discussions_state? return false unless skip_discussions_check || mergeable_discussions_state?
true true
end end
......
...@@ -124,6 +124,12 @@ class ProjectWiki ...@@ -124,6 +124,12 @@ class ProjectWiki
update_project_activity update_project_activity
end end
def page_formatted_data(page)
page_title, page_dir = page_title_and_dir(page.title)
wiki.page_formatted_data(title: page_title, dir: page_dir, version: page.version)
end
def page_title_and_dir(title) def page_title_and_dir(title)
title_array = title.split("/") title_array = title.split("/")
title = title_array.pop title = title_array.pop
......
...@@ -255,6 +255,8 @@ class Repository ...@@ -255,6 +255,8 @@ class Repository
# This will still fail if the file is corrupted (e.g. 0 bytes) # This will still fail if the file is corrupted (e.g. 0 bytes)
raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false) raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false)
rescue Gitlab::Git::CommandError => ex
Rails.logger.error "Unable to create keep-around reference for repository #{path}: #{ex}"
end end
def kept_around?(sha) def kept_around?(sha)
...@@ -491,7 +493,7 @@ class Repository ...@@ -491,7 +493,7 @@ class Repository
raw_repository.root_ref raw_repository.root_ref
else else
# When the repo does not exist we raise this error so no data is cached. # When the repo does not exist we raise this error so no data is cached.
raise Rugged::ReferenceError raise Gitlab::Git::Repository::NoRepository
end end
end end
cache_method :root_ref cache_method :root_ref
...@@ -525,11 +527,7 @@ class Repository ...@@ -525,11 +527,7 @@ class Repository
def commit_count_for_ref(ref) def commit_count_for_ref(ref)
return 0 unless exists? return 0 unless exists?
begin cache.fetch(:"commit_count_#{ref}") { raw_repository.commit_count(ref) }
cache.fetch(:"commit_count_#{ref}") { raw_repository.commit_count(ref) }
rescue Rugged::ReferenceError
0
end
end end
delegate :branch_names, to: :raw_repository delegate :branch_names, to: :raw_repository
...@@ -653,26 +651,14 @@ class Repository ...@@ -653,26 +651,14 @@ class Repository
end end
def last_commit_for_path(sha, path) def last_commit_for_path(sha, path)
raw_repository.gitaly_migrate(:last_commit_for_path) do |is_enabled| commit_by(oid: last_commit_id_for_path(sha, path))
if is_enabled
last_commit_for_path_by_gitaly(sha, path)
else
last_commit_for_path_by_rugged(sha, path)
end
end
end end
def last_commit_id_for_path(sha, path) def last_commit_id_for_path(sha, path)
key = path.blank? ? "last_commit_id_for_path:#{sha}" : "last_commit_id_for_path:#{sha}:#{Digest::SHA1.hexdigest(path)}" key = path.blank? ? "last_commit_id_for_path:#{sha}" : "last_commit_id_for_path:#{sha}:#{Digest::SHA1.hexdigest(path)}"
cache.fetch(key) do cache.fetch(key) do
raw_repository.gitaly_migrate(:last_commit_for_path) do |is_enabled| raw_repository.last_commit_id_for_path(sha, path)
if is_enabled
last_commit_for_path_by_gitaly(sha, path).id
else
last_commit_id_for_path_by_shelling_out(sha, path)
end
end
end end
end end
...@@ -800,16 +786,6 @@ class Repository ...@@ -800,16 +786,6 @@ class Repository
with_cache_hooks { raw.multi_action(user, **options) } with_cache_hooks { raw.multi_action(user, **options) }
end end
def can_be_merged?(source_sha, target_branch)
raw_repository.gitaly_migrate(:can_be_merged) do |is_enabled|
if is_enabled
gitaly_can_be_merged?(source_sha, find_branch(target_branch).target)
else
rugged_can_be_merged?(source_sha, target_branch)
end
end
end
def merge(user, source_sha, merge_request, message) def merge(user, source_sha, merge_request, message)
with_cache_hooks do with_cache_hooks do
raw_repository.merge(user, source_sha, merge_request.target_branch, message) do |commit_id| raw_repository.merge(user, source_sha, merge_request.target_branch, message) do |commit_id|
...@@ -876,26 +852,18 @@ class Repository ...@@ -876,26 +852,18 @@ class Repository
@root_ref_sha ||= commit(root_ref).sha @root_ref_sha ||= commit(root_ref).sha
end end
delegate :merged_branch_names, to: :raw_repository delegate :merged_branch_names, :can_be_merged?, to: :raw_repository
def merge_base(first_commit_id, second_commit_id) def merge_base(first_commit_id, second_commit_id)
first_commit_id = commit(first_commit_id).try(:id) || first_commit_id first_commit_id = commit(first_commit_id).try(:id) || first_commit_id
second_commit_id = commit(second_commit_id).try(:id) || second_commit_id second_commit_id = commit(second_commit_id).try(:id) || second_commit_id
raw_repository.merge_base(first_commit_id, second_commit_id) raw_repository.merge_base(first_commit_id, second_commit_id)
rescue Rugged::ReferenceError
nil
end end
def ancestor?(ancestor_id, descendant_id) def ancestor?(ancestor_id, descendant_id)
return false if ancestor_id.nil? || descendant_id.nil? return false if ancestor_id.nil? || descendant_id.nil?
Gitlab::GitalyClient.migrate(:is_ancestor) do |is_enabled| raw_repository.ancestor?(ancestor_id, descendant_id)
if is_enabled
raw_repository.ancestor?(ancestor_id, descendant_id)
else
rugged_is_ancestor?(ancestor_id, descendant_id)
end
end
end end
def fetch_as_mirror(url, forced: false, refmap: :all_refs, remote_name: nil) def fetch_as_mirror(url, forced: false, refmap: :all_refs, remote_name: nil)
...@@ -983,7 +951,7 @@ class Repository ...@@ -983,7 +951,7 @@ class Repository
end end
instance_variable_set(ivar, value) instance_variable_set(ivar, value)
rescue Rugged::ReferenceError, Gitlab::Git::Repository::NoRepository rescue Gitlab::Git::Repository::NoRepository
# Even if the above `#exists?` check passes these errors might still # Even if the above `#exists?` check passes these errors might still
# occur (for example because of a non-existing HEAD). We want to # occur (for example because of a non-existing HEAD). We want to
# gracefully handle this and not cache anything # gracefully handle this and not cache anything
...@@ -1077,30 +1045,7 @@ class Repository ...@@ -1077,30 +1045,7 @@ class Repository
Gitlab::Metrics.add_event(event, { path: full_path }.merge(tags)) Gitlab::Metrics.add_event(event, { path: full_path }.merge(tags))
end end
def last_commit_for_path_by_gitaly(sha, path)
c = raw_repository.gitaly_commit_client.last_commit_for_path(sha, path)
commit_by(oid: c)
end
def last_commit_for_path_by_rugged(sha, path)
sha = last_commit_id_for_path_by_shelling_out(sha, path)
commit_by(oid: sha)
end
def last_commit_id_for_path_by_shelling_out(sha, path)
args = %W(rev-list --max-count=1 #{sha} -- #{path})
raw_repository.run_git_with_timeout(args, Gitlab::Git::Popen::FAST_GIT_PROCESS_TIMEOUT).first.strip
end
def initialize_raw_repository def initialize_raw_repository
Gitlab::Git::Repository.new(project.repository_storage, disk_path + '.git', Gitlab::GlRepository.gl_repository(project, is_wiki)) Gitlab::Git::Repository.new(project.repository_storage, disk_path + '.git', Gitlab::GlRepository.gl_repository(project, is_wiki))
end end
def gitaly_can_be_merged?(their_commit, our_commit)
!raw_repository.gitaly_conflicts_client(our_commit, their_commit).conflicts?
end
def rugged_can_be_merged?(their_commit, our_commit)
!rugged.merge_commits(our_commit, their_commit).conflicts?
end
end end
...@@ -107,7 +107,10 @@ class WikiPage ...@@ -107,7 +107,10 @@ class WikiPage
# The processed/formatted content of this page. # The processed/formatted content of this page.
def formatted_content def formatted_content
@attributes[:formatted_content] ||= @page&.formatted_data # Assuming @page exists, nil formatted_data means we didn't load it
# before hand (i.e. page was fetched by Gitaly), so we fetch it separately.
# If the page was fetched by Gollum, formatted_data would've been a String.
@attributes[:formatted_content] ||= @page&.formatted_data || @wiki.page_formatted_data(@page)
end end
# The markup format for the page. # The markup format for the page.
......
...@@ -48,7 +48,18 @@ class MergeRequestWidgetEntity < IssuableEntity ...@@ -48,7 +48,18 @@ class MergeRequestWidgetEntity < IssuableEntity
expose :merge_ongoing?, as: :merge_ongoing expose :merge_ongoing?, as: :merge_ongoing
expose :work_in_progress?, as: :work_in_progress expose :work_in_progress?, as: :work_in_progress
expose :source_branch_exists?, as: :source_branch_exists expose :source_branch_exists?, as: :source_branch_exists
expose :mergeable_discussions_state?, as: :mergeable_discussions_state
expose :mergeable_discussions_state?, as: :mergeable_discussions_state do |merge_request|
# This avoids calling MergeRequest#mergeable_discussions_state without
# considering the state of the MR first. If a MR isn't mergeable, we can
# safely short-circuit it.
if merge_request.mergeable_state?(skip_ci_check: true, skip_discussions_check: true)
merge_request.mergeable_discussions_state?
else
false
end
end
expose :branch_missing?, as: :branch_missing expose :branch_missing?, as: :branch_missing
expose :commits_count expose :commits_count
expose :cannot_be_merged?, as: :has_conflicts expose :cannot_be_merged?, as: :has_conflicts
......
...@@ -138,19 +138,11 @@ ...@@ -138,19 +138,11 @@
GitLab API GitLab API
%span.pull-right %span.pull-right
= API::API::version = API::API::version
%p
Gitaly
%span.pull-right
= Gitlab::GitalyClient.expected_server_version
- if Gitlab.config.pages.enabled - if Gitlab.config.pages.enabled
%p %p
GitLab Pages GitLab Pages
%span.pull-right %span.pull-right
= Gitlab::Pages::VERSION = Gitlab::Pages::VERSION
%p
Git
%span.pull-right
= Gitlab::Git.version
%p %p
Ruby Ruby
%span.pull-right %span.pull-right
...@@ -163,6 +155,8 @@ ...@@ -163,6 +155,8 @@
= Gitlab::Database.adapter_name = Gitlab::Database.adapter_name
%span.pull-right %span.pull-right
= Gitlab::Database.version = Gitlab::Database.version
%p
= link_to "Gitaly Servers", admin_gitaly_servers_path
.row .row
.col-md-4 .col-md-4
.info-well .info-well
......
- breadcrumb_title _("Gitaly Servers")
%h3.page-title= _("Gitaly Servers")
%hr
.gitaly_servers
- if @gitaly_servers.any?
.table-holder
%table.table.responsive-table
%thead.hidden-sm.hidden-xs
%tr
%th= _("Storage")
%th= n_("Gitaly|Address")
%th= _("Server version")
%th= _("Git version")
%th= _("Up to date")
- @gitaly_servers.each do |server|
%tr
%td
= server.storage
%td
= server.address
%td
= server.server_version
%td
= server.git_binary_version
%td
= boolean_to_icon(server.up_to_date?)
- else
.empty-state
.text-center
%h4= _("No connection could be made to a Gitaly Server, please check your logs!")
%li.header-new.dropdown %li.header-new.dropdown
= link_to new_project_path, class: "header-new-dropdown-toggle has-tooltip", title: "New...", ref: 'tooltip', aria: { label: "New..." }, data: { toggle: 'dropdown', placement: 'bottom', container: 'body' } do = link_to new_project_path, class: "header-new-dropdown-toggle has-tooltip qa-new-menu-toggle", title: "New...", ref: 'tooltip', aria: { label: "New..." }, data: { toggle: 'dropdown', placement: 'bottom', container: 'body' } do
= sprite_icon('plus-square', size: 16) = sprite_icon('plus-square', size: 16)
= sprite_icon('angle-down', css_class: 'caret-down') = sprite_icon('angle-down', css_class: 'caret-down')
.dropdown-menu-nav.dropdown-menu-align-right .dropdown-menu-nav.dropdown-menu-align-right
......
---
title: Login via OAuth now only marks new users as external
merge_request: 16672
author:
type: fixed
---
title: Reduce the number of Prometheus metrics
merge_request: 16443
author:
type: performance
---
title: Fix 500 error when loading a merge request with an invalid comment
merge_request: 16795
author:
type: fixed
---
title: Update nokogiri to 1.8.2
merge_request: 16807
author:
type: security
---
title: Contribution calendar label was cut off
merge_request:
author: Branka Martinovic
type: fixed
---
title: Stop checking if discussions are in a mergeable state if the MR isn't
merge_request:
author:
type: performance
---
title: Fix not all events being shown in group dashboard
merge_request:
author:
type: fixed
---
title: Remove N+1 queries with /projects/:project_id/{access_requests,members} API
endpoints
merge_request:
author:
type: performance
---
title: Add Gitaly Servers admin dashboard
merge_request:
author:
type: added
raise "Vendored ActiveRecord 5 code! Delete #{__FILE__}!" if ActiveRecord::VERSION::MAJOR >= 5 raise "Vendored ActiveRecord 5 code! Delete #{__FILE__}!" if ActiveRecord::VERSION::MAJOR >= 5
require 'active_record/connection_adapters/postgresql_adapter' if Gitlab::Database.postgresql?
require 'active_record/connection_adapters/postgresql/schema_statements' require 'active_record/connection_adapters/postgresql_adapter'
require 'active_record/connection_adapters/postgresql/schema_statements'
#
# Monkey-patch the refused Rails 4.2 patch at https://github.com/rails/rails/pull/31330 #
# # Monkey-patch the refused Rails 4.2 patch at https://github.com/rails/rails/pull/31330
# Updates sequence logic to support PostgreSQL 10. #
# # Updates sequence logic to support PostgreSQL 10.
# rubocop:disable all #
module ActiveRecord # rubocop:disable all
module ConnectionAdapters module ActiveRecord
module ConnectionAdapters
# We need #postgresql_version to be public as in ActiveRecord 5 for seed_fu
# to work. In ActiveRecord 4, it is protected. # We need #postgresql_version to be public as in ActiveRecord 5 for seed_fu
# https://github.com/mbleigh/seed-fu/issues/123 # to work. In ActiveRecord 4, it is protected.
class PostgreSQLAdapter # https://github.com/mbleigh/seed-fu/issues/123
public :postgresql_version class PostgreSQLAdapter
end public :postgresql_version
end
module PostgreSQL module PostgreSQL
module SchemaStatements module SchemaStatements
# Resets the sequence of a table's primary key to the maximum value. # Resets the sequence of a table's primary key to the maximum value.
def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc: def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:
unless pk and sequence unless pk and sequence
default_pk, default_sequence = pk_and_sequence_for(table) default_pk, default_sequence = pk_and_sequence_for(table)
pk ||= default_pk pk ||= default_pk
sequence ||= default_sequence sequence ||= default_sequence
end end
if @logger && pk && !sequence if @logger && pk && !sequence
@logger.warn "#{table} has primary key #{pk} with no default sequence" @logger.warn "#{table} has primary key #{pk} with no default sequence"
end end
if pk && sequence if pk && sequence
quoted_sequence = quote_table_name(sequence) quoted_sequence = quote_table_name(sequence)
max_pk = select_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}") max_pk = select_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}")
if max_pk.nil? if max_pk.nil?
if postgresql_version >= 100000 if postgresql_version >= 100000
minvalue = select_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass") minvalue = select_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass")
else else
minvalue = select_value("SELECT min_value FROM #{quoted_sequence}") minvalue = select_value("SELECT min_value FROM #{quoted_sequence}")
end
end end
end
select_value <<-end_sql, 'SCHEMA' select_value <<-end_sql, 'SCHEMA'
SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false}) SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false})
end_sql end_sql
end
end end
end end
end end
end end
end end
# rubocop:enable all
end end
# rubocop:enable all
...@@ -7,10 +7,12 @@ if Gitlab::Database.mysql? ...@@ -7,10 +7,12 @@ if Gitlab::Database.mysql?
require 'peek-mysql2' require 'peek-mysql2'
PEEK_DB_CLIENT = ::Mysql2::Client PEEK_DB_CLIENT = ::Mysql2::Client
PEEK_DB_VIEW = Peek::Views::Mysql2 PEEK_DB_VIEW = Peek::Views::Mysql2
else elsif Gitlab::Database.postgresql?
require 'peek-pg' require 'peek-pg'
PEEK_DB_CLIENT = ::PG::Connection PEEK_DB_CLIENT = ::PG::Connection
PEEK_DB_VIEW = Peek::Views::PG PEEK_DB_VIEW = Peek::Views::PG
else
raise "Unsupported database adapter for peek!"
end end
Peek.into PEEK_DB_VIEW Peek.into PEEK_DB_VIEW
......
...@@ -24,6 +24,8 @@ namespace :admin do ...@@ -24,6 +24,8 @@ namespace :admin do
resource :impersonation, only: :destroy resource :impersonation, only: :destroy
resources :abuse_reports, only: [:index, :destroy] resources :abuse_reports, only: [:index, :destroy]
resources :gitaly_servers, only: [:index]
resources :spam_logs, only: [:index, :destroy] do resources :spam_logs, only: [:index, :destroy] do
member do member do
post :mark_as_ham post :mark_as_ham
......
...@@ -114,6 +114,7 @@ Parameters: ...@@ -114,6 +114,7 @@ Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user - `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
- `sha` (optional) - The commit SHA to download. A tag, branch reference or sha can be used. This defaults to the tip of the default branch if not specified - `sha` (optional) - The commit SHA to download. A tag, branch reference or sha can be used. This defaults to the tip of the default branch if not specified
- `format` (optional) - The archive format. Default is `tar.gz`. Options are `tar.gz`, `tar.bz2`, `tbz`, `tbz2`, `tb2`, `bz2`, `tar`, `zip`
## Compare branches, tags or commits ## Compare branches, tags or commits
......
...@@ -24,7 +24,7 @@ module API ...@@ -24,7 +24,7 @@ module API
access_requesters = AccessRequestsFinder.new(source).execute!(current_user) access_requesters = AccessRequestsFinder.new(source).execute!(current_user)
access_requesters = paginate(access_requesters.includes(:user)) access_requesters = paginate(access_requesters.includes(:user))
present access_requesters.map(&:user), with: Entities::AccessRequester, source: source present access_requesters, with: Entities::AccessRequester
end end
desc "Requests access for the authenticated user to a #{source_type}." do desc "Requests access for the authenticated user to a #{source_type}." do
...@@ -36,7 +36,7 @@ module API ...@@ -36,7 +36,7 @@ module API
access_requester = source.request_access(current_user) access_requester = source.request_access(current_user)
if access_requester.persisted? if access_requester.persisted?
present access_requester.user, with: Entities::AccessRequester, access_requester: access_requester present access_requester, with: Entities::AccessRequester
else else
render_validation_error!(access_requester) render_validation_error!(access_requester)
end end
...@@ -56,7 +56,7 @@ module API ...@@ -56,7 +56,7 @@ module API
member = ::Members::ApproveAccessRequestService.new(source, current_user, declared_params).execute member = ::Members::ApproveAccessRequestService.new(source, current_user, declared_params).execute
status :created status :created
present member.user, with: Entities::Member, member: member present member, with: Entities::Member
end end
desc 'Denies an access request for the given user.' do desc 'Denies an access request for the given user.' do
......
...@@ -205,22 +205,15 @@ module API ...@@ -205,22 +205,15 @@ module API
expose :build_artifacts_size, as: :job_artifacts_size expose :build_artifacts_size, as: :job_artifacts_size
end end
class Member < UserBasic class Member < Grape::Entity
expose :access_level do |user, options| expose :user, merge: true, using: UserBasic
member = options[:member] || options[:source].members.find_by(user_id: user.id) expose :access_level
member.access_level expose :expires_at
end
expose :expires_at do |user, options|
member = options[:member] || options[:source].members.find_by(user_id: user.id)
member.expires_at
end
end end
class AccessRequester < UserBasic class AccessRequester < Grape::Entity
expose :requested_at do |user, options| expose :user, merge: true, using: UserBasic
access_requester = options[:access_requester] || options[:source].requesters.find_by(user_id: user.id) expose :requested_at
access_requester.requested_at
end
end end
class Group < Grape::Entity class Group < Grape::Entity
......
...@@ -21,10 +21,11 @@ module API ...@@ -21,10 +21,11 @@ module API
get ":id/members" do get ":id/members" do
source = find_source(source_type, params[:id]) source = find_source(source_type, params[:id])
users = source.users members = source.members.where.not(user_id: nil).includes(:user)
users = users.merge(User.search(params[:query])) if params[:query].present? members = members.joins(:user).merge(User.search(params[:query])) if params[:query].present?
members = paginate(members)
present paginate(users), with: Entities::Member, source: source present members, with: Entities::Member
end end
desc 'Gets a member of a group or project.' do desc 'Gets a member of a group or project.' do
...@@ -39,7 +40,7 @@ module API ...@@ -39,7 +40,7 @@ module API
members = source.members members = source.members
member = members.find_by!(user_id: params[:user_id]) member = members.find_by!(user_id: params[:user_id])
present member.user, with: Entities::Member, member: member present member, with: Entities::Member
end end
desc 'Adds a member to a group or project.' do desc 'Adds a member to a group or project.' do
...@@ -62,7 +63,7 @@ module API ...@@ -62,7 +63,7 @@ module API
if !member if !member
not_allowed! # This currently can only be reached in EE not_allowed! # This currently can only be reached in EE
elsif member.persisted? && member.valid? elsif member.persisted? && member.valid?
present member.user, with: Entities::Member, member: member present member, with: Entities::Member
else else
render_validation_error!(member) render_validation_error!(member)
end end
...@@ -83,7 +84,7 @@ module API ...@@ -83,7 +84,7 @@ module API
member = source.members.find_by!(user_id: params.delete(:user_id)) member = source.members.find_by!(user_id: params.delete(:user_id))
if member.update_attributes(declared_params(include_missing: false)) if member.update_attributes(declared_params(include_missing: false))
present member.user, with: Entities::Member, member: member present member, with: Entities::Member
else else
render_validation_error!(member) render_validation_error!(member)
end end
......
...@@ -22,10 +22,11 @@ module API ...@@ -22,10 +22,11 @@ module API
get ":id/members" do get ":id/members" do
source = find_source(source_type, params[:id]) source = find_source(source_type, params[:id])
users = source.users members = source.members.where.not(user_id: nil).includes(:user)
users = users.merge(User.search(params[:query])) if params[:query].present? members = members.joins(:user).merge(User.search(params[:query])) if params[:query].present?
members = paginate(members)
present paginate(users), with: ::API::Entities::Member, source: source present members, with: ::API::Entities::Member
end end
desc 'Gets a member of a group or project.' do desc 'Gets a member of a group or project.' do
...@@ -40,7 +41,7 @@ module API ...@@ -40,7 +41,7 @@ module API
members = source.members members = source.members
member = members.find_by!(user_id: params[:user_id]) member = members.find_by!(user_id: params[:user_id])
present member.user, with: ::API::Entities::Member, member: member present member, with: ::API::Entities::Member
end end
desc 'Adds a member to a group or project.' do desc 'Adds a member to a group or project.' do
...@@ -69,7 +70,7 @@ module API ...@@ -69,7 +70,7 @@ module API
end end
if member.persisted? && member.valid? if member.persisted? && member.valid?
present member.user, with: ::API::Entities::Member, member: member present member, with: ::API::Entities::Member
else else
# This is to ensure back-compatibility but 400 behavior should be used # This is to ensure back-compatibility but 400 behavior should be used
# for all validation errors in 9.0! # for all validation errors in 9.0!
...@@ -93,7 +94,7 @@ module API ...@@ -93,7 +94,7 @@ module API
member = source.members.find_by!(user_id: params.delete(:user_id)) member = source.members.find_by!(user_id: params.delete(:user_id))
if member.update_attributes(declared_params(include_missing: false)) if member.update_attributes(declared_params(include_missing: false))
present member.user, with: ::API::Entities::Member, member: member present member, with: ::API::Entities::Member
else else
# This is to ensure back-compatibility but 400 behavior should be used # This is to ensure back-compatibility but 400 behavior should be used
# for all validation errors in 9.0! # for all validation errors in 9.0!
...@@ -125,7 +126,7 @@ module API ...@@ -125,7 +126,7 @@ module API
else else
::Members::DestroyService.new(source, current_user, declared_params).execute ::Members::DestroyService.new(source, current_user, declared_params).execute
present member.user, with: ::API::Entities::Member, member: member present member, with: ::API::Entities::Member
end end
end end
end end
......
module Gitaly
class Server
def self.all
Gitlab.config.repositories.storages.keys.map { |s| Gitaly::Server.new(s) }
end
attr_reader :storage
def initialize(storage)
@storage = storage
end
def server_version
info.server_version
end
def git_binary_version
info.git_version
end
def up_to_date?
server_version == Gitlab::GitalyClient.expected_server_version
end
def address
Gitlab::GitalyClient.address(@storage)
rescue RuntimeError => e
"Error getting the address: #{e.message}"
end
private
def info
@info ||=
begin
Gitlab::GitalyClient::ServerService.new(@storage).info
rescue GRPC::Unavailable, GRPC::GRPC::DeadlineExceeded
# This will show the server as being out of date
Gitaly::ServerInfoResponse.new(git_version: '', server_version: '')
end
end
end
end
...@@ -5,7 +5,7 @@ module Gitlab ...@@ -5,7 +5,7 @@ module Gitlab
DEFAULT_CE_PROJECT_URL = 'https://gitlab.com/gitlab-org/gitlab-ce'.freeze DEFAULT_CE_PROJECT_URL = 'https://gitlab.com/gitlab-org/gitlab-ce'.freeze
EE_REPO_URL = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze EE_REPO_URL = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
CHECK_DIR = Rails.root.join('ee_compat_check') CHECK_DIR = Rails.root.join('ee_compat_check')
IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze IGNORED_FILES_REGEX = %r{VERSION|CHANGELOG\.md|db/schema\.rb}i.freeze
PLEASE_READ_THIS_BANNER = %Q{ PLEASE_READ_THIS_BANNER = %Q{
============================================================ ============================================================
===================== PLEASE READ THIS ===================== ===================== PLEASE READ THIS =====================
......
...@@ -44,7 +44,7 @@ module Gitlab ...@@ -44,7 +44,7 @@ module Gitlab
# branch1...branch2) From the git documentation: # branch1...branch2) From the git documentation:
# "git diff A...B" is equivalent to "git diff # "git diff A...B" is equivalent to "git diff
# $(git-merge-base A B) B" # $(git-merge-base A B) B"
repo.merge_base_commit(head, base) repo.merge_base(head, base)
end end
options ||= {} options ||= {}
......
...@@ -131,7 +131,10 @@ module Gitlab ...@@ -131,7 +131,10 @@ module Gitlab
oldrev = branch.target oldrev = branch.target
if oldrev == repository.merge_base(newrev, branch.target) merge_base = repository.merge_base(newrev, branch.target)
raise Gitlab::Git::Repository::InvalidRef unless merge_base
if oldrev == merge_base
oldrev oldrev
else else
raise Gitlab::Git::CommitError.new('Branch diverged') raise Gitlab::Git::CommitError.new('Branch diverged')
......
...@@ -551,29 +551,34 @@ module Gitlab ...@@ -551,29 +551,34 @@ module Gitlab
end end
# Returns the SHA of the most recent common ancestor of +from+ and +to+ # Returns the SHA of the most recent common ancestor of +from+ and +to+
def merge_base_commit(from, to) def merge_base(from, to)
gitaly_migrate(:merge_base) do |is_enabled| gitaly_migrate(:merge_base) do |is_enabled|
if is_enabled if is_enabled
gitaly_repository_client.find_merge_base(from, to) gitaly_repository_client.find_merge_base(from, to)
else else
rugged.merge_base(from, to) rugged_merge_base(from, to)
end end
end end
end end
alias_method :merge_base, :merge_base_commit
# Gitaly note: JV: check gitlab-ee before removing this method. # Gitaly note: JV: check gitlab-ee before removing this method.
def rugged_is_ancestor?(ancestor_id, descendant_id) def rugged_is_ancestor?(ancestor_id, descendant_id)
return false if ancestor_id.nil? || descendant_id.nil? return false if ancestor_id.nil? || descendant_id.nil?
merge_base_commit(ancestor_id, descendant_id) == ancestor_id rugged_merge_base(ancestor_id, descendant_id) == ancestor_id
rescue Rugged::OdbError rescue Rugged::OdbError
false false
end end
# Returns true is +from+ is direct ancestor to +to+, otherwise false # Returns true is +from+ is direct ancestor to +to+, otherwise false
def ancestor?(from, to) def ancestor?(from, to)
gitaly_commit_client.ancestor?(from, to) Gitlab::GitalyClient.migrate(:is_ancestor) do |is_enabled|
if is_enabled
gitaly_commit_client.ancestor?(from, to)
else
rugged_is_ancestor?(from, to)
end
end
end end
def merged_branch_names(branch_names = []) def merged_branch_names(branch_names = [])
...@@ -680,11 +685,7 @@ module Gitlab ...@@ -680,11 +685,7 @@ module Gitlab
if is_enabled if is_enabled
gitaly_commit_client.commit_count(ref) gitaly_commit_client.commit_count(ref)
else else
walker = Rugged::Walker.new(rugged) rugged_commit_count(ref)
walker.sorting(Rugged::SORT_TOPO | Rugged::SORT_REVERSE)
oid = rugged.rev_parse_oid(ref)
walker.push(oid)
walker.count
end end
end end
end end
...@@ -887,16 +888,12 @@ module Gitlab ...@@ -887,16 +888,12 @@ module Gitlab
end end
def delete_refs(*ref_names) def delete_refs(*ref_names)
instructions = ref_names.map do |ref| gitaly_migrate(:delete_refs) do |is_enabled|
"delete #{ref}\x00\x00" if is_enabled
end gitaly_delete_refs(*ref_names)
else
message, status = run_git(%w[update-ref --stdin -z]) do |stdin| git_delete_refs(*ref_names)
stdin.write(instructions.join) end
end
unless status.zero?
raise GitError.new("Could not delete refs #{ref_names}: #{message}")
end end
end end
...@@ -1105,10 +1102,14 @@ module Gitlab ...@@ -1105,10 +1102,14 @@ module Gitlab
end end
def write_ref(ref_path, ref, old_ref: nil, shell: true) def write_ref(ref_path, ref, old_ref: nil, shell: true)
if shell ref_path = "#{Gitlab::Git::BRANCH_REF_PREFIX}#{ref_path}" unless ref_path.start_with?("refs/") || ref_path == "HEAD"
shell_write_ref(ref_path, ref, old_ref)
else gitaly_migrate(:write_ref) do |is_enabled|
rugged_write_ref(ref_path, ref) if is_enabled
gitaly_repository_client.write_ref(ref_path, ref, old_ref, shell)
else
local_write_ref(ref_path, ref, old_ref: old_ref, shell: shell)
end
end end
end end
...@@ -1130,13 +1131,6 @@ module Gitlab ...@@ -1130,13 +1131,6 @@ module Gitlab
target_ref target_ref
end end
# Refactoring aid; allows us to copy code from app/models/repository.rb
def run_git_with_timeout(args, timeout, env: {})
circuit_breaker.perform do
popen_with_timeout([Gitlab.config.git.bin_path, *args], timeout, path, env)
end
end
# Refactoring aid; allows us to copy code from app/models/repository.rb # Refactoring aid; allows us to copy code from app/models/repository.rb
def commit(ref = 'HEAD') def commit(ref = 'HEAD')
Gitlab::Git::Commit.find(self, ref) Gitlab::Git::Commit.find(self, ref)
...@@ -1392,6 +1386,16 @@ module Gitlab ...@@ -1392,6 +1386,16 @@ module Gitlab
run_git(args).first.scrub.split(/^--$/) run_git(args).first.scrub.split(/^--$/)
end end
def can_be_merged?(source_sha, target_branch)
gitaly_migrate(:can_be_merged) do |is_enabled|
if is_enabled
gitaly_can_be_merged?(source_sha, find_branch(target_branch, true).target)
else
rugged_can_be_merged?(source_sha, target_branch)
end
end
end
def search_files_by_name(query, ref) def search_files_by_name(query, ref)
safe_query = Regexp.escape(query.sub(/^\/*/, "")) safe_query = Regexp.escape(query.sub(/^\/*/, ""))
...@@ -1417,8 +1421,36 @@ module Gitlab ...@@ -1417,8 +1421,36 @@ module Gitlab
output output
end end
def can_be_merged?(source_sha, target_branch)
gitaly_migrate(:can_be_merged) do |is_enabled|
if is_enabled
gitaly_can_be_merged?(source_sha, find_branch(target_branch).target)
else
rugged_can_be_merged?(source_sha, target_branch)
end
end
end
def last_commit_id_for_path(sha, path)
gitaly_migrate(:last_commit_for_path) do |is_enabled|
if is_enabled
last_commit_for_path_by_gitaly(sha, path).id
else
last_commit_id_for_path_by_shelling_out(sha, path)
end
end
end
private private
def local_write_ref(ref_path, ref, old_ref: nil, shell: true)
if shell
shell_write_ref(ref_path, ref, old_ref)
else
rugged_write_ref(ref_path, ref)
end
end
def shell_write_ref(ref_path, ref, old_ref) def shell_write_ref(ref_path, ref, old_ref)
raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ') raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00") raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
...@@ -1460,6 +1492,12 @@ module Gitlab ...@@ -1460,6 +1492,12 @@ module Gitlab
output output
end end
def run_git_with_timeout(args, timeout, env: {})
circuit_breaker.perform do
popen_with_timeout([Gitlab.config.git.bin_path, *args], timeout, path, env)
end
end
def fresh_worktree?(path) def fresh_worktree?(path)
File.exist?(path) && !clean_stuck_worktree(path) File.exist?(path) && !clean_stuck_worktree(path)
end end
...@@ -2160,7 +2198,7 @@ module Gitlab ...@@ -2160,7 +2198,7 @@ module Gitlab
source_sha source_sha
end end
rescue Rugged::ReferenceError rescue Rugged::ReferenceError, InvalidRef
raise ArgumentError, 'Invalid merge source' raise ArgumentError, 'Invalid merge source'
end end
...@@ -2172,6 +2210,24 @@ module Gitlab ...@@ -2172,6 +2210,24 @@ module Gitlab
remote_update(remote_name, url: url) remote_update(remote_name, url: url)
end end
def git_delete_refs(*ref_names)
instructions = ref_names.map do |ref|
"delete #{ref}\x00\x00"
end
message, status = run_git(%w[update-ref --stdin -z]) do |stdin|
stdin.write(instructions.join)
end
unless status.zero?
raise GitError.new("Could not delete refs #{ref_names}: #{message}")
end
end
def gitaly_delete_refs(*ref_names)
gitaly_ref_client.delete_refs(refs: ref_names)
end
def rugged_remove_remote(remote_name) def rugged_remove_remote(remote_name)
# When a remote is deleted all its remote refs are deleted too, but in # When a remote is deleted all its remote refs are deleted too, but in
# the case of mirrors we map its refs (that would usualy go under # the case of mirrors we map its refs (that would usualy go under
...@@ -2234,6 +2290,14 @@ module Gitlab ...@@ -2234,6 +2290,14 @@ module Gitlab
run_git(['fetch', remote_name], env: env).last.zero? run_git(['fetch', remote_name], env: env).last.zero?
end end
def gitaly_can_be_merged?(their_commit, our_commit)
!gitaly_conflicts_client(our_commit, their_commit).conflicts?
end
def rugged_can_be_merged?(their_commit, our_commit)
!rugged.merge_commits(our_commit, their_commit).conflicts?
end
def gitlab_projects_error def gitlab_projects_error
raise CommandError, @gitlab_projects.output raise CommandError, @gitlab_projects.output
end end
...@@ -2257,6 +2321,39 @@ module Gitlab ...@@ -2257,6 +2321,39 @@ module Gitlab
.commits_by_message(query, revision: ref, path: path, limit: limit, offset: offset) .commits_by_message(query, revision: ref, path: path, limit: limit, offset: offset)
.map { |c| commit(c) } .map { |c| commit(c) }
end end
def gitaly_can_be_merged?(their_commit, our_commit)
!gitaly_conflicts_client(our_commit, their_commit).conflicts?
end
def rugged_can_be_merged?(their_commit, our_commit)
!rugged.merge_commits(our_commit, their_commit).conflicts?
end
def last_commit_for_path_by_gitaly(sha, path)
gitaly_commit_client.last_commit_for_path(sha, path)
end
def last_commit_id_for_path_by_shelling_out(sha, path)
args = %W(rev-list --max-count=1 #{sha} -- #{path})
run_git_with_timeout(args, Gitlab::Git::Popen::FAST_GIT_PROCESS_TIMEOUT).first.strip
end
def rugged_merge_base(from, to)
rugged.merge_base(from, to)
rescue Rugged::ReferenceError
nil
end
def rugged_commit_count(ref)
walker = Rugged::Walker.new(rugged)
walker.sorting(Rugged::SORT_TOPO | Rugged::SORT_REVERSE)
oid = rugged.rev_parse_oid(ref)
walker.push(oid)
walker.count
rescue Rugged::ReferenceError
0
end
end end
end end
end end
...@@ -83,6 +83,8 @@ module Gitlab ...@@ -83,6 +83,8 @@ module Gitlab
commit_id: sha commit_id: sha
) )
end end
rescue Rugged::ReferenceError
[]
end end
end end
......
...@@ -117,6 +117,20 @@ module Gitlab ...@@ -117,6 +117,20 @@ module Gitlab
page.url_path page.url_path
end end
def page_formatted_data(title:, dir: nil, version: nil)
version = version&.id
@repository.gitaly_migrate(:wiki_page_formatted_data) do |is_enabled|
if is_enabled
gitaly_wiki_client.get_formatted_data(title: title, dir: dir, version: version)
else
# We don't use #page because if wiki_find_page feature is enabled, we would
# get a page without formatted_data.
gollum_find_page(title: title, dir: dir, version: version)&.formatted_data
end
end
end
private private
# options: # options:
......
...@@ -6,6 +6,7 @@ require 'grpc/health/v1/health_services_pb' ...@@ -6,6 +6,7 @@ require 'grpc/health/v1/health_services_pb'
module Gitlab module Gitlab
module GitalyClient module GitalyClient
include Gitlab::Metrics::Methods
module MigrationStatus module MigrationStatus
DISABLED = 1 DISABLED = 1
OPT_IN = 2 OPT_IN = 2
...@@ -33,8 +34,6 @@ module Gitlab ...@@ -33,8 +34,6 @@ module Gitlab
CLIENT_NAME = (Sidekiq.server? ? 'gitlab-sidekiq' : 'gitlab-web').freeze CLIENT_NAME = (Sidekiq.server? ? 'gitlab-sidekiq' : 'gitlab-web').freeze
MUTEX = Mutex.new MUTEX = Mutex.new
METRICS_MUTEX = Mutex.new
private_constant :MUTEX, :METRICS_MUTEX
class << self class << self
attr_accessor :query_time attr_accessor :query_time
...@@ -42,28 +41,14 @@ module Gitlab ...@@ -42,28 +41,14 @@ module Gitlab
self.query_time = 0 self.query_time = 0
def self.migrate_histogram define_histogram :gitaly_migrate_call_duration_seconds do
@migrate_histogram ||= docstring "Gitaly migration call execution timings"
METRICS_MUTEX.synchronize do base_labels gitaly_enabled: nil, feature: nil
# If a thread was blocked on the mutex, the value was set already
return @migrate_histogram if @migrate_histogram
Gitlab::Metrics.histogram(:gitaly_migrate_call_duration_seconds,
"Gitaly migration call execution timings",
gitaly_enabled: nil, feature: nil)
end
end end
def self.gitaly_call_histogram define_histogram :gitaly_controller_action_duration_seconds do
@gitaly_call_histogram ||= docstring "Gitaly endpoint histogram by controller and action combination"
METRICS_MUTEX.synchronize do base_labels Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil)
# If a thread was blocked on the mutex, the value was set already
return @gitaly_call_histogram if @gitaly_call_histogram
Gitlab::Metrics.histogram(:gitaly_controller_action_duration_seconds,
"Gitaly endpoint histogram by controller and action combination",
Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil))
end
end end
def self.stub(name, storage) def self.stub(name, storage)
...@@ -145,7 +130,7 @@ module Gitlab ...@@ -145,7 +130,7 @@ module Gitlab
# Keep track, seperately, for the performance bar # Keep track, seperately, for the performance bar
self.query_time += duration self.query_time += duration
gitaly_call_histogram.observe( gitaly_controller_action_duration_seconds.observe(
current_transaction_labels.merge(gitaly_service: service.to_s, rpc: rpc.to_s), current_transaction_labels.merge(gitaly_service: service.to_s, rpc: rpc.to_s),
duration) duration)
end end
...@@ -247,7 +232,7 @@ module Gitlab ...@@ -247,7 +232,7 @@ module Gitlab
yield is_enabled yield is_enabled
ensure ensure
total_time = Gitlab::Metrics::System.monotonic_time - start total_time = Gitlab::Metrics::System.monotonic_time - start
migrate_histogram.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time) gitaly_migrate_call_duration_seconds.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time)
feature_stack.shift feature_stack.shift
Thread.current[:gitaly_feature_stack] = nil if feature_stack.empty? Thread.current[:gitaly_feature_stack] = nil if feature_stack.empty?
end end
......
...@@ -103,7 +103,13 @@ module Gitlab ...@@ -103,7 +103,13 @@ module Gitlab
request_enum.push(Gitaly::UserMergeBranchRequest.new(apply: true)) request_enum.push(Gitaly::UserMergeBranchRequest.new(apply: true))
branch_update = response_enum.next.branch_update second_response = response_enum.next
if second_response.pre_receive_error.present?
raise Gitlab::Git::HooksService::PreReceiveError, second_response.pre_receive_error
end
branch_update = second_response.branch_update
return if branch_update.nil? return if branch_update.nil?
raise Gitlab::Git::CommitError.new('failed to apply merge to branch') unless branch_update.commit_id.present? raise Gitlab::Git::CommitError.new('failed to apply merge to branch') unless branch_update.commit_id.present?
......
...@@ -133,13 +133,16 @@ module Gitlab ...@@ -133,13 +133,16 @@ module Gitlab
GitalyClient.call(@repository.storage, :ref_service, :delete_branch, request) GitalyClient.call(@repository.storage, :ref_service, :delete_branch, request)
end end
def delete_refs(except_with_prefixes:) def delete_refs(refs: [], except_with_prefixes: [])
request = Gitaly::DeleteRefsRequest.new( request = Gitaly::DeleteRefsRequest.new(
repository: @gitaly_repo, repository: @gitaly_repo,
except_with_prefix: except_with_prefixes refs: refs.map { |r| encode_binary(r) },
except_with_prefix: except_with_prefixes.map { |r| encode_binary(r) }
) )
GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request) response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request)
raise Gitlab::Git::Repository::GitError, response.git_error if response.git_error.present?
end end
private private
......
...@@ -203,6 +203,22 @@ module Gitlab ...@@ -203,6 +203,22 @@ module Gitlab
timeout: GitalyClient.default_timeout timeout: GitalyClient.default_timeout
) )
end end
def write_ref(ref_path, ref, old_ref, shell)
request = Gitaly::WriteRefRequest.new(
repository: @gitaly_repo,
ref: ref_path.b,
revision: ref.b,
shell: shell
)
request.old_revision = old_ref.b unless old_ref.nil?
response = GitalyClient.call(@storage, :repository_service, :write_ref, request)
raise Gitlab::Git::CommandError, encode!(response.error) if response.error.present?
true
end
end end
end end
end end
module Gitlab
module GitalyClient
# Meant for extraction of server data, and later maybe to perform misc task
#
# Not meant for connection logic, look in Gitlab::GitalyClient
class ServerService
def initialize(storage)
@storage = storage
end
def info
GitalyClient.call(@storage, :server_service, :server_info, Gitaly::ServerInfoRequest.new)
end
end
end
end
...@@ -127,6 +127,18 @@ module Gitlab ...@@ -127,6 +127,18 @@ module Gitlab
wiki_file wiki_file
end end
def get_formatted_data(title:, dir: nil, version: nil)
request = Gitaly::WikiGetFormattedDataRequest.new(
repository: @gitaly_repo,
title: encode_binary(title),
revision: encode_binary(version),
directory: encode_binary(dir)
)
response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_formatted_data, request)
response.reduce("") { |memo, msg| memo << msg.data }
end
private private
# If a block is given and the yielded value is true, iteration will be # If a block is given and the yielded value is true, iteration will be
......
module Gitlab module Gitlab
module Metrics module Metrics
extend Gitlab::Metrics::InfluxDb include Gitlab::Metrics::InfluxDb
extend Gitlab::Metrics::Prometheus include Gitlab::Metrics::Prometheus
def self.enabled? def self.enabled?
influx_metrics_enabled? || prometheus_metrics_enabled? influx_metrics_enabled? || prometheus_metrics_enabled?
......
This diff is collapsed.
...@@ -4,26 +4,15 @@ module Gitlab ...@@ -4,26 +4,15 @@ module Gitlab
module Metrics module Metrics
# Class for tracking timing information about method calls # Class for tracking timing information about method calls
class MethodCall class MethodCall
@@measurement_enabled_cache = Concurrent::AtomicBoolean.new(false) include Gitlab::Metrics::Methods
@@measurement_enabled_cache_expires_at = Concurrent::AtomicReference.new(Time.now.to_i)
MUTEX = Mutex.new
BASE_LABELS = { module: nil, method: nil }.freeze BASE_LABELS = { module: nil, method: nil }.freeze
attr_reader :real_time, :cpu_time, :call_count, :labels attr_reader :real_time, :cpu_time, :call_count, :labels
def self.call_duration_histogram define_histogram :gitlab_method_call_duration_seconds do
return @call_duration_histogram if @call_duration_histogram docstring 'Method calls real duration'
base_labels Transaction::BASE_LABELS.merge(BASE_LABELS)
MUTEX.synchronize do buckets [0.01, 0.05, 0.1, 0.5, 1]
@call_duration_histogram ||= Gitlab::Metrics.histogram( with_feature :prometheus_metrics_method_instrumentation
:gitlab_method_call_duration_seconds,
'Method calls real duration',
Transaction::BASE_LABELS.merge(BASE_LABELS),
[0.01, 0.05, 0.1, 0.5, 1])
end
end
def self.measurement_enabled_cache_expires_at
@@measurement_enabled_cache_expires_at
end end
# name - The full name of the method (including namespace) such as # name - The full name of the method (including namespace) such as
...@@ -53,8 +42,8 @@ module Gitlab ...@@ -53,8 +42,8 @@ module Gitlab
@cpu_time += cpu_time @cpu_time += cpu_time
@call_count += 1 @call_count += 1
if call_measurement_enabled? && above_threshold? if above_threshold?
self.class.call_duration_histogram.observe(@transaction.labels.merge(labels), real_time) self.class.gitlab_method_call_duration_seconds.observe(@transaction.labels.merge(labels), real_time)
end end
retval retval
...@@ -78,17 +67,6 @@ module Gitlab ...@@ -78,17 +67,6 @@ module Gitlab
def above_threshold? def above_threshold?
real_time.in_milliseconds >= Metrics.method_call_threshold real_time.in_milliseconds >= Metrics.method_call_threshold
end end
def call_measurement_enabled?
expires_at = @@measurement_enabled_cache_expires_at.value
if expires_at < Time.now.to_i
if @@measurement_enabled_cache_expires_at.compare_and_set(expires_at, 1.minute.from_now.to_i)
@@measurement_enabled_cache.value = Feature.get(:prometheus_metrics_method_instrumentation).enabled?
end
end
@@measurement_enabled_cache.value
end
end end
end end
end end
# rubocop:disable Style/ClassVars
module Gitlab
module Metrics
module Methods
extend ActiveSupport::Concern
included do
@@_metric_provider_mutex ||= Mutex.new
@@_metrics_provider_cache = {}
end
class_methods do
def reload_metric!(name)
@@_metrics_provider_cache.delete(name)
end
private
def define_metric(type, name, opts = {}, &block)
if respond_to?(name)
raise ArgumentError, "method #{name} already exists"
end
define_singleton_method(name) do
# inlining fetch_metric method to avoid method call overhead when instrumenting hot spots
@@_metrics_provider_cache[name] || init_metric(type, name, opts, &block)
end
end
def fetch_metric(type, name, opts = {}, &block)
@@_metrics_provider_cache[name] || init_metric(type, name, opts, &block)
end
def init_metric(type, name, opts = {}, &block)
options = MetricOptions.new(opts)
options.evaluate(&block)
if disabled_by_feature(options)
synchronized_cache_fill(name) { NullMetric.instance }
else
synchronized_cache_fill(name) { build_metric!(type, name, options) }
end
end
def synchronized_cache_fill(key)
@@_metric_provider_mutex.synchronize do
@@_metrics_provider_cache[key] ||= yield
end
end
def disabled_by_feature(options)
options.with_feature && !Feature.get(options.with_feature).enabled?
end
def build_metric!(type, name, options)
case type
when :gauge
Gitlab::Metrics.gauge(name, options.docstring, options.base_labels, options.multiprocess_mode)
when :counter
Gitlab::Metrics.counter(name, options.docstring, options.base_labels)
when :histogram
Gitlab::Metrics.histogram(name, options.docstring, options.base_labels, options.buckets)
when :summary
raise NotImplementedError, "summary metrics are not currently supported"
else
raise ArgumentError, "uknown metric type #{type}"
end
end
# Fetch and/or initialize counter metric
# @param [Symbol] name
# @param [Hash] opts
def fetch_counter(name, opts = {}, &block)
fetch_metric(:counter, name, opts, &block)
end
# Fetch and/or initialize gauge metric
# @param [Symbol] name
# @param [Hash] opts
def fetch_gauge(name, opts = {}, &block)
fetch_metric(:gauge, name, opts, &block)
end
# Fetch and/or initialize histogram metric
# @param [Symbol] name
# @param [Hash] opts
def fetch_histogram(name, opts = {}, &block)
fetch_metric(:histogram, name, opts, &block)
end
# Fetch and/or initialize summary metric
# @param [Symbol] name
# @param [Hash] opts
def fetch_summary(name, opts = {}, &block)
fetch_metric(:summary, name, opts, &block)
end
# Define metric accessor method for a Counter
# @param [Symbol] name
# @param [Hash] opts
def define_counter(name, opts = {}, &block)
define_metric(:counter, name, opts, &block)
end
# Define metric accessor method for a Gauge
# @param [Symbol] name
# @param [Hash] opts
def define_gauge(name, opts = {}, &block)
define_metric(:gauge, name, opts, &block)
end
# Define metric accessor method for a Histogram
# @param [Symbol] name
# @param [Hash] opts
def define_histogram(name, opts = {}, &block)
define_metric(:histogram, name, opts, &block)
end
# Define metric accessor method for a Summary
# @param [Symbol] name
# @param [Hash] opts
def define_summary(name, opts = {}, &block)
define_metric(:summary, name, opts, &block)
end
end
end
end
end
module Gitlab
module Metrics
module Methods
class MetricOptions
SMALL_NETWORK_BUCKETS = [0.005, 0.01, 0.1, 1, 10].freeze
def initialize(options = {})
@multiprocess_mode = options[:multiprocess_mode] || :all
@buckets = options[:buckets] || SMALL_NETWORK_BUCKETS
@base_labels = options[:base_labels] || {}
@docstring = options[:docstring]
@with_feature = options[:with_feature]
end
# Documentation describing metric in metrics endpoint '/-/metrics'
def docstring(docstring = nil)
@docstring = docstring unless docstring.nil?
@docstring
end
# Gauge aggregation mode for multiprocess metrics
# - :all (default) returns each gauge for every process
# - :livesum all process'es gauges summed up
# - :max maximum value of per process gauges
# - :min minimum value of per process gauges
def multiprocess_mode(mode = nil)
@multiprocess_mode = mode unless mode.nil?
@multiprocess_mode
end
# Measurement buckets for histograms
def buckets(buckets = nil)
@buckets = buckets unless buckets.nil?
@buckets
end
# Base labels are merged with per metric labels
def base_labels(base_labels = nil)
@base_labels = base_labels unless base_labels.nil?
@base_labels
end
# Use feature toggle to control whether certain metric is enabled/disabled
def with_feature(name = nil)
@with_feature = name unless name.nil?
@with_feature
end
def evaluate(&block)
instance_eval(&block) if block_given?
self
end
end
end
end
end
...@@ -2,6 +2,8 @@ module Gitlab ...@@ -2,6 +2,8 @@ module Gitlab
module Metrics module Metrics
# Mocks ::Prometheus::Client::Metric and all derived metrics # Mocks ::Prometheus::Client::Metric and all derived metrics
class NullMetric class NullMetric
include Singleton
def method_missing(name, *args, &block) def method_missing(name, *args, &block)
nil nil
end end
......
...@@ -3,73 +3,77 @@ require 'prometheus/client' ...@@ -3,73 +3,77 @@ require 'prometheus/client'
module Gitlab module Gitlab
module Metrics module Metrics
module Prometheus module Prometheus
include Gitlab::CurrentSettings extend ActiveSupport::Concern
include Gitlab::Utils::StrongMemoize
REGISTRY_MUTEX = Mutex.new REGISTRY_MUTEX = Mutex.new
PROVIDER_MUTEX = Mutex.new PROVIDER_MUTEX = Mutex.new
def metrics_folder_present? class_methods do
multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir include Gitlab::Utils::StrongMemoize
multiprocess_files_dir && def metrics_folder_present?
::Dir.exist?(multiprocess_files_dir) && multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir
::File.writable?(multiprocess_files_dir)
end
def prometheus_metrics_enabled? multiprocess_files_dir &&
strong_memoize(:prometheus_metrics_enabled) do ::Dir.exist?(multiprocess_files_dir) &&
prometheus_metrics_enabled_unmemoized ::File.writable?(multiprocess_files_dir)
end
def prometheus_metrics_enabled?
strong_memoize(:prometheus_metrics_enabled) do
prometheus_metrics_enabled_unmemoized
end
end end
end
def registry def registry
strong_memoize(:registry) do strong_memoize(:registry) do
REGISTRY_MUTEX.synchronize do REGISTRY_MUTEX.synchronize do
strong_memoize(:registry) do strong_memoize(:registry) do
::Prometheus::Client.registry ::Prometheus::Client.registry
end
end end
end end
end end
end
def counter(name, docstring, base_labels = {}) def counter(name, docstring, base_labels = {})
safe_provide_metric(:counter, name, docstring, base_labels) safe_provide_metric(:counter, name, docstring, base_labels)
end end
def summary(name, docstring, base_labels = {}) def summary(name, docstring, base_labels = {})
safe_provide_metric(:summary, name, docstring, base_labels) safe_provide_metric(:summary, name, docstring, base_labels)
end end
def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all) def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all)
safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode) safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode)
end end
def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS) def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS)
safe_provide_metric(:histogram, name, docstring, base_labels, buckets) safe_provide_metric(:histogram, name, docstring, base_labels, buckets)
end end
private private
def safe_provide_metric(method, name, *args) def safe_provide_metric(method, name, *args)
metric = provide_metric(name) metric = provide_metric(name)
return metric if metric return metric if metric
PROVIDER_MUTEX.synchronize do PROVIDER_MUTEX.synchronize do
provide_metric(name) || registry.method(method).call(name, *args) provide_metric(name) || registry.method(method).call(name, *args)
end
end end
end
def provide_metric(name) def provide_metric(name)
if prometheus_metrics_enabled? if prometheus_metrics_enabled?
registry.get(name) registry.get(name)
else else
NullMetric.new NullMetric.instance
end
end end
end
def prometheus_metrics_enabled_unmemoized def prometheus_metrics_enabled_unmemoized
metrics_folder_present? && current_application_settings[:prometheus_metrics_enabled] || false metrics_folder_present? &&
Gitlab::CurrentSettings.current_application_settings[:prometheus_metrics_enabled] || false
end
end end
end end
end end
......
...@@ -3,6 +3,14 @@ module Gitlab ...@@ -3,6 +3,14 @@ module Gitlab
module Subscribers module Subscribers
# Class for tracking the rendering timings of views. # Class for tracking the rendering timings of views.
class ActionView < ActiveSupport::Subscriber class ActionView < ActiveSupport::Subscriber
include Gitlab::Metrics::Methods
define_histogram :gitlab_view_rendering_duration_seconds do
docstring 'View rendering time'
base_labels Transaction::BASE_LABELS.merge({ path: nil })
buckets [0.001, 0.01, 0.1, 1, 10.0]
with_feature :prometheus_metrics_view_instrumentation
end
attach_to :action_view attach_to :action_view
SERIES = 'views'.freeze SERIES = 'views'.freeze
...@@ -15,23 +23,11 @@ module Gitlab ...@@ -15,23 +23,11 @@ module Gitlab
private private
def metric_view_rendering_duration_seconds
@metric_view_rendering_duration_seconds ||= Gitlab::Metrics.histogram(
:gitlab_view_rendering_duration_seconds,
'View rendering time',
Transaction::BASE_LABELS.merge({ path: nil }),
[0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
)
end
def track(event) def track(event)
values = values_for(event) values = values_for(event)
tags = tags_for(event) tags = tags_for(event)
metric_view_rendering_duration_seconds.observe( self.class.gitlab_view_rendering_duration_seconds.observe(current_transaction.labels.merge(tags), event.duration)
current_transaction.labels.merge(tags),
event.duration
)
current_transaction.increment(:view_duration, event.duration) current_transaction.increment(:view_duration, event.duration)
current_transaction.add_metric(SERIES, values, tags) current_transaction.add_metric(SERIES, values, tags)
......
...@@ -3,12 +3,13 @@ module Gitlab ...@@ -3,12 +3,13 @@ module Gitlab
module Subscribers module Subscribers
# Class for tracking the total query duration of a transaction. # Class for tracking the total query duration of a transaction.
class ActiveRecord < ActiveSupport::Subscriber class ActiveRecord < ActiveSupport::Subscriber
include Gitlab::Metrics::Methods
attach_to :active_record attach_to :active_record
def sql(event) def sql(event)
return unless current_transaction return unless current_transaction
metric_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0) self.class.gitlab_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0)
current_transaction.increment(:sql_duration, event.duration, false) current_transaction.increment(:sql_duration, event.duration, false)
current_transaction.increment(:sql_count, 1, false) current_transaction.increment(:sql_count, 1, false)
...@@ -16,17 +17,14 @@ module Gitlab ...@@ -16,17 +17,14 @@ module Gitlab
private private
def current_transaction define_histogram :gitlab_sql_duration_seconds do
Transaction.current docstring 'SQL time'
base_labels Transaction::BASE_LABELS
buckets [0.001, 0.01, 0.1, 1.0, 10.0]
end end
def metric_sql_duration_seconds def current_transaction
@metric_sql_duration_seconds ||= Gitlab::Metrics.histogram( Transaction.current
:gitlab_sql_duration_seconds,
'SQL time',
Transaction::BASE_LABELS,
[0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
)
end end
end end
end end
......
...@@ -2,11 +2,12 @@ module Gitlab ...@@ -2,11 +2,12 @@ module Gitlab
module Metrics module Metrics
# Class for storing metrics information of a single transaction. # Class for storing metrics information of a single transaction.
class Transaction class Transaction
include Gitlab::Metrics::Methods
# base labels shared among all transactions # base labels shared among all transactions
BASE_LABELS = { controller: nil, action: nil }.freeze BASE_LABELS = { controller: nil, action: nil }.freeze
THREAD_KEY = :_gitlab_metrics_transaction THREAD_KEY = :_gitlab_metrics_transaction
METRICS_MUTEX = Mutex.new
# The series to store events (e.g. Git pushes) in. # The series to store events (e.g. Git pushes) in.
EVENT_SERIES = 'events'.freeze EVENT_SERIES = 'events'.freeze
...@@ -54,8 +55,8 @@ module Gitlab ...@@ -54,8 +55,8 @@ module Gitlab
@memory_after = System.memory_usage @memory_after = System.memory_usage
@finished_at = System.monotonic_time @finished_at = System.monotonic_time
self.class.metric_transaction_duration_seconds.observe(labels, duration) self.class.gitlab_transaction_duration_seconds.observe(labels, duration)
self.class.metric_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0) self.class.gitlab_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0)
Thread.current[THREAD_KEY] = nil Thread.current[THREAD_KEY] = nil
end end
...@@ -72,7 +73,7 @@ module Gitlab ...@@ -72,7 +73,7 @@ module Gitlab
# event_name - The name of the event (e.g. "git_push"). # event_name - The name of the event (e.g. "git_push").
# tags - A set of tags to attach to the event. # tags - A set of tags to attach to the event.
def add_event(event_name, tags = {}) def add_event(event_name, tags = {})
self.class.metric_event_counter(event_name, tags).increment(tags.merge(labels)) self.class.transaction_metric(event_name, :counter, prefix: 'event_', tags: tags).increment(tags.merge(labels))
@metrics << Metric.new(EVENT_SERIES, { count: 1 }, tags.merge(event: event_name), :event) @metrics << Metric.new(EVENT_SERIES, { count: 1 }, tags.merge(event: event_name), :event)
end end
...@@ -86,12 +87,12 @@ module Gitlab ...@@ -86,12 +87,12 @@ module Gitlab
end end
def increment(name, value, use_prometheus = true) def increment(name, value, use_prometheus = true)
self.class.metric_transaction_counter(name).increment(labels, value) if use_prometheus self.class.transaction_metric(name, :counter).increment(labels, value) if use_prometheus
@values[name] += value @values[name] += value
end end
def set(name, value, use_prometheus = true) def set(name, value, use_prometheus = true)
self.class.metric_transaction_gauge(name).set(labels, value) if use_prometheus self.class.transaction_metric(name, :gauge).set(labels, value) if use_prometheus
@values[name] = value @values[name] = value
end end
...@@ -136,64 +137,28 @@ module Gitlab ...@@ -136,64 +137,28 @@ module Gitlab
"#{labels[:controller]}##{labels[:action]}" if labels && !labels.empty? "#{labels[:controller]}##{labels[:action]}" if labels && !labels.empty?
end end
def self.metric_transaction_duration_seconds define_histogram :gitlab_transaction_duration_seconds do
return @metric_transaction_duration_seconds if @metric_transaction_duration_seconds docstring 'Transaction duration'
base_labels BASE_LABELS
METRICS_MUTEX.synchronize do buckets [0.001, 0.01, 0.1, 1.0, 10.0]
@metric_transaction_duration_seconds ||= Gitlab::Metrics.histogram(
:gitlab_transaction_duration_seconds,
'Transaction duration',
BASE_LABELS,
[0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
)
end
end
def self.metric_transaction_allocated_memory_bytes
return @metric_transaction_allocated_memory_bytes if @metric_transaction_allocated_memory_bytes
METRICS_MUTEX.synchronize do
@metric_transaction_allocated_memory_bytes ||= Gitlab::Metrics.histogram(
:gitlab_transaction_allocated_memory_bytes,
'Transaction allocated memory bytes',
BASE_LABELS,
[1000, 10000, 20000, 500000, 1000000, 2000000, 5000000, 10000000, 20000000, 100000000]
)
end
end end
def self.metric_event_counter(event_name, tags) define_histogram :gitlab_transaction_allocated_memory_bytes do
return @metric_event_counters[event_name] if @metric_event_counters&.has_key?(event_name) docstring 'Transaction allocated memory bytes'
base_labels BASE_LABELS
METRICS_MUTEX.synchronize do buckets [100, 1000, 10000, 100000, 1000000, 10000000]
@metric_event_counters ||= {} with_feature :prometheus_metrics_transaction_allocated_memory
@metric_event_counters[event_name] ||= Gitlab::Metrics.counter(
"gitlab_transaction_event_#{event_name}_total".to_sym,
"Transaction event #{event_name} counter",
tags.merge(BASE_LABELS)
)
end
end
def self.metric_transaction_counter(name)
return @metric_transaction_counters[name] if @metric_transaction_counters&.has_key?(name)
METRICS_MUTEX.synchronize do
@metric_transaction_counters ||= {}
@metric_transaction_counters[name] ||= Gitlab::Metrics.counter(
"gitlab_transaction_#{name}_total".to_sym, "Transaction #{name} counter", BASE_LABELS
)
end
end end
def self.metric_transaction_gauge(name) def self.transaction_metric(name, type, prefix: nil, tags: {})
return @metric_transaction_gauges[name] if @metric_transaction_gauges&.has_key?(name) metric_name = "gitlab_transaction_#{prefix}#{name}_total".to_sym
fetch_metric(type, metric_name) do
docstring "Transaction #{prefix}#{name} #{type}"
base_labels tags.merge(BASE_LABELS)
METRICS_MUTEX.synchronize do if type == :gauge
@metric_transaction_gauges ||= {} multiprocess_mode :livesum
@metric_transaction_gauges[name] ||= Gitlab::Metrics.gauge( end
"gitlab_transaction_#{name}".to_sym, "Transaction gauge #{name}", BASE_LABELS, :livesum
)
end end
end end
end end
......
...@@ -55,7 +55,7 @@ module Gitlab ...@@ -55,7 +55,7 @@ module Gitlab
user ||= find_or_build_ldap_user if auto_link_ldap_user? user ||= find_or_build_ldap_user if auto_link_ldap_user?
user ||= build_new_user if signup_enabled? user ||= build_new_user if signup_enabled?
user.external = true if external_provider? && user user.external = true if external_provider? && user&.new_record?
user user
end end
......
This diff is collapsed.
...@@ -34,6 +34,9 @@ You can use GitLab QA to exercise tests on any live instance! For example, the ...@@ -34,6 +34,9 @@ You can use GitLab QA to exercise tests on any live instance! For example, the
following call would login to a local [GDK] instance and run all specs in following call would login to a local [GDK] instance and run all specs in
`qa/specs/features`: `qa/specs/features`:
First, `cd` into the `$gdk/gitlab/qa` directory.
The `bin/qa` script expects you to be in the `qa` folder of the app.
``` ```
bin/qa Test::Instance http://localhost:3000 bin/qa Test::Instance http://localhost:3000
``` ```
......
...@@ -27,6 +27,7 @@ module QA ...@@ -27,6 +27,7 @@ module QA
module Resource module Resource
autoload :Sandbox, 'qa/factory/resource/sandbox' autoload :Sandbox, 'qa/factory/resource/sandbox'
autoload :Group, 'qa/factory/resource/group' autoload :Group, 'qa/factory/resource/group'
autoload :Issue, 'qa/factory/resource/issue'
autoload :Project, 'qa/factory/resource/project' autoload :Project, 'qa/factory/resource/project'
autoload :MergeRequest, 'qa/factory/resource/merge_request' autoload :MergeRequest, 'qa/factory/resource/merge_request'
autoload :DeployKey, 'qa/factory/resource/deploy_key' autoload :DeployKey, 'qa/factory/resource/deploy_key'
...@@ -125,6 +126,12 @@ module QA ...@@ -125,6 +126,12 @@ module QA
autoload :SecretVariables, 'qa/page/project/settings/secret_variables' autoload :SecretVariables, 'qa/page/project/settings/secret_variables'
autoload :Runners, 'qa/page/project/settings/runners' autoload :Runners, 'qa/page/project/settings/runners'
end end
module Issue
autoload :New, 'qa/page/project/issue/new'
autoload :Show, 'qa/page/project/issue/show'
autoload :Index, 'qa/page/project/issue/index'
end
end end
module Profile module Profile
...@@ -143,6 +150,13 @@ module QA ...@@ -143,6 +150,13 @@ module QA
autoload :Main, 'qa/page/mattermost/main' autoload :Main, 'qa/page/mattermost/main'
autoload :Login, 'qa/page/mattermost/login' autoload :Login, 'qa/page/mattermost/login'
end end
##
# Classes describing components that are used by several pages.
#
module Component
autoload :Dropzone, 'qa/page/component/dropzone'
end
end end
## ##
......
require 'securerandom'
module QA
module Factory
module Resource
class Issue < Factory::Base
attr_writer :title, :description, :project
dependency Factory::Resource::Project, as: :project do |project|
project.name = 'project-for-issues'
project.description = 'project for adding issues'
end
product :title do
Page::Project::Issue::Show.act { issue_title }
end
def fabricate!
project.visit!
Page::Project::Show.act do
go_to_new_issue
end
Page::Project::Issue::New.perform do |page|
page.add_title(@title)
page.add_description(@description)
page.create_new_issue
end
end
end
end
end
end
...@@ -2,12 +2,13 @@ module QA ...@@ -2,12 +2,13 @@ module QA
module Page module Page
module Admin module Admin
class Settings < Page::Base class Settings < Page::Base
## view 'app/views/admin/application_settings/_form.html.haml' do
# TODO, define all selectors required by this page object element :form_actions, '.form-actions'
# element :submit, "submit 'Save'"
# See gitlab-org/gitlab-qa#154 element :repository_storage, '%legend Repository Storage'
# element :hashed_storage,
view 'app/views/admin/application_settings/show.html.haml' 'Create new projects using hashed storage paths'
end
def enable_hashed_storage def enable_hashed_storage
scroll_to 'legend', text: 'Repository Storage' scroll_to 'legend', text: 'Repository Storage'
......
...@@ -42,6 +42,23 @@ module QA ...@@ -42,6 +42,23 @@ module QA
page.within(selector) { yield } if block_given? page.within(selector) { yield } if block_given?
end end
# Returns true if successfully GETs the given URL
# Useful because `page.status_code` is unsupported by our driver, and
# we don't have access to the `response` to use `have_http_status`.
def asset_exists?(url)
page.execute_script <<~JS
xhr = new XMLHttpRequest();
xhr.open('GET', '#{url}', true);
xhr.send();
JS
return false unless wait(time: 0.5, max: 60, reload: false) do
page.evaluate_script('xhr.readyState == XMLHttpRequest.DONE')
end
page.evaluate_script('xhr.status') == 200
end
def find_element(name) def find_element(name)
find(element_selector_css(name)) find(element_selector_css(name))
end end
......
module QA
module Page
module Component
class Dropzone
attr_reader :page, :container
def initialize(page, container)
@page = page
@container = container
end
# Not tested and not expected to work with multiple dropzones
# instantiated on one page because there is no distinguishing
# attribute per dropzone file field.
def attach_file(attachment)
filename = File.basename(attachment)
field_style = { visibility: 'visible', height: '', width: '' }
page.attach_file(attachment, class: 'dz-hidden-input', make_visible: field_style)
# Wait for link to be appended to dropzone text
page.wait(reload: false) do
page.find("#{container} textarea").value.match(filename)
end
end
end
end
end
end
...@@ -2,12 +2,20 @@ module QA ...@@ -2,12 +2,20 @@ module QA
module Page module Page
module Group module Group
class Show < Page::Base class Show < Page::Base
## view 'app/views/groups/show.html.haml' do
# TODO, define all selectors required by this page object element :new_project_or_subgroup_dropdown, '.new-project-subgroup'
# element :new_project_or_subgroup_dropdown_toggle, '.dropdown-toggle'
# See gitlab-org/gitlab-qa#154 element :new_project_option, /%li.*data:.*value: "new-project"/
# element :new_project_button, /%input.*data:.*action: "new-project"/
view 'app/views/groups/show.html.haml' element :new_subgroup_option, /%li.*data:.*value: "new-subgroup"/
# data-value and data-action get modified by JS for subgroup
element :new_subgroup_button, /%input.*\.js-new-group-child/
end
view 'app/assets/javascripts/groups/constants.js' do
element :no_result_text, 'Sorry, no groups or projects matched your search'
end
def go_to_subgroup(name) def go_to_subgroup(name)
click_link name click_link name
...@@ -20,35 +28,40 @@ module QA ...@@ -20,35 +28,40 @@ module QA
def has_subgroup?(name) def has_subgroup?(name)
filter_by_name(name) filter_by_name(name)
page.has_link?(name) wait(reload: false) do
return false if page.has_content?('Sorry, no groups or projects matched your search')
page.has_link?(name)
end
end end
def go_to_new_subgroup def go_to_new_subgroup
within '.new-project-subgroup' do click_new('subgroup')
# May need to click again because it is possible to click the button quicker than the JS is bound
wait(reload: false) do
find('.dropdown-toggle').click
page.has_css?("li[data-value='new-subgroup']")
end
find("li[data-value='new-subgroup']").click
end
find("input[data-action='new-subgroup']").click find("input[data-action='new-subgroup']").click
end end
def go_to_new_project def go_to_new_project
click_new('project')
find("input[data-action='new-project']").click
end
private
def click_new(kind)
within '.new-project-subgroup' do within '.new-project-subgroup' do
css = "li[data-value='new-#{kind}']"
# May need to click again because it is possible to click the button quicker than the JS is bound # May need to click again because it is possible to click the button quicker than the JS is bound
wait(reload: false) do wait(reload: false) do
find('.dropdown-toggle').click find('.dropdown-toggle').click
page.has_css?("li[data-value='new-project']") page.has_css?(css)
end end
find("li[data-value='new-project']").click
end
find("input[data-action='new-project']").click find(css).click
end
end end
end end
end end
......
...@@ -2,15 +2,8 @@ module QA ...@@ -2,15 +2,8 @@ module QA
module Page module Page
module Menu module Menu
class Admin < Page::Base class Admin < Page::Base
## view 'app/views/layouts/nav/sidebar/_admin.html.haml' do
# TODO, define all selectors required by this page object element :settings, "_('Settings')"
#
# See gitlab-org/gitlab-qa#154
#
view 'app/views/admin/dashboard/index.html.haml'
def go_to_license
click_link 'License'
end end
def go_to_settings def go_to_settings
......
...@@ -7,6 +7,8 @@ module QA ...@@ -7,6 +7,8 @@ module QA
element :settings_link, 'link_to edit_project_path' element :settings_link, 'link_to edit_project_path'
element :repository_link, "title: 'Repository'" element :repository_link, "title: 'Repository'"
element :pipelines_settings_link, "title: 'CI / CD'" element :pipelines_settings_link, "title: 'CI / CD'"
element :issues_link, %r{link_to.*shortcuts-issues}
element :issues_link_text, "Issues"
element :top_level_items, '.sidebar-top-level-items' element :top_level_items, '.sidebar-top-level-items'
element :activity_link, "title: 'Activity'" element :activity_link, "title: 'Activity'"
end end
...@@ -43,6 +45,12 @@ module QA ...@@ -43,6 +45,12 @@ module QA
end end
end end
def click_issues
within_sidebar do
click_link('Issues')
end
end
private private
def hover_settings def hover_settings
......
module QA
module Page
module Project
module Issue
class Index < Page::Base
view 'app/views/projects/issues/_issue.html.haml' do
element :issue_link, 'link_to issue.title'
end
def go_to_issue(title)
click_link(title)
end
end
end
end
end
end
module QA
module Page
module Project
module Issue
class New < Page::Base
view 'app/views/shared/issuable/_form.html.haml' do
element :submit_issue_button, 'form.submit "Submit'
end
view 'app/views/shared/issuable/form/_title.html.haml' do
element :issue_title_textbox, 'form.text_field :title'
end
view 'app/views/shared/form_elements/_description.html.haml' do
element :issue_description_textarea, "render 'projects/zen', f: form, attr: :description"
end
def add_title(title)
fill_in 'issue_title', with: title
end
def add_description(description)
fill_in 'issue_description', with: description
end
def create_new_issue
click_on 'Submit issue'
end
end
end
end
end
end
module QA
module Page
module Project
module Issue
class Show < Page::Base
view 'app/views/projects/issues/show.html.haml' do
element :issue_details, '.issue-details'
element :title, '.title'
end
view 'app/views/shared/notes/_form.html.haml' do
element :new_note_form, 'new-note'
element :new_note_form, 'attr: :note'
end
view 'app/views/shared/notes/_comment_button.html.haml' do
element :comment_button, '%strong Comment'
end
def issue_title
find('.issue-details .title').text
end
# Adds a comment to an issue
# attachment option should be an absolute path
def comment(text, attachment: nil)
fill_in(with: text, name: 'note[note]')
unless attachment.nil?
QA::Page::Component::Dropzone.new(page, '.new-note')
.attach_file(attachment)
end
click_on 'Comment'
end
end
end
end
end
end
...@@ -17,6 +17,11 @@ module QA ...@@ -17,6 +17,11 @@ module QA
element :project_name element :project_name
end end
view 'app/views/layouts/header/_new_dropdown.haml' do
element :new_menu_toggle
element :new_issue_link, "link_to 'New issue', new_project_issue_path(@project)"
end
def choose_repository_clone_http def choose_repository_clone_http
wait(reload: false) do wait(reload: false) do
click_element :clone_dropdown click_element :clone_dropdown
...@@ -46,6 +51,12 @@ module QA ...@@ -46,6 +51,12 @@ module QA
sleep 5 sleep 5
refresh refresh
end end
def go_to_new_issue
click_element :new_menu_toggle
click_link 'New issue'
end
end end
end end
end end
......
...@@ -21,6 +21,7 @@ ALLOWED = [ ...@@ -21,6 +21,7 @@ ALLOWED = [
].freeze ].freeze
rugged_lines = IO.popen(%w[git grep -i -n rugged -- app config lib], &:read).lines rugged_lines = IO.popen(%w[git grep -i -n rugged -- app config lib], &:read).lines
rugged_lines = rugged_lines.select { |l| /^[^:]*\.rb:/ =~ l }
rugged_lines = rugged_lines.reject { |l| l.start_with?(*ALLOWED) } rugged_lines = rugged_lines.reject { |l| l.start_with?(*ALLOWED) }
rugged_lines = rugged_lines.reject do |line| rugged_lines = rugged_lines.reject do |line|
code, _comment = line.split('# ', 2) code, _comment = line.split('# ', 2)
......
...@@ -35,7 +35,6 @@ tasks = [ ...@@ -35,7 +35,6 @@ tasks = [
%w[bundle exec rubocop --parallel], %w[bundle exec rubocop --parallel],
%w[bundle exec rake gettext:lint], %w[bundle exec rake gettext:lint],
%w[bundle exec rake lint:static_verification], %w[bundle exec rake lint:static_verification],
%w[scripts/lint-changelog-yaml],
%w[scripts/lint-conflicts.sh], %w[scripts/lint-conflicts.sh],
%w[scripts/lint-rugged] %w[scripts/lint-rugged]
] ]
......
require 'spec_helper'
describe Admin::GitalyServersController do
describe '#index' do
before do
sign_in(create(:admin))
end
it 'shows the gitaly servers page' do
get :index
expect(response).to have_gitlab_http_status(200)
end
end
end
...@@ -85,6 +85,30 @@ describe GroupsController do ...@@ -85,6 +85,30 @@ describe GroupsController do
end end
end end
describe 'GET #activity' do
render_views
before do
sign_in(user)
project
end
context 'as json' do
it 'includes all projects in event feed' do
3.times do
project = create(:project, group: group)
create(:event, project: project)
end
get :activity, id: group.to_param, format: :json
expect(response).to have_gitlab_http_status(200)
expect(json_response['count']).to eq(3)
expect(assigns(:projects).limit_value).to be_nil
end
end
end
describe 'POST #create' do describe 'POST #create' do
context 'when creating subgroups', :nested_groups do context 'when creating subgroups', :nested_groups do
[true, false].each do |can_create_group_status| [true, false].each do |can_create_group_status|
......
...@@ -102,6 +102,18 @@ describe Projects::IssuesController do ...@@ -102,6 +102,18 @@ describe Projects::IssuesController do
expect(response).to redirect_to(namespace_project_issues_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope])) expect(response).to redirect_to(namespace_project_issues_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
end end
it 'does not use pagination if disabled' do
allow(controller).to receive(:pagination_disabled?).and_return(true)
get :index,
namespace_id: project.namespace.to_param,
project_id: project,
page: (last_page + 1).to_param
expect(response).to have_gitlab_http_status(200)
expect(assigns(:issues).size).to eq(2)
end
end end
end end
......
...@@ -122,7 +122,7 @@ feature 'Project > Members > Share with Group', :js do ...@@ -122,7 +122,7 @@ feature 'Project > Members > Share with Group', :js do
select2 group.id, from: '#link_group_id' select2 group.id, from: '#link_group_id'
fill_in 'expires_at_groups', with: (Time.now + 4.5.days).strftime('%Y-%m-%d') fill_in 'expires_at_groups', with: (Time.now + 4.5.days).strftime('%Y-%m-%d')
page.find('body').click click_on 'share-with-group-tab'
find('.btn-create').click find('.btn-create').click
end end
......
This diff is collapsed.
/* eslint-disable no-new */ /* eslint-disable no-new */
import MockAdapter from 'axios-mock-adapter';
import BlobViewer from '~/blob/viewer/index'; import BlobViewer from '~/blob/viewer/index';
import axios from '~/lib/utils/axios_utils';
describe('Blob viewer', () => { describe('Blob viewer', () => {
let blob; let blob;
let mock;
preloadFixtures('snippets/show.html.raw'); preloadFixtures('snippets/show.html.raw');
beforeEach(() => { beforeEach(() => {
mock = new MockAdapter(axios);
loadFixtures('snippets/show.html.raw'); loadFixtures('snippets/show.html.raw');
$('#modal-upload-blob').remove(); $('#modal-upload-blob').remove();
blob = new BlobViewer(); blob = new BlobViewer();
spyOn($, 'ajax').and.callFake(() => { mock.onGet('http://test.host/snippets/1.json?viewer=rich').reply(200, {
const d = $.Deferred(); html: '<div>testing</div>',
});
d.resolve({
html: '<div>testing</div>',
});
return d.promise(); mock.onGet('http://test.host/snippets/1.json?viewer=simple').reply(200, {
html: '<div>testing</div>',
}); });
spyOn(axios, 'get').and.callThrough();
}); });
afterEach(() => { afterEach(() => {
mock.restore();
location.hash = ''; location.hash = '';
}); });
...@@ -30,7 +37,6 @@ describe('Blob viewer', () => { ...@@ -30,7 +37,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => { setTimeout(() => {
expect($.ajax).toHaveBeenCalled();
expect( expect(
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]') document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
.classList.contains('hidden'), .classList.contains('hidden'),
...@@ -46,7 +52,6 @@ describe('Blob viewer', () => { ...@@ -46,7 +52,6 @@ describe('Blob viewer', () => {
new BlobViewer(); new BlobViewer();
setTimeout(() => { setTimeout(() => {
expect($.ajax).toHaveBeenCalled();
expect( expect(
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]') document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
.classList.contains('hidden'), .classList.contains('hidden'),
...@@ -64,12 +69,8 @@ describe('Blob viewer', () => { ...@@ -64,12 +69,8 @@ describe('Blob viewer', () => {
}); });
asyncClick() asyncClick()
.then(() => asyncClick())
.then(() => { .then(() => {
expect($.ajax).toHaveBeenCalled();
return asyncClick();
})
.then(() => {
expect($.ajax.calls.count()).toBe(1);
expect( expect(
document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'), document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'),
).toBe('true'); ).toBe('true');
...@@ -122,7 +123,6 @@ describe('Blob viewer', () => { ...@@ -122,7 +123,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => { setTimeout(() => {
expect($.ajax).toHaveBeenCalled();
expect( expect(
copyButton.classList.contains('disabled'), copyButton.classList.contains('disabled'),
).toBeFalsy(); ).toBeFalsy();
...@@ -135,8 +135,6 @@ describe('Blob viewer', () => { ...@@ -135,8 +135,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => { setTimeout(() => {
expect($.ajax).toHaveBeenCalled();
expect( expect(
copyButton.getAttribute('data-original-title'), copyButton.getAttribute('data-original-title'),
).toBe('Copy source to clipboard'); ).toBe('Copy source to clipboard');
...@@ -171,14 +169,14 @@ describe('Blob viewer', () => { ...@@ -171,14 +169,14 @@ describe('Blob viewer', () => {
it('sends AJAX request when switching to simple view', () => { it('sends AJAX request when switching to simple view', () => {
blob.switchToViewer('simple'); blob.switchToViewer('simple');
expect($.ajax).toHaveBeenCalled(); expect(axios.get).toHaveBeenCalled();
}); });
it('does not send AJAX request when switching to rich view', () => { it('does not send AJAX request when switching to rich view', () => {
blob.switchToViewer('simple'); blob.switchToViewer('simple');
blob.switchToViewer('rich'); blob.switchToViewer('rich');
expect($.ajax.calls.count()).toBe(1); expect(axios.get.calls.count()).toBe(1);
}); });
}); });
}); });
import 'vendor/jquery.endless-scroll'; import 'vendor/jquery.endless-scroll';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import CommitsList from '~/commits'; import CommitsList from '~/commits';
describe('Commits List', () => { describe('Commits List', () => {
...@@ -43,30 +45,47 @@ describe('Commits List', () => { ...@@ -43,30 +45,47 @@ describe('Commits List', () => {
describe('on entering input', () => { describe('on entering input', () => {
let ajaxSpy; let ajaxSpy;
let mock;
beforeEach(() => { beforeEach(() => {
CommitsList.init(25); CommitsList.init(25);
CommitsList.searchField.val(''); CommitsList.searchField.val('');
spyOn(history, 'replaceState').and.stub(); spyOn(history, 'replaceState').and.stub();
ajaxSpy = spyOn(jQuery, 'ajax').and.callFake((req) => { mock = new MockAdapter(axios);
req.success({
data: '<li>Result</li>', mock.onGet('/h5bp/html5-boilerplate/commits/master').reply(200, {
}); html: '<li>Result</li>',
}); });
ajaxSpy = spyOn(axios, 'get').and.callThrough();
});
afterEach(() => {
mock.restore();
}); });
it('should save the last search string', () => { it('should save the last search string', (done) => {
CommitsList.searchField.val('GitLab'); CommitsList.searchField.val('GitLab');
CommitsList.filterResults(); CommitsList.filterResults()
expect(ajaxSpy).toHaveBeenCalled(); .then(() => {
expect(CommitsList.lastSearch).toEqual('GitLab'); expect(ajaxSpy).toHaveBeenCalled();
expect(CommitsList.lastSearch).toEqual('GitLab');
done();
})
.catch(done.fail);
}); });
it('should not make ajax call if the input does not change', () => { it('should not make ajax call if the input does not change', (done) => {
CommitsList.filterResults(); CommitsList.filterResults()
expect(ajaxSpy).not.toHaveBeenCalled(); .then(() => {
expect(CommitsList.lastSearch).toEqual(''); expect(ajaxSpy).not.toHaveBeenCalled();
expect(CommitsList.lastSearch).toEqual('');
done();
})
.catch(done.fail);
}); });
}); });
}); });
...@@ -18,54 +18,67 @@ describe('CreateItemDropdown', () => { ...@@ -18,54 +18,67 @@ describe('CreateItemDropdown', () => {
preloadFixtures('static/create_item_dropdown.html.raw'); preloadFixtures('static/create_item_dropdown.html.raw');
let $wrapperEl; let $wrapperEl;
let createItemDropdown;
function createItemAndClearInput(text) {
// Filter for the new item
$wrapperEl.find('.dropdown-input-field')
.val(text)
.trigger('input');
// Create the new item
const $createButton = $wrapperEl.find('.js-dropdown-create-new-item');
$createButton.click();
// Clear out the filter
$wrapperEl.find('.dropdown-input-field')
.val('')
.trigger('input');
}
beforeEach(() => { beforeEach(() => {
loadFixtures('static/create_item_dropdown.html.raw'); loadFixtures('static/create_item_dropdown.html.raw');
$wrapperEl = $('.js-create-item-dropdown-fixture-root'); $wrapperEl = $('.js-create-item-dropdown-fixture-root');
// eslint-disable-next-line no-new
new CreateItemDropdown({
$dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
defaultToggleLabel: 'All variables',
fieldName: 'variable[environment]',
getData: (term, callback) => {
callback(DROPDOWN_ITEM_DATA);
},
});
}); });
afterEach(() => { afterEach(() => {
$wrapperEl.remove(); $wrapperEl.remove();
}); });
it('should have a dropdown item for each piece of data', () => { describe('items', () => {
// Get the data in the dropdown beforeEach(() => {
$('.js-dropdown-menu-toggle').click(); createItemDropdown = new CreateItemDropdown({
$dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
defaultToggleLabel: 'All variables',
fieldName: 'variable[environment]',
getData: (term, callback) => {
callback(DROPDOWN_ITEM_DATA);
},
});
});
it('should have a dropdown item for each piece of data', () => {
// Get the data in the dropdown
$('.js-dropdown-menu-toggle').click();
const $itemEls = $wrapperEl.find('.js-dropdown-content a'); const $itemEls = $wrapperEl.find('.js-dropdown-content a');
expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length); expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length);
});
}); });
describe('created items', () => { describe('created items', () => {
const NEW_ITEM_TEXT = 'foobarbaz'; const NEW_ITEM_TEXT = 'foobarbaz';
function createItemAndClearInput(text) {
// Filter for the new item
$wrapperEl.find('.dropdown-input-field')
.val(text)
.trigger('input');
// Create the new item
const $createButton = $wrapperEl.find('.js-dropdown-create-new-item');
$createButton.click();
// Clear out the filter
$wrapperEl.find('.dropdown-input-field')
.val('')
.trigger('input');
}
beforeEach(() => { beforeEach(() => {
createItemDropdown = new CreateItemDropdown({
$dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
defaultToggleLabel: 'All variables',
fieldName: 'variable[environment]',
getData: (term, callback) => {
callback(DROPDOWN_ITEM_DATA);
},
});
// Open the dropdown // Open the dropdown
$('.js-dropdown-menu-toggle').click(); $('.js-dropdown-menu-toggle').click();
...@@ -103,4 +116,68 @@ describe('CreateItemDropdown', () => { ...@@ -103,4 +116,68 @@ describe('CreateItemDropdown', () => {
expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length); expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length);
}); });
}); });
describe('clearDropdown()', () => {
beforeEach(() => {
createItemDropdown = new CreateItemDropdown({
$dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
defaultToggleLabel: 'All variables',
fieldName: 'variable[environment]',
getData: (term, callback) => {
callback(DROPDOWN_ITEM_DATA);
},
});
});
it('should clear all data and filter input', () => {
const filterInput = $wrapperEl.find('.dropdown-input-field');
// Get the data in the dropdown
$('.js-dropdown-menu-toggle').click();
// Filter for an item
filterInput
.val('one')
.trigger('input');
const $itemElsAfterFilter = $wrapperEl.find('.js-dropdown-content a');
expect($itemElsAfterFilter.length).toEqual(1);
createItemDropdown.clearDropdown();
const $itemElsAfterClear = $wrapperEl.find('.js-dropdown-content a');
expect($itemElsAfterClear.length).toEqual(0);
expect(filterInput.val()).toEqual('');
});
});
describe('createNewItemFromValue option', () => {
beforeEach(() => {
createItemDropdown = new CreateItemDropdown({
$dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
defaultToggleLabel: 'All variables',
fieldName: 'variable[environment]',
getData: (term, callback) => {
callback(DROPDOWN_ITEM_DATA);
},
createNewItemFromValue: newValue => ({
title: `${newValue}-title`,
id: `${newValue}-id`,
text: `${newValue}-text`,
}),
});
});
it('all items go through createNewItemFromValue', () => {
// Get the data in the dropdown
$('.js-dropdown-menu-toggle').click();
createItemAndClearInput('new-item');
const $itemEls = $wrapperEl.find('.js-dropdown-content a');
expect($itemEls.length).toEqual(1 + DROPDOWN_ITEM_DATA.length);
expect($($itemEls[3]).text()).toEqual('new-item-text');
expect($wrapperEl.find('.dropdown-toggle-text').text()).toEqual('new-item-title');
});
});
}); });
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment