Commit 51d455d0 authored by Clement Ho's avatar Clement Ho

Merge branch 'master' into bootstrap4

parents ee16cf15 6c51e220
...@@ -307,6 +307,8 @@ cloud-native-image: ...@@ -307,6 +307,8 @@ cloud-native-image:
before_script: [] before_script: []
stage: build stage: build
allow_failure: true allow_failure: true
variables:
GIT_DEPTH: "1"
cache: {} cache: {}
before_script: before_script:
- gem install gitlab --no-rdoc --no-ri - gem install gitlab --no-rdoc --no-ri
......
...@@ -2,6 +2,21 @@ ...@@ -2,6 +2,21 @@
documentation](doc/development/changelog.md) for instructions on adding your own documentation](doc/development/changelog.md) for instructions on adding your own
entry. entry.
## 10.8.1 (2018-05-23)
### Fixed (9 changes)
- Allow CommitStatus class to use presentable methods. !18979
- Fix corrupted environment pages with unathorized proxy url. !18989
- Fixes deploy token variables on Ci::Build. !19047
- Fix project mirror database inconsistencies when upgrading from EE to CE. !19109
- Render 404 when prometheus adapter is disabled in Prometheus metrics controller. !19110
- Fix error when deleting an empty list of refs.
- Fixed U2F login when used with LDAP.
- Bump prometheus-client-mmap to 0.9.3 to fix nil exception error.
- Fix system hook not firing for blocked users when LDAP sign-in is used.
## 10.8.0 (2018-05-22) ## 10.8.0 (2018-05-22)
### Security (3 changes, 1 of them is from the community) ### Security (3 changes, 1 of them is from the community)
......
...@@ -23,6 +23,8 @@ const Api = { ...@@ -23,6 +23,8 @@ const Api = {
commitPath: '/api/:version/projects/:id/repository/commits', commitPath: '/api/:version/projects/:id/repository/commits',
branchSinglePath: '/api/:version/projects/:id/repository/branches/:branch', branchSinglePath: '/api/:version/projects/:id/repository/branches/:branch',
createBranchPath: '/api/:version/projects/:id/repository/branches', createBranchPath: '/api/:version/projects/:id/repository/branches',
pipelinesPath: '/api/:version/projects/:id/pipelines',
pipelineJobsPath: '/api/:version/projects/:id/pipelines/:pipeline_id/jobs',
group(groupId, callback) { group(groupId, callback) {
const url = Api.buildUrl(Api.groupPath).replace(':id', groupId); const url = Api.buildUrl(Api.groupPath).replace(':id', groupId);
...@@ -222,6 +224,20 @@ const Api = { ...@@ -222,6 +224,20 @@ const Api = {
}); });
}, },
pipelines(projectPath, params = {}) {
const url = Api.buildUrl(this.pipelinesPath).replace(':id', encodeURIComponent(projectPath));
return axios.get(url, { params });
},
pipelineJobs(projectPath, pipelineId, params = {}) {
const url = Api.buildUrl(this.pipelineJobsPath)
.replace(':id', encodeURIComponent(projectPath))
.replace(':pipeline_id', pipelineId);
return axios.get(url, { params });
},
buildUrl(url) { buildUrl(url) {
let urlRoot = ''; let urlRoot = '';
if (gon.relative_url_root != null) { if (gon.relative_url_root != null) {
......
...@@ -39,12 +39,10 @@ export default { ...@@ -39,12 +39,10 @@ export default {
return this.allBlobs.slice(0, MAX_FILE_FINDER_RESULTS); return this.allBlobs.slice(0, MAX_FILE_FINDER_RESULTS);
} }
return fuzzaldrinPlus return fuzzaldrinPlus.filter(this.allBlobs, searchText, {
.filter(this.allBlobs, searchText, {
key: 'path', key: 'path',
maxResults: MAX_FILE_FINDER_RESULTS, maxResults: MAX_FILE_FINDER_RESULTS,
}) });
.sort((a, b) => b.lastOpenedAt - a.lastOpenedAt);
}, },
filteredBlobsLength() { filteredBlobsLength() {
return this.filteredBlobs.length; return this.filteredBlobs.length;
......
...@@ -52,7 +52,10 @@ export default { ...@@ -52,7 +52,10 @@ export default {
methods: { methods: {
...mapActions(['toggleFileFinder']), ...mapActions(['toggleFileFinder']),
mousetrapStopCallback(e, el, combo) { mousetrapStopCallback(e, el, combo) {
if (combo === 't' && el.classList.contains('dropdown-input-field')) { if (
(combo === 't' && el.classList.contains('dropdown-input-field')) ||
el.classList.contains('inputarea')
) {
return true; return true;
} else if (combo === 'command+p' || combo === 'ctrl+p') { } else if (combo === 'command+p' || combo === 'ctrl+p') {
return false; return false;
......
...@@ -43,9 +43,13 @@ export default { ...@@ -43,9 +43,13 @@ export default {
}, },
}, },
watch: { watch: {
file(oldVal, newVal) { file(newVal, oldVal) {
if (oldVal.pending) {
this.removePendingTab(oldVal);
}
// Compare key to allow for files opened in review mode to be cached differently // Compare key to allow for files opened in review mode to be cached differently
if (newVal.key !== this.file.key) { if (oldVal.key !== this.file.key) {
this.initMonaco(); this.initMonaco();
if (this.currentActivityView !== activityBarViews.edit) { if (this.currentActivityView !== activityBarViews.edit) {
...@@ -99,6 +103,7 @@ export default { ...@@ -99,6 +103,7 @@ export default {
'setFileViewMode', 'setFileViewMode',
'setFileEOL', 'setFileEOL',
'updateViewer', 'updateViewer',
'removePendingTab',
]), ]),
initMonaco() { initMonaco() {
if (this.shouldHideEditor) return; if (this.shouldHideEditor) return;
......
...@@ -41,7 +41,7 @@ const router = new VueRouter({ ...@@ -41,7 +41,7 @@ const router = new VueRouter({
component: EmptyRouterComponent, component: EmptyRouterComponent,
children: [ children: [
{ {
path: ':targetmode(edit|tree|blob)/:branch/*', path: ':targetmode(edit|tree|blob)/*',
component: EmptyRouterComponent, component: EmptyRouterComponent,
}, },
{ {
...@@ -63,23 +63,27 @@ router.beforeEach((to, from, next) => { ...@@ -63,23 +63,27 @@ router.beforeEach((to, from, next) => {
.then(() => { .then(() => {
const fullProjectId = `${to.params.namespace}/${to.params.project}`; const fullProjectId = `${to.params.namespace}/${to.params.project}`;
if (to.params.branch) { const baseSplit = to.params[0].split('/-/');
store.dispatch('setCurrentBranchId', to.params.branch); const branchId = baseSplit[0].slice(-1) === '/' ? baseSplit[0].slice(0, -1) : baseSplit[0];
if (branchId) {
const basePath = baseSplit.length > 1 ? baseSplit[1] : '';
store.dispatch('setCurrentBranchId', branchId);
store.dispatch('getBranchData', { store.dispatch('getBranchData', {
projectId: fullProjectId, projectId: fullProjectId,
branchId: to.params.branch, branchId,
}); });
store store
.dispatch('getFiles', { .dispatch('getFiles', {
projectId: fullProjectId, projectId: fullProjectId,
branchId: to.params.branch, branchId,
}) })
.then(() => { .then(() => {
if (to.params[0]) { if (basePath) {
const path = const path = basePath.slice(-1) === '/' ? basePath.slice(0, -1) : basePath;
to.params[0].slice(-1) === '/' ? to.params[0].slice(0, -1) : to.params[0];
const treeEntryKey = Object.keys(store.state.entries).find( const treeEntryKey = Object.keys(store.state.entries).find(
key => key === path && !store.state.entries[key].pending, key => key === path && !store.state.entries[key].pending,
); );
......
...@@ -63,7 +63,9 @@ export const getFileData = ({ state, commit, dispatch }, { path, makeFileActive ...@@ -63,7 +63,9 @@ export const getFileData = ({ state, commit, dispatch }, { path, makeFileActive
const file = state.entries[path]; const file = state.entries[path];
commit(types.TOGGLE_LOADING, { entry: file }); commit(types.TOGGLE_LOADING, { entry: file });
return service return service
.getFileData(`${gon.relative_url_root ? gon.relative_url_root : ''}${file.url}`) .getFileData(
`${gon.relative_url_root ? gon.relative_url_root : ''}${file.url.replace('/-/', '/')}`,
)
.then(res => { .then(res => {
const pageTitle = decodeURI(normalizeHeaders(res.headers)['PAGE-TITLE']); const pageTitle = decodeURI(normalizeHeaders(res.headers)['PAGE-TITLE']);
setPageTitle(pageTitle); setPageTitle(pageTitle);
......
...@@ -5,6 +5,7 @@ import * as actions from './actions'; ...@@ -5,6 +5,7 @@ import * as actions from './actions';
import * as getters from './getters'; import * as getters from './getters';
import mutations from './mutations'; import mutations from './mutations';
import commitModule from './modules/commit'; import commitModule from './modules/commit';
import pipelines from './modules/pipelines';
Vue.use(Vuex); Vue.use(Vuex);
...@@ -15,5 +16,6 @@ export default new Vuex.Store({ ...@@ -15,5 +16,6 @@ export default new Vuex.Store({
getters, getters,
modules: { modules: {
commit: commitModule, commit: commitModule,
pipelines,
}, },
}); });
...@@ -204,17 +204,23 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState, roo ...@@ -204,17 +204,23 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState, roo
dispatch('updateViewer', 'editor', { root: true }); dispatch('updateViewer', 'editor', { root: true });
router.push( router.push(
`/project/${rootState.currentProjectId}/blob/${getters.branchName}/${ `/project/${rootState.currentProjectId}/blob/${getters.branchName}/-/${
rootGetters.activeFile.path rootGetters.activeFile.path
}`, }`,
); );
} }
}) })
.then(() => dispatch('updateCommitAction', consts.COMMIT_TO_CURRENT_BRANCH)) .then(() => dispatch('updateCommitAction', consts.COMMIT_TO_CURRENT_BRANCH))
.then(() => dispatch('refreshLastCommitData', { .then(() =>
dispatch(
'refreshLastCommitData',
{
projectId: rootState.currentProjectId, projectId: rootState.currentProjectId,
branchId: rootState.currentBranchId, branchId: rootState.currentBranchId,
}, { root: true })); },
{ root: true },
),
);
}) })
.catch(err => { .catch(err => {
let errMsg = __('Error committing changes. Please try again.'); let errMsg = __('Error committing changes. Please try again.');
......
import { __ } from '../../../../locale';
import Api from '../../../../api';
import flash from '../../../../flash';
import * as types from './mutation_types';
export const requestLatestPipeline = ({ commit }) => commit(types.REQUEST_LATEST_PIPELINE);
export const receiveLatestPipelineError = ({ commit }) => {
flash(__('There was an error loading latest pipeline'));
commit(types.RECEIVE_LASTEST_PIPELINE_ERROR);
};
export const receiveLatestPipelineSuccess = ({ commit }, pipeline) =>
commit(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, pipeline);
export const fetchLatestPipeline = ({ dispatch, rootState }, sha) => {
dispatch('requestLatestPipeline');
return Api.pipelines(rootState.currentProjectId, { sha, per_page: '1' })
.then(({ data }) => {
dispatch('receiveLatestPipelineSuccess', data.pop());
})
.catch(() => dispatch('receiveLatestPipelineError'));
};
export const requestJobs = ({ commit }) => commit(types.REQUEST_JOBS);
export const receiveJobsError = ({ commit }) => {
flash(__('There was an error loading jobs'));
commit(types.RECEIVE_JOBS_ERROR);
};
export const receiveJobsSuccess = ({ commit }, data) => commit(types.RECEIVE_JOBS_SUCCESS, data);
export const fetchJobs = ({ dispatch, state, rootState }, page = '1') => {
dispatch('requestJobs');
Api.pipelineJobs(rootState.currentProjectId, state.latestPipeline.id, {
page,
})
.then(({ data, headers }) => {
const nextPage = headers && headers['x-next-page'];
dispatch('receiveJobsSuccess', data);
if (nextPage) {
dispatch('fetchJobs', nextPage);
}
})
.catch(() => dispatch('receiveJobsError'));
};
export default () => {};
export const hasLatestPipeline = state => !state.isLoadingPipeline && !!state.latestPipeline;
export const failedJobs = state =>
state.stages.reduce(
(acc, stage) => acc.concat(stage.jobs.filter(job => job.status === 'failed')),
[],
);
import state from './state';
import * as actions from './actions';
import mutations from './mutations';
import * as getters from './getters';
export default {
namespaced: true,
state: state(),
actions,
mutations,
getters,
};
export const REQUEST_LATEST_PIPELINE = 'REQUEST_LATEST_PIPELINE';
export const RECEIVE_LASTEST_PIPELINE_ERROR = 'RECEIVE_LASTEST_PIPELINE_ERROR';
export const RECEIVE_LASTEST_PIPELINE_SUCCESS = 'RECEIVE_LASTEST_PIPELINE_SUCCESS';
export const REQUEST_JOBS = 'REQUEST_JOBS';
export const RECEIVE_JOBS_ERROR = 'RECEIVE_JOBS_ERROR';
export const RECEIVE_JOBS_SUCCESS = 'RECEIVE_JOBS_SUCCESS';
/* eslint-disable no-param-reassign */
import * as types from './mutation_types';
export default {
[types.REQUEST_LATEST_PIPELINE](state) {
state.isLoadingPipeline = true;
},
[types.RECEIVE_LASTEST_PIPELINE_ERROR](state) {
state.isLoadingPipeline = false;
},
[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](state, pipeline) {
state.isLoadingPipeline = false;
if (pipeline) {
state.latestPipeline = {
id: pipeline.id,
status: pipeline.status,
};
}
},
[types.REQUEST_JOBS](state) {
state.isLoadingJobs = true;
},
[types.RECEIVE_JOBS_ERROR](state) {
state.isLoadingJobs = false;
},
[types.RECEIVE_JOBS_SUCCESS](state, jobs) {
state.isLoadingJobs = false;
state.stages = jobs.reduce((acc, job) => {
let stage = acc.find(s => s.title === job.stage);
if (!stage) {
stage = {
title: job.stage,
jobs: [],
};
acc.push(stage);
}
stage.jobs = stage.jobs.concat({
id: job.id,
name: job.name,
status: job.status,
stage: job.stage,
duration: job.duration,
});
return acc;
}, state.stages);
},
};
export default () => ({
isLoadingPipeline: false,
isLoadingJobs: false,
latestPipeline: null,
stages: [],
});
...@@ -26,7 +26,7 @@ self.addEventListener('message', e => { ...@@ -26,7 +26,7 @@ self.addEventListener('message', e => {
id: folderPath, id: folderPath,
name: folderName, name: folderName,
path: folderPath, path: folderPath,
url: `/${projectId}/tree/${branchId}/${folderPath}/`, url: `/${projectId}/tree/${branchId}/-/${folderPath}/`,
type: 'tree', type: 'tree',
parentTreeUrl: parentFolder ? parentFolder.url : `/${projectId}/tree/${branchId}/`, parentTreeUrl: parentFolder ? parentFolder.url : `/${projectId}/tree/${branchId}/`,
tempFile, tempFile,
...@@ -64,7 +64,7 @@ self.addEventListener('message', e => { ...@@ -64,7 +64,7 @@ self.addEventListener('message', e => {
id: path, id: path,
name: blobName, name: blobName,
path, path,
url: `/${projectId}/blob/${branchId}/${path}`, url: `/${projectId}/blob/${branchId}/-/${path}`,
type: 'blob', type: 'blob',
parentTreeUrl: fileFolder ? fileFolder.url : `/${projectId}/blob/${branchId}`, parentTreeUrl: fileFolder ? fileFolder.url : `/${projectId}/blob/${branchId}`,
tempFile, tempFile,
......
import initU2F from '../../../shared/sessions/u2f';
document.addEventListener('DOMContentLoaded', initU2F);
...@@ -121,6 +121,14 @@ label { ...@@ -121,6 +121,14 @@ label {
@include box-shadow(none); @include box-shadow(none);
border-radius: 2px; border-radius: 2px;
padding: $gl-vert-padding $gl-input-padding; padding: $gl-vert-padding $gl-input-padding;
&.input-short {
width: $input-short-width;
@media (min-width: $screen-md-min) {
width: $input-short-md-width;
}
}
} }
.select-wrapper { .select-wrapper {
......
...@@ -128,14 +128,6 @@ ...@@ -128,14 +128,6 @@
/* Large devices (large desktops, 1200px and up) */ /* Large devices (large desktops, 1200px and up) */
@include media-breakpoint-up(lg) { width: 250px; } @include media-breakpoint-up(lg) { width: 250px; }
&.input-short {
/* Medium devices (desktops, 992px and up) */
@include media-breakpoint-up(md) { width: 170px; }
/* Large devices (large desktops, 1200px and up) */
@include media-breakpoint-up(lg) { width: 210px; }
}
} }
@include media-breakpoint-down(xs) { @include media-breakpoint-down(xs) {
...@@ -164,10 +156,6 @@ ...@@ -164,10 +156,6 @@
} }
} }
.input-short {
width: 100%;
}
.icon-label { .icon-label {
display: inline-block; display: inline-block;
} }
......
...@@ -559,8 +559,8 @@ $input-danger-border: $red-400; ...@@ -559,8 +559,8 @@ $input-danger-border: $red-400;
$input-group-addon-bg: #f7f8fa; $input-group-addon-bg: #f7f8fa;
$gl-field-focus-shadow: rgba(0, 0, 0, 0.075); $gl-field-focus-shadow: rgba(0, 0, 0, 0.075);
$gl-field-focus-shadow-error: rgba($red-500, 0.6); $gl-field-focus-shadow-error: rgba($red-500, 0.6);
$input-disabled-bg: $gray-lighter; $input-short-width: 200px;
$input-border-color: $border-color; $input-short-md-width: 280px;
/* /*
* Help * Help
......
...@@ -99,16 +99,6 @@ ...@@ -99,16 +99,6 @@
@include media-breakpoint-up(sm) { @include media-breakpoint-up(sm) {
width: 250px; width: 250px;
} }
&.input-short {
@include media-breakpoint-up(md) {
width: 170px;
}
@include media-breakpoint-up(lg) {
width: 210px;
}
}
} }
} }
......
...@@ -873,12 +873,6 @@ pre.light-well { ...@@ -873,12 +873,6 @@ pre.light-well {
margin: 0; margin: 0;
} }
.commits-search-form {
.input-short {
min-width: 200px;
}
}
.git-clone-holder { .git-clone-holder {
width: 380px; width: 380px;
......
...@@ -52,7 +52,7 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController ...@@ -52,7 +52,7 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
private private
def set_application_setting def set_application_setting
@application_setting = ApplicationSetting.current @application_setting = ApplicationSetting.current_without_cache
end end
def application_setting_params def application_setting_params
......
...@@ -2,19 +2,24 @@ class Groups::BoardsController < Groups::ApplicationController ...@@ -2,19 +2,24 @@ class Groups::BoardsController < Groups::ApplicationController
include BoardsResponses include BoardsResponses
before_action :assign_endpoint_vars before_action :assign_endpoint_vars
before_action :boards, only: :index
def index def index
@boards = Boards::ListService.new(group, current_user).execute
respond_with_boards respond_with_boards
end end
def show def show
@board = group.boards.find(params[:id]) @board = boards.find(params[:id])
respond_with_board respond_with_board
end end
private
def boards
@boards ||= Boards::ListService.new(group, current_user).execute
end
def assign_endpoint_vars def assign_endpoint_vars
@boards_endpoint = group_boards_url(group) @boards_endpoint = group_boards_url(group)
@namespace_path = group.to_param @namespace_path = group.to_param
......
...@@ -4,22 +4,25 @@ class Projects::BoardsController < Projects::ApplicationController ...@@ -4,22 +4,25 @@ class Projects::BoardsController < Projects::ApplicationController
before_action :check_issues_available! before_action :check_issues_available!
before_action :authorize_read_board!, only: [:index, :show] before_action :authorize_read_board!, only: [:index, :show]
before_action :boards, only: :index
before_action :assign_endpoint_vars before_action :assign_endpoint_vars
def index def index
@boards = Boards::ListService.new(project, current_user).execute
respond_with_boards respond_with_boards
end end
def show def show
@board = project.boards.find(params[:id]) @board = boards.find(params[:id])
respond_with_board respond_with_board
end end
private private
def boards
@boards ||= Boards::ListService.new(project, current_user).execute
end
def assign_endpoint_vars def assign_endpoint_vars
@boards_endpoint = project_boards_path(project) @boards_endpoint = project_boards_path(project)
@bulk_issues_path = bulk_update_project_issues_path(project) @bulk_issues_path = bulk_update_project_issues_path(project)
......
...@@ -25,7 +25,7 @@ module Projects ...@@ -25,7 +25,7 @@ module Projects
end end
def require_prometheus_metrics! def require_prometheus_metrics!
render_404 unless prometheus_adapter.can_query? render_404 unless prometheus_adapter&.can_query?
end end
end end
end end
......
...@@ -2,6 +2,11 @@ require 'digest/md5' ...@@ -2,6 +2,11 @@ require 'digest/md5'
require 'uri' require 'uri'
module ApplicationHelper module ApplicationHelper
# See https://docs.gitlab.com/ee/development/ee_features.html#code-in-app-views
def render_if_exists(partial, locals = {})
render(partial, locals) if lookup_context.exists?(partial, [], true)
end
# Check if a particular controller is the current one # Check if a particular controller is the current one
# #
# args - One or more controller names to check # args - One or more controller names to check
......
...@@ -17,7 +17,9 @@ module BlobHelper ...@@ -17,7 +17,9 @@ module BlobHelper
end end
def ide_edit_path(project = @project, ref = @ref, path = @path, options = {}) def ide_edit_path(project = @project, ref = @ref, path = @path, options = {})
"#{ide_path}/project#{url_for([project, "edit", "blob", id: [ref, path], script_name: "/"])}" segments = [ide_path, 'project', project.full_path, 'edit', ref]
segments.concat(['-', path]) if path.present?
File.join(segments)
end end
def edit_blob_button(project = @project, ref = @ref, path = @path, options = {}) def edit_blob_button(project = @project, ref = @ref, path = @path, options = {})
...@@ -331,7 +333,6 @@ module BlobHelper ...@@ -331,7 +333,6 @@ module BlobHelper
if !on_top_of_branch?(project, ref) if !on_top_of_branch?(project, ref)
edit_disabled_button_tag(text, common_classes) edit_disabled_button_tag(text, common_classes)
# This condition only applies to users who are logged in # This condition only applies to users who are logged in
# Web IDE (Beta) requires the user to have this feature enabled
elsif !current_user || (current_user && can_modify_blob?(blob, project, ref)) elsif !current_user || (current_user && can_modify_blob?(blob, project, ref))
edit_link_tag(text, edit_path, common_classes) edit_link_tag(text, edit_path, common_classes)
elsif can?(current_user, :fork_project, project) && can?(current_user, :create_merge_request_in, project) elsif can?(current_user, :fork_project, project) && can?(current_user, :create_merge_request_in, project)
......
class Appearance < ActiveRecord::Base class Appearance < ActiveRecord::Base
include CacheableAttributes
include CacheMarkdownField include CacheMarkdownField
include AfterCommitQueue
include ObjectStorage::BackgroundMove include ObjectStorage::BackgroundMove
include WithUploads include WithUploads
...@@ -15,16 +15,9 @@ class Appearance < ActiveRecord::Base ...@@ -15,16 +15,9 @@ class Appearance < ActiveRecord::Base
mount_uploader :logo, AttachmentUploader mount_uploader :logo, AttachmentUploader
mount_uploader :header_logo, AttachmentUploader mount_uploader :header_logo, AttachmentUploader
CACHE_KEY = "current_appearance:#{Gitlab::VERSION}".freeze # Overrides CacheableAttributes.current_without_cache
def self.current_without_cache
after_commit :flush_redis_cache first
def self.current
Rails.cache.fetch(CACHE_KEY) { first }
end
def flush_redis_cache
Rails.cache.delete(CACHE_KEY)
end end
def single_appearance_row def single_appearance_row
......
class ApplicationSetting < ActiveRecord::Base class ApplicationSetting < ActiveRecord::Base
include CacheableAttributes
include CacheMarkdownField include CacheMarkdownField
include TokenAuthenticatable include TokenAuthenticatable
add_authentication_token_field :runners_registration_token add_authentication_token_field :runners_registration_token
add_authentication_token_field :health_check_access_token add_authentication_token_field :health_check_access_token
CACHE_KEY = 'application_setting.last'.freeze
DOMAIN_LIST_SEPARATOR = %r{\s*[,;]\s* # comma or semicolon, optionally surrounded by whitespace DOMAIN_LIST_SEPARATOR = %r{\s*[,;]\s* # comma or semicolon, optionally surrounded by whitespace
| # or | # or
\s # any whitespace character \s # any whitespace character
...@@ -229,40 +229,6 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -229,40 +229,6 @@ class ApplicationSetting < ActiveRecord::Base
after_commit do after_commit do
reset_memoized_terms reset_memoized_terms
Rails.cache.write(CACHE_KEY, self)
end
def self.current
ensure_cache_setup
Rails.cache.fetch(CACHE_KEY) do
ApplicationSetting.last.tap do |settings|
# do not cache nils
raise 'missing settings' unless settings
end
end
rescue
# Fall back to an uncached value if there are any problems (e.g. redis down)
ApplicationSetting.last
end
def self.expire
Rails.cache.delete(CACHE_KEY)
rescue
# Gracefully handle when Redis is not available. For example,
# omnibus may fail here during gitlab:assets:compile.
end
def self.cached
value = Rails.cache.read(CACHE_KEY)
ensure_cache_setup if value.present?
value
end
def self.ensure_cache_setup
# This is a workaround for a Rails bug that causes attribute methods not
# to be loaded when read from cache: https://github.com/rails/rails/issues/27348
ApplicationSetting.define_attribute_methods
end end
def self.defaults def self.defaults
......
module CacheableAttributes
extend ActiveSupport::Concern
included do
after_commit { self.class.expire }
end
class_methods do
# Can be overriden
def current_without_cache
last
end
def cache_key
"#{name}:#{Gitlab::VERSION}:#{Gitlab.migrations_hash}:json".freeze
end
def defaults
{}
end
def build_from_defaults(attributes = {})
new(defaults.merge(attributes))
end
def cached
json_attributes = Rails.cache.read(cache_key)
return nil unless json_attributes.present?
build_from_defaults(JSON.parse(json_attributes))
end
def current
cached_record = cached
return cached_record if cached_record.present?
current_without_cache.tap { |current_record| current_record&.cache! }
rescue
# Fall back to an uncached value if there are any problems (e.g. Redis down)
current_without_cache
end
def expire
Rails.cache.delete(cache_key)
rescue
# Gracefully handle when Redis is not available. For example,
# omnibus may fail here during gitlab:assets:compile.
end
end
def cache!
Rails.cache.write(self.class.cache_key, attributes.to_json)
end
end
...@@ -3,6 +3,7 @@ require "gemnasium/gitlab_service" ...@@ -3,6 +3,7 @@ require "gemnasium/gitlab_service"
class GemnasiumService < Service class GemnasiumService < Service
prop_accessor :token, :api_key prop_accessor :token, :api_key
validates :token, :api_key, presence: true, if: :activated? validates :token, :api_key, presence: true, if: :activated?
validate :deprecation_validation
def title def title
'Gemnasium' 'Gemnasium'
...@@ -27,6 +28,18 @@ class GemnasiumService < Service ...@@ -27,6 +28,18 @@ class GemnasiumService < Service
%w(push) %w(push)
end end
def deprecated?
true
end
def deprecation_message
"Gemnasium has been acquired by GitLab in January 2018. Since May 15, 2018, the service provided by Gemnasium is no longer available."
end
def deprecation_validation
errors[:base] << deprecation_message
end
def execute(data) def execute(data)
return unless supported_events.include?(data[:object_kind]) return unless supported_events.include?(data[:object_kind])
......
...@@ -253,7 +253,6 @@ class Service < ActiveRecord::Base ...@@ -253,7 +253,6 @@ class Service < ActiveRecord::Base
emails_on_push emails_on_push
external_wiki external_wiki
flowdock flowdock
gemnasium
hipchat hipchat
irker irker
jira jira
......
...@@ -10,11 +10,15 @@ module Boards ...@@ -10,11 +10,15 @@ module Boards
end end
def execute def execute
create_issue(params.merge(label_ids: [list.label_id])) create_issue(params.merge(issue_params))
end end
private private
def issue_params
{ label_ids: [list.label_id] }
end
def board def board
@board ||= parent.boards.find(params.delete(:board_id)) @board ||= parent.boards.find(params.delete(:board_id))
end end
......
...@@ -103,6 +103,7 @@ module ObjectStorage ...@@ -103,6 +103,7 @@ module ObjectStorage
end end
included do included do
include AfterCommitQueue
after_save on: [:create, :update] do after_save on: [:create, :update] do
background_upload(changed_mounts) background_upload(changed_mounts)
end end
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
.form-check .form-check
= f.label :auto_devops_enabled do = f.label :auto_devops_enabled do
= f.check_box :auto_devops_enabled = f.check_box :auto_devops_enabled
Enabled Auto DevOps (Beta) for projects by default Enabled Auto DevOps for projects by default
.form-text.text-muted .form-text.text-muted
It will automatically build, test, and deploy applications based on a predefined CI/CD configuration It will automatically build, test, and deploy applications based on a predefined CI/CD configuration
= link_to icon('question-circle'), help_page_path('topics/autodevops/index.md') = link_to icon('question-circle'), help_page_path('topics/autodevops/index.md')
......
...@@ -2,6 +2,8 @@ ...@@ -2,6 +2,8 @@
- breadcrumb_title "Dashboard" - breadcrumb_title "Dashboard"
%div{ class: container_class } %div{ class: container_class }
= render_if_exists "admin/licenses/breakdown", license: @license
.admin-dashboard.prepend-top-default .admin-dashboard.prepend-top-default
.row .row
.col-sm-4 .col-sm-4
...@@ -20,6 +22,7 @@ ...@@ -20,6 +22,7 @@
%h3.text-center %h3.text-center
Users: Users:
= approximate_count_with_delimiters(User) = approximate_count_with_delimiters(User)
= render_if_exists 'users_statistics'
%hr %hr
= link_to 'New user', new_admin_user_path, class: "btn btn-new" = link_to 'New user', new_admin_user_path, class: "btn btn-new"
.col-sm-4 .col-sm-4
...@@ -97,6 +100,9 @@ ...@@ -97,6 +100,9 @@
= reply_email = reply_email
%span.light.float-right %span.light.float-right
= boolean_to_icon Gitlab::IncomingEmail.enabled? = boolean_to_icon Gitlab::IncomingEmail.enabled?
= render_if_exists 'elastic_and_geo'
- container_reg = "Container Registry" - container_reg = "Container Registry"
%p{ "aria-label" => "#{container_reg}: status " + (Gitlab.config.registry.enabled ? "on" : "off") } %p{ "aria-label" => "#{container_reg}: status " + (Gitlab.config.registry.enabled ? "on" : "off") }
= container_reg = container_reg
...@@ -144,6 +150,9 @@ ...@@ -144,6 +150,9 @@
GitLab Pages GitLab Pages
%span.float-right %span.float-right
= Gitlab::Pages::VERSION = Gitlab::Pages::VERSION
= render_if_exists 'geo'
%p %p
Ruby Ruby
%span.float-right %span.float-right
......
...@@ -45,7 +45,7 @@ ...@@ -45,7 +45,7 @@
.float-left .float-left
= form_tag admin_runners_path, id: 'runners-search', class: 'form-inline', method: :get do = form_tag admin_runners_path, id: 'runners-search', class: 'form-inline', method: :get do
.form-group .form-group
= search_field_tag :search, params[:search], class: 'form-control', placeholder: 'Runner description or token', spellcheck: false = search_field_tag :search, params[:search], class: 'form-control input-short', placeholder: 'Runner description or token', spellcheck: false
= submit_tag 'Search', class: 'btn' = submit_tag 'Search', class: 'btn'
.float-right.light .float-right.light
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
%hr %hr
%p %p
- link_to_auto_devops_settings = link_to(s_('AutoDevOps|enable Auto DevOps (Beta)'), project_settings_ci_cd_path(@project, anchor: 'autodevops-settings')) - link_to_auto_devops_settings = link_to(s_('AutoDevOps|enable Auto DevOps'), project_settings_ci_cd_path(@project, anchor: 'autodevops-settings'))
- link_to_add_kubernetes_cluster = link_to(s_('AutoDevOps|add a Kubernetes cluster'), new_project_cluster_path(@project)) - link_to_add_kubernetes_cluster = link_to(s_('AutoDevOps|add a Kubernetes cluster'), new_project_cluster_path(@project))
= s_('AutoDevOps|You can automatically build and test your application if you %{link_to_auto_devops_settings} for this project. You can automatically deploy it as well, if you %{link_to_add_kubernetes_cluster}.').html_safe % { link_to_auto_devops_settings: link_to_auto_devops_settings, link_to_add_kubernetes_cluster: link_to_add_kubernetes_cluster } = s_('AutoDevOps|You can automatically build and test your application if you %{link_to_auto_devops_settings} for this project. You can automatically deploy it as well, if you %{link_to_add_kubernetes_cluster}.').html_safe % { link_to_auto_devops_settings: link_to_auto_devops_settings, link_to_add_kubernetes_cluster: link_to_add_kubernetes_cluster }
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
%section.settings#autodevops-settings.no-animate{ class: ('expanded' if expanded) } %section.settings#autodevops-settings.no-animate{ class: ('expanded' if expanded) }
.settings-header .settings-header
%h4 %h4
= s_('CICD|Auto DevOps (Beta)') = s_('CICD|Auto DevOps')
%button.btn.btn-default.js-settings-toggle{ type: 'button' } %button.btn.btn-default.js-settings-toggle{ type: 'button' }
= expanded ? _('Collapse') : _('Expand') = expanded ? _('Collapse') : _('Expand')
%p %p
......
...@@ -82,7 +82,7 @@ ...@@ -82,7 +82,7 @@
- if can_collaborate - if can_collaborate
= succeed " " do = succeed " " do
= link_to ide_edit_path(@project, @id, ""), class: 'btn btn-default' do = link_to ide_edit_path(@project, @ref, @path), class: 'btn btn-default' do
= _('Web IDE') = _('Web IDE')
= render 'projects/buttons/download', project: @project, ref: @ref = render 'projects/buttons/download', project: @project, ref: @ref
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
= custom_icon('icon_autodevops') = custom_icon('icon_autodevops')
.banner-body.prepend-left-10.append-bottom-10 .banner-body.prepend-left-10.append-bottom-10
%h5.banner-title= s_('AutoDevOps|Auto DevOps (Beta)') %h5.banner-title= s_('AutoDevOps|Auto DevOps')
%p= s_('AutoDevOps|It will automatically build, test, and deploy your application based on a predefined CI/CD configuration.') %p= s_('AutoDevOps|It will automatically build, test, and deploy your application based on a predefined CI/CD configuration.')
%p %p
- link = link_to(s_('AutoDevOps|Auto DevOps documentation'), help_page_path('topics/autodevops/index.md'), target: '_blank', rel: 'noopener noreferrer') - link = link_to(s_('AutoDevOps|Auto DevOps documentation'), help_page_path('topics/autodevops/index.md'), target: '_blank', rel: 'noopener noreferrer')
......
---
title: 'Fixes: Runners search input placeholder is cut off'
merge_request: 19015
author: Jacopo Beschi @jacopo-beschi
type: fixed
---
title: Allow CommitStatus class to use presentable methods
merge_request: 18979
author:
type: fixed
---
title: Deprecate Gemnasium project service
merge_request: 18954
author:
type: deprecated
---
title: Fixes deploy token variables on Ci::Build
merge_request: 19047
author:
type: fixed
---
title: Removed "(Beta)" from "Auto DevOps" messages
merge_request: 18759
author:
type: changed
---
title: Fix corrupted environment pages with unathorized proxy url
merge_request: 18989
author:
type: fixed
---
title: Don't trim incoming emails that create new issues
merge_request:
author: Cameron Crockett
type: fixed
---
title: Bump prometheus-client-mmap to 0.9.3 to fix nil exception error
merge_request:
author:
type: fixed
---
title: Fix system hook not firing for blocked users when LDAP sign-in is used
merge_request:
author:
type: fixed
...@@ -470,6 +470,3 @@ if Rails.env.test? ...@@ -470,6 +470,3 @@ if Rails.env.test?
Settings.gitlab['default_can_create_group'] = true Settings.gitlab['default_can_create_group'] = true
Settings.gitlab['default_can_create_team'] = false Settings.gitlab['default_can_create_team'] = false
end end
# Force a refresh of application settings at startup
ApplicationSetting.expire
class Kubeclient::Client
# We need to monkey patch this method until
# https://github.com/abonas/kubeclient/pull/323 is merged
def proxy_url(kind, name, port, namespace = '')
discover unless @discovered
entity_name_plural =
if %w[services pods nodes].include?(kind.to_s)
kind.to_s
else
@entities[kind.to_s].resource_name
end
ns_prefix = build_namespace_prefix(namespace)
rest_client["#{ns_prefix}#{entity_name_plural}/#{name}:#{port}/proxy"].url
end
end
...@@ -9,6 +9,7 @@ const CompressionPlugin = require('compression-webpack-plugin'); ...@@ -9,6 +9,7 @@ const CompressionPlugin = require('compression-webpack-plugin');
const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin; const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
const ROOT_PATH = path.resolve(__dirname, '..'); const ROOT_PATH = path.resolve(__dirname, '..');
const CACHE_PATH = path.join(ROOT_PATH, 'tmp/cache');
const IS_PRODUCTION = process.env.NODE_ENV === 'production'; const IS_PRODUCTION = process.env.NODE_ENV === 'production';
const IS_DEV_SERVER = process.argv.join(' ').indexOf('webpack-dev-server') !== -1; const IS_DEV_SERVER = process.argv.join(' ').indexOf('webpack-dev-server') !== -1;
const DEV_SERVER_HOST = process.env.DEV_SERVER_HOST || 'localhost'; const DEV_SERVER_HOST = process.env.DEV_SERVER_HOST || 'localhost';
...@@ -17,6 +18,9 @@ const DEV_SERVER_LIVERELOAD = IS_DEV_SERVER && process.env.DEV_SERVER_LIVERELOAD ...@@ -17,6 +18,9 @@ const DEV_SERVER_LIVERELOAD = IS_DEV_SERVER && process.env.DEV_SERVER_LIVERELOAD
const WEBPACK_REPORT = process.env.WEBPACK_REPORT; const WEBPACK_REPORT = process.env.WEBPACK_REPORT;
const NO_COMPRESSION = process.env.NO_COMPRESSION; const NO_COMPRESSION = process.env.NO_COMPRESSION;
const VUE_VERSION = require('vue/package.json').version;
const VUE_LOADER_VERSION = require('vue-loader/package.json').version;
let autoEntriesCount = 0; let autoEntriesCount = 0;
let watchAutoEntries = []; let watchAutoEntries = [];
const defaultEntries = ['./main']; const defaultEntries = ['./main'];
...@@ -99,12 +103,21 @@ module.exports = { ...@@ -99,12 +103,21 @@ module.exports = {
exclude: path => /node_modules|vendor[\\/]assets/.test(path) && !/\.vue\.js/.test(path), exclude: path => /node_modules|vendor[\\/]assets/.test(path) && !/\.vue\.js/.test(path),
loader: 'babel-loader', loader: 'babel-loader',
options: { options: {
cacheDirectory: path.join(ROOT_PATH, 'tmp/cache/babel-loader'), cacheDirectory: path.join(CACHE_PATH, 'babel-loader'),
}, },
}, },
{ {
test: /\.vue$/, test: /\.vue$/,
loader: 'vue-loader', loader: 'vue-loader',
options: {
cacheDirectory: path.join(CACHE_PATH, 'vue-loader'),
cacheIdentifier: [
process.env.NODE_ENV || 'development',
webpack.version,
VUE_VERSION,
VUE_LOADER_VERSION,
].join('|'),
},
}, },
{ {
test: /\.svg$/, test: /\.svg$/,
......
class EnsureMissingColumnsToProjectMirrorData < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
add_column :project_mirror_data, :status, :string unless column_exists?(:project_mirror_data, :status)
add_column :project_mirror_data, :jid, :string unless column_exists?(:project_mirror_data, :jid)
add_column :project_mirror_data, :last_error, :text unless column_exists?(:project_mirror_data, :last_error)
end
def down
# db/migrate/20180502122856_create_project_mirror_data.rb will remove the table
end
end
class RemoveGemnasiumService < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
disable_statement_timeout
execute("DELETE FROM services WHERE type='GemnasiumService';")
end
def down
# noop
end
end
...@@ -405,6 +405,13 @@ GET /projects/:id/services/flowdock ...@@ -405,6 +405,13 @@ GET /projects/:id/services/flowdock
Gemnasium monitors your project dependencies and alerts you about updates and security vulnerabilities. Gemnasium monitors your project dependencies and alerts you about updates and security vulnerabilities.
CAUTION: **Warning:**
Gemnasium service integration has been deprecated in GitLab 11.0. Gemnasium has been
[acquired by GitLab](https://about.gitlab.com/press/releases/2018-01-30-gemnasium-acquisition.html)
in January 2018 and since May 15, 2018, the service provided by Gemnasium is no longer available.
You can [migrate from Gemnasium to GitLab](https://docs.gitlab.com/ee/user/project/import/gemnasium.html)
to keep monitoring your dependencies.
### Create/Edit Gemnasium service ### Create/Edit Gemnasium service
Set Gemnasium service for a project. Set Gemnasium service for a project.
......
...@@ -53,6 +53,9 @@ stub_licensed_features(variable_environment_scope: true) ...@@ -53,6 +53,9 @@ stub_licensed_features(variable_environment_scope: true)
EE-specific comments should not be backported to CE. EE-specific comments should not be backported to CE.
**Note:** This is only meant as a workaround, we should follow up and
resolve this soon.
### Detection of EE-only files ### Detection of EE-only files
For each commit (except on `master`), the `ee-files-location-check` CI job tries For each commit (except on `master`), the `ee-files-location-check` CI job tries
...@@ -105,11 +108,14 @@ is applied not only to models. Here's a list of other examples: ...@@ -105,11 +108,14 @@ is applied not only to models. Here's a list of other examples:
- `ee/app/services/foo/create_service.rb` - `ee/app/services/foo/create_service.rb`
- `ee/app/validators/foo_attr_validator.rb` - `ee/app/validators/foo_attr_validator.rb`
- `ee/app/workers/foo_worker.rb` - `ee/app/workers/foo_worker.rb`
- `ee/app/views/foo.html.haml`
- `ee/app/views/foo/_bar.html.haml`
This works because for every path that are present in CE's eager-load/auto-load This works because for every path that are present in CE's eager-load/auto-load
paths, we add the same `ee/`-prepended path in [`config/application.rb`]. paths, we add the same `ee/`-prepended path in [`config/application.rb`].
This also applies to views.
[`config/application.rb`]: https://gitlab.com/gitlab-org/gitlab-ee/blob/d278b76d6600a0e27d8019a0be27971ba23ab640/config/application.rb#L41-51 [`config/application.rb`]: https://gitlab.com/gitlab-org/gitlab-ee/blob/925d3d4ebc7a2c72964ce97623ae41b8af12538d/config/application.rb#L42-52
### EE features based on CE features ### EE features based on CE features
...@@ -359,9 +365,37 @@ Blocks of code that are EE-specific should be moved to partials. This ...@@ -359,9 +365,37 @@ Blocks of code that are EE-specific should be moved to partials. This
avoids conflicts with big chunks of HAML code that that are not fun to avoids conflicts with big chunks of HAML code that that are not fun to
resolve when you add the indentation to the equation. resolve when you add the indentation to the equation.
EE-specific views should be placed in `ee/app/views/ee/`, using extra EE-specific views should be placed in `ee/app/views/`, using extra
sub-directories if appropriate. sub-directories if appropriate.
Instead of using regular `render`, we should use `render_if_exists`, which
will not render anything if it cannot find the specific partial. We use this
so that we could put `render_if_exists` in CE, keeping code the same between
CE and EE.
Also, it should search for the EE partial first, and then CE partial, and
then if nothing found, render nothing.
This has two uses:
- CE renders nothing, and EE renders its EE partial.
- CE renders its CE partial, and EE renders its EE partial, while the view
file stays the same.
The advantages of this:
- Minimal code difference between CE and EE.
- Very clear hints about where we're extending EE views while reading CE codes.
- Whenever we want to show something different in CE, we could just add CE
partials. Same applies the other way around. If we just use
`render_if_exists`, it would be very easy to change the content in EE.
The disadvantage of this:
- Slightly more work while developing EE features, because now we need to
port `render_if_exists` to CE.
- If we have typos in the partial name, it would be silently ignored.
### Code in `lib/` ### Code in `lib/`
Place EE-specific logic in the top-level `EE` module namespace. Namespace the Place EE-specific logic in the top-level `EE` module namespace. Namespace the
......
# Auto DevOps # Auto DevOps
DANGER: Auto DevOps is currently in **Beta** and _not recommended for production use_.
> [Introduced][ce-37115] in GitLab 10.0. > [Introduced][ce-37115] in GitLab 10.0.
Auto DevOps automatically detects, builds, tests, deploys, and monitors your Auto DevOps automatically detects, builds, tests, deploys, and monitors your
...@@ -496,7 +494,16 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac ...@@ -496,7 +494,16 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac
| `POSTGRES_DB` | The PostgreSQL database name; defaults to the value of [`$CI_ENVIRONMENT_SLUG`](../../ci/variables/README.md#predefined-variables-environment-variables). Set it to use a custom database name. | | `POSTGRES_DB` | The PostgreSQL database name; defaults to the value of [`$CI_ENVIRONMENT_SLUG`](../../ci/variables/README.md#predefined-variables-environment-variables). Set it to use a custom database name. |
| `BUILDPACK_URL` | The buildpack's full URL. It can point to either Git repositories or a tarball URL. For Git repositories, it is possible to point to a specific `ref`, for example `https://github.com/heroku/heroku-buildpack-ruby.git#v142` | | `BUILDPACK_URL` | The buildpack's full URL. It can point to either Git repositories or a tarball URL. For Git repositories, it is possible to point to a specific `ref`, for example `https://github.com/heroku/heroku-buildpack-ruby.git#v142` |
| `STAGING_ENABLED` | From GitLab 10.8, this variable can be used to define a [deploy policy for staging and production environments](#deploy-policy-for-staging-and-production-environments). | | `STAGING_ENABLED` | From GitLab 10.8, this variable can be used to define a [deploy policy for staging and production environments](#deploy-policy-for-staging-and-production-environments). |
| `CANARY_ENABLED` | From GitLab 11.0, this variable can be used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). |
| `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. | | `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. |
| `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. |
| `CODEQUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `codequality` job. If the variable is present, the job will not be created. |
| `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. |
| `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. |
| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast:container` job. If the variable is present, the job will not be created. |
| `REVIEW_DISABLED` | From GitLab 11.0, this variable can be used to disable the `review` and the manual `review:stop` job. If the variable is present, these jobs will not be created. |
| `DAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dast` job. If the variable is present, the job will not be created. |
| `PERFORMANCE_DISABLED` | From GitLab 11.0, this variable can be used to disable the `performance` job. If the variable is present, the job will not be created. |
TIP: **Tip:** TIP: **Tip:**
Set up the replica variables using a Set up the replica variables using a
...@@ -579,6 +586,21 @@ If `STAGING_ENABLED` is defined in your project (e.g., set `STAGING_ENABLED` to ...@@ -579,6 +586,21 @@ If `STAGING_ENABLED` is defined in your project (e.g., set `STAGING_ENABLED` to
to a `staging` environment, and a `production_manual` job will be created for to a `staging` environment, and a `production_manual` job will be created for
you when you're ready to manually deploy to production. you when you're ready to manually deploy to production.
#### Deploy policy for canary environments **[PREMIUM]**
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ci-yml/merge_requests/171)
in GitLab 11.0.
A [canary environment](https://docs.gitlab.com/ee/user/project/canary_deployments.html) can be used
before any changes are deployed to production.
If `CANARY_ENABLED` is defined in your project (e.g., set `CANARY_ENABLED` to
`1` as a secret variable) then two manual jobs will be created:
- `canary` which will deploy the application to the canary environment
- `production_manual` which is to be used by you when you're ready to manually
deploy to production.
#### Incremental rollout to production **[PREMIUM]** #### Incremental rollout to production **[PREMIUM]**
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/5415) in GitLab 10.8. > [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/5415) in GitLab 10.8.
......
# Auto DevOps: quick start guide # Auto DevOps: quick start guide
DANGER: Auto DevOps is currently in **Beta** and _not recommended for production use_.
> [Introduced][ce-37115] in GitLab 10.0. > [Introduced][ce-37115] in GitLab 10.0.
This is a step-by-step guide to deploying a project hosted on GitLab.com to This is a step-by-step guide to deploying a project hosted on GitLab.com to
......
...@@ -34,7 +34,7 @@ Click on the service links to see further configuration instructions and details ...@@ -34,7 +34,7 @@ Click on the service links to see further configuration instructions and details
| [Emails on push](emails_on_push.md) | Email the commits and diff of each push to a list of recipients | | [Emails on push](emails_on_push.md) | Email the commits and diff of each push to a list of recipients |
| External Wiki | Replaces the link to the internal wiki with a link to an external wiki | | External Wiki | Replaces the link to the internal wiki with a link to an external wiki |
| Flowdock | Flowdock is a collaboration web app for technical teams | | Flowdock | Flowdock is a collaboration web app for technical teams |
| Gemnasium | Gemnasium monitors your project dependencies and alerts you about updates and security vulnerabilities | | Gemnasium _(Has been deprecated in GitLab 11.0)_ | Gemnasium monitors your project dependencies and alerts you about updates and security vulnerabilities |
| [HipChat](hipchat.md) | Private group chat and IM | | [HipChat](hipchat.md) | Private group chat and IM |
| [Irker (IRC gateway)](irker.md) | Send IRC messages, on update, to a list of recipients through an Irker gateway | | [Irker (IRC gateway)](irker.md) | Send IRC messages, on update, to a list of recipients through an Irker gateway |
| [JIRA](jira.md) | JIRA issue tracker | | [JIRA](jira.md) | JIRA issue tracker |
......
...@@ -11,7 +11,7 @@ You can see GitLab's keyboard shortcuts by using 'shift + ?' ...@@ -11,7 +11,7 @@ You can see GitLab's keyboard shortcuts by using 'shift + ?'
| <kbd>f</kbd> | Focus filter | | <kbd>f</kbd> | Focus filter |
| <kbd>p</kbd> + <kbd>b</kbd> | Show/hide the Performance Bar | | <kbd>p</kbd> + <kbd>b</kbd> | Show/hide the Performance Bar |
| <kbd>?</kbd> | Show/hide this dialog | | <kbd>?</kbd> | Show/hide this dialog |
| <kbd></kbd> + <kbd>shift</kbd> + <kbd>p</kbd> | Toggle markdown preview | | <kbd>Cmd</kbd>/<kbd>Ctrl</kbd> + <kbd>Shift</kbd> + <kbd>p</kbd> | Toggle markdown preview |
| <kbd></kbd> | Edit last comment (when focused on an empty textarea) | | <kbd></kbd> | Edit last comment (when focused on an empty textarea) |
## Project Files Browsing ## Project Files Browsing
...@@ -70,8 +70,8 @@ You can see GitLab's keyboard shortcuts by using 'shift + ?' ...@@ -70,8 +70,8 @@ You can see GitLab's keyboard shortcuts by using 'shift + ?'
| <kbd></kbd> or <kbd>l</kbd> | Scroll right | | <kbd></kbd> or <kbd>l</kbd> | Scroll right |
| <kbd></kbd> or <kbd>k</kbd> | Scroll up | | <kbd></kbd> or <kbd>k</kbd> | Scroll up |
| <kbd></kbd> or <kbd>j</kbd> | Scroll down | | <kbd></kbd> or <kbd>j</kbd> | Scroll down |
| <kbd>shift</kbd> + <kbd></kbd> or <kbd>shift</kbd> + <kbd>k</kbd> | Scroll to top | | <kbd>Shift</kbd> + <kbd></kbd> or <kbd>Shift</kbd> + <kbd>k</kbd> | Scroll to top |
| <kbd>shift</kbd> + <kbd></kbd> or <kbd>shift</kbd> + <kbd>j</kbd> | Scroll to bottom | | <kbd>Shift</kbd> + <kbd></kbd> or <kbd>Shift</kbd> + <kbd>j</kbd> | Scroll to bottom |
## Issues and Merge Requests ## Issues and Merge Requests
......
...@@ -5,7 +5,7 @@ module API ...@@ -5,7 +5,7 @@ module API
helpers do helpers do
def current_settings def current_settings
@current_setting ||= @current_setting ||=
(ApplicationSetting.current || ApplicationSetting.create_from_defaults) (ApplicationSetting.current_without_cache || ApplicationSetting.create_from_defaults)
end end
end end
......
...@@ -6,7 +6,7 @@ module API ...@@ -6,7 +6,7 @@ module API
helpers do helpers do
def current_settings def current_settings
@current_setting ||= @current_setting ||=
(ApplicationSetting.current || ApplicationSetting.create_from_defaults) (ApplicationSetting.current_without_cache || ApplicationSetting.create_from_defaults)
end end
end end
......
...@@ -23,7 +23,7 @@ module Banzai ...@@ -23,7 +23,7 @@ module Banzai
private private
def settings def settings
ApplicationSetting.current || ApplicationSetting.create_from_defaults Gitlab::CurrentSettings.current_application_settings
end end
def plantuml_setup def plantuml_setup
......
...@@ -9,6 +9,10 @@ module Gitlab ...@@ -9,6 +9,10 @@ module Gitlab
Settings Settings
end end
def self.migrations_hash
@_migrations_hash ||= Digest::MD5.hexdigest(ActiveRecord::Migrator.get_all_versions.to_s)
end
COM_URL = 'https://gitlab.com'.freeze COM_URL = 'https://gitlab.com'.freeze
APP_DIRS_PATTERN = %r{^/?(app|config|ee|lib|spec|\(\w*\))} APP_DIRS_PATTERN = %r{^/?(app|config|ee|lib|spec|\(\w*\))}
SUBDOMAIN_REGEX = %r{\Ahttps://[a-z0-9]+\.gitlab\.com\z} SUBDOMAIN_REGEX = %r{\Ahttps://[a-z0-9]+\.gitlab\.com\z}
......
...@@ -9,8 +9,8 @@ module Gitlab ...@@ -9,8 +9,8 @@ module Gitlab
end end
end end
def fake_application_settings(defaults = ::ApplicationSetting.defaults) def fake_application_settings(attributes = {})
Gitlab::FakeApplicationSettings.new(defaults) Gitlab::FakeApplicationSettings.new(::ApplicationSetting.defaults.merge(attributes || {}))
end end
def method_missing(name, *args, &block) def method_missing(name, *args, &block)
...@@ -25,43 +25,35 @@ module Gitlab ...@@ -25,43 +25,35 @@ module Gitlab
def ensure_application_settings! def ensure_application_settings!
return in_memory_application_settings if ENV['IN_MEMORY_APPLICATION_SETTINGS'] == 'true' return in_memory_application_settings if ENV['IN_MEMORY_APPLICATION_SETTINGS'] == 'true'
cached_application_settings || uncached_application_settings
end
def cached_application_settings
begin
::ApplicationSetting.cached
rescue ::Redis::BaseError, ::Errno::ENOENT, ::Errno::EADDRNOTAVAIL
# In case Redis isn't running or the Redis UNIX socket file is not available
end
end
def uncached_application_settings
return fake_application_settings unless connect_to_db? return fake_application_settings unless connect_to_db?
db_settings = ::ApplicationSetting.current current_settings = ::ApplicationSetting.current
# If there are pending migrations, it's possible there are columns that # If there are pending migrations, it's possible there are columns that
# need to be added to the application settings. To prevent Rake tasks # need to be added to the application settings. To prevent Rake tasks
# and other callers from failing, use any loaded settings and return # and other callers from failing, use any loaded settings and return
# defaults for missing columns. # defaults for missing columns.
if ActiveRecord::Migrator.needs_migration? if ActiveRecord::Migrator.needs_migration?
defaults = ::ApplicationSetting.defaults return fake_application_settings(current_settings&.attributes)
defaults.merge!(db_settings.attributes.symbolize_keys) if db_settings.present?
return fake_application_settings(defaults)
end end
return db_settings if db_settings.present? return current_settings if current_settings.present?
with_fallback_to_fake_application_settings do
::ApplicationSetting.create_from_defaults || in_memory_application_settings ::ApplicationSetting.create_from_defaults || in_memory_application_settings
end end
end
def in_memory_application_settings def in_memory_application_settings
@in_memory_application_settings ||= ::ApplicationSetting.new(::ApplicationSetting.defaults) # rubocop:disable Gitlab/ModuleWithInstanceVariables with_fallback_to_fake_application_settings do
rescue ActiveRecord::StatementInvalid, ActiveRecord::UnknownAttributeError @in_memory_application_settings ||= ::ApplicationSetting.build_from_defaults # rubocop:disable Gitlab/ModuleWithInstanceVariables
# In case migrations the application_settings table is not created yet, end
# we fallback to a simple OpenStruct end
def with_fallback_to_fake_application_settings(&block)
yield
rescue
# In case the application_settings table is not created yet, or if a new
# ApplicationSetting column is not yet migrated we fallback to a simple OpenStruct
fake_application_settings fake_application_settings
end end
......
...@@ -47,7 +47,7 @@ module Gitlab ...@@ -47,7 +47,7 @@ module Gitlab
project, project,
author, author,
title: mail.subject, title: mail.subject,
description: message description: message_including_reply
).execute ).execute
end end
end end
......
...@@ -16,8 +16,12 @@ module Gitlab ...@@ -16,8 +16,12 @@ module Gitlab
@message ||= process_message @message ||= process_message
end end
def process_message def message_including_reply
message = ReplyParser.new(mail).execute.strip @message_with_reply ||= process_message(trim_reply: false)
end
def process_message(**kwargs)
message = ReplyParser.new(mail, **kwargs).execute.strip
add_attachments(message) add_attachments(message)
end end
......
...@@ -4,8 +4,9 @@ module Gitlab ...@@ -4,8 +4,9 @@ module Gitlab
class ReplyParser class ReplyParser
attr_accessor :message attr_accessor :message
def initialize(message) def initialize(message, trim_reply: true)
@message = message @message = message
@trim_reply = trim_reply
end end
def execute def execute
...@@ -13,7 +14,9 @@ module Gitlab ...@@ -13,7 +14,9 @@ module Gitlab
encoding = body.encoding encoding = body.encoding
if @trim_reply
body = EmailReplyTrimmer.trim(body) body = EmailReplyTrimmer.trim(body)
end
return '' unless body return '' unless body
......
...@@ -53,7 +53,7 @@ module Gitlab ...@@ -53,7 +53,7 @@ module Gitlab
# Import project via git clone --bare # Import project via git clone --bare
# URL must be publicly cloneable # URL must be publicly cloneable
def import_project(source, timeout) def import_project(source, timeout)
Gitlab::GitalyClient.migrate(:import_repository) do |is_enabled| Gitlab::GitalyClient.migrate(:import_repository, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
gitaly_import_repository(source) gitaly_import_repository(source)
else else
......
...@@ -1048,7 +1048,7 @@ module Gitlab ...@@ -1048,7 +1048,7 @@ module Gitlab
return @info_attributes if @info_attributes return @info_attributes if @info_attributes
content = content =
gitaly_migrate(:get_info_attributes) do |is_enabled| gitaly_migrate(:get_info_attributes, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
gitaly_repository_client.info_attributes gitaly_repository_client.info_attributes
else else
...@@ -1334,7 +1334,7 @@ module Gitlab ...@@ -1334,7 +1334,7 @@ module Gitlab
end end
def squash_in_progress?(squash_id) def squash_in_progress?(squash_id)
gitaly_migrate(:squash_in_progress) do |is_enabled| gitaly_migrate(:squash_in_progress, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
gitaly_repository_client.squash_in_progress?(squash_id) gitaly_repository_client.squash_in_progress?(squash_id)
else else
......
...@@ -131,7 +131,7 @@ module Gitlab ...@@ -131,7 +131,7 @@ module Gitlab
def page_formatted_data(title:, dir: nil, version: nil) def page_formatted_data(title:, dir: nil, version: nil)
version = version&.id version = version&.id
@repository.gitaly_migrate(:wiki_page_formatted_data) do |is_enabled| @repository.gitaly_migrate(:wiki_page_formatted_data, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
gitaly_wiki_client.get_formatted_data(title: title, dir: dir, version: version) gitaly_wiki_client.get_formatted_data(title: title, dir: dir, version: version)
else else
......
...@@ -12,6 +12,7 @@ describe Projects::Prometheus::MetricsController do ...@@ -12,6 +12,7 @@ describe Projects::Prometheus::MetricsController do
end end
describe 'GET #active_common' do describe 'GET #active_common' do
context 'when prometheus_adapter can query' do
before do before do
allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter) allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
end end
...@@ -54,6 +55,28 @@ describe Projects::Prometheus::MetricsController do ...@@ -54,6 +55,28 @@ describe Projects::Prometheus::MetricsController do
end end
end end
context 'when prometheus_adapter cannot query' do
it 'renders 404' do
prometheus_adapter = double('prometheus_adapter', can_query?: false)
allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return({})
get :active_common, project_params(format: :json)
expect(response).to have_gitlab_http_status(404)
end
end
context 'when prometheus_adapter is disabled' do
it 'renders 404' do
get :active_common, project_params(format: :json)
expect(response).to have_gitlab_http_status(404)
end
end
end
describe '#prometheus_adapter' do describe '#prometheus_adapter' do
before do before do
allow(controller).to receive(:project).and_return(project) allow(controller).to receive(:project).and_return(project)
......
...@@ -152,7 +152,7 @@ feature 'Admin updates settings' do ...@@ -152,7 +152,7 @@ feature 'Admin updates settings' do
scenario 'Change CI/CD settings' do scenario 'Change CI/CD settings' do
page.within('.as-ci-cd') do page.within('.as-ci-cd') do
check 'Enabled Auto DevOps (Beta) for projects by default' check 'Enabled Auto DevOps for projects by default'
fill_in 'Auto devops domain', with: 'domain.com' fill_in 'Auto devops domain', with: 'domain.com'
click_button 'Save changes' click_button 'Save changes'
end end
......
Return-Path: <jake@adventuretime.ooo>
Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
Date: Thu, 13 Jun 2013 17:03:48 -0400
From: Jake the Dog <jake@adventuretime.ooo>
To: incoming+gitlabhq/gitlabhq+auth_token@appmail.adventuretime.ooo
Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
Subject: New Issue by email
Mime-Version: 1.0
Content-Type: text/plain;
charset=ISO-8859-1
Content-Transfer-Encoding: 7bit
X-Sieve: CMU Sieve 2.2
X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
13 Jun 2013 14:03:48 -0700 (PDT)
X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
The reply by email functionality should be extended to allow creating a new issue by email.
even when the email is forwarded to the project which may include lines that begin with ">"
there should be a quote below this line:
> this is a quote
\ No newline at end of file
...@@ -258,13 +258,19 @@ describe BlobHelper do ...@@ -258,13 +258,19 @@ describe BlobHelper do
it 'returns full IDE path' do it 'returns full IDE path' do
Rails.application.routes.default_url_options[:script_name] = nil Rails.application.routes.default_url_options[:script_name] = nil
expect(helper.ide_edit_path(project, "master", "")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master/") expect(helper.ide_edit_path(project, "master", "")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
end
it 'returns full IDE path with second -' do
Rails.application.routes.default_url_options[:script_name] = nil
expect(helper.ide_edit_path(project, "testing/slashes", "readme.md")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/testing/slashes/-/readme.md")
end end
it 'returns IDE path without relative_url_root' do it 'returns IDE path without relative_url_root' do
Rails.application.routes.default_url_options[:script_name] = "/gitlab" Rails.application.routes.default_url_options[:script_name] = "/gitlab"
expect(helper.ide_edit_path(project, "master", "")).to eq("/gitlab/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master/") expect(helper.ide_edit_path(project, "master", "")).to eq("/gitlab/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
end end
end end
end end
...@@ -55,7 +55,7 @@ export default (action, payload, state, expectedMutations, expectedActions, done ...@@ -55,7 +55,7 @@ export default (action, payload, state, expectedMutations, expectedActions, done
}; };
// call the action with mocked store and arguments // call the action with mocked store and arguments
action({ commit, state, dispatch }, payload); action({ commit, state, dispatch, rootState: state }, payload);
// check if no mutations should have been dispatched // check if no mutations should have been dispatched
if (expectedMutations.length === 0) { if (expectedMutations.length === 0) {
......
...@@ -107,5 +107,11 @@ describe('ide component', () => { ...@@ -107,5 +107,11 @@ describe('ide component', () => {
vm.mousetrapStopCallback(null, vm.$el.querySelector('.dropdown-input-field'), 't'), vm.mousetrapStopCallback(null, vm.$el.querySelector('.dropdown-input-field'), 't'),
).toBe(true); ).toBe(true);
}); });
it('stops callback in monaco editor', () => {
setFixtures('<div class="inputarea"></div>');
expect(vm.mousetrapStopCallback(null, document.querySelector('.inputarea'), 't')).toBe(true);
});
}); });
}); });
...@@ -346,4 +346,24 @@ describe('RepoEditor', () => { ...@@ -346,4 +346,24 @@ describe('RepoEditor', () => {
}); });
}); });
}); });
it('calls removePendingTab when old file is pending', done => {
spyOnProperty(vm, 'shouldHideEditor').and.returnValue(true);
spyOn(vm, 'removePendingTab');
vm.file.pending = true;
vm
.$nextTick()
.then(() => {
vm.file = file('testing');
return vm.$nextTick();
})
.then(() => {
expect(vm.removePendingTab).toHaveBeenCalled();
})
.then(done)
.catch(done.fail);
});
}); });
// eslint-disable-next-line import/prefer-default-export
export const projectData = { export const projectData = {
id: 1, id: 1,
name: 'abcproject', name: 'abcproject',
...@@ -14,3 +13,49 @@ export const projectData = { ...@@ -14,3 +13,49 @@ export const projectData = {
mergeRequests: {}, mergeRequests: {},
merge_requests_enabled: true, merge_requests_enabled: true,
}; };
export const pipelines = [
{
id: 1,
ref: 'master',
sha: '123',
status: 'failed',
},
{
id: 2,
ref: 'master',
sha: '213',
status: 'success',
},
];
export const jobs = [
{
id: 1,
name: 'test',
status: 'failed',
stage: 'test',
duration: 1,
},
{
id: 2,
name: 'test 2',
status: 'failed',
stage: 'test',
duration: 1,
},
{
id: 3,
name: 'test 3',
status: 'failed',
stage: 'test',
duration: 1,
},
{
id: 4,
name: 'test 3',
status: 'failed',
stage: 'build',
duration: 1,
},
];
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import actions, {
requestLatestPipeline,
receiveLatestPipelineError,
receiveLatestPipelineSuccess,
fetchLatestPipeline,
requestJobs,
receiveJobsError,
receiveJobsSuccess,
fetchJobs,
} from '~/ide/stores/modules/pipelines/actions';
import state from '~/ide/stores/modules/pipelines/state';
import * as types from '~/ide/stores/modules/pipelines/mutation_types';
import testAction from '../../../../helpers/vuex_action_helper';
import { pipelines, jobs } from '../../../mock_data';
describe('IDE pipelines actions', () => {
let mockedState;
let mock;
beforeEach(() => {
mockedState = state();
mock = new MockAdapter(axios);
gon.api_version = 'v4';
mockedState.currentProjectId = 'test/project';
});
afterEach(() => {
mock.restore();
});
describe('requestLatestPipeline', () => {
it('commits request', done => {
testAction(
requestLatestPipeline,
null,
mockedState,
[{ type: types.REQUEST_LATEST_PIPELINE }],
[],
done,
);
});
});
describe('receiveLatestPipelineError', () => {
it('commits error', done => {
testAction(
receiveLatestPipelineError,
null,
mockedState,
[{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
[],
done,
);
});
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveLatestPipelineError({ commit() {} });
expect(flashSpy).toHaveBeenCalled();
});
});
describe('receiveLatestPipelineSuccess', () => {
it('commits pipeline', done => {
testAction(
receiveLatestPipelineSuccess,
pipelines[0],
mockedState,
[{ type: types.RECEIVE_LASTEST_PIPELINE_SUCCESS, payload: pipelines[0] }],
[],
done,
);
});
});
describe('fetchLatestPipeline', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(200, pipelines);
});
it('dispatches request', done => {
testAction(
fetchLatestPipeline,
'123',
mockedState,
[],
[{ type: 'requestLatestPipeline' }, { type: 'receiveLatestPipelineSuccess' }],
done,
);
});
it('dispatches success with latest pipeline', done => {
testAction(
fetchLatestPipeline,
'123',
mockedState,
[],
[
{ type: 'requestLatestPipeline' },
{ type: 'receiveLatestPipelineSuccess', payload: pipelines[0] },
],
done,
);
});
it('calls axios with correct params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchLatestPipeline({ dispatch() {}, rootState: state }, '123');
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: {
sha: '123',
per_page: '1',
},
});
});
});
describe('error', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(500);
});
it('dispatches error', done => {
testAction(
fetchLatestPipeline,
'123',
mockedState,
[],
[{ type: 'requestLatestPipeline' }, { type: 'receiveLatestPipelineError' }],
done,
);
});
});
});
describe('requestJobs', () => {
it('commits request', done => {
testAction(requestJobs, null, mockedState, [{ type: types.REQUEST_JOBS }], [], done);
});
});
describe('receiveJobsError', () => {
it('commits error', done => {
testAction(
receiveJobsError,
null,
mockedState,
[{ type: types.RECEIVE_JOBS_ERROR }],
[],
done,
);
});
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveJobsError({ commit() {} });
expect(flashSpy).toHaveBeenCalled();
});
});
describe('receiveJobsSuccess', () => {
it('commits jobs', done => {
testAction(
receiveJobsSuccess,
jobs,
mockedState,
[{ type: types.RECEIVE_JOBS_SUCCESS, payload: jobs }],
[],
done,
);
});
});
describe('fetchJobs', () => {
let page = '';
beforeEach(() => {
mockedState.latestPipeline = pipelines[0];
});
describe('success', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines\/(.*)\/jobs/).replyOnce(() => [
200,
jobs,
{
'x-next-page': page,
},
]);
});
it('dispatches request', done => {
testAction(
fetchJobs,
null,
mockedState,
[],
[{ type: 'requestJobs' }, { type: 'receiveJobsSuccess' }],
done,
);
});
it('dispatches success with latest pipeline', done => {
testAction(
fetchJobs,
null,
mockedState,
[],
[{ type: 'requestJobs' }, { type: 'receiveJobsSuccess', payload: jobs }],
done,
);
});
it('dispatches twice for both pages', done => {
page = '2';
testAction(
fetchJobs,
null,
mockedState,
[],
[
{ type: 'requestJobs' },
{ type: 'receiveJobsSuccess', payload: jobs },
{ type: 'fetchJobs', payload: '2' },
{ type: 'requestJobs' },
{ type: 'receiveJobsSuccess', payload: jobs },
],
done,
);
});
it('calls axios with correct URL', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState });
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '1' },
});
});
it('calls axios with page next page', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState });
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '1' },
});
page = '2';
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState }, page);
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '2' },
});
});
});
describe('error', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(500);
});
it('dispatches error', done => {
testAction(
fetchJobs,
null,
mockedState,
[],
[{ type: 'requestJobs' }, { type: 'receiveJobsError' }],
done,
);
});
});
});
});
import * as getters from '~/ide/stores/modules/pipelines/getters';
import state from '~/ide/stores/modules/pipelines/state';
describe('IDE pipeline getters', () => {
let mockedState;
beforeEach(() => {
mockedState = state();
});
describe('hasLatestPipeline', () => {
it('returns false when loading is true', () => {
mockedState.isLoadingPipeline = true;
expect(getters.hasLatestPipeline(mockedState)).toBe(false);
});
it('returns false when pipelines is null', () => {
mockedState.latestPipeline = null;
expect(getters.hasLatestPipeline(mockedState)).toBe(false);
});
it('returns false when loading is true & pipelines is null', () => {
mockedState.latestPipeline = null;
mockedState.isLoadingPipeline = true;
expect(getters.hasLatestPipeline(mockedState)).toBe(false);
});
it('returns true when loading is false & pipelines is an object', () => {
mockedState.latestPipeline = {
id: 1,
};
mockedState.isLoadingPipeline = false;
expect(getters.hasLatestPipeline(mockedState)).toBe(true);
});
});
describe('failedJobs', () => {
it('returns array of failed jobs', () => {
mockedState.stages = [
{
title: 'test',
jobs: [{ id: 1, status: 'failed' }, { id: 2, status: 'success' }],
},
{
title: 'build',
jobs: [{ id: 3, status: 'failed' }, { id: 4, status: 'failed' }],
},
];
expect(getters.failedJobs(mockedState).length).toBe(3);
expect(getters.failedJobs(mockedState)).toEqual([
{
id: 1,
status: jasmine.anything(),
},
{
id: 3,
status: jasmine.anything(),
},
{
id: 4,
status: jasmine.anything(),
},
]);
});
});
});
import mutations from '~/ide/stores/modules/pipelines/mutations';
import state from '~/ide/stores/modules/pipelines/state';
import * as types from '~/ide/stores/modules/pipelines/mutation_types';
import { pipelines, jobs } from '../../../mock_data';
describe('IDE pipelines mutations', () => {
let mockedState;
beforeEach(() => {
mockedState = state();
});
describe(types.REQUEST_LATEST_PIPELINE, () => {
it('sets loading to true', () => {
mutations[types.REQUEST_LATEST_PIPELINE](mockedState);
expect(mockedState.isLoadingPipeline).toBe(true);
});
});
describe(types.RECEIVE_LASTEST_PIPELINE_ERROR, () => {
it('sets loading to false', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState);
expect(mockedState.isLoadingPipeline).toBe(false);
});
});
describe(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, () => {
it('sets loading to false on success', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, pipelines[0]);
expect(mockedState.isLoadingPipeline).toBe(false);
});
it('sets latestPipeline', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, pipelines[0]);
expect(mockedState.latestPipeline).toEqual({
id: pipelines[0].id,
status: pipelines[0].status,
});
});
it('does not set latest pipeline if pipeline is null', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, null);
expect(mockedState.latestPipeline).toEqual(null);
});
});
describe(types.REQUEST_JOBS, () => {
it('sets jobs loading to true', () => {
mutations[types.REQUEST_JOBS](mockedState);
expect(mockedState.isLoadingJobs).toBe(true);
});
});
describe(types.RECEIVE_JOBS_ERROR, () => {
it('sets jobs loading to false', () => {
mutations[types.RECEIVE_JOBS_ERROR](mockedState);
expect(mockedState.isLoadingJobs).toBe(false);
});
});
describe(types.RECEIVE_JOBS_SUCCESS, () => {
it('sets jobs loading to false on success', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
expect(mockedState.isLoadingJobs).toBe(false);
});
it('sets stages', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
expect(mockedState.stages.length).toBe(2);
expect(mockedState.stages).toEqual([
{
title: 'test',
jobs: jasmine.anything(),
},
{
title: 'build',
jobs: jasmine.anything(),
},
]);
});
it('sets jobs in stages', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
expect(mockedState.stages[0].jobs.length).toBe(3);
expect(mockedState.stages[1].jobs.length).toBe(1);
expect(mockedState.stages).toEqual([
{
title: jasmine.anything(),
jobs: jobs.filter(job => job.stage === 'test').map(job => ({
id: job.id,
name: job.name,
status: job.status,
stage: job.stage,
duration: job.duration,
})),
},
{
title: jasmine.anything(),
jobs: jobs.filter(job => job.stage === 'build').map(job => ({
id: job.id,
name: job.name,
status: job.status,
stage: job.stage,
duration: job.duration,
})),
},
]);
});
});
});
require 'spec_helper' require 'spec_helper'
describe Gitlab::CurrentSettings do describe Gitlab::CurrentSettings do
include StubENV
before do before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end end
describe '#current_application_settings' do describe '#current_application_settings', :use_clean_rails_memory_store_caching do
it 'allows keys to be called directly' do it 'allows keys to be called directly' do
db_settings = create(:application_setting, db_settings = create(:application_setting,
home_page_url: 'http://mydomain.com', home_page_url: 'http://mydomain.com',
...@@ -19,46 +17,54 @@ describe Gitlab::CurrentSettings do ...@@ -19,46 +17,54 @@ describe Gitlab::CurrentSettings do
expect(described_class.metrics_sample_interval).to be(15) expect(described_class.metrics_sample_interval).to be(15)
end end
context 'with DB available' do context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is true' do
before do before do
# For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
# during the initialization phase of the test suite, so instead let's mock the internals of it
allow(ActiveRecord::Base.connection).to receive(:active?).and_return(true)
allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
allow(ActiveRecord::Base.connection).to receive(:table_exists?).with('application_settings').and_return(true)
end end
it 'attempts to use cached values first' do it 'returns an in-memory ApplicationSetting object' do
expect(ApplicationSetting).to receive(:cached) expect(ApplicationSetting).not_to receive(:current)
expect(described_class.current_application_settings).to be_a(ApplicationSetting) expect(described_class.current_application_settings).to be_a(ApplicationSetting)
expect(described_class.current_application_settings).not_to be_persisted
end
end end
it 'falls back to DB if Redis returns an empty value' do context 'with DB unavailable' do
expect(ApplicationSetting).to receive(:cached).and_return(nil) before do
expect(ApplicationSetting).to receive(:last).and_call_original.twice # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
# during the initialization phase of the test suite, so instead let's mock the internals of it
expect(described_class.current_application_settings).to be_a(ApplicationSetting) allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false)
end end
it 'falls back to DB if Redis fails' do it 'returns an in-memory ApplicationSetting object' do
db_settings = ApplicationSetting.create!(ApplicationSetting.defaults) expect(ApplicationSetting).not_to receive(:current)
expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError) expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(Redis::BaseError) end
end
expect(described_class.current_application_settings).to eq(db_settings) context 'with DB available' do
# This method returns the ::ApplicationSetting.defaults hash
# but with respect of custom attribute accessors of ApplicationSetting model
def settings_from_defaults
ar_wrapped_defaults = ::ApplicationSetting.build_from_defaults.attributes
ar_wrapped_defaults.slice(*::ApplicationSetting.defaults.keys)
end end
it 'creates default ApplicationSettings if none are present' do before do
expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError) # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues
expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(Redis::BaseError) # during the initialization phase of the test suite, so instead let's mock the internals of it
allow(ActiveRecord::Base.connection).to receive(:active?).and_return(true)
allow(ActiveRecord::Base.connection).to receive(:cached_table_exists?).with('application_settings').and_return(true)
end
it 'creates default ApplicationSettings if none are present' do
settings = described_class.current_application_settings settings = described_class.current_application_settings
expect(settings).to be_a(ApplicationSetting) expect(settings).to be_a(ApplicationSetting)
expect(settings).to be_persisted expect(settings).to be_persisted
expect(settings).to have_attributes(ApplicationSetting.defaults) expect(settings).to have_attributes(settings_from_defaults)
end end
context 'with migrations pending' do context 'with migrations pending' do
...@@ -69,7 +75,7 @@ describe Gitlab::CurrentSettings do ...@@ -69,7 +75,7 @@ describe Gitlab::CurrentSettings do
it 'returns an in-memory ApplicationSetting object' do it 'returns an in-memory ApplicationSetting object' do
settings = described_class.current_application_settings settings = described_class.current_application_settings
expect(settings).to be_a(OpenStruct) expect(settings).to be_a(Gitlab::FakeApplicationSettings)
expect(settings.sign_in_enabled?).to eq(settings.sign_in_enabled) expect(settings.sign_in_enabled?).to eq(settings.sign_in_enabled)
expect(settings.sign_up_enabled?).to eq(settings.sign_up_enabled) expect(settings.sign_up_enabled?).to eq(settings.sign_up_enabled)
end end
...@@ -81,7 +87,7 @@ describe Gitlab::CurrentSettings do ...@@ -81,7 +87,7 @@ describe Gitlab::CurrentSettings do
settings = described_class.current_application_settings settings = described_class.current_application_settings
app_defaults = ApplicationSetting.last app_defaults = ApplicationSetting.last
expect(settings).to be_a(OpenStruct) expect(settings).to be_a(Gitlab::FakeApplicationSettings)
expect(settings.home_page_url).to eq(db_settings.home_page_url) expect(settings.home_page_url).to eq(db_settings.home_page_url)
expect(settings.signup_enabled?).to be_falsey expect(settings.signup_enabled?).to be_falsey
expect(settings.signup_enabled).to be_falsey expect(settings.signup_enabled).to be_falsey
...@@ -91,34 +97,29 @@ describe Gitlab::CurrentSettings do ...@@ -91,34 +97,29 @@ describe Gitlab::CurrentSettings do
settings.each { |key, _| expect(settings[key]).to eq(app_defaults[key]) } settings.each { |key, _| expect(settings[key]).to eq(app_defaults[key]) }
end end
end end
end
context 'with DB unavailable' do context 'when ApplicationSettings.current is present' do
before do it 'returns the existing application settings' do
# For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues expect(ApplicationSetting).to receive(:current).and_return(:current_settings)
# during the initialization phase of the test suite, so instead let's mock the internals of it
allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false) expect(described_class.current_application_settings).to eq(:current_settings)
end
end end
context 'when the application_settings table does not exists' do
it 'returns an in-memory ApplicationSetting object' do it 'returns an in-memory ApplicationSetting object' do
expect(ApplicationSetting).not_to receive(:current) expect(ApplicationSetting).to receive(:create_from_defaults).and_raise(ActiveRecord::StatementInvalid)
expect(ApplicationSetting).not_to receive(:last)
expect(described_class.current_application_settings).to be_a(OpenStruct) expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
end end
end end
context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is true' do context 'when the application_settings table is not fully migrated' do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
end
it 'returns an in-memory ApplicationSetting object' do it 'returns an in-memory ApplicationSetting object' do
expect(ApplicationSetting).not_to receive(:current) expect(ApplicationSetting).to receive(:create_from_defaults).and_raise(ActiveRecord::UnknownAttributeError)
expect(ApplicationSetting).not_to receive(:last)
expect(described_class.current_application_settings).to be_a(ApplicationSetting) expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
expect(described_class.current_application_settings).not_to be_persisted end
end end
end end
end end
......
...@@ -46,6 +46,20 @@ describe Gitlab::Email::Handler::CreateIssueHandler do ...@@ -46,6 +46,20 @@ describe Gitlab::Email::Handler::CreateIssueHandler do
expect(issue.description).to eq('') expect(issue.description).to eq('')
end end
end end
context "when there are quotes in email" do
let(:email_raw) { fixture_file("emails/valid_new_issue_with_quote.eml") }
it "creates a new issue" do
expect { receiver.execute }.to change { project.issues.count }.by(1)
issue = project.issues.last
expect(issue.author).to eq(user)
expect(issue.title).to eq('New Issue by email')
expect(issue.description).to include('reply by email')
expect(issue.description).to include('> this is a quote')
end
end
end end
context "something is wrong" do context "something is wrong" do
......
...@@ -3,8 +3,8 @@ require "spec_helper" ...@@ -3,8 +3,8 @@ require "spec_helper"
# Inspired in great part by Discourse's Email::Receiver # Inspired in great part by Discourse's Email::Receiver
describe Gitlab::Email::ReplyParser do describe Gitlab::Email::ReplyParser do
describe '#execute' do describe '#execute' do
def test_parse_body(mail_string) def test_parse_body(mail_string, params = {})
described_class.new(Mail::Message.new(mail_string)).execute described_class.new(Mail::Message.new(mail_string), params).execute
end end
it "returns an empty string if the message is blank" do it "returns an empty string if the message is blank" do
...@@ -212,5 +212,19 @@ describe Gitlab::Email::ReplyParser do ...@@ -212,5 +212,19 @@ describe Gitlab::Email::ReplyParser do
it "does not wrap links with no href in unnecessary brackets" do it "does not wrap links with no href in unnecessary brackets" do
expect(test_parse_body(fixture_file("emails/html_empty_link.eml"))).to eq("no brackets!") expect(test_parse_body(fixture_file("emails/html_empty_link.eml"))).to eq("no brackets!")
end end
it "does not trim reply if trim_reply option is false" do
expect(test_parse_body(fixture_file("emails/valid_new_issue_with_quote.eml"), { trim_reply: false }))
.to eq(
<<-BODY.strip_heredoc.chomp
The reply by email functionality should be extended to allow creating a new issue by email.
even when the email is forwarded to the project which may include lines that begin with ">"
there should be a quote below this line:
> this is a quote
BODY
)
end
end end
end end
...@@ -6759,26 +6759,6 @@ ...@@ -6759,26 +6759,6 @@
"default": false, "default": false,
"wiki_page_events": true "wiki_page_events": true
}, },
{
"id": 92,
"title": "Gemnasium",
"project_id": 5,
"created_at": "2016-06-14T15:01:51.202Z",
"updated_at": "2016-06-14T15:01:51.202Z",
"active": false,
"properties": {},
"template": false,
"push_events": true,
"issues_events": true,
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
"job_events": true,
"type": "GemnasiumService",
"category": "common",
"default": false,
"wiki_page_events": true
},
{ {
"id": 91, "id": 91,
"title": "Flowdock", "title": "Flowdock",
......
...@@ -3,34 +3,11 @@ require 'rails_helper' ...@@ -3,34 +3,11 @@ require 'rails_helper'
describe Appearance do describe Appearance do
subject { build(:appearance) } subject { build(:appearance) }
it { is_expected.to be_valid } it { include(CacheableAttributes) }
it { expect(described_class.current_without_cache).to eq(described_class.first) }
it { is_expected.to have_many(:uploads) } it { is_expected.to have_many(:uploads) }
describe '.current', :use_clean_rails_memory_store_caching do
let!(:appearance) { create(:appearance) }
it 'returns the current appearance row' do
expect(described_class.current).to eq(appearance)
end
it 'caches the result' do
expect(described_class).to receive(:first).once
2.times { described_class.current }
end
end
describe '#flush_redis_cache' do
it 'flushes the cache in Redis' do
appearance = create(:appearance)
expect(Rails.cache).to receive(:delete).with(described_class::CACHE_KEY)
appearance.flush_redis_cache
end
end
describe '#single_appearance_row' do describe '#single_appearance_row' do
it 'adds an error when more than 1 row exists' do it 'adds an error when more than 1 row exists' do
create(:appearance) create(:appearance)
......
...@@ -3,6 +3,9 @@ require 'spec_helper' ...@@ -3,6 +3,9 @@ require 'spec_helper'
describe ApplicationSetting do describe ApplicationSetting do
let(:setting) { described_class.create_from_defaults } let(:setting) { described_class.create_from_defaults }
it { include(CacheableAttributes) }
it { expect(described_class.current_without_cache).to eq(described_class.last) }
it { expect(setting).to be_valid } it { expect(setting).to be_valid }
it { expect(setting.uuid).to be_present } it { expect(setting.uuid).to be_present }
it { expect(setting).to have_db_column(:auto_devops_enabled) } it { expect(setting).to have_db_column(:auto_devops_enabled) }
...@@ -318,33 +321,6 @@ describe ApplicationSetting do ...@@ -318,33 +321,6 @@ describe ApplicationSetting do
end end
end end
describe '.current' do
context 'redis unavailable' do
it 'returns an ApplicationSetting' do
allow(Rails.cache).to receive(:fetch).and_call_original
allow(described_class).to receive(:last).and_return(:last)
expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(ArgumentError)
expect(described_class.current).to eq(:last)
end
end
context 'when an ApplicationSetting is not yet present' do
it 'does not cache nil object' do
# when missing settings a nil object is returned, but not cached
allow(described_class).to receive(:last).and_return(nil).twice
expect(described_class.current).to be_nil
# when the settings are set the method returns a valid object
allow(described_class).to receive(:last).and_return(:last)
expect(described_class.current).to eq(:last)
# subsequent calls get everything from cache
expect(described_class.current).to eq(:last)
end
end
end
context 'restrict creating duplicates' do context 'restrict creating duplicates' do
before do before do
described_class.create_from_defaults described_class.create_from_defaults
......
...@@ -79,7 +79,7 @@ describe Clusters::Applications::Prometheus do ...@@ -79,7 +79,7 @@ describe Clusters::Applications::Prometheus do
end end
it 'creates proper url' do it 'creates proper url' do
expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80') expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80/proxy')
end end
it 'copies options and headers from kube client to proxy client' do it 'copies options and headers from kube client to proxy client' do
......
require 'spec_helper'
describe CacheableAttributes do
let(:minimal_test_class) do
Class.new do
include ActiveModel::Model
extend ActiveModel::Callbacks
define_model_callbacks :commit
include CacheableAttributes
def self.name
'TestClass'
end
def self.first
@_first ||= new('foo' => 'a')
end
def self.last
@_last ||= new('foo' => 'a', 'bar' => 'b')
end
attr_accessor :attributes
def initialize(attrs = {})
@attributes = attrs
end
end
end
shared_context 'with defaults' do
before do
minimal_test_class.define_singleton_method(:defaults) do
{ foo: 'a', bar: 'b', baz: 'c' }
end
end
end
describe '.current_without_cache' do
it 'defaults to last' do
expect(minimal_test_class.current_without_cache).to eq(minimal_test_class.last)
end
it 'can be overriden' do
minimal_test_class.define_singleton_method(:current_without_cache) do
first
end
expect(minimal_test_class.current_without_cache).to eq(minimal_test_class.first)
end
end
describe '.cache_key' do
it 'excludes cache attributes' do
expect(minimal_test_class.cache_key).to eq("TestClass:#{Gitlab::VERSION}:#{Gitlab.migrations_hash}:json")
end
end
describe '.defaults' do
it 'defaults to {}' do
expect(minimal_test_class.defaults).to eq({})
end
context 'with defaults defined' do
include_context 'with defaults'
it 'can be overriden' do
expect(minimal_test_class.defaults).to eq({ foo: 'a', bar: 'b', baz: 'c' })
end
end
end
describe '.build_from_defaults' do
include_context 'with defaults'
context 'without any attributes given' do
it 'intializes a new object with the defaults' do
expect(minimal_test_class.build_from_defaults).not_to be_persisted
end
end
context 'without attributes given' do
it 'intializes a new object with the given attributes merged into the defaults' do
expect(minimal_test_class.build_from_defaults(foo: 'd').attributes[:foo]).to eq('d')
end
end
end
describe '.current', :use_clean_rails_memory_store_caching do
context 'redis unavailable' do
it 'returns an uncached record' do
allow(minimal_test_class).to receive(:last).and_return(:last)
expect(Rails.cache).to receive(:read).and_raise(Redis::BaseError)
expect(minimal_test_class.current).to eq(:last)
end
end
context 'when a record is not yet present' do
it 'does not cache nil object' do
# when missing settings a nil object is returned, but not cached
allow(minimal_test_class).to receive(:last).twice.and_return(nil)
expect(minimal_test_class.current).to be_nil
expect(Rails.cache.exist?(minimal_test_class.cache_key)).to be(false)
end
it 'cache non-nil object' do
# when the settings are set the method returns a valid object
allow(minimal_test_class).to receive(:last).and_call_original
expect(minimal_test_class.current).to eq(minimal_test_class.last)
expect(Rails.cache.exist?(minimal_test_class.cache_key)).to be(true)
# subsequent calls retrieve the record from the cache
last_record = minimal_test_class.last
expect(minimal_test_class).not_to receive(:last)
expect(minimal_test_class.current.attributes).to eq(last_record.attributes)
end
end
end
describe '.cached', :use_clean_rails_memory_store_caching do
context 'when cache is cold' do
it 'returns nil' do
expect(minimal_test_class.cached).to be_nil
end
end
context 'when cached settings do not include the latest defaults' do
before do
Rails.cache.write(minimal_test_class.cache_key, { bar: 'b', baz: 'c' }.to_json)
minimal_test_class.define_singleton_method(:defaults) do
{ foo: 'a', bar: 'b', baz: 'c' }
end
end
it 'includes attributes from defaults' do
expect(minimal_test_class.cached.attributes[:foo]).to eq(minimal_test_class.defaults[:foo])
end
end
end
describe '#cache!', :use_clean_rails_memory_store_caching do
let(:appearance_record) { create(:appearance) }
it 'caches the attributes' do
appearance_record.cache!
expect(Rails.cache.read(Appearance.cache_key)).to eq(appearance_record.attributes.to_json)
end
end
end
...@@ -26,24 +26,49 @@ describe GemnasiumService do ...@@ -26,24 +26,49 @@ describe GemnasiumService do
end end
end end
describe "deprecated?" do
let(:project) { create(:project, :repository) }
let(:gemnasium_service) { described_class.new }
before do
allow(gemnasium_service).to receive_messages(
project_id: project.id,
project: project,
service_hook: true,
token: 'verySecret',
api_key: 'GemnasiumUserApiKey'
)
end
it "is true" do
expect(gemnasium_service.deprecated?).to be true
end
it "can't create a new service" do
expect(gemnasium_service.save).to be false
expect(gemnasium_service.errors[:base].first)
.to eq('Gemnasium has been acquired by GitLab in January 2018. Since May 15, 2018, the service provided by Gemnasium is no longer available.')
end
end
describe "Execute" do describe "Execute" do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:gemnasium_service) { described_class.new }
let(:sample_data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
before do before do
@gemnasium_service = described_class.new allow(gemnasium_service).to receive_messages(
allow(@gemnasium_service).to receive_messages(
project_id: project.id, project_id: project.id,
project: project, project: project,
service_hook: true, service_hook: true,
token: 'verySecret', token: 'verySecret',
api_key: 'GemnasiumUserApiKey' api_key: 'GemnasiumUserApiKey'
) )
@sample_data = Gitlab::DataBuilder::Push.build_sample(project, user)
end end
it "calls Gemnasium service" do it "calls Gemnasium service" do
expect(Gemnasium::GitlabService).to receive(:execute).with(an_instance_of(Hash)).once expect(Gemnasium::GitlabService).to receive(:execute).with(an_instance_of(Hash)).once
@gemnasium_service.execute(@sample_data) gemnasium_service.execute(sample_data)
end end
end end
end end
...@@ -1481,6 +1481,15 @@ cache-base@^1.0.1: ...@@ -1481,6 +1481,15 @@ cache-base@^1.0.1:
union-value "^1.0.0" union-value "^1.0.0"
unset-value "^1.0.0" unset-value "^1.0.0"
cache-loader@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/cache-loader/-/cache-loader-1.2.2.tgz#6d5c38ded959a09cc5d58190ab5af6f73bd353f5"
dependencies:
loader-utils "^1.1.0"
mkdirp "^0.5.1"
neo-async "^2.5.0"
schema-utils "^0.4.2"
cacheable-request@^2.1.1: cacheable-request@^2.1.1:
version "2.1.4" version "2.1.4"
resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-2.1.4.tgz#0d808801b6342ad33c91df9d0b44dc09b91e5c3d" resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-2.1.4.tgz#0d808801b6342ad33c91df9d0b44dc09b91e5c3d"
...@@ -7693,7 +7702,7 @@ sax@~1.2.1: ...@@ -7693,7 +7702,7 @@ sax@~1.2.1:
version "1.2.2" version "1.2.2"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.2.tgz#fd8631a23bc7826bef5d871bdb87378c95647828" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.2.tgz#fd8631a23bc7826bef5d871bdb87378c95647828"
schema-utils@^0.4.0, schema-utils@^0.4.3, schema-utils@^0.4.4, schema-utils@^0.4.5: schema-utils@^0.4.0, schema-utils@^0.4.2, schema-utils@^0.4.3, schema-utils@^0.4.4, schema-utils@^0.4.5:
version "0.4.5" version "0.4.5"
resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.4.5.tgz#21836f0608aac17b78f9e3e24daff14a5ca13a3e" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.4.5.tgz#21836f0608aac17b78f9e3e24daff14a5ca13a3e"
dependencies: dependencies:
...@@ -8965,9 +8974,9 @@ vue-hot-reload-api@^2.3.0: ...@@ -8965,9 +8974,9 @@ vue-hot-reload-api@^2.3.0:
version "2.3.0" version "2.3.0"
resolved "https://registry.yarnpkg.com/vue-hot-reload-api/-/vue-hot-reload-api-2.3.0.tgz#97976142405d13d8efae154749e88c4e358cf926" resolved "https://registry.yarnpkg.com/vue-hot-reload-api/-/vue-hot-reload-api-2.3.0.tgz#97976142405d13d8efae154749e88c4e358cf926"
vue-loader@^15.0.12: vue-loader@^15.2.0:
version "15.0.12" version "15.2.0"
resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.0.12.tgz#9221e88f1c4f7657d425e40c676cd25671d5d294" resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.2.0.tgz#5a8138e490a1040942d2f10ae68fa72b5a923364"
dependencies: dependencies:
"@vue/component-compiler-utils" "^1.2.1" "@vue/component-compiler-utils" "^1.2.1"
hash-sum "^1.0.2" hash-sum "^1.0.2"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment