Commit 40be8977 authored by Nick Thomas's avatar Nick Thomas

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2017-10-05

parents d3cc857c 1d5b5df0
...@@ -23,7 +23,7 @@ gem 'faraday', '~> 0.12' ...@@ -23,7 +23,7 @@ gem 'faraday', '~> 0.12'
# Authentication libraries # Authentication libraries
gem 'devise', '~> 4.2' gem 'devise', '~> 4.2'
gem 'doorkeeper', '~> 4.2.0' gem 'doorkeeper', '~> 4.2.0'
gem 'doorkeeper-openid_connect', '~> 1.1.0' gem 'doorkeeper-openid_connect', '~> 1.2.0'
gem 'omniauth', '~> 1.4.2' gem 'omniauth', '~> 1.4.2'
gem 'omniauth-auth0', '~> 1.4.1' gem 'omniauth-auth0', '~> 1.4.1'
gem 'omniauth-azure-oauth2', '~> 0.0.9' gem 'omniauth-azure-oauth2', '~> 0.0.9'
......
...@@ -88,7 +88,7 @@ GEM ...@@ -88,7 +88,7 @@ GEM
coderay (>= 1.0.0) coderay (>= 1.0.0)
erubis (>= 2.6.6) erubis (>= 2.6.6)
rack (>= 0.9.0) rack (>= 0.9.0)
bindata (2.3.5) bindata (2.4.1)
binding_of_caller (0.7.2) binding_of_caller (0.7.2)
debug_inspector (>= 0.0.1) debug_inspector (>= 0.0.1)
bootstrap-sass (3.3.6) bootstrap-sass (3.3.6)
...@@ -174,9 +174,9 @@ GEM ...@@ -174,9 +174,9 @@ GEM
docile (1.1.5) docile (1.1.5)
domain_name (0.5.20161021) domain_name (0.5.20161021)
unf (>= 0.0.5, < 1.0.0) unf (>= 0.0.5, < 1.0.0)
doorkeeper (4.2.0) doorkeeper (4.2.6)
railties (>= 4.2) railties (>= 4.2)
doorkeeper-openid_connect (1.1.2) doorkeeper-openid_connect (1.2.0)
doorkeeper (~> 4.0) doorkeeper (~> 4.0)
json-jwt (~> 1.6) json-jwt (~> 1.6)
dropzonejs-rails (0.7.2) dropzonejs-rails (0.7.2)
...@@ -438,7 +438,7 @@ GEM ...@@ -438,7 +438,7 @@ GEM
railties (>= 4.2.0) railties (>= 4.2.0)
thor (>= 0.14, < 2.0) thor (>= 0.14, < 2.0)
json (1.8.6) json (1.8.6)
json-jwt (1.7.1) json-jwt (1.7.2)
activesupport activesupport
bindata bindata
multi_json (>= 1.3) multi_json (>= 1.3)
...@@ -710,7 +710,7 @@ GEM ...@@ -710,7 +710,7 @@ GEM
rainbow (2.2.2) rainbow (2.2.2)
rake rake
raindrops (0.18.0) raindrops (0.18.0)
rake (12.0.0) rake (12.1.0)
rblineprof (0.3.6) rblineprof (0.3.6)
debugger-ruby_core_source (~> 1.3) debugger-ruby_core_source (~> 1.3)
rbnacl (4.0.2) rbnacl (4.0.2)
...@@ -1033,7 +1033,7 @@ DEPENDENCIES ...@@ -1033,7 +1033,7 @@ DEPENDENCIES
devise-two-factor (~> 3.0.0) devise-two-factor (~> 3.0.0)
diffy (~> 3.1.0) diffy (~> 3.1.0)
doorkeeper (~> 4.2.0) doorkeeper (~> 4.2.0)
doorkeeper-openid_connect (~> 1.1.0) doorkeeper-openid_connect (~> 1.2.0)
dropzonejs-rails (~> 0.7.1) dropzonejs-rails (~> 0.7.1)
elasticsearch-api (= 5.0.3) elasticsearch-api (= 5.0.3)
elasticsearch-model (~> 0.1.9) elasticsearch-model (~> 0.1.9)
......
...@@ -8,7 +8,9 @@ ...@@ -8,7 +8,9 @@
* - Button Actions. * - Button Actions.
* [Mockup](https://gitlab.com/gitlab-org/gitlab-ce/uploads/2f655655c0eadf655d0ae7467b53002a/environments__deploy-graphic.png) * [Mockup](https://gitlab.com/gitlab-org/gitlab-ce/uploads/2f655655c0eadf655d0ae7467b53002a/environments__deploy-graphic.png)
*/ */
import _ from 'underscore';
import deployBoardSvg from 'empty_states/icons/_deploy_board.svg'; import deployBoardSvg from 'empty_states/icons/_deploy_board.svg';
import { n__ } from '../../locale';
import instanceComponent from './deploy_board_instance_component.vue'; import instanceComponent from './deploy_board_instance_component.vue';
import loadingIcon from '../../vue_shared/components/loading_icon.vue'; import loadingIcon from '../../vue_shared/components/loading_icon.vue';
...@@ -26,40 +28,27 @@ ...@@ -26,40 +28,27 @@
type: Boolean, type: Boolean,
required: true, required: true,
}, },
hasError: { isEmpty: {
type: Boolean, type: Boolean,
required: true, required: true,
}, },
}, },
data() {
return {
deployBoardSvg,
};
},
computed: { computed: {
canRenderDeployBoard() { canRenderDeployBoard() {
return !this.isLoading && !this.hasError && this.deployBoardData.valid; return !this.isLoading && !this.isEmpty && !_.isEmpty(this.deployBoardData);
}, },
canRenderEmptyState() { canRenderEmptyState() {
return !this.isLoading && !this.hasError && !this.deployBoardData.valid; return !this.isLoading && this.isEmpty;
},
canRenderErrorState() {
return !this.isLoading && this.hasError;
}, },
instanceTitle() { instanceTitle() {
let title; return n__('Instance', 'Instances', this.deployBoardData.instances.length);
if (this.deployBoardData.instances.length === 1) {
title = 'Instance';
} else {
title = 'Instances';
}
return title;
}, },
projectName() { projectName() {
return '<projectname>'; return '<projectname>';
}, },
deployBoardSvg() {
return deployBoardSvg;
},
}, },
}; };
</script> </script>
...@@ -128,11 +117,5 @@ ...@@ -128,11 +117,5 @@
</span> </span>
</section> </section>
</div> </div>
<div
v-if="canRenderErrorState"
class="deploy-board-error-message">
We can't fetch the data right now. Please try again later.
</div>
</div> </div>
</script> </script>
...@@ -127,16 +127,10 @@ export default { ...@@ -127,16 +127,10 @@ export default {
/** /**
* Toggles the visibility of the deploy boards of the clicked environment. * Toggles the visibility of the deploy boards of the clicked environment.
*
* @param {Object} model * @param {Object} model
* @return {Object}
*/ */
toggleDeployBoard(model) { toggleDeployBoard(model) {
this.store.toggleDeployBoard(model.id); this.store.toggleDeployBoard(model.id);
if (!model.isDeployboardVisible) {
this.fetchDeployBoard(model, true);
}
}, },
toggleFolder(folder) { toggleFolder(folder) {
...@@ -203,11 +197,6 @@ export default { ...@@ -203,11 +197,6 @@ export default {
if (openFolders.length) { if (openFolders.length) {
openFolders.forEach(folder => this.fetchChildEnvironments(folder)); openFolders.forEach(folder => this.fetchChildEnvironments(folder));
} }
const openDeployBoards = this.store.getOpenDeployBoards();
if (openDeployBoards.length) {
openDeployBoards.forEach(env => this.fetchDeployBoard(env));
}
}, },
errorCallback() { errorCallback() {
...@@ -215,23 +204,6 @@ export default { ...@@ -215,23 +204,6 @@ export default {
// eslint-disable-next-line no-new // eslint-disable-next-line no-new
new Flash('An error occurred while fetching the environments.'); new Flash('An error occurred while fetching the environments.');
}, },
fetchDeployBoard(environment, showLoader = false) {
this.store.updateEnvironmentProp(environment, 'isLoadingDeployBoard', showLoader);
this.service.getDeployBoard(environment.rollout_status_path)
.then(resp => resp.json())
.then((data) => {
this.store.storeDeployBoard(environment.id, data);
this.store.updateEnvironmentProp(environment, 'isLoadingDeployBoard', false);
})
.catch(() => {
this.store.updateEnvironmentProp(environment, 'isLoadingDeployBoard', false);
this.store.updateEnvironmentProp(environment, 'hasErrorDeployBoard', true);
// eslint-disable-next-line no-new
new Flash('An error occurred while fetching the deploy board.');
});
},
}, },
}; };
</script> </script>
......
...@@ -74,7 +74,7 @@ export default { ...@@ -74,7 +74,7 @@ export default {
<deploy-board <deploy-board
:deploy-board-data="model.deployBoardData" :deploy-board-data="model.deployBoardData"
:is-loading="model.isLoadingDeployBoard" :is-loading="model.isLoadingDeployBoard"
:has-error="model.hasErrorDeployBoard" :is-empty="model.isEmptyDeployBoard"
/> />
</div> </div>
</div> </div>
......
...@@ -30,7 +30,7 @@ export default class EnvironmentsStore { ...@@ -30,7 +30,7 @@ export default class EnvironmentsStore {
* If the `size` is bigger than 1, it means it should be rendered as a folder. * If the `size` is bigger than 1, it means it should be rendered as a folder.
* In those cases we add `isFolder` key in order to render it properly. * In those cases we add `isFolder` key in order to render it properly.
* *
* Top level environments - when the size is 1 - with `rollout_status_path` * Top level environments - when the size is 1 - with `rollout_status`
* can render a deploy board. We add `isDeployBoardVisible` and `deployBoardData` * can render a deploy board. We add `isDeployBoardVisible` and `deployBoardData`
* keys to those environments. * keys to those environments.
* The first key will let's us know if we should or not render the deploy board. * The first key will let's us know if we should or not render the deploy board.
...@@ -65,13 +65,15 @@ export default class EnvironmentsStore { ...@@ -65,13 +65,15 @@ export default class EnvironmentsStore {
filtered = Object.assign(filtered, env); filtered = Object.assign(filtered, env);
} }
if (filtered.size === 1 && filtered.rollout_status_path) { if (filtered.size === 1 && filtered.rollout_status) {
filtered = Object.assign({}, filtered, { filtered = Object.assign({}, filtered, {
hasDeployBoard: true, hasDeployBoard: true,
isDeployBoardVisible: oldEnvironmentState.isDeployBoardVisible || false, isDeployBoardVisible: oldEnvironmentState.isDeployBoardVisible === false ?
deployBoardData: oldEnvironmentState.deployBoardData || {}, oldEnvironmentState.isDeployBoardVisible :
isLoadingDeployBoard: oldEnvironmentState.isLoadingDeployBoard || false, true,
hasErrorDeployBoard: oldEnvironmentState.hasErrorDeployBoard || false, deployBoardData: filtered.rollout_status.status === 'found' ? filtered.rollout_status : {},
isLoadingDeployBoard: filtered.rollout_status.status === 'loading',
isEmptyDeployBoard: filtered.rollout_status.status === 'not_found',
}); });
} }
return filtered; return filtered;
......
...@@ -3,18 +3,23 @@ ...@@ -3,18 +3,23 @@
# Automatically sets the layout and ensures an administrator is logged in # Automatically sets the layout and ensures an administrator is logged in
class Admin::ApplicationController < ApplicationController class Admin::ApplicationController < ApplicationController
before_action :authenticate_admin! before_action :authenticate_admin!
before_action :display_geo_information before_action :display_read_only_information
layout 'admin' layout 'admin'
def authenticate_admin! def authenticate_admin!
render_404 unless current_user.admin? render_404 unless current_user.admin?
end end
def display_geo_information def display_read_only_information
return unless Gitlab::Geo.secondary? return unless Gitlab::Database.read_only?
return unless Gitlab::Geo.primary_node_configured?
primary_node = view_context.link_to('primary node', Gitlab::Geo.primary_node.url) flash.now[:notice] = read_only_message
flash.now[:notice] = "You are on a secondary (read-only) Geo node. If you want to make any changes, you must visit the #{primary_node}.".html_safe end
private
# Overridden in EE
def read_only_message
_('You are on a read-only GitLab instance.')
end end
end end
...@@ -12,7 +12,7 @@ module Boards ...@@ -12,7 +12,7 @@ module Boards
def index def index
issues = Boards::Issues::ListService.new(board_parent, current_user, filter_params).execute issues = Boards::Issues::ListService.new(board_parent, current_user, filter_params).execute
issues = issues.page(params[:page]).per(params[:per] || 20) issues = issues.page(params[:page]).per(params[:per] || 20)
make_sure_position_is_set(issues) unless Gitlab::Geo.secondary? make_sure_position_is_set(issues) if Gitlab::Database.read_write?
issues = issues.preload(:project, issues = issues.preload(:project,
:milestone, :milestone,
:assignees, :assignees,
......
class Projects::EnvironmentsController < Projects::ApplicationController class Projects::EnvironmentsController < Projects::ApplicationController
layout 'project' layout 'project'
before_action :authorize_read_environment! before_action :authorize_read_environment!
before_action :authorize_read_deploy_board!, only: :status
before_action :authorize_create_environment!, only: [:new, :create] before_action :authorize_create_environment!, only: [:new, :create]
before_action :authorize_create_deployment!, only: [:stop] before_action :authorize_create_deployment!, only: [:stop]
before_action :authorize_update_environment!, only: [:edit, :update] before_action :authorize_update_environment!, only: [:edit, :update]
before_action :authorize_admin_environment!, only: [:terminal, :terminal_websocket_authorize] before_action :authorize_admin_environment!, only: [:terminal, :terminal_websocket_authorize]
before_action :environment, only: [:show, :edit, :update, :stop, :terminal, :terminal_websocket_authorize, :metrics, :status] before_action :environment, only: [:show, :edit, :update, :stop, :terminal, :terminal_websocket_authorize, :metrics]
before_action :verify_api_request!, only: :terminal_websocket_authorize before_action :verify_api_request!, only: :terminal_websocket_authorize
before_action :expire_etag_cache, only: [:index]
def index def index
@environments = project.environments @environments = project.environments
...@@ -132,25 +132,6 @@ class Projects::EnvironmentsController < Projects::ApplicationController ...@@ -132,25 +132,6 @@ class Projects::EnvironmentsController < Projects::ApplicationController
end end
end end
# The rollout status of an enviroment
def status
unless @environment.deployment_service_ready?
render text: 'Not found', status: 404
return
end
rollout_status = @environment.rollout_status
Gitlab::PollingInterval.set_header(response, interval: 3000) unless rollout_status.try(:complete?)
if rollout_status.nil?
render body: nil, status: 204 # no result yet
else
serializer = RolloutStatusSerializer.new(project: @project, current_user: @current_user)
render json: serializer.represent(rollout_status)
end
end
def additional_metrics def additional_metrics
respond_to do |format| respond_to do |format|
format.json do format.json do
...@@ -167,6 +148,15 @@ class Projects::EnvironmentsController < Projects::ApplicationController ...@@ -167,6 +148,15 @@ class Projects::EnvironmentsController < Projects::ApplicationController
Gitlab::Workhorse.verify_api_request!(request.headers) Gitlab::Workhorse.verify_api_request!(request.headers)
end end
def expire_etag_cache
return if request.format.json?
# this forces to reload json content
Gitlab::EtagCaching::Store.new.tap do |store|
store.touch(project_environments_path(project, format: :json))
end
end
def environment_params def environment_params
params.require(:environment).permit(:name, :external_url) params.require(:environment).permit(:name, :external_url)
end end
......
...@@ -4,6 +4,8 @@ class Projects::LfsApiController < Projects::GitHttpClientController ...@@ -4,6 +4,8 @@ class Projects::LfsApiController < Projects::GitHttpClientController
include GitlabRoutingHelper include GitlabRoutingHelper
include LfsRequest include LfsRequest
prepend ::EE::Projects::LfsApiController
skip_before_action :lfs_check_access!, only: [:deprecated] skip_before_action :lfs_check_access!, only: [:deprecated]
before_action :lfs_check_batch_operation!, only: [:batch] before_action :lfs_check_batch_operation!, only: [:batch]
...@@ -96,14 +98,19 @@ class Projects::LfsApiController < Projects::GitHttpClientController ...@@ -96,14 +98,19 @@ class Projects::LfsApiController < Projects::GitHttpClientController
end end
def lfs_check_batch_operation! def lfs_check_batch_operation!
if upload_request? && Gitlab::Geo.secondary? if upload_request? && Gitlab::Database.read_only?
render( render(
json: { json: {
message: "You cannot write to a secondary GitLab Geo instance. Please use #{geo_primary_default_url_to_repo(project)} instead." message: lfs_read_only_message
}, },
content_type: "application/vnd.git-lfs+json", content_type: 'application/vnd.git-lfs+json',
status: 403 status: 403
) )
end end
end end
# Overridden in EE
def lfs_read_only_message
_('You cannot write to this read-only GitLab instance.')
end
end end
...@@ -15,7 +15,7 @@ class Projects::MergeRequests::ApplicationController < Projects::ApplicationCont ...@@ -15,7 +15,7 @@ class Projects::MergeRequests::ApplicationController < Projects::ApplicationCont
# Make sure merge requests created before 8.0 # Make sure merge requests created before 8.0
# have head file in refs/merge-requests/ # have head file in refs/merge-requests/
def ensure_ref_fetched def ensure_ref_fetched
@merge_request.ensure_ref_fetched @merge_request.ensure_ref_fetched if Gitlab::Database.read_write?
end end
def merge_request_params def merge_request_params
......
...@@ -9,9 +9,7 @@ class SessionsController < Devise::SessionsController ...@@ -9,9 +9,7 @@ class SessionsController < Devise::SessionsController
prepend_before_action :check_initial_setup, only: [:new] prepend_before_action :check_initial_setup, only: [:new]
prepend_before_action :authenticate_with_two_factor, prepend_before_action :authenticate_with_two_factor,
if: :two_factor_enabled?, only: [:create] if: :two_factor_enabled?, only: [:create]
prepend_before_action :store_redirect_path, only: [:new] prepend_before_action :store_redirect_uri, only: [:new]
before_action :gitlab_geo_login, only: [:new]
before_action :gitlab_geo_logout, only: [:destroy]
before_action :auto_sign_in_with_provider, only: [:new] before_action :auto_sign_in_with_provider, only: [:new]
before_action :load_recaptcha before_action :load_recaptcha
...@@ -88,7 +86,11 @@ class SessionsController < Devise::SessionsController ...@@ -88,7 +86,11 @@ class SessionsController < Devise::SessionsController
end end
end end
def store_redirect_path def stored_redirect_uri
@redirect_to ||= stored_location_for(:redirect)
end
def store_redirect_uri
redirect_uri = redirect_uri =
if request.referer.present? && (params['redirect_to_referer'] == 'yes') if request.referer.present? && (params['redirect_to_referer'] == 'yes')
URI(request.referer) URI(request.referer)
...@@ -98,40 +100,22 @@ class SessionsController < Devise::SessionsController ...@@ -98,40 +100,22 @@ class SessionsController < Devise::SessionsController
# Prevent a 'you are already signed in' message directly after signing: # Prevent a 'you are already signed in' message directly after signing:
# we should never redirect to '/users/sign_in' after signing in successfully. # we should never redirect to '/users/sign_in' after signing in successfully.
if redirect_uri.path == new_user_session_path return true if redirect_uri.path == new_user_session_path
return true
elsif redirect_uri.host == Gitlab.config.gitlab.host && redirect_uri.port == Gitlab.config.gitlab.port redirect_to = redirect_uri.to_s if redirect_allowed_to?(redirect_uri)
redirect_to = redirect_uri.to_s
elsif Gitlab::Geo.geo_node?(host: redirect_uri.host, port: redirect_uri.port)
redirect_to = redirect_uri.to_s
end
@redirect_to = redirect_to @redirect_to = redirect_to
store_location_for(:redirect, redirect_to) store_location_for(:redirect, redirect_to)
end end
def two_factor_enabled? # Overridden in EE
find_user.try(:two_factor_enabled?) def redirect_allowed_to?(uri)
end uri.host == Gitlab.config.gitlab.host &&
uri.port == Gitlab.config.gitlab.port
def gitlab_geo_login
return unless Gitlab::Geo.secondary?
return if signed_in?
oauth = Gitlab::Geo::OauthSession.new
# share full url with primary node by oauth state
user_return_to = URI.join(root_url, session[:user_return_to].to_s).to_s
oauth.return_to = @redirect_to || user_return_to
redirect_to oauth_geo_auth_url(state: oauth.generate_oauth_state)
end end
def gitlab_geo_logout def two_factor_enabled?
return unless Gitlab::Geo.secondary? find_user&.two_factor_enabled?
oauth = Gitlab::Geo::OauthSession.new(access_token: session[:access_token])
@geo_logout_state = oauth.generate_logout_state
end end
def auto_sign_in_with_provider def auto_sign_in_with_provider
......
...@@ -59,7 +59,7 @@ module CacheMarkdownField ...@@ -59,7 +59,7 @@ module CacheMarkdownField
# Update every column in a row if any one is invalidated, as we only store # Update every column in a row if any one is invalidated, as we only store
# one version per row # one version per row
def refresh_markdown_cache!(do_update: false) def refresh_markdown_cache
options = { skip_project_check: skip_project_check? } options = { skip_project_check: skip_project_check? }
updates = cached_markdown_fields.markdown_fields.map do |markdown_field| updates = cached_markdown_fields.markdown_fields.map do |markdown_field|
...@@ -71,8 +71,14 @@ module CacheMarkdownField ...@@ -71,8 +71,14 @@ module CacheMarkdownField
updates['cached_markdown_version'] = CacheMarkdownField::CACHE_VERSION updates['cached_markdown_version'] = CacheMarkdownField::CACHE_VERSION
updates.each {|html_field, data| write_attribute(html_field, data) } updates.each {|html_field, data| write_attribute(html_field, data) }
end
def refresh_markdown_cache!
updates = refresh_markdown_cache
return unless persisted? && Gitlab::Database.read_write?
update_columns(updates) if persisted? && do_update update_columns(updates)
end end
def cached_html_up_to_date?(markdown_field) def cached_html_up_to_date?(markdown_field)
...@@ -124,8 +130,8 @@ module CacheMarkdownField ...@@ -124,8 +130,8 @@ module CacheMarkdownField
end end
# Using before_update here conflicts with elasticsearch-model somehow # Using before_update here conflicts with elasticsearch-model somehow
before_create :refresh_markdown_cache!, if: :invalidated_markdown_cache? before_create :refresh_markdown_cache, if: :invalidated_markdown_cache?
before_update :refresh_markdown_cache!, if: :invalidated_markdown_cache? before_update :refresh_markdown_cache, if: :invalidated_markdown_cache?
end end
class_methods do class_methods do
......
...@@ -59,6 +59,9 @@ module ReactiveCaching ...@@ -59,6 +59,9 @@ module ReactiveCaching
raise NotImplementedError raise NotImplementedError
end end
def reactive_cache_updated(*args)
end
def with_reactive_cache(*args, &blk) def with_reactive_cache(*args, &blk)
within_reactive_cache_lifetime(*args) do within_reactive_cache_lifetime(*args) do
data = Rails.cache.read(full_reactive_cache_key(*args)) data = Rails.cache.read(full_reactive_cache_key(*args))
...@@ -77,8 +80,11 @@ module ReactiveCaching ...@@ -77,8 +80,11 @@ module ReactiveCaching
locking_reactive_cache(*args) do locking_reactive_cache(*args) do
within_reactive_cache_lifetime(*args) do within_reactive_cache_lifetime(*args) do
enqueuing_update(*args) do enqueuing_update(*args) do
value = calculate_reactive_cache(*args) key = full_reactive_cache_key(*args)
Rails.cache.write(full_reactive_cache_key(*args), value) new_value = calculate_reactive_cache(*args)
old_value = Rails.cache.read(key)
Rails.cache.write(key, new_value)
reactive_cache_updated(*args) if new_value != old_value
end end
end end
end end
......
...@@ -156,7 +156,7 @@ module Routable ...@@ -156,7 +156,7 @@ module Routable
end end
def update_route def update_route
return if Gitlab::Geo.secondary? return if Gitlab::Database.read_only?
prepare_route prepare_route
route.save route.save
......
...@@ -43,15 +43,17 @@ module TokenAuthenticatable ...@@ -43,15 +43,17 @@ module TokenAuthenticatable
write_attribute(token_field, token) if token write_attribute(token_field, token) if token
end end
# Returns a token, but only saves when the database is in read & write mode
define_method("ensure_#{token_field}!") do define_method("ensure_#{token_field}!") do
send("reset_#{token_field}!") if read_attribute(token_field).blank? # rubocop:disable GitlabSecurity/PublicSend send("reset_#{token_field}!") if read_attribute(token_field).blank? # rubocop:disable GitlabSecurity/PublicSend
read_attribute(token_field) read_attribute(token_field)
end end
# Resets the token, but only saves when the database is in read & write mode
define_method("reset_#{token_field}!") do define_method("reset_#{token_field}!") do
write_new_token(token_field) write_new_token(token_field)
save! save! if Gitlab::Database.read_write?
end end
end end
end end
......
...@@ -501,7 +501,7 @@ class MergeRequest < ActiveRecord::Base ...@@ -501,7 +501,7 @@ class MergeRequest < ActiveRecord::Base
end end
def check_if_can_be_merged def check_if_can_be_merged
return unless unchecked? && !Gitlab::Geo.secondary? return unless unchecked? && Gitlab::Database.read_write?
can_be_merged = can_be_merged =
!broken? && project.repository.can_be_merged?(diff_head_sha, target_branch) !broken? && project.repository.can_be_merged?(diff_head_sha, target_branch)
......
...@@ -812,7 +812,7 @@ class Project < ActiveRecord::Base ...@@ -812,7 +812,7 @@ class Project < ActiveRecord::Base
end end
def cache_has_external_issue_tracker def cache_has_external_issue_tracker
update_column(:has_external_issue_tracker, services.external_issue_trackers.any?) update_column(:has_external_issue_tracker, services.external_issue_trackers.any?) if Gitlab::Database.read_write?
end end
def has_wiki? def has_wiki?
...@@ -832,7 +832,7 @@ class Project < ActiveRecord::Base ...@@ -832,7 +832,7 @@ class Project < ActiveRecord::Base
end end
def cache_has_external_wiki def cache_has_external_wiki
update_column(:has_external_wiki, services.external_wikis.any?) update_column(:has_external_wiki, services.external_wikis.any?) if Gitlab::Database.read_write?
end end
def find_or_initialize_services(exceptions: []) def find_or_initialize_services(exceptions: [])
......
...@@ -17,12 +17,14 @@ class MockDeploymentService < DeploymentService ...@@ -17,12 +17,14 @@ class MockDeploymentService < DeploymentService
end end
def rollout_status(environment) def rollout_status(environment)
OpenStruct.new( case environment.name
instances: rollout_status_instances, when 'staging'
completion: 80, Gitlab::Kubernetes::RolloutStatus.new([], status: :not_found)
valid?: true, when 'test'
complete?: true Gitlab::Kubernetes::RolloutStatus.new([], status: :loading)
) else
Gitlab::Kubernetes::RolloutStatus.new(rollout_status_deployments)
end
end end
private private
...@@ -31,4 +33,8 @@ class MockDeploymentService < DeploymentService ...@@ -31,4 +33,8 @@ class MockDeploymentService < DeploymentService
data = File.read(Rails.root.join('spec', 'fixtures', 'rollout_status_instances.json')) data = File.read(Rails.root.join('spec', 'fixtures', 'rollout_status_instances.json'))
JSON.parse(data) JSON.parse(data)
end end
def rollout_status_deployments
[OpenStruct.new(instances: rollout_status_instances)]
end
end end
...@@ -478,6 +478,14 @@ class User < ActiveRecord::Base ...@@ -478,6 +478,14 @@ class User < ActiveRecord::Base
reset_password_sent_at.present? && reset_password_sent_at >= 1.minute.ago reset_password_sent_at.present? && reset_password_sent_at >= 1.minute.ago
end end
def remember_me!
super if ::Gitlab::Database.read_write?
end
def forget_me!
super if ::Gitlab::Database.read_write?
end
def disable_two_factor! def disable_two_factor!
transaction do transaction do
update_attributes( update_attributes(
......
...@@ -9,7 +9,9 @@ class EnvironmentEntity < Grape::Entity ...@@ -9,7 +9,9 @@ class EnvironmentEntity < Grape::Entity
expose :last_deployment, using: DeploymentEntity expose :last_deployment, using: DeploymentEntity
expose :stop_action? expose :stop_action?
expose :metrics_path, if: -> (environment, _) { environment.has_metrics? } do |environment| expose :rollout_status, if: -> (*) { can_read_deploy_board? }, using: RolloutStatusEntity
expose :metrics_path, if: -> (*) { environment.has_metrics? } do |environment|
metrics_project_environment_path(environment.project, environment) metrics_project_environment_path(environment.project, environment)
end end
...@@ -21,19 +23,26 @@ class EnvironmentEntity < Grape::Entity ...@@ -21,19 +23,26 @@ class EnvironmentEntity < Grape::Entity
stop_project_environment_path(environment.project, environment) stop_project_environment_path(environment.project, environment)
end end
expose :terminal_path, if: ->(environment, _) { environment.deployment_service_ready? } do |environment| expose :terminal_path, if: ->(*) { environment.deployment_service_ready? } do |environment|
can?(request.current_user, :admin_environment, environment.project) && can?(request.current_user, :admin_environment, environment.project) &&
terminal_project_environment_path(environment.project, environment) terminal_project_environment_path(environment.project, environment)
end end
expose :rollout_status_path, if: ->(environment, _) { environment.deployment_service_ready? } do |environment|
can?(request.current_user, :read_deploy_board, environment.project) &&
status_project_environment_path(environment.project, environment, format: :json)
end
expose :folder_path do |environment| expose :folder_path do |environment|
folder_project_environments_path(environment.project, environment.folder_name) folder_project_environments_path(environment.project, environment.folder_name)
end end
expose :created_at, :updated_at expose :created_at, :updated_at
private
alias_method :environment, :object
def current_user
request.current_user
end
def can_read_deploy_board?
can?(current_user, :read_deploy_board, environment.project)
end
end end
class RolloutStatusEntity < Grape::Entity class RolloutStatusEntity < Grape::Entity
include RequestAwareEntity include RequestAwareEntity
expose :instances expose :status, as: :status
expose :completion
expose :valid?, as: :valid
expose :is_completed do |rollout_status| expose :instances, if: -> (rollout_status, _) { rollout_status.found? }
rollout_status.complete? expose :completion, if: -> (rollout_status, _) { rollout_status.found? }
end expose :complete?, as: :is_completed, if: -> (rollout_status, _) { rollout_status.found? }
end end
class RolloutStatusSerializer < BaseSerializer
entity RolloutStatusEntity
end
module Keys module Keys
class LastUsedService class LastUsedService
prepend ::EE::Keys::LastUsedService
TIMEOUT = 1.day.to_i TIMEOUT = 1.day.to_i
attr_reader :key attr_reader :key
...@@ -18,6 +16,8 @@ module Keys ...@@ -18,6 +16,8 @@ module Keys
end end
def update? def update?
return false if ::Gitlab::Database.read_only?
last_used = key.last_used_at last_used = key.last_used_at
return false if last_used && (Time.zone.now - last_used) <= TIMEOUT return false if last_used && (Time.zone.now - last_used) <= TIMEOUT
......
...@@ -14,7 +14,7 @@ module Users ...@@ -14,7 +14,7 @@ module Users
private private
def record_activity def record_activity
Gitlab::UserActivities.record(@author.id) unless Gitlab::Geo.secondary? Gitlab::UserActivities.record(@author.id) if Gitlab::Database.read_write?
Rails.logger.debug("Recorded activity: #{@activity} for User ID: #{@author.id} (username: #{@author.username})") Rails.logger.debug("Recorded activity: #{@activity} for User ID: #{@author.id} (username: #{@author.username})")
end end
......
...@@ -84,7 +84,7 @@ ...@@ -84,7 +84,7 @@
%p %p
.js-health .js-health
- unless Gitlab::Geo.secondary? - if Gitlab::Database.read_write?
.node-actions .node-actions
- if Gitlab::Geo.license_allows? - if Gitlab::Geo.license_allows?
- if node.missing_oauth_application? - if node.missing_oauth_application?
......
module ProjectStartImport
def start(project)
if project.import_started? && project.import_jid == self.jid
return true
end
project.import_start
end
end
...@@ -4,6 +4,7 @@ class RepositoryForkWorker ...@@ -4,6 +4,7 @@ class RepositoryForkWorker
include Sidekiq::Worker include Sidekiq::Worker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
include ProjectStartImport
sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
...@@ -37,7 +38,7 @@ class RepositoryForkWorker ...@@ -37,7 +38,7 @@ class RepositoryForkWorker
private private
def start_fork(project) def start_fork(project)
return true if project.import_start return true if start(project)
Rails.logger.info("Project #{project.full_path} was in inconsistent state (#{project.import_status}) while forking.") Rails.logger.info("Project #{project.full_path} was in inconsistent state (#{project.import_status}) while forking.")
false false
......
...@@ -4,6 +4,7 @@ class RepositoryImportWorker ...@@ -4,6 +4,7 @@ class RepositoryImportWorker
include Sidekiq::Worker include Sidekiq::Worker
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
include ExceptionBacktrace include ExceptionBacktrace
include ProjectStartImport
sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
...@@ -38,7 +39,7 @@ class RepositoryImportWorker ...@@ -38,7 +39,7 @@ class RepositoryImportWorker
private private
def start_import(project) def start_import(project)
return true if project.import_start return true if start(project)
Rails.logger.info("Project #{project.full_path} was in inconsistent state (#{project.import_status}) while importing.") Rails.logger.info("Project #{project.full_path} was in inconsistent state (#{project.import_status}) while importing.")
false false
......
...@@ -4,6 +4,7 @@ class RepositoryUpdateMirrorWorker ...@@ -4,6 +4,7 @@ class RepositoryUpdateMirrorWorker
include Sidekiq::Worker include Sidekiq::Worker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
include ProjectStartImport
LEASE_KEY = 'repository_update_mirror_worker_start_scheduler'.freeze LEASE_KEY = 'repository_update_mirror_worker_start_scheduler'.freeze
LEASE_TIMEOUT = 2.seconds LEASE_TIMEOUT = 2.seconds
...@@ -45,7 +46,7 @@ class RepositoryUpdateMirrorWorker ...@@ -45,7 +46,7 @@ class RepositoryUpdateMirrorWorker
end end
def start_mirror(project) def start_mirror(project)
if project.import_start if start(project)
Rails.logger.info("Mirror update for #{project.full_path} started. Waiting duration: #{project.mirror_waiting_duration}") Rails.logger.info("Mirror update for #{project.full_path} started. Waiting duration: #{project.mirror_waiting_duration}")
Gitlab::Metrics.add_event_with_values( Gitlab::Metrics.add_event_with_values(
:mirrors_running, :mirrors_running,
......
---
title: Schedule repository synchronization when processing events on a Geo secondary
node
merge_request: 2838
author:
type: changed
---
title: Improves visibility of deploy boards
merge_request:
author:
type: changed
---
title: Create idea of read-only database and add method to check for it
merge_request: 2954
author:
type: changed
---
title: Upgrade doorkeeper-openid_connect
merge_request: 14372
author: Markus Koller
type: other
...@@ -175,8 +175,8 @@ module Gitlab ...@@ -175,8 +175,8 @@ module Gitlab
ENV['GITLAB_PATH_OUTSIDE_HOOK'] = ENV['PATH'] ENV['GITLAB_PATH_OUTSIDE_HOOK'] = ENV['PATH']
ENV['GIT_TERMINAL_PROMPT'] = '0' ENV['GIT_TERMINAL_PROMPT'] = '0'
# Gitlab Geo Middleware support # Gitlab Read-only middleware support
config.middleware.insert_after ActionDispatch::Flash, 'Gitlab::Middleware::ReadonlyGeo' config.middleware.insert_after ActionDispatch::Flash, 'Gitlab::Middleware::ReadOnly'
config.generators do |g| config.generators do |g|
g.factory_girl false g.factory_girl false
......
Doorkeeper::OpenidConnect.configure do Doorkeeper::OpenidConnect.configure do
issuer Gitlab.config.gitlab.url issuer Gitlab.config.gitlab.url
jws_private_key Rails.application.secrets.jws_private_key signing_key Rails.application.secrets.openid_connect_signing_key
resource_owner_from_access_token do |access_token| resource_owner_from_access_token do |access_token|
User.active.find_by(id: access_token.resource_owner_id) User.active.find_by(id: access_token.resource_owner_id)
......
...@@ -25,7 +25,7 @@ def create_tokens ...@@ -25,7 +25,7 @@ def create_tokens
secret_key_base: file_secret_key || generate_new_secure_token, secret_key_base: file_secret_key || generate_new_secure_token,
otp_key_base: env_secret_key || file_secret_key || generate_new_secure_token, otp_key_base: env_secret_key || file_secret_key || generate_new_secure_token,
db_key_base: generate_new_secure_token, db_key_base: generate_new_secure_token,
jws_private_key: generate_new_rsa_private_key openid_connect_signing_key: generate_new_rsa_private_key
} }
missing_secrets = set_missing_keys(defaults) missing_secrets = set_missing_keys(defaults)
......
...@@ -224,7 +224,6 @@ constraints(ProjectUrlConstrainer.new) do ...@@ -224,7 +224,6 @@ constraints(ProjectUrlConstrainer.new) do
get :terminal get :terminal
get :metrics get :metrics
get :additional_metrics get :additional_metrics
get :status, constraints: { format: :json }
get '/terminal.ws/authorize', to: 'environments#terminal_websocket_authorize', constraints: { format: nil } get '/terminal.ws/authorize', to: 'environments#terminal_websocket_authorize', constraints: { format: nil }
end end
......
...@@ -395,13 +395,12 @@ your installation compares before proceeding. ...@@ -395,13 +395,12 @@ your installation compares before proceeding.
There are two encryption methods, `simple_tls` and `start_tls`. There are two encryption methods, `simple_tls` and `start_tls`.
For either encryption method, if setting `validate_certificates: false`, TLS For either encryption method, if setting `verify_certificates: false`, TLS
encryption is established with the LDAP server before any LDAP-protocol data is encryption is established with the LDAP server before any LDAP-protocol data is
exchanged but no validation of the LDAP server's SSL certificate is performed. exchanged but no validation of the LDAP server's SSL certificate is performed.
>**Note**: Before GitLab 9.5, `validate_certificates: false` is the default if >**Note**: Before GitLab 9.5, `verify_certificates: false` is the default if
unspecified. unspecified.
>>>>>>> upstream/master
## Limitations ## Limitations
......
...@@ -78,19 +78,20 @@ to see if there are changes since the last time the log was checked ...@@ -78,19 +78,20 @@ to see if there are changes since the last time the log was checked
and will handle repository updates, deletes, changes & renames. and will handle repository updates, deletes, changes & renames.
## Readonly ## Read-only
All **Secondary** nodes are read-only. All **Secondary** nodes are read-only.
We have a Rails Middleware that filters any potentially writing operations The general principle of a [read-only database](verifying_database_capabilities.md#read-only-database)
and prevent user from trying to update the database and getting a 500 error applies to all Geo secondary nodes. So `Gitlab::Database.read_only?`
(see `Gitlab::Middleware::ReadonlyGeo`). will always return `true` on a secondary node.
Database will already be read-only in a replicated setup, so we don't need to When some write actions are not allowed, because the node is a
take any extra step for that. secondary, consider the `Gitlab::Database.read_only?` or `Gitlab::Database.read_write?`
guard, instead of `Gitlab::Geo.secondary?`.
We do use our feature toggle `.secondary?` to coordinate Git operations and do Database itself will already be read-only in a replicated setup, so we
the correct authorization (denying writing on any secondary node). don't need to take any extra step for that.
## File Transfers ## File Transfers
......
...@@ -24,3 +24,15 @@ else ...@@ -24,3 +24,15 @@ else
run_query run_query
end end
``` ```
# Read-only database
The database can be used in read-only mode. In this case we have to
make sure all GET requests don't attempt any write operations to the
database. If one of those requests wants to write to the database, it needs
to be wrapped in a `Gitlab::Database.read_only?` or `Gitlab::Database.read_write?`
guard, to make sure it doesn't for read-only databases.
We have a Rails Middleware that filters any potentially writing
operations (the CUD operations of CRUD) and prevent the user from trying
to update the database and getting a 500 error (see `Gitlab::Middleware::ReadOnly`).
...@@ -48,6 +48,13 @@ chown git:git /var/opt/gitlab/gitlab-rails/working ...@@ -48,6 +48,13 @@ chown git:git /var/opt/gitlab/gitlab-rails/working
You may delete `/var/opt/gitlab/gitlab-rails/working.old` any time. You may delete `/var/opt/gitlab/gitlab-rails/working.old` any time.
Once this is done, we advise restarting GitLab on the secondary nodes for the
new working directory to be used:
```
sudo gitlab-ctl restart
```
## Upgrading from GitLab 9.3 or older ## Upgrading from GitLab 9.3 or older
If you started running Geo on GitLab 9.3 or older, we recommend that you If you started running Geo on GitLab 9.3 or older, we recommend that you
......
module EE
module Admin
module ApplicationController
def read_only_message
raise NotImplementedError unless defined?(super)
return super unless Gitlab::Geo.secondary_with_primary?
link_to_primary_node = view_context.link_to('primary node', Gitlab::Geo.primary_node.url)
(_('You are on a read-only GitLab instance. If you want to make any changes, you must visit the %{link_to_primary_node}.') % { link_to_primary_node: link_to_primary_node }).html_safe
end
end
end
end
module EE
module Projects
module LfsApiController
def lfs_read_only_message
raise NotImplementedError unless defined?(super)
return super unless ::Gitlab::Geo.secondary_with_primary?
(_('You cannot write to a read-only secondary GitLab Geo instance. Please use %{link_to_primary_node} instead.') % { link_to_primary_node: geo_primary_default_url_to_repo(project) }).html_safe
end
end
end
end
...@@ -2,13 +2,45 @@ module EE ...@@ -2,13 +2,45 @@ module EE
module SessionsController module SessionsController
extend ActiveSupport::Concern extend ActiveSupport::Concern
prepended do
before_action :gitlab_geo_login, only: [:new]
before_action :gitlab_geo_logout, only: [:destroy]
end
private private
def gitlab_geo_login
return unless ::Gitlab::Geo.secondary?
return if signed_in?
oauth = ::Gitlab::Geo::OauthSession.new
# share full url with primary node by oauth state
user_return_to = URI.join(root_url, session[:user_return_to].to_s).to_s
oauth.return_to = stored_redirect_uri || user_return_to
redirect_to oauth_geo_auth_url(state: oauth.generate_oauth_state)
end
def gitlab_geo_logout
return unless ::Gitlab::Geo.secondary?
oauth = ::Gitlab::Geo::OauthSession.new(access_token: session[:access_token])
@geo_logout_state = oauth.generate_logout_state
end
def log_failed_login def log_failed_login
::AuditEventService.new(request.filtered_parameters['user']['login'], nil, ip_address: request.remote_ip) ::AuditEventService.new(request.filtered_parameters['user']['login'], nil, ip_address: request.remote_ip)
.for_failed_login.unauth_security_event .for_failed_login.unauth_security_event
super super
end end
def redirect_allowed_to?(uri)
raise NotImplementedError unless defined?(super)
# Redirect is not only allowed to current host, but also to other Geo nodes
super || ::Gitlab::Geo.geo_node?(host: uri.host, port: uri.port)
end
end end
end end
module EE module EE
module KubernetesService module KubernetesService
def rollout_status(environment) def rollout_status(environment)
with_reactive_cache do |data| result = with_reactive_cache do |data|
specs = filter_by_label(data[:deployments], app: environment.slug) specs = filter_by_label(data[:deployments], app: environment.slug)
::Gitlab::Kubernetes::RolloutStatus.from_specs(*specs) ::Gitlab::Kubernetes::RolloutStatus.from_specs(*specs)
end end
result || ::Gitlab::Kubernetes::RolloutStatus.loading
end end
def calculate_reactive_cache def calculate_reactive_cache
...@@ -15,6 +16,15 @@ module EE ...@@ -15,6 +16,15 @@ module EE
result result
end end
def reactive_cache_updated
super
::Gitlab::EtagCaching::Store.new.tap do |store|
store.touch(
::Gitlab::Routing.url_helpers.project_environments_path(project, format: :json))
end
end
def read_deployments def read_deployments
kubeclient = build_kubeclient!(api_path: 'apis/extensions', api_version: 'v1beta1') kubeclient = build_kubeclient!(api_path: 'apis/extensions', api_version: 'v1beta1')
......
...@@ -273,14 +273,6 @@ module EE ...@@ -273,14 +273,6 @@ module EE
.order(order % quoted_values) # `order` cannot escape for us! .order(order % quoted_values) # `order` cannot escape for us!
end end
def cache_has_external_issue_tracker
super unless ::Gitlab::Geo.secondary?
end
def cache_has_external_wiki
super unless ::Gitlab::Geo.secondary?
end
def execute_hooks(data, hooks_scope = :push_hooks) def execute_hooks(data, hooks_scope = :push_hooks)
super super
......
...@@ -84,16 +84,6 @@ module EE ...@@ -84,16 +84,6 @@ module EE
super || auditor? super || auditor?
end end
def remember_me!
return if ::Gitlab::Geo.secondary?
super
end
def forget_me!
return if ::Gitlab::Geo.secondary?
super
end
def email_opted_in_source def email_opted_in_source
email_opted_in_source_id == EMAIL_OPT_IN_SOURCE_ID_GITLAB_COM ? 'GitLab.com' : '' email_opted_in_source_id == EMAIL_OPT_IN_SOURCE_ID_GITLAB_COM ? 'GitLab.com' : ''
end end
......
module EE module EE
module Keys module Gitlab
module LastUsedService module Database
def update? def self.read_only?
raise NotImplementedError unless defined?(super) raise NotImplementedError unless defined?(super)
!::Gitlab::Geo.secondary? && super Gitlab::Geo.secondary? || super
end end
end end
end end
......
...@@ -40,7 +40,7 @@ module Banzai ...@@ -40,7 +40,7 @@ module Banzai
return cacheless_render_field(object, field) return cacheless_render_field(object, field)
end end
object.refresh_markdown_cache!(do_update: update_object?(object)) unless object.cached_html_up_to_date?(field) object.refresh_markdown_cache! unless object.cached_html_up_to_date?(field)
object.cached_html_for(field) object.cached_html_for(field)
end end
...@@ -162,10 +162,5 @@ module Banzai ...@@ -162,10 +162,5 @@ module Banzai
return unless cache_key return unless cache_key
Rails.cache.__send__(:expanded_key, full_cache_key(cache_key, pipeline_name)) # rubocop:disable GitlabSecurity/PublicSend Rails.cache.__send__(:expanded_key, full_cache_key(cache_key, pipeline_name)) # rubocop:disable GitlabSecurity/PublicSend
end end
# GitLab EE needs to disable updates on GET requests in Geo
def self.update_object?(object)
!Gitlab::Geo.secondary?
end
end end
end end
module Gitlab module Gitlab
module Database module Database
extend ::EE::Gitlab::Database
# The max value of INTEGER type is the same between MySQL and PostgreSQL: # The max value of INTEGER type is the same between MySQL and PostgreSQL:
# https://www.postgresql.org/docs/9.2/static/datatype-numeric.html # https://www.postgresql.org/docs/9.2/static/datatype-numeric.html
# http://dev.mysql.com/doc/refman/5.7/en/integer-types.html # http://dev.mysql.com/doc/refman/5.7/en/integer-types.html
...@@ -29,6 +31,15 @@ module Gitlab ...@@ -29,6 +31,15 @@ module Gitlab
adapter_name.casecmp('postgresql').zero? adapter_name.casecmp('postgresql').zero?
end end
# Overridden in EE
def self.read_only?
false
end
def self.read_write?
!self.read_only?
end
def self.version def self.version
database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1] database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
end end
......
...@@ -54,6 +54,10 @@ module Gitlab ...@@ -54,6 +54,10 @@ module Gitlab
Gitlab::Geo.primary_node.present? Gitlab::Geo.primary_node.present?
end end
def self.secondary_with_primary?
self.secondary? && self.primary_node_configured?
end
def self.license_allows? def self.license_allows?
::License.feature_available?(:geo) ::License.feature_available?(:geo)
end end
......
...@@ -65,15 +65,15 @@ module Gitlab ...@@ -65,15 +65,15 @@ module Gitlab
next unless can_replay?(event_log) next unless can_replay?(event_log)
if event_log.repository_updated_event if event_log.repository_updated_event
handle_repository_update(event_log) handle_repository_updated(event_log)
elsif event_log.repository_created_event elsif event_log.repository_created_event
handle_repository_created(event_log) handle_repository_created(event_log)
elsif event_log.repository_deleted_event elsif event_log.repository_deleted_event
handle_repository_delete(event_log) handle_repository_deleted(event_log)
elsif event_log.repositories_changed_event elsif event_log.repositories_changed_event
handle_repositories_changed(event_log.repositories_changed_event) handle_repositories_changed(event_log.repositories_changed_event)
elsif event_log.repository_renamed_event elsif event_log.repository_renamed_event
handle_repository_rename(event_log) handle_repository_renamed(event_log)
end end
end end
end end
...@@ -107,95 +107,98 @@ module Gitlab ...@@ -107,95 +107,98 @@ module Gitlab
end end
def handle_repository_created(event_log) def handle_repository_created(event_log)
created_event = event_log.repository_created_event event = event_log.repository_created_event
registry = ::Geo::ProjectRegistry.find_or_initialize_by(project_id: created_event.project_id) registry = find_or_initialize_registry(event.project_id, resync_repository: true, resync_wiki: event.wiki_path.present?)
registry.resync_repository = true
registry.resync_wiki = created_event.wiki_path.present?
log_event_info( log_event_info(
event_log.created_at, event_log.created_at,
message: 'Repository created', message: 'Repository created',
project_id: created_event.project_id, project_id: event.project_id,
repo_path: created_event.repo_path, repo_path: event.repo_path,
wiki_path: created_event.wiki_path, wiki_path: event.wiki_path,
resync_repository: registry.resync_repository, resync_repository: registry.resync_repository,
resync_wiki: registry.resync_wiki) resync_wiki: registry.resync_wiki)
registry.save! registry.save!
end
def handle_repository_update(event)
updated_event = event.repository_updated_event
registry = ::Geo::ProjectRegistry.find_or_initialize_by(project_id: updated_event.project_id)
case updated_event.source ::Geo::ProjectSyncWorker.perform_async(event.project_id, Time.now)
when 'repository'
registry.resync_repository = true
when 'wiki'
registry.resync_wiki = true
end end
def handle_repository_updated(event_log)
event = event_log.repository_updated_event
registry = find_or_initialize_registry(event.project_id, "resync_#{event.source}" => true)
log_event_info( log_event_info(
event.created_at, event_log.created_at,
message: "Repository update", message: 'Repository update',
project_id: updated_event.project_id, project_id: event.project_id,
source: updated_event.source, source: event.source,
resync_repository: registry.resync_repository, resync_repository: registry.resync_repository,
resync_wiki: registry.resync_wiki) resync_wiki: registry.resync_wiki)
registry.save! registry.save!
::Geo::ProjectSyncWorker.perform_async(event.project_id, Time.now)
end end
def handle_repository_delete(event) def handle_repository_deleted(event_log)
deleted_event = event.repository_deleted_event event = event_log.repository_deleted_event
full_path = File.join(deleted_event.repository_storage_path,
deleted_event.deleted_path) disk_path = File.join(event.repository_storage_path, event.deleted_path)
job_id = ::Geo::RepositoryDestroyService job_id = ::Geo::RepositoryDestroyService
.new(deleted_event.project_id, .new(event.project_id, event.deleted_project_name, disk_path, event.repository_storage_name)
deleted_event.deleted_project_name,
full_path,
deleted_event.repository_storage_name)
.async_execute .async_execute
log_event_info(event.created_at,
message: "Deleted project", log_event_info(
project_id: deleted_event.project_id, event_log.created_at,
full_path: full_path, message: 'Deleted project',
project_id: event.project_id,
disk_path: disk_path,
job_id: job_id) job_id: job_id)
# No need to create a project entry if it doesn't exist # No need to create a project entry if it doesn't exist
::Geo::ProjectRegistry.where(project_id: deleted_event.project_id).delete_all ::Geo::ProjectRegistry.where(project_id: event.project_id).delete_all
end end
def handle_repositories_changed(changed_event) def handle_repositories_changed(event)
return unless Gitlab::Geo.current_node.id == changed_event.geo_node_id return unless Gitlab::Geo.current_node.id == event.geo_node_id
job_id = ::Geo::RepositoriesCleanUpWorker.perform_in(1.hour, changed_event.geo_node_id) job_id = ::Geo::RepositoriesCleanUpWorker.perform_in(1.hour, event.geo_node_id)
if job_id if job_id
log_info('Scheduled repositories clean up for Geo node', geo_node_id: changed_event.geo_node_id, job_id: job_id) log_info('Scheduled repositories clean up for Geo node', geo_node_id: event.geo_node_id, job_id: job_id)
else else
log_error('Could not schedule repositories clean up for Geo node', geo_node_id: changed_event.geo_node_id) log_error('Could not schedule repositories clean up for Geo node', geo_node_id: event.geo_node_id)
end end
end end
def handle_repository_rename(event) def handle_repository_renamed(event_log)
renamed_event = event.repository_renamed_event event = event_log.repository_renamed_event
return unless renamed_event.project_id return unless event.project_id
old_path = renamed_event.old_path_with_namespace old_path = event.old_path_with_namespace
new_path = renamed_event.new_path_with_namespace new_path = event.new_path_with_namespace
job_id = ::Geo::MoveRepositoryService job_id = ::Geo::MoveRepositoryService
.new(renamed_event.project_id, "", old_path, new_path) .new(event.project_id, '', old_path, new_path)
.async_execute .async_execute
log_event_info(event.created_at, log_event_info(
message: "Renaming project", event_log.created_at,
project_id: renamed_event.project_id, message: 'Renaming project',
project_id: event.project_id,
old_path: old_path, old_path: old_path,
new_path: new_path, new_path: new_path,
job_id: job_id) job_id: job_id)
end end
def find_or_initialize_registry(project_id, attrs)
registry = ::Geo::ProjectRegistry.find_or_initialize_by(project_id: project_id)
registry.assign_attributes(attrs)
registry
end
def cursor_delay(created_at) def cursor_delay(created_at)
(Time.now - created_at).to_f.round(3) (Time.now - created_at).to_f.round(3)
end end
......
...@@ -58,7 +58,7 @@ module Gitlab ...@@ -58,7 +58,7 @@ module Gitlab
return false unless access_token return false unless access_token
api = OAuth2::AccessToken.from_hash(oauth_client, access_token: access_token) api = OAuth2::AccessToken.from_hash(oauth_client, access_token: access_token)
api.get('/api/v3/user').parsed api.get('/api/v4/user').parsed
end end
private private
......
...@@ -19,8 +19,8 @@ module Gitlab ...@@ -19,8 +19,8 @@ module Gitlab
command_not_allowed: "The command you're trying to execute is not allowed.", command_not_allowed: "The command you're trying to execute is not allowed.",
upload_pack_disabled_over_http: 'Pulling over HTTP is not allowed.', upload_pack_disabled_over_http: 'Pulling over HTTP is not allowed.',
receive_pack_disabled_over_http: 'Pushing over HTTP is not allowed.', receive_pack_disabled_over_http: 'Pushing over HTTP is not allowed.',
readonly: 'The repository is temporarily read-only. Please try again later.', read_only: 'The repository is temporarily read-only. Please try again later.',
cannot_push_to_secondary_geo: "You can't push code to a secondary GitLab Geo node." cannot_push_to_read_only: "You can't push code to a read-only GitLab instance."
}.freeze }.freeze
DOWNLOAD_COMMANDS = %w{ git-upload-pack git-upload-archive }.freeze DOWNLOAD_COMMANDS = %w{ git-upload-pack git-upload-archive }.freeze
...@@ -173,11 +173,11 @@ module Gitlab ...@@ -173,11 +173,11 @@ module Gitlab
# TODO: please clean this up # TODO: please clean this up
def check_push_access!(changes) def check_push_access!(changes)
if project.repository_read_only? if project.repository_read_only?
raise UnauthorizedError, ERROR_MESSAGES[:readonly] raise UnauthorizedError, ERROR_MESSAGES[:read_only]
end end
if Gitlab::Geo.secondary? if Gitlab::Database.read_only?
raise UnauthorizedError, ERROR_MESSAGES[:cannot_push_to_secondary_geo] raise UnauthorizedError, ERROR_MESSAGES[:cannot_push_to_read_only]
end end
if deploy_key if deploy_key
......
module Gitlab module Gitlab
class GitAccessWiki < GitAccess class GitAccessWiki < GitAccess
ERROR_MESSAGES = { ERROR_MESSAGES = {
geo: "You can't push code to a secondary GitLab Geo node.", read_only: "You can't push code to a read-only GitLab instance.",
write_to_wiki: "You are not allowed to write to this project's wiki." write_to_wiki: "You are not allowed to write to this project's wiki."
}.freeze }.freeze
...@@ -18,8 +18,8 @@ module Gitlab ...@@ -18,8 +18,8 @@ module Gitlab
raise UnauthorizedError, ERROR_MESSAGES[:write_to_wiki] raise UnauthorizedError, ERROR_MESSAGES[:write_to_wiki]
end end
if Gitlab::Geo.enabled? && Gitlab::Geo.secondary? if Gitlab::Database.read_only?
raise UnauthorizedError, ERROR_MESSAGES[:geo] raise UnauthorizedError, ERROR_MESSAGES[:read_only]
end end
true true
......
...@@ -6,26 +6,38 @@ module Gitlab ...@@ -6,26 +6,38 @@ module Gitlab
# other resources, unified by an `app=` label. The rollout status sums the # other resources, unified by an `app=` label. The rollout status sums the
# Kubernetes deployments together. # Kubernetes deployments together.
class RolloutStatus class RolloutStatus
attr_reader :deployments, :instances, :completion attr_reader :deployments, :instances, :completion, :status
def complete? def complete?
completion == 100 completion == 100
end end
def valid? def loading?
@valid @status == :loading
end
def not_found?
@status == :not_found
end
def found?
@status == :found
end end
def self.from_specs(*specs) def self.from_specs(*specs)
return new([], valid: false) if specs.empty? return new([], status: :not_found) if specs.empty?
deployments = specs.map { |spec| ::Gitlab::Kubernetes::Deployment.new(spec) } deployments = specs.map { |spec| ::Gitlab::Kubernetes::Deployment.new(spec) }
deployments.sort_by!(&:order) deployments.sort_by!(&:order)
new(deployments) new(deployments)
end end
def initialize(deployments, valid: true) def self.loading
@valid = valid new([], status: :loading)
end
def initialize(deployments, status: :found)
@status = status
@deployments = deployments @deployments = deployments
@instances = deployments.flat_map(&:instances) @instances = deployments.flat_map(&:instances)
......
module Gitlab module Gitlab
module Middleware module Middleware
class ReadonlyGeo class ReadOnly
DISALLOWED_METHODS = %w(POST PATCH PUT DELETE).freeze DISALLOWED_METHODS = %w(POST PATCH PUT DELETE).freeze
APPLICATION_JSON = 'application/json'.freeze APPLICATION_JSON = 'application/json'.freeze
API_VERSIONS = (3..4) API_VERSIONS = (3..4)
...@@ -13,9 +13,9 @@ module Gitlab ...@@ -13,9 +13,9 @@ module Gitlab
def call(env) def call(env)
@env = env @env = env
if disallowed_request? && Gitlab::Geo.secondary? if disallowed_request? && Gitlab::Database.read_only?
Rails.logger.debug('GitLab Geo: preventing possible non readonly operation') Rails.logger.debug('GitLab ReadOnly: preventing possible non read-only operation')
error_message = 'You cannot do writing operations on a secondary GitLab Geo instance' error_message = 'You cannot do writing operations on a read-only GitLab instance'
if json_request? if json_request?
return [403, { 'Content-Type' => 'application/json' }, [{ 'message' => error_message }.to_json]] return [403, { 'Content-Type' => 'application/json' }, [{ 'message' => error_message }.to_json]]
......
require Rails.root.join('ee/lib/ee/gitlab/database')
require Rails.root.join('lib/gitlab/database') require Rails.root.join('lib/gitlab/database')
require Rails.root.join('lib/gitlab/database/migration_helpers') require Rails.root.join('lib/gitlab/database/migration_helpers')
require Rails.root.join('db/migrate/20151007120511_namespaces_projects_path_lower_indexes') require Rails.root.join('db/migrate/20151007120511_namespaces_projects_path_lower_indexes')
......
require 'spec_helper' require 'spec_helper'
describe Projects::EnvironmentsController do describe Projects::EnvironmentsController do
include KubernetesHelpers
set(:user) { create(:user) } set(:user) { create(:user) }
set(:project) { create(:project) } set(:project) { create(:project) }
...@@ -21,11 +23,19 @@ describe Projects::EnvironmentsController do ...@@ -21,11 +23,19 @@ describe Projects::EnvironmentsController do
expect(response).to have_http_status(:ok) expect(response).to have_http_status(:ok)
end end
it 'expires etag cache to force reload environments list' do
expect_any_instance_of(Gitlab::EtagCaching::Store)
.to receive(:touch).with(project_environments_path(project, format: :json))
get :index, environment_params
end
end end
context 'when requesting JSON response for folders' do context 'when requesting JSON response for folders' do
before do before do
allow_any_instance_of(Environment).to receive(:deployment_service_ready?).and_return(true) allow_any_instance_of(Environment).to receive(:deployment_service_ready?).and_return(true)
allow_any_instance_of(Environment).to receive(:rollout_status).and_return(kube_deployment_rollout_status)
create(:environment, project: project, create(:environment, project: project,
name: 'staging/review-1', name: 'staging/review-1',
...@@ -49,14 +59,18 @@ describe Projects::EnvironmentsController do ...@@ -49,14 +59,18 @@ describe Projects::EnvironmentsController do
get :index, environment_params(format: :json, scope: :available) get :index, environment_params(format: :json, scope: :available)
end end
it 'responds with matching schema' do
expect(response).to match_response_schema('environments')
end
it 'responds with a payload describing available environments' do it 'responds with a payload describing available environments' do
expect(environments.count).to eq 2 expect(environments.count).to eq 2
expect(environments.first['name']).to eq 'production' expect(environments.first['name']).to eq 'production'
expect(environments.first['latest']['rollout_status_path']).to be_present expect(environments.first['latest']['rollout_status']).to be_present
expect(environments.second['name']).to eq 'staging' expect(environments.second['name']).to eq 'staging'
expect(environments.second['size']).to eq 2 expect(environments.second['size']).to eq 2
expect(environments.second['latest']['name']).to eq 'staging/review-2' expect(environments.second['latest']['name']).to eq 'staging/review-2'
expect(environments.second['latest']['rollout_status_path']).to be_present expect(environments.second['latest']['rollout_status']).to be_present
end end
it 'contains values describing environment scopes sizes' do it 'contains values describing environment scopes sizes' do
...@@ -96,8 +110,8 @@ describe Projects::EnvironmentsController do ...@@ -96,8 +110,8 @@ describe Projects::EnvironmentsController do
end end
it 'does not return the rollout_status_path attribute' do it 'does not return the rollout_status_path attribute' do
expect(environments.first['latest']['rollout_status_path']).to be_blank expect(environments.first['latest']['rollout_status']).not_to be_present
expect(environments.second['latest']['rollout_status_path']).to be_blank expect(environments.second['latest']['rollout_status']).not_to be_present
end end
end end
end end
...@@ -289,59 +303,6 @@ describe Projects::EnvironmentsController do ...@@ -289,59 +303,6 @@ describe Projects::EnvironmentsController do
end end
end end
describe 'GET #status' do
context 'without deployment service' do
it 'returns 404' do
get :status, environment_params
expect(response.status).to eq(404)
end
end
context 'with deployment service' do
let(:project) { create(:kubernetes_project) }
let(:environment) { create(:environment, name: 'production', project: project) }
before do
stub_licensed_features(deploy_board: true)
allow_any_instance_of(Environment).to receive(:deployment_service_ready?).and_return(true)
end
it 'returns 204 until the rollout status is present' do
expect_any_instance_of(Environment)
.to receive(:rollout_status)
.and_return(nil)
get :status, environment_params
expect(response.status).to eq(204)
expect(response.headers['Poll-Interval']).to eq("3000")
end
it 'returns the rollout status when present' do
expect_any_instance_of(Environment)
.to receive(:rollout_status)
.and_return(::Gitlab::Kubernetes::RolloutStatus.new([]))
get :status, environment_params
expect(response.status).to eq(200)
end
end
context 'when license does not has the GitLab_DeployBoard add-on' do
before do
stub_licensed_features(deploy_board: false)
end
it 'does not return any data' do
get :status, environment_params
expect(response).to have_http_status(:not_found)
end
end
end
describe 'GET #metrics' do describe 'GET #metrics' do
before do before do
allow(controller).to receive(:environment).and_return(environment) allow(controller).to receive(:environment).and_return(environment)
......
...@@ -83,8 +83,8 @@ describe EE::User do ...@@ -83,8 +83,8 @@ describe EE::User do
expect(subject.reload.remember_created_at).to be_nil expect(subject.reload.remember_created_at).to be_nil
end end
it 'does not clear remember_created_at when in a Geo secondary node' do it 'does not clear remember_created_at when in a GitLab read-only instance' do
allow(Gitlab::Geo).to receive(:secondary?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
expect { subject.forget_me! }.not_to change(subject, :remember_created_at) expect { subject.forget_me! }.not_to change(subject, :remember_created_at)
end end
...@@ -99,8 +99,8 @@ describe EE::User do ...@@ -99,8 +99,8 @@ describe EE::User do
expect(subject.reload.remember_created_at).not_to be_nil expect(subject.reload.remember_created_at).not_to be_nil
end end
it 'does not update remember_created_at when in a Geo secondary node' do it 'does not update remember_created_at when in a Geo read-only instance' do
allow(Gitlab::Geo).to receive(:secondary?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
expect { subject.remember_me! }.not_to change(subject, :remember_created_at) expect { subject.remember_me! }.not_to change(subject, :remember_created_at)
end end
......
...@@ -9,6 +9,7 @@ describe KubernetesService, models: true, use_clean_rails_memory_store_caching: ...@@ -9,6 +9,7 @@ describe KubernetesService, models: true, use_clean_rails_memory_store_caching:
describe '#rollout_status' do describe '#rollout_status' do
let(:environment) { build(:environment, project: project, name: "env", slug: "env-000000") } let(:environment) { build(:environment, project: project, name: "env", slug: "env-000000") }
subject(:rollout_status) { service.rollout_status(environment) } subject(:rollout_status) { service.rollout_status(environment) }
context 'with valid deployments' do context 'with valid deployments' do
...@@ -24,5 +25,30 @@ describe KubernetesService, models: true, use_clean_rails_memory_store_caching: ...@@ -24,5 +25,30 @@ describe KubernetesService, models: true, use_clean_rails_memory_store_caching:
expect(rollout_status.deployments.map(&:labels)).to eq([{ 'app' => 'env-000000' }]) expect(rollout_status.deployments.map(&:labels)).to eq([{ 'app' => 'env-000000' }])
end end
end end
context 'with empty list of deployments' do
before do
stub_reactive_cache(
service,
deployments: []
)
end
it 'creates a matching RolloutStatus' do
expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
expect(rollout_status).to be_not_found
end
end
context 'not yet loaded deployments' do
before do
stub_reactive_cache
end
it 'creates a matching RolloutStatus' do
expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
expect(rollout_status).to be_loading
end
end
end end
end end
require 'spec_helper' require 'spec_helper'
describe Keys::LastUsedService do describe Keys::LastUsedService do
it 'does not run on Geo secondaries', :clean_gitlab_redis_shared_state do it 'does not run on read-only GitLab instances', :clean_gitlab_redis_shared_state do
key = create(:key, last_used_at: 1.year.ago) key = create(:key, last_used_at: 1.year.ago)
original_time = key.last_used_at original_time = key.last_used_at
allow(::Gitlab::Geo).to receive(:secondary?).and_return(true) allow(::Gitlab::Database).to receive(:read_only?).and_return(true)
described_class.new(key).execute described_class.new(key).execute
expect(key.reload.last_used_at).to be_like_time(original_time) expect(key.reload.last_used_at).to be_like_time(original_time)
......
...@@ -174,7 +174,7 @@ FactoryGirl.define do ...@@ -174,7 +174,7 @@ FactoryGirl.define do
end end
end end
trait :readonly do trait :read_only do
repository_read_only true repository_read_only true
end end
......
{
"additionalProperties": false,
"properties": {
"created_at": {
"type": "string"
},
"id": {
"type": "integer"
},
"iid": {
"type": "integer"
},
"last?": {
"type": "boolean"
},
"ref": {
"additionalProperties": false,
"properties": {
"name": {
"type": "string"
}
},
"required": [
"name"
],
"type": "object"
},
"sha": {
"type": "string"
},
"tag": {
"type": "boolean"
}
},
"required": [
"sha",
"created_at",
"iid",
"tag",
"last?",
"ref",
"id"
],
"type": "object"
}
...@@ -3,49 +3,7 @@ ...@@ -3,49 +3,7 @@
"properties": { "properties": {
"deployments": { "deployments": {
"items": { "items": {
"additionalProperties": false, "$ref": "deployment.json"
"properties": {
"created_at": {
"type": "string"
},
"id": {
"type": "integer"
},
"iid": {
"type": "integer"
},
"last?": {
"type": "boolean"
},
"ref": {
"additionalProperties": false,
"properties": {
"name": {
"type": "string"
}
},
"required": [
"name"
],
"type": "object"
},
"sha": {
"type": "string"
},
"tag": {
"type": "boolean"
}
},
"required": [
"sha",
"created_at",
"iid",
"tag",
"last?",
"ref",
"id"
],
"type": "object"
}, },
"minItems": 1, "minItems": 1,
"type": "array" "type": "array"
......
{
"type": "object",
"additionalProperties": false,
"required": [
"id",
"name",
"state",
"last_deployment",
"environment_path",
"created_at",
"updated_at"
],
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "string"
},
"state": {
"type": "string"
},
"external_url": {
"type": "string"
},
"environment_type": {
"type": [
"string",
"null"
]
},
"last_deployment": {
"oneOf": [
{
"$ref": "deployment.json"
},
{
"type": ["null"]
}
]
},
"stop_action?": {
"type": "boolean"
},
"rollout_status": {
"$ref": "rollout_status.json"
},
"environment_path": {
"type": "string"
},
"stop_path": {
"type": "string"
},
"terminal_path": {
"type": "string"
},
"folder_path": {
"type": "string"
},
"created_at": {
"type": "date"
},
"updated_at": {
"type": "date"
}
}
}
{
"additionalProperties": false,
"properties": {
"environments": {
"items": {
"$ref": "environments_group.json"
},
"minItems": 1,
"type": "array"
},
"available_count": {
"type": "integer"
},
"stopped_count": {
"type": "integer"
}
},
"required": [
"environments",
"available_count",
"stopped_count"
],
"type": "object"
}
{
"type": "object",
"required": [
"name",
"size",
"latest"
],
"additionalProperties": false,
"properties": {
"name": {
"type": "string"
},
"size": {
"type": "integer"
},
"latest": {
"$ref": "environment.json"
}
}
}
{
"type": "object",
"additionalProperties": false,
"required": [
"status"
],
"properties": {
"status": {
"type": "string"
},
"completion": {
"type": "integer"
},
"is_completed": {
"type": "boolean"
},
"instances": {
"type": "array",
"items": {
"additionalProperties": false,
"type": "object",
"required": [
"status",
"tooltip",
"track",
"stable"
],
"properties": {
"status": {
"type": "string"
},
"tooltip": {
"type": "string"
},
"track": {
"type": "string"
},
"stable": {
"type": "boolean"
}
}
}
}
}
}
...@@ -36,10 +36,10 @@ describe 'create_tokens' do ...@@ -36,10 +36,10 @@ describe 'create_tokens' do
expect(keys).to all(match(HEX_KEY)) expect(keys).to all(match(HEX_KEY))
end end
it 'generates an RSA key for jws_private_key' do it 'generates an RSA key for openid_connect_signing_key' do
create_tokens create_tokens
keys = secrets.values_at(:jws_private_key) keys = secrets.values_at(:openid_connect_signing_key)
expect(keys.uniq).to eq(keys) expect(keys.uniq).to eq(keys)
expect(keys).to all(match(RSA_KEY)) expect(keys).to all(match(RSA_KEY))
...@@ -49,7 +49,7 @@ describe 'create_tokens' do ...@@ -49,7 +49,7 @@ describe 'create_tokens' do
expect(self).to receive(:warn_missing_secret).with('secret_key_base') expect(self).to receive(:warn_missing_secret).with('secret_key_base')
expect(self).to receive(:warn_missing_secret).with('otp_key_base') expect(self).to receive(:warn_missing_secret).with('otp_key_base')
expect(self).to receive(:warn_missing_secret).with('db_key_base') expect(self).to receive(:warn_missing_secret).with('db_key_base')
expect(self).to receive(:warn_missing_secret).with('jws_private_key') expect(self).to receive(:warn_missing_secret).with('openid_connect_signing_key')
create_tokens create_tokens
end end
...@@ -61,7 +61,7 @@ describe 'create_tokens' do ...@@ -61,7 +61,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base) expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base)
expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base) expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base)
expect(new_secrets['db_key_base']).to eq(secrets.db_key_base) expect(new_secrets['db_key_base']).to eq(secrets.db_key_base)
expect(new_secrets['jws_private_key']).to eq(secrets.jws_private_key) expect(new_secrets['openid_connect_signing_key']).to eq(secrets.openid_connect_signing_key)
end end
create_tokens create_tokens
...@@ -77,7 +77,7 @@ describe 'create_tokens' do ...@@ -77,7 +77,7 @@ describe 'create_tokens' do
context 'when the other secrets all exist' do context 'when the other secrets all exist' do
before do before do
secrets.db_key_base = 'db_key_base' secrets.db_key_base = 'db_key_base'
secrets.jws_private_key = 'jws_private_key' secrets.openid_connect_signing_key = 'openid_connect_signing_key'
allow(File).to receive(:exist?).with('.secret').and_return(true) allow(File).to receive(:exist?).with('.secret').and_return(true)
allow(File).to receive(:read).with('.secret').and_return('file_key') allow(File).to receive(:read).with('.secret').and_return('file_key')
...@@ -88,7 +88,7 @@ describe 'create_tokens' do ...@@ -88,7 +88,7 @@ describe 'create_tokens' do
stub_env('SECRET_KEY_BASE', 'env_key') stub_env('SECRET_KEY_BASE', 'env_key')
secrets.secret_key_base = 'secret_key_base' secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base' secrets.otp_key_base = 'otp_key_base'
secrets.jws_private_key = 'jws_private_key' secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end end
it 'does not issue a warning' do it 'does not issue a warning' do
...@@ -114,7 +114,7 @@ describe 'create_tokens' do ...@@ -114,7 +114,7 @@ describe 'create_tokens' do
before do before do
secrets.secret_key_base = 'secret_key_base' secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base' secrets.otp_key_base = 'otp_key_base'
secrets.jws_private_key = 'jws_private_key' secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end end
it 'does not write any files' do it 'does not write any files' do
...@@ -129,7 +129,7 @@ describe 'create_tokens' do ...@@ -129,7 +129,7 @@ describe 'create_tokens' do
expect(secrets.secret_key_base).to eq('secret_key_base') expect(secrets.secret_key_base).to eq('secret_key_base')
expect(secrets.otp_key_base).to eq('otp_key_base') expect(secrets.otp_key_base).to eq('otp_key_base')
expect(secrets.db_key_base).to eq('db_key_base') expect(secrets.db_key_base).to eq('db_key_base')
expect(secrets.jws_private_key).to eq('jws_private_key') expect(secrets.openid_connect_signing_key).to eq('openid_connect_signing_key')
end end
it 'deletes the .secret file' do it 'deletes the .secret file' do
...@@ -153,7 +153,7 @@ describe 'create_tokens' do ...@@ -153,7 +153,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq('file_key') expect(new_secrets['secret_key_base']).to eq('file_key')
expect(new_secrets['otp_key_base']).to eq('file_key') expect(new_secrets['otp_key_base']).to eq('file_key')
expect(new_secrets['db_key_base']).to eq('db_key_base') expect(new_secrets['db_key_base']).to eq('db_key_base')
expect(new_secrets['jws_private_key']).to eq('jws_private_key') expect(new_secrets['openid_connect_signing_key']).to eq('openid_connect_signing_key')
end end
create_tokens create_tokens
......
import Vue from 'vue'; import Vue from 'vue';
import DeployBoard from '~/environments/components/deploy_board_component.vue'; import DeployBoard from '~/environments/components/deploy_board_component.vue';
import { deployBoardMockData, invalidDeployBoardMockData } from './mock_data'; import { deployBoardMockData } from './mock_data';
describe('Deploy Board', () => { describe('Deploy Board', () => {
let DeployBoardComponent; let DeployBoardComponent;
...@@ -17,7 +17,7 @@ describe('Deploy Board', () => { ...@@ -17,7 +17,7 @@ describe('Deploy Board', () => {
propsData: { propsData: {
deployBoardData: deployBoardMockData, deployBoardData: deployBoardMockData,
isLoading: false, isLoading: false,
hasError: false, isEmpty: false,
}, },
}).$mount(); }).$mount();
}); });
...@@ -46,15 +46,15 @@ describe('Deploy Board', () => { ...@@ -46,15 +46,15 @@ describe('Deploy Board', () => {
}); });
}); });
describe('without valid data', () => { describe('with empty state', () => {
let component; let component;
beforeEach(() => { beforeEach(() => {
component = new DeployBoardComponent({ component = new DeployBoardComponent({
propsData: { propsData: {
deployBoardData: invalidDeployBoardMockData, deployBoardData: {},
isLoading: false, isLoading: false,
hasError: false, isEmpty: true,
}, },
}).$mount(); }).$mount();
}); });
...@@ -65,21 +65,21 @@ describe('Deploy Board', () => { ...@@ -65,21 +65,21 @@ describe('Deploy Board', () => {
}); });
}); });
describe('with error', () => { describe('with loading state', () => {
let component; let component;
beforeEach(() => { beforeEach(() => {
component = new DeployBoardComponent({ component = new DeployBoardComponent({
propsData: { propsData: {
deployBoardData: {}, deployBoardData: {},
isLoading: false, isLoading: true,
hasError: true, isEmpty: false,
}, },
}).$mount(); }).$mount();
}); });
it('should render empty state', () => { it('should render loading spinner', () => {
expect(component.$el.children.length).toEqual(1); expect(component.$el.querySelector('.fa-spin')).toBeDefined();
}); });
}); });
}); });
...@@ -37,12 +37,11 @@ describe('Environment item', () => { ...@@ -37,12 +37,11 @@ describe('Environment item', () => {
size: 1, size: 1,
environment_path: 'url', environment_path: 'url',
id: 1, id: 1,
rollout_status_path: 'url',
hasDeployBoard: true, hasDeployBoard: true,
deployBoardData: deployBoardMockData, deployBoardData: deployBoardMockData,
isDeployBoardVisible: true, isDeployBoardVisible: true,
isLoadingDeployBoard: false, isLoadingDeployBoard: false,
hasErrorDeployBoard: false, isEmptyDeployBoard: false,
}; };
const component = new EnvironmentTable({ const component = new EnvironmentTable({
...@@ -66,7 +65,6 @@ describe('Environment item', () => { ...@@ -66,7 +65,6 @@ describe('Environment item', () => {
size: 1, size: 1,
environment_path: 'url', environment_path: 'url',
id: 1, id: 1,
rollout_status_path: 'url',
hasDeployBoard: true, hasDeployBoard: true,
deployBoardData: { deployBoardData: {
instances: [ instances: [
......
...@@ -29,12 +29,12 @@ describe('Store', () => { ...@@ -29,12 +29,12 @@ describe('Store', () => {
stop_path: '/root/review-app/environments/7/stop', stop_path: '/root/review-app/environments/7/stop',
created_at: '2017-01-31T10:53:46.894Z', created_at: '2017-01-31T10:53:46.894Z',
updated_at: '2017-01-31T10:53:46.894Z', updated_at: '2017-01-31T10:53:46.894Z',
rollout_status_path: '/path', rollout_status: {},
hasDeployBoard: true, hasDeployBoard: true,
isDeployBoardVisible: false, isDeployBoardVisible: true,
deployBoardData: {}, deployBoardData: {},
isLoadingDeployBoard: false, isLoadingDeployBoard: false,
hasErrorDeployBoard: false, isEmptyDeployBoard: false,
}; };
store.storeEnvironments(serverData); store.storeEnvironments(serverData);
...@@ -58,20 +58,20 @@ describe('Store', () => { ...@@ -58,20 +58,20 @@ describe('Store', () => {
expect(store.state.environments.length).toEqual(serverData.length); expect(store.state.environments.length).toEqual(serverData.length);
}); });
it('should store a non folder environment with deploy board if rollout_status_path key is provided', () => { it('should store a non folder environment with deploy board if rollout_status key is provided', () => {
const environment = { const environment = {
name: 'foo', name: 'foo',
size: 1, size: 1,
latest: { latest: {
id: 1, id: 1,
rollout_status_path: 'url', rollout_status: deployBoardMockData,
}, },
}; };
store.storeEnvironments([environment]); store.storeEnvironments([environment]);
expect(store.state.environments[0].hasDeployBoard).toEqual(true); expect(store.state.environments[0].hasDeployBoard).toEqual(true);
expect(store.state.environments[0].isDeployBoardVisible).toEqual(false); expect(store.state.environments[0].isDeployBoardVisible).toEqual(true);
expect(store.state.environments[0].deployBoardData).toEqual({}); expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData);
}); });
it('should add folder keys when environment is a folder', () => { it('should add folder keys when environment is a folder', () => {
...@@ -192,7 +192,7 @@ describe('Store', () => { ...@@ -192,7 +192,7 @@ describe('Store', () => {
latest: { latest: {
id: 1, id: 1,
}, },
rollout_status_path: 'path', rollout_status: deployBoardMockData,
}; };
store.storeEnvironments([environment]); store.storeEnvironments([environment]);
...@@ -201,16 +201,10 @@ describe('Store', () => { ...@@ -201,16 +201,10 @@ describe('Store', () => {
it('should toggle deploy board property for given environment id', () => { it('should toggle deploy board property for given environment id', () => {
store.toggleDeployBoard(1); store.toggleDeployBoard(1);
expect(store.state.environments[0].isDeployBoardVisible).toEqual(true); expect(store.state.environments[0].isDeployBoardVisible).toEqual(false);
});
it('should store deploy board data for given environment id', () => {
store.storeDeployBoard(1, deployBoardMockData);
expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData);
}); });
it('should keep deploy board data when updating environments', () => { it('should keep deploy board data when updating environments', () => {
store.storeDeployBoard(1, deployBoardMockData);
expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData); expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData);
const environment = { const environment = {
...@@ -219,7 +213,7 @@ describe('Store', () => { ...@@ -219,7 +213,7 @@ describe('Store', () => {
latest: { latest: {
id: 1, id: 1,
}, },
rollout_status_path: 'path', rollout_status: deployBoardMockData,
}; };
store.storeEnvironments([environment]); store.storeEnvironments([environment]);
expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData); expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData);
...@@ -243,12 +237,12 @@ describe('Store', () => { ...@@ -243,12 +237,12 @@ describe('Store', () => {
latest: { latest: {
id: 1, id: 1,
}, },
rollout_status_path: 'path', rollout_status: deployBoardMockData,
}; };
store.storeEnvironments([environment]); store.storeEnvironments([environment]);
expect(store.getOpenDeployBoards().length).toEqual(0); expect(store.getOpenDeployBoards().length).toEqual(1);
}); });
}); });
}); });
...@@ -12,7 +12,7 @@ export const environmentsList = [ ...@@ -12,7 +12,7 @@ export const environmentsList = [
stop_path: '/root/review-app/environments/7/stop', stop_path: '/root/review-app/environments/7/stop',
created_at: '2017-01-31T10:53:46.894Z', created_at: '2017-01-31T10:53:46.894Z',
updated_at: '2017-01-31T10:53:46.894Z', updated_at: '2017-01-31T10:53:46.894Z',
rollout_status_path: '/path', rollout_status: {},
}, },
{ {
folderName: 'build', folderName: 'build',
...@@ -28,7 +28,7 @@ export const environmentsList = [ ...@@ -28,7 +28,7 @@ export const environmentsList = [
stop_path: '/root/review-app/environments/12/stop', stop_path: '/root/review-app/environments/12/stop',
created_at: '2017-02-01T19:42:18.400Z', created_at: '2017-02-01T19:42:18.400Z',
updated_at: '2017-02-01T19:42:18.400Z', updated_at: '2017-02-01T19:42:18.400Z',
rollout_status_path: '/path', rollout_status: {},
}, },
]; ];
...@@ -48,7 +48,7 @@ export const serverData = [ ...@@ -48,7 +48,7 @@ export const serverData = [
stop_path: '/root/review-app/environments/7/stop', stop_path: '/root/review-app/environments/7/stop',
created_at: '2017-01-31T10:53:46.894Z', created_at: '2017-01-31T10:53:46.894Z',
updated_at: '2017-01-31T10:53:46.894Z', updated_at: '2017-01-31T10:53:46.894Z',
rollout_status_path: '/path', rollout_status: {},
}, },
}, },
{ {
...@@ -100,7 +100,7 @@ export const environment = { ...@@ -100,7 +100,7 @@ export const environment = {
stop_path: '/root/review-app/environments/7/stop', stop_path: '/root/review-app/environments/7/stop',
created_at: '2017-01-31T10:53:46.894Z', created_at: '2017-01-31T10:53:46.894Z',
updated_at: '2017-01-31T10:53:46.894Z', updated_at: '2017-01-31T10:53:46.894Z',
rollout_status_path: '/path', rollout_status: {},
}; };
export const deployBoardMockData = { export const deployBoardMockData = {
...@@ -136,15 +136,7 @@ export const deployBoardMockData = { ...@@ -136,15 +136,7 @@ export const deployBoardMockData = {
abort_url: 'url', abort_url: 'url',
rollback_url: 'url', rollback_url: 'url',
completion: 100, completion: 100,
valid: true, status: 'found',
};
export const invalidDeployBoardMockData = {
instances: [],
abort_url: 'url',
rollback_url: 'url',
completion: 100,
valid: false,
}; };
export const folder = { export const folder = {
......
...@@ -31,14 +31,14 @@ describe Banzai::Renderer do ...@@ -31,14 +31,14 @@ describe Banzai::Renderer do
let(:object) { fake_object(fresh: false) } let(:object) { fake_object(fresh: false) }
it 'caches and returns the result' do it 'caches and returns the result' do
expect(object).to receive(:refresh_markdown_cache!).with(do_update: true) expect(object).to receive(:refresh_markdown_cache!)
is_expected.to eq('field_html') is_expected.to eq('field_html')
end end
it "skips database caching on a Geo secondary" do it "skips database caching on a GitLab read-only instance" do
allow(Gitlab::Geo).to receive(:secondary?).and_return(true) allow(Gitlab::Database).to receive(:read_only?).and_return(true)
expect(object).to receive(:refresh_markdown_cache!).with(do_update: false) expect(object).to receive(:refresh_markdown_cache!)
is_expected.to eq('field_html') is_expected.to eq('field_html')
end end
......
...@@ -65,7 +65,9 @@ describe Gitlab::Geo::DatabaseTasks do ...@@ -65,7 +65,9 @@ describe Gitlab::Geo::DatabaseTasks do
describe described_class::Migrate do describe described_class::Migrate do
describe '.up' do describe '.up' do
it 'requires ENV["VERSION"] to be set' do it 'requires ENV["VERSION"] to be set' do
expect { subject.up }.to raise_error(String) stub_env('VERSION', nil)
expect { subject.up }.to raise_error(/VERSION is required/)
end end
it 'calls ActiveRecord::Migrator.run' do it 'calls ActiveRecord::Migrator.run' do
...@@ -78,7 +80,9 @@ describe Gitlab::Geo::DatabaseTasks do ...@@ -78,7 +80,9 @@ describe Gitlab::Geo::DatabaseTasks do
describe '.down' do describe '.down' do
it 'requires ENV["VERSION"] to be set' do it 'requires ENV["VERSION"] to be set' do
expect { subject.down }.to raise_error(String) stub_env('VERSION', nil)
expect { subject.down }.to raise_error(/VERSION is required/)
end end
it 'calls ActiveRecord::Migrator.run' do it 'calls ActiveRecord::Migrator.run' do
......
...@@ -4,7 +4,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do ...@@ -4,7 +4,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do
include ::EE::GeoHelpers include ::EE::GeoHelpers
describe '#run!' do describe '#run!' do
set(:geo_node) { create(:geo_node) } set(:geo_node) { create(:geo_node, :primary) }
before do before do
stub_current_geo_node(geo_node) stub_current_geo_node(geo_node)
...@@ -30,7 +30,8 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do ...@@ -30,7 +30,8 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do
end end
context 'when replaying a repository created event' do context 'when replaying a repository created event' do
let(:repository_created_event) { create(:geo_repository_created_event) } let(:project) { create(:project) }
let(:repository_created_event) { create(:geo_repository_created_event, project: project) }
let(:event_log) { create(:geo_event_log, repository_created_event: repository_created_event) } let(:event_log) { create(:geo_event_log, repository_created_event: repository_created_event) }
let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) } let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
...@@ -47,7 +48,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do ...@@ -47,7 +48,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do
registry = Geo::ProjectRegistry.last registry = Geo::ProjectRegistry.last
expect(registry).to have_attributes(resync_repository: true, resync_wiki: true) expect(registry).to have_attributes(project_id: project.id, resync_repository: true, resync_wiki: true)
end end
it 'sets resync_wiki to false if wiki_path is nil' do it 'sets resync_wiki to false if wiki_path is nil' do
...@@ -57,14 +58,22 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do ...@@ -57,14 +58,22 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do
registry = Geo::ProjectRegistry.last registry = Geo::ProjectRegistry.last
expect(registry).to have_attributes(resync_repository: true, resync_wiki: false) expect(registry).to have_attributes(project_id: project.id, resync_repository: true, resync_wiki: false)
end
it 'performs Geo::ProjectSyncWorker' do
expect(Geo::ProjectSyncWorker).to receive(:perform_async)
.with(project.id, anything).once
subject.run!
end end
end end
context 'when replaying a repository updated event' do context 'when replaying a repository updated event' do
let(:event_log) { create(:geo_event_log, :updated_event) } let(:project) { create(:project) }
let(:repository_updated_event) { create(:geo_repository_updated_event, project: project) }
let(:event_log) { create(:geo_event_log, repository_updated_event: repository_updated_event) }
let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) } let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
let(:repository_updated_event) { event_log.repository_updated_event }
before do before do
allow(subject).to receive(:exit?).and_return(false, true) allow(subject).to receive(:exit?).and_return(false, true)
...@@ -91,6 +100,13 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do ...@@ -91,6 +100,13 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do
expect(registry.reload.resync_wiki).to be true expect(registry.reload.resync_wiki).to be true
end end
it 'performs Geo::ProjectSyncWorker' do
expect(Geo::ProjectSyncWorker).to receive(:perform_async)
.with(project.id, anything).once
subject.run!
end
end end
context 'when replaying a repository deleted event' do context 'when replaying a repository deleted event' do
...@@ -155,6 +171,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do ...@@ -155,6 +171,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql do
before do before do
allow(subject).to receive(:exit?).and_return(false, true) allow(subject).to receive(:exit?).and_return(false, true)
allow(Geo::ProjectSyncWorker).to receive(:perform_async)
end end
it 'replays events for projects that belong to selected namespaces to replicate' do it 'replays events for projects that belong to selected namespaces to replicate' do
......
...@@ -110,7 +110,7 @@ describe Gitlab::Geo do ...@@ -110,7 +110,7 @@ describe Gitlab::Geo do
end end
end end
describe 'readonly?' do describe 'secondary?' do
context 'when current node is secondary' do context 'when current node is secondary' do
it 'returns true' do it 'returns true' do
stub_current_geo_node(secondary_node) stub_current_geo_node(secondary_node)
......
...@@ -744,11 +744,10 @@ describe Gitlab::GitAccess do ...@@ -744,11 +744,10 @@ describe Gitlab::GitAccess do
run_permission_checks(admin: matrix) run_permission_checks(admin: matrix)
end end
context "when in a secondary gitlab geo node" do context "when in a read-only GitLab instance" do
before do before do
create(:protected_branch, name: 'feature', project: project) create(:protected_branch, name: 'feature', project: project)
allow(Gitlab::Geo).to receive(:enabled?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
allow(Gitlab::Geo).to receive(:secondary?) { true }
end end
# Only check admin; if an admin can't do it, other roles can't either # Only check admin; if an admin can't do it, other roles can't either
...@@ -944,7 +943,7 @@ describe Gitlab::GitAccess do ...@@ -944,7 +943,7 @@ describe Gitlab::GitAccess do
end end
context 'when the repository is read only' do context 'when the repository is read only' do
let(:project) { create(:project, :repository, :readonly) } let(:project) { create(:project, :repository, :read_only) }
it 'denies push access' do it 'denies push access' do
project.add_master(user) project.add_master(user)
......
...@@ -25,15 +25,13 @@ describe Gitlab::GitAccessWiki do ...@@ -25,15 +25,13 @@ describe Gitlab::GitAccessWiki do
it { expect { subject }.not_to raise_error } it { expect { subject }.not_to raise_error }
context 'when in a secondary gitlab geo node' do context 'when in a read-only GitLab instance' do
before do before do
allow(Gitlab::Geo).to receive(:enabled?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
allow(Gitlab::Geo).to receive(:secondary?) { true }
allow(Gitlab::Geo).to receive(:license_allows?) { true }
end end
it 'does not give access to upload wiki code' do it 'does not give access to upload wiki code' do
expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "You can't push code to a secondary GitLab Geo node.") expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "You can't push code to a read-only GitLab instance.")
end end
end end
end end
......
...@@ -94,15 +94,33 @@ describe Gitlab::Kubernetes::RolloutStatus do ...@@ -94,15 +94,33 @@ describe Gitlab::Kubernetes::RolloutStatus do
end end
end end
describe '#valid?' do describe '#not_found?' do
context 'when the specs are passed' do context 'when the specs are passed' do
it { is_expected.to be_valid } it { is_expected.not_to be_not_found }
end end
context 'when no specs are passed' do context 'when list of specs is empty' do
let(:specs) { specs_none } let(:specs) { specs_none }
it { is_expected.not_to be_valid } it { is_expected.to be_not_found }
end end
end end
describe '#found?' do
context 'when the specs are passed' do
it { is_expected.to be_found }
end
context 'when list of specs is empty' do
let(:specs) { specs_none }
it { is_expected.not_to be_found }
end
end
describe '.loading' do
subject { described_class.loading }
it { is_expected.to be_loading }
end
end end
require 'spec_helper' require 'spec_helper'
describe Gitlab::Middleware::ReadonlyGeo do describe Gitlab::Middleware::ReadOnly do
include Rack::Test::Methods include Rack::Test::Methods
RSpec::Matchers.define :be_a_redirect do RSpec::Matchers.define :be_a_redirect do
...@@ -38,11 +38,11 @@ describe Gitlab::Middleware::ReadonlyGeo do ...@@ -38,11 +38,11 @@ describe Gitlab::Middleware::ReadonlyGeo do
let(:request) { Rack::MockRequest.new(rack_stack) } let(:request) { Rack::MockRequest.new(rack_stack) }
context 'normal requests to a secondary Gitlab Geo' do context 'normal requests to a read-only Gitlab instance' do
let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'text/plain' }, ['OK']] } } let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'text/plain' }, ['OK']] } }
before do before do
allow(Gitlab::Geo).to receive(:secondary?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
end end
it 'expects PATCH requests to be disallowed' do it 'expects PATCH requests to be disallowed' do
...@@ -98,13 +98,6 @@ describe Gitlab::Middleware::ReadonlyGeo do ...@@ -98,13 +98,6 @@ describe Gitlab::Middleware::ReadonlyGeo do
expect(subject).not_to disallow_request expect(subject).not_to disallow_request
end end
it 'expects a GET status request to be allowed' do
response = request.get("/api/#{API::API.version}/geo/status")
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
it 'expects a POST LFS request to batch URL to be allowed' do it 'expects a POST LFS request to batch URL to be allowed' do
response = request.post('/root/rouge.git/info/lfs/objects/batch') response = request.post('/root/rouge.git/info/lfs/objects/batch')
...@@ -114,12 +107,12 @@ describe Gitlab::Middleware::ReadonlyGeo do ...@@ -114,12 +107,12 @@ describe Gitlab::Middleware::ReadonlyGeo do
end end
end end
context 'json requests to a secondary Geo node' do context 'json requests to a read-only GitLab instance' do
let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'application/json' }, ['OK']] } } let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'application/json' }, ['OK']] } }
let(:content_json) { { 'CONTENT_TYPE' => 'application/json' } } let(:content_json) { { 'CONTENT_TYPE' => 'application/json' } }
before do before do
allow(Gitlab::Geo).to receive(:secondary?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
end end
it 'expects PATCH requests to be disallowed' do it 'expects PATCH requests to be disallowed' do
......
...@@ -45,6 +45,7 @@ describe RspecFlaky::Listener, :aggregate_failures do ...@@ -45,6 +45,7 @@ describe RspecFlaky::Listener, :aggregate_failures do
# Stub these env variables otherwise specs don't behave the same on the CI # Stub these env variables otherwise specs don't behave the same on the CI
stub_env('CI_PROJECT_URL', nil) stub_env('CI_PROJECT_URL', nil)
stub_env('CI_JOB_ID', nil) stub_env('CI_JOB_ID', nil)
stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil)
end end
describe '#initialize' do describe '#initialize' do
......
...@@ -178,14 +178,13 @@ describe CacheMarkdownField do ...@@ -178,14 +178,13 @@ describe CacheMarkdownField do
end end
end end
describe '#refresh_markdown_cache!' do describe '#refresh_markdown_cache' do
before do before do
thing.foo = updated_markdown thing.foo = updated_markdown
end end
context 'do_update: false' do
it 'fills all html fields' do it 'fills all html fields' do
thing.refresh_markdown_cache! thing.refresh_markdown_cache
expect(thing.foo_html).to eq(updated_html) expect(thing.foo_html).to eq(updated_html)
expect(thing.foo_html_changed?).to be_truthy expect(thing.foo_html_changed?).to be_truthy
...@@ -195,20 +194,24 @@ describe CacheMarkdownField do ...@@ -195,20 +194,24 @@ describe CacheMarkdownField do
it 'does not save the result' do it 'does not save the result' do
expect(thing).not_to receive(:update_columns) expect(thing).not_to receive(:update_columns)
thing.refresh_markdown_cache! thing.refresh_markdown_cache
end end
it 'updates the markdown cache version' do it 'updates the markdown cache version' do
thing.cached_markdown_version = nil thing.cached_markdown_version = nil
thing.refresh_markdown_cache! thing.refresh_markdown_cache
expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION) expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION)
end end
end end
context 'do_update: true' do describe '#refresh_markdown_cache!' do
before do
thing.foo = updated_markdown
end
it 'fills all html fields' do it 'fills all html fields' do
thing.refresh_markdown_cache!(do_update: true) thing.refresh_markdown_cache!
expect(thing.foo_html).to eq(updated_html) expect(thing.foo_html).to eq(updated_html)
expect(thing.foo_html_changed?).to be_truthy expect(thing.foo_html_changed?).to be_truthy
...@@ -219,7 +222,7 @@ describe CacheMarkdownField do ...@@ -219,7 +222,7 @@ describe CacheMarkdownField do
expect(thing).to receive(:persisted?).and_return(false) expect(thing).to receive(:persisted?).and_return(false)
expect(thing).not_to receive(:update_columns) expect(thing).not_to receive(:update_columns)
thing.refresh_markdown_cache!(do_update: true) thing.refresh_markdown_cache!
end end
it 'saves the changes using #update_columns' do it 'saves the changes using #update_columns' do
...@@ -227,8 +230,7 @@ describe CacheMarkdownField do ...@@ -227,8 +230,7 @@ describe CacheMarkdownField do
expect(thing).to receive(:update_columns) expect(thing).to receive(:update_columns)
.with("foo_html" => updated_html, "baz_html" => "", "cached_markdown_version" => CacheMarkdownField::CACHE_VERSION) .with("foo_html" => updated_html, "baz_html" => "", "cached_markdown_version" => CacheMarkdownField::CACHE_VERSION)
thing.refresh_markdown_cache!(do_update: true) thing.refresh_markdown_cache!
end
end end
end end
......
...@@ -115,6 +115,13 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do ...@@ -115,6 +115,13 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
go! go!
end end
it "calls a reactive_cache_updated only once if content did not change on subsequent update" do
expect(instance).to receive(:calculate_reactive_cache).twice
expect(instance).to receive(:reactive_cache_updated).once
2.times { instance.exclusively_update_reactive_cache! }
end
context 'and #calculate_reactive_cache raises an exception' do context 'and #calculate_reactive_cache raises an exception' do
before do before do
stub_reactive_cache(instance, "preexisting") stub_reactive_cache(instance, "preexisting")
......
...@@ -12,10 +12,10 @@ describe Group, 'Routable' do ...@@ -12,10 +12,10 @@ describe Group, 'Routable' do
it { is_expected.to have_many(:redirect_routes).dependent(:destroy) } it { is_expected.to have_many(:redirect_routes).dependent(:destroy) }
end end
describe 'Geo secondary' do describe 'GitLab read-only instance' do
it 'does not save route if route is not present' do it 'does not save route if route is not present' do
group.route.path = '' group.route.path = ''
allow(Gitlab::Geo).to receive(:secondary?).and_return(true) allow(Gitlab::Database).to receive(:read_only?).and_return(true)
expect(group).to receive(:update_route).and_call_original expect(group).to receive(:update_route).and_call_original
expect { group.full_path }.to change { Route.count }.by(0) expect { group.full_path }.to change { Route.count }.by(0)
......
...@@ -385,6 +385,7 @@ describe Environment do ...@@ -385,6 +385,7 @@ describe Environment do
describe '#rollout_status' do describe '#rollout_status' do
let(:project) { create(:kubernetes_project) } let(:project) { create(:kubernetes_project) }
subject { environment.rollout_status } subject { environment.rollout_status }
context 'when the environment has rollout status' do context 'when the environment has rollout status' do
......
...@@ -824,8 +824,8 @@ describe Project do ...@@ -824,8 +824,8 @@ describe Project do
end.to change { project.has_external_issue_tracker}.to(false) end.to change { project.has_external_issue_tracker}.to(false)
end end
it 'does not cache data when in a secondary gitlab geo node' do it 'does not cache data when in a read-only GitLab instance' do
allow(Gitlab::Geo).to receive(:secondary?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
expect do expect do
project.cache_has_external_issue_tracker project.cache_has_external_issue_tracker
...@@ -854,8 +854,8 @@ describe Project do ...@@ -854,8 +854,8 @@ describe Project do
end.to change { project.has_external_wiki}.to(false) end.to change { project.has_external_wiki}.to(false)
end end
it 'does not cache data when in a secondary gitlab geo node' do it 'does not cache data when in a read-only GitLab instance' do
allow(Gitlab::Geo).to receive(:secondary?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
expect do expect do
project.cache_has_external_wiki project.cache_has_external_wiki
...@@ -2930,7 +2930,7 @@ describe Project do ...@@ -2930,7 +2930,7 @@ describe Project do
expect(project.migrate_to_hashed_storage!).to be_truthy expect(project.migrate_to_hashed_storage!).to be_truthy
end end
it 'flags as readonly' do it 'flags as read-only' do
expect { project.migrate_to_hashed_storage! }.to change { project.repository_read_only }.to(true) expect { project.migrate_to_hashed_storage! }.to change { project.repository_read_only }.to(true)
end end
...@@ -3057,7 +3057,7 @@ describe Project do ...@@ -3057,7 +3057,7 @@ describe Project do
expect(project.migrate_to_hashed_storage!).to be_nil expect(project.migrate_to_hashed_storage!).to be_nil
end end
it 'does not flag as readonly' do it 'does not flag as read-only' do
expect { project.migrate_to_hashed_storage! }.not_to change { project.repository_read_only } expect { project.migrate_to_hashed_storage! }.not_to change { project.repository_read_only }
end end
end end
......
...@@ -877,8 +877,7 @@ describe 'Git LFS API and storage' do ...@@ -877,8 +877,7 @@ describe 'Git LFS API and storage' do
end end
end end
describe 'when handling lfs batch request on a secondary Geo node' do describe 'when handling lfs batch request on a read-only GitLab instance' do
let!(:primary) { create(:geo_node, :primary) }
let(:authorization) { authorize_user } let(:authorization) { authorize_user }
let(:project) { create(:project) } let(:project) { create(:project) }
let(:path) { "#{project.http_url_to_repo}/info/lfs/objects/batch" } let(:path) { "#{project.http_url_to_repo}/info/lfs/objects/batch" }
...@@ -887,7 +886,7 @@ describe 'Git LFS API and storage' do ...@@ -887,7 +886,7 @@ describe 'Git LFS API and storage' do
end end
before do before do
allow(Gitlab::Geo).to receive(:secondary?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
project.team << [user, :master] project.team << [user, :master]
enable_lfs enable_lfs
end end
...@@ -902,7 +901,7 @@ describe 'Git LFS API and storage' do ...@@ -902,7 +901,7 @@ describe 'Git LFS API and storage' do
post_lfs_json path, body.merge('operation' => 'upload'), headers post_lfs_json path, body.merge('operation' => 'upload'), headers
expect(response).to have_gitlab_http_status(403) expect(response).to have_gitlab_http_status(403)
expect(json_response).to include('message' => "You cannot write to a secondary GitLab Geo instance. Please use #{project.http_url_to_repo} instead.") expect(json_response).to include('message' => 'You cannot write to this read-only GitLab instance.')
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe EnvironmentEntity do describe EnvironmentEntity do
include KubernetesHelpers
let(:user) { create(:user) } let(:user) { create(:user) }
let(:environment) { create(:environment) } let(:environment) { create(:environment) }
...@@ -50,12 +52,11 @@ describe EnvironmentEntity do ...@@ -50,12 +52,11 @@ describe EnvironmentEntity do
before do before do
stub_licensed_features(deploy_board: true) stub_licensed_features(deploy_board: true)
allow(environment).to receive(:deployment_service_ready?).and_return(true) allow(environment).to receive(:deployment_service_ready?).and_return(true)
allow(environment).to receive(:rollout_status).and_return(kube_deployment_rollout_status)
end end
it 'exposes rollout_status_path' do it 'exposes rollout_status' do
expected = '/' + [environment.project.full_path, 'environments', environment.id, 'status.json'].join('/') expect(subject).to include(:rollout_status)
expect(subject[:rollout_status_path]).to eq(expected)
end end
end end
...@@ -65,7 +66,7 @@ describe EnvironmentEntity do ...@@ -65,7 +66,7 @@ describe EnvironmentEntity do
allow(environment).to receive(:deployment_service_ready?).and_return(true) allow(environment).to receive(:deployment_service_ready?).and_return(true)
end end
it 'does not expose rollout_status_path' do it 'does not expose rollout_status' do
expect(subject[:rollout_status_path]).to be_blank expect(subject[:rollout_status_path]).to be_blank
end end
end end
......
...@@ -54,7 +54,7 @@ describe EnvironmentSerializer do ...@@ -54,7 +54,7 @@ describe EnvironmentSerializer do
context 'when representing environments within folders' do context 'when representing environments within folders' do
let(:serializer) do let(:serializer) do
described_class.new(project: project).within_folders described_class.new(current_user: user, project: project).within_folders
end end
let(:resource) { Environment.all } let(:resource) { Environment.all }
...@@ -123,7 +123,7 @@ describe EnvironmentSerializer do ...@@ -123,7 +123,7 @@ describe EnvironmentSerializer do
let(:pagination) { { page: 1, per_page: 2 } } let(:pagination) { { page: 1, per_page: 2 } }
let(:serializer) do let(:serializer) do
described_class.new(project: project) described_class.new(current_user: user, project: project)
.with_pagination(request, response) .with_pagination(request, response)
end end
...@@ -169,7 +169,7 @@ describe EnvironmentSerializer do ...@@ -169,7 +169,7 @@ describe EnvironmentSerializer do
context 'when grouping environments within folders' do context 'when grouping environments within folders' do
let(:serializer) do let(:serializer) do
described_class.new(project: project) described_class.new(current_user: user, project: project)
.with_pagination(request, response) .with_pagination(request, response)
.within_folders .within_folders
end end
......
...@@ -7,11 +7,29 @@ describe RolloutStatusEntity do ...@@ -7,11 +7,29 @@ describe RolloutStatusEntity do
described_class.new(rollout_status, request: double) described_class.new(rollout_status, request: double)
end end
let(:rollout_status) { ::Gitlab::Kubernetes::RolloutStatus.from_specs(kube_deployment) }
subject { entity.as_json } subject { entity.as_json }
it { is_expected.to have_key(:instances) } context 'when kube deployment is valid' do
it { is_expected.to have_key(:completion) } let(:rollout_status) { kube_deployment_rollout_status }
it { is_expected.to have_key(:is_completed) }
it { is_expected.to have_key(:valid) } it "exposes status" do
is_expected.to include(:status)
end
it "exposes deployment data" do
is_expected.to include(:instances, :completion, :is_completed)
end
end
context 'when kube deployment is empty' do
let(:rollout_status) { empty_deployment_rollout_status }
it "exposes status" do
is_expected.to include(:status)
end
it "does not expose deployment data" do
is_expected.not_to include(:instances, :completion, :is_completed)
end
end
end end
...@@ -20,7 +20,7 @@ describe Projects::HashedStorageMigrationService do ...@@ -20,7 +20,7 @@ describe Projects::HashedStorageMigrationService do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
end end
it 'updates project to be hashed and not readonly' do it 'updates project to be hashed and not read-only' do
service.execute service.execute
expect(project.hashed_storage?).to be_truthy expect(project.hashed_storage?).to be_truthy
......
...@@ -39,9 +39,9 @@ describe Users::ActivityService do ...@@ -39,9 +39,9 @@ describe Users::ActivityService do
end end
end end
context 'when in Geo secondary node' do context 'when in GitLab read-only instance' do
before do before do
allow(Gitlab::Geo).to receive(:secondary?).and_return(true) allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end end
it 'does not update last_activity_at' do it 'does not update last_activity_at' do
......
...@@ -122,4 +122,12 @@ module KubernetesHelpers ...@@ -122,4 +122,12 @@ module KubernetesHelpers
terminal terminal
end end
end end
def kube_deployment_rollout_status
::Gitlab::Kubernetes::RolloutStatus.from_specs(kube_deployment)
end
def empty_deployment_rollout_status
::Gitlab::Kubernetes::RolloutStatus.from_specs()
end
end end
...@@ -12,6 +12,28 @@ describe RepositoryForkWorker do ...@@ -12,6 +12,28 @@ describe RepositoryForkWorker do
end end
describe "#perform" do describe "#perform" do
describe 'when a worker was reset without cleanup' do
let(:jid) { '12345678' }
let(:started_project) { create(:project, :repository, :import_started) }
it 'creates a new repository from a fork' do
allow(subject).to receive(:jid).and_return(jid)
expect(shell).to receive(:fork_repository).with(
'/test/path',
project.full_path,
project.repository_storage_path,
fork_project.namespace.full_path
).and_return(true)
subject.perform(
project.id,
'/test/path',
project.full_path,
fork_project.namespace.full_path)
end
end
it "creates a new repository from a fork" do it "creates a new repository from a fork" do
expect(shell).to receive(:fork_repository).with( expect(shell).to receive(:fork_repository).with(
'/test/path', '/test/path',
......
...@@ -6,6 +6,23 @@ describe RepositoryImportWorker do ...@@ -6,6 +6,23 @@ describe RepositoryImportWorker do
subject { described_class.new } subject { described_class.new }
describe '#perform' do describe '#perform' do
context 'when worker was reset without cleanup' do
let(:jid) { '12345678' }
let(:started_project) { create(:project, :import_started, import_jid: jid) }
it 'imports the project successfully' do
allow(subject).to receive(:jid).and_return(jid)
expect_any_instance_of(Projects::ImportService).to receive(:execute)
.and_return({ status: :ok })
expect_any_instance_of(Repository).to receive(:expire_emptiness_caches)
expect_any_instance_of(Project).to receive(:import_finish)
subject.perform(project.id)
end
end
context 'when the import was successful' do context 'when the import was successful' do
it 'imports a project' do it 'imports a project' do
expect_any_instance_of(Projects::ImportService).to receive(:execute) expect_any_instance_of(Projects::ImportService).to receive(:execute)
......
...@@ -2,10 +2,12 @@ require 'rails_helper' ...@@ -2,10 +2,12 @@ require 'rails_helper'
describe RepositoryUpdateMirrorWorker do describe RepositoryUpdateMirrorWorker do
describe '#perform' do describe '#perform' do
let(:jid) { '12345678' }
let!(:project) { create(:project, :mirror, :import_scheduled) } let!(:project) { create(:project, :mirror, :import_scheduled) }
before do before do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true) allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
allow(subject).to receive(:jid).and_return(jid)
end end
it 'sets status as finished when update mirror service executes successfully' do it 'sets status as finished when update mirror service executes successfully' do
...@@ -36,16 +38,22 @@ describe RepositoryUpdateMirrorWorker do ...@@ -36,16 +38,22 @@ describe RepositoryUpdateMirrorWorker do
expect(project.reload.import_status).to eq('failed') expect(project.reload.import_status).to eq('failed')
end end
context 'when worker was reset without cleanup' do
let(:started_project) { create(:project, :mirror, :import_started, import_jid: jid) }
it 'sets status as finished when update mirror service executes successfully' do
expect_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_return(status: :success)
expect { subject.perform(started_project.id) }.to change { started_project.reload.import_status }.to('finished')
end
end
context 'reschedule mirrors' do context 'reschedule mirrors' do
before do before do
allow_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_return(status: :success) allow_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_return(status: :success)
end end
context 'when we obtain the lease' do context 'when we obtain the lease' do
before do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
end
it 'performs UpdateAllMirrorsWorker when reschedule_immediately? returns true' do it 'performs UpdateAllMirrorsWorker when reschedule_immediately? returns true' do
allow(Gitlab::Mirror).to receive(:reschedule_immediately?).and_return(true) allow(Gitlab::Mirror).to receive(:reschedule_immediately?).and_return(true)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment