Commit e98aadf1 authored by Achilleas Pipinellis's avatar Achilleas Pipinellis

Merge branch 'ce-to-ee-2018-10-30' into 'master'

CE upstream - 2018-10-30 14:22 UTC

Closes gitlab-ce#51128, gitlab-ce#51142, gitlab-ce#53072, gitlab-ce#53153, gitlab-ce#53317, gitlab-org/quality/staging#14, and #6067

See merge request gitlab-org/gitlab-ee!8151
parents be38783e 127807b7
...@@ -153,13 +153,9 @@ export default { ...@@ -153,13 +153,9 @@ export default {
}, },
setDiscussions() { setDiscussions() {
if (this.isNotesFetched && !this.assignedDiscussions && !this.isLoading) { if (this.isNotesFetched && !this.assignedDiscussions && !this.isLoading) {
requestIdleCallback(
() =>
this.assignDiscussionsToDiff().then(() => {
this.assignedDiscussions = true; this.assignedDiscussions = true;
}),
{ timeout: 1000 }, requestIdleCallback(() => this.assignDiscussionsToDiff(), { timeout: 1000 });
);
} }
}, },
adjustView() { adjustView() {
......
...@@ -133,7 +133,7 @@ export default { ...@@ -133,7 +133,7 @@ export default {
}, },
right: { right: {
...line.right, ...line.right,
discussions: right ? line.right.discussions.concat(discussion) : [], discussions: right && !left ? line.right.discussions.concat(discussion) : [],
}, },
}; };
} }
......
...@@ -22,9 +22,7 @@ export default { ...@@ -22,9 +22,7 @@ export default {
return { currentValue: this.defaultValue }; return { currentValue: this.defaultValue };
}, },
computed: { computed: {
...mapGetters([ ...mapGetters(['getNotesDataByProp']),
'getNotesDataByProp',
]),
currentFilter() { currentFilter() {
if (!this.currentValue) return this.filters[0]; if (!this.currentValue) return this.filters[0];
return this.filters.find(filter => filter.value === this.currentValue); return this.filters.find(filter => filter.value === this.currentValue);
...@@ -51,7 +49,7 @@ export default { ...@@ -51,7 +49,7 @@ export default {
<button <button
id="discussion-filter-dropdown" id="discussion-filter-dropdown"
ref="dropdownToggle" ref="dropdownToggle"
class="btn btn-default" class="btn btn-default qa-discussion-filter"
data-toggle="dropdown" data-toggle="dropdown"
aria-expanded="false" aria-expanded="false"
> >
...@@ -69,6 +67,7 @@ export default { ...@@ -69,6 +67,7 @@ export default {
> >
<button <button
:class="{ 'is-active': filter.value === currentValue }" :class="{ 'is-active': filter.value === currentValue }"
class="qa-filter-options"
type="button" type="button"
@click="selectFilter(filter.value)" @click="selectFilter(filter.value)"
> >
......
<script> <script>
import { s__, sprintf } from '~/locale'; import { s__, sprintf } from '~/locale';
import { formatTime } from '~/lib/utils/datetime_utility';
import eventHub from '../event_hub'; import eventHub from '../event_hub';
import icon from '../../vue_shared/components/icon.vue'; import icon from '../../vue_shared/components/icon.vue';
import tooltip from '../../vue_shared/directives/tooltip'; import tooltip from '../../vue_shared/directives/tooltip';
import GlCountdown from '~/vue_shared/components/gl_countdown.vue';
export default { export default {
directives: { directives: {
...@@ -11,6 +11,7 @@ export default { ...@@ -11,6 +11,7 @@ export default {
}, },
components: { components: {
icon, icon,
GlCountdown,
}, },
props: { props: {
actions: { actions: {
...@@ -51,11 +52,6 @@ export default { ...@@ -51,11 +52,6 @@ export default {
return !action.playable; return !action.playable;
}, },
remainingTime(action) {
const remainingMilliseconds = new Date(action.scheduled_at).getTime() - Date.now();
return formatTime(Math.max(0, remainingMilliseconds));
},
}, },
}; };
</script> </script>
...@@ -100,7 +96,7 @@ export default { ...@@ -100,7 +96,7 @@ export default {
class="pull-right" class="pull-right"
> >
<icon name="clock" /> <icon name="clock" />
{{ remainingTime(action) }} <gl-countdown :end-date-string="action.scheduled_at" />
</span> </span>
</button> </button>
</li> </li>
......
...@@ -10,6 +10,7 @@ import { visitUrl } from '../../lib/utils/url_utility'; ...@@ -10,6 +10,7 @@ import { visitUrl } from '../../lib/utils/url_utility';
import createFlash from '../../flash'; import createFlash from '../../flash';
import MemoryUsage from './memory_usage.vue'; import MemoryUsage from './memory_usage.vue';
import StatusIcon from './mr_widget_status_icon.vue'; import StatusIcon from './mr_widget_status_icon.vue';
import ReviewAppLink from './review_app_link.vue';
import MRWidgetService from '../services/mr_widget_service'; import MRWidgetService from '../services/mr_widget_service';
export default { export default {
...@@ -21,6 +22,7 @@ export default { ...@@ -21,6 +22,7 @@ export default {
Icon, Icon,
TooltipOnTruncate, TooltipOnTruncate,
FilteredSearchDropdown, FilteredSearchDropdown,
ReviewAppLink,
}, },
directives: { directives: {
tooltip, tooltip,
...@@ -63,6 +65,12 @@ export default { ...@@ -63,6 +65,12 @@ export default {
deployedText() { deployedText() {
return this.$options.deployedTextMap[this.deployment.status]; return this.$options.deployedTextMap[this.deployment.status];
}, },
shouldRenderDropdown() {
return (
this.enableCiEnvironmentsStatusChanges &&
(this.deployment.changes && this.deployment.changes.length > 0)
);
},
}, },
methods: { methods: {
stopEnvironment() { stopEnvironment() {
...@@ -133,7 +141,7 @@ export default { ...@@ -133,7 +141,7 @@ export default {
<div> <div>
<template v-if="hasExternalUrls"> <template v-if="hasExternalUrls">
<filtered-search-dropdown <filtered-search-dropdown
v-if="enableCiEnvironmentsStatusChanges" v-if="shouldRenderDropdown"
class="js-mr-wigdet-deployment-dropdown inline" class="js-mr-wigdet-deployment-dropdown inline"
:items="deployment.changes" :items="deployment.changes"
:main-action-link="deployment.external_url" :main-action-link="deployment.external_url"
...@@ -143,18 +151,10 @@ export default { ...@@ -143,18 +151,10 @@ export default {
slot="mainAction" slot="mainAction"
slot-scope="slotProps" slot-scope="slotProps"
> >
<a <review-app-link
:href="deployment.external_url" :link="deployment.external_url"
target="_blank" :css-class="`deploy-link js-deploy-url inline ${slotProps.className}`"
rel="noopener noreferrer nofollow" />
class="deploy-link js-deploy-url inline"
:class="slotProps.className"
>
<span>
{{ __('View app') }}
<icon name="external-link" />
</span>
</a>
</template> </template>
<template <template
...@@ -177,18 +177,11 @@ export default { ...@@ -177,18 +177,11 @@ export default {
</a> </a>
</template> </template>
</filtered-search-dropdown> </filtered-search-dropdown>
<a <review-app-link
v-else v-else
:href="deployment.external_url" :link="deployment.external_url"
target="_blank" css-class="js-deploy-url js-deploy-url-feature-flag deploy-link btn btn-default btn-sm inlin"
rel="noopener noreferrer nofollow" />
class="js-deploy-url js-deploy-url-feature-flag deploy-link btn btn-default btn-sm inline"
>
<span>
{{ __('View app') }}
<icon name="external-link" />
</span>
</a>
</template> </template>
<loading-button <loading-button
v-if="deployment.stop_url" v-if="deployment.stop_url"
......
<script>
import Icon from '~/vue_shared/components/icon.vue';
export default {
components: {
Icon,
},
props: {
link: {
type: String,
required: true,
},
cssClass: {
type: String,
required: true,
},
},
};
</script>
<template>
<a
:href="link"
target="_blank"
rel="noopener noreferrer nofollow"
:class="cssClass"
>
{{ __('View app') }}
<icon name="external-link" />
</a>
</template>
...@@ -74,49 +74,7 @@ ...@@ -74,49 +74,7 @@
} }
.controllers { .controllers {
display: flex; @include build-controllers(15px, center, false, 0);
justify-content: center;
align-items: center;
svg {
height: 15px;
display: block;
fill: $gl-text-color;
}
.controllers-buttons {
color: $gl-text-color;
margin: 0 $grid-size;
&:last-child {
margin-right: 0;
}
}
.btn-scroll.animate {
.first-triangle {
animation: blinking-scroll-button 1s ease infinite;
animation-delay: 0.3s;
}
.second-triangle {
animation: blinking-scroll-button 1s ease infinite;
animation-delay: 0.2s;
}
.third-triangle {
animation: blinking-scroll-button 1s ease infinite;
}
&:disabled {
opacity: 1;
}
}
.btn-scroll:disabled {
opacity: 0.35;
cursor: not-allowed;
}
} }
} }
......
...@@ -46,6 +46,8 @@ class ApplicationController < ActionController::Base ...@@ -46,6 +46,8 @@ class ApplicationController < ActionController::Base
:git_import_enabled?, :gitlab_project_import_enabled?, :git_import_enabled?, :gitlab_project_import_enabled?,
:manifest_import_enabled? :manifest_import_enabled?
DEFAULT_GITLAB_CACHE_CONTROL = "#{ActionDispatch::Http::Cache::Response::DEFAULT_CACHE_CONTROL}, no-store".freeze
rescue_from Encoding::CompatibilityError do |exception| rescue_from Encoding::CompatibilityError do |exception|
log_exception(exception) log_exception(exception)
render "errors/encoding", layout: "errors", status: 500 render "errors/encoding", layout: "errors", status: 500
...@@ -252,6 +254,13 @@ class ApplicationController < ActionController::Base ...@@ -252,6 +254,13 @@ class ApplicationController < ActionController::Base
headers['X-XSS-Protection'] = '1; mode=block' headers['X-XSS-Protection'] = '1; mode=block'
headers['X-UA-Compatible'] = 'IE=edge' headers['X-UA-Compatible'] = 'IE=edge'
headers['X-Content-Type-Options'] = 'nosniff' headers['X-Content-Type-Options'] = 'nosniff'
if current_user
# Adds `no-store` to the DEFAULT_CACHE_CONTROL, to prevent security
# concerns due to caching private data.
headers['Cache-Control'] = DEFAULT_GITLAB_CACHE_CONTROL
headers["Pragma"] = "no-cache" # HTTP 1.0 compatibility
end
end end
def validate_user_service_ticket! def validate_user_service_ticket!
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
class PersonalAccessTokensFinder class PersonalAccessTokensFinder
attr_accessor :params attr_accessor :params
delegate :build, :find, :find_by, to: :execute delegate :build, :find, :find_by, :find_by_token, to: :execute
def initialize(params = {}) def initialize(params = {})
@params = params @params = params
......
...@@ -5,57 +5,50 @@ module TokenAuthenticatable ...@@ -5,57 +5,50 @@ module TokenAuthenticatable
private private
def write_new_token(token_field) class_methods do
new_token = generate_available_token(token_field) private # rubocop:disable Lint/UselessAccessModifier
write_attribute(token_field, new_token)
end
def generate_available_token(token_field) def add_authentication_token_field(token_field, options = {})
loop do @token_fields = [] unless @token_fields
token = generate_token(token_field)
break token unless self.class.unscoped.find_by(token_field => token)
end
end
def generate_token(token_field) if @token_fields.include?(token_field)
Devise.friendly_token raise ArgumentError.new("#{token_field} already configured via add_authentication_token_field")
end end
class_methods do @token_fields << token_field
def authentication_token_fields
@token_fields || []
end
private # rubocop:disable Lint/UselessAccessModifier attr_accessor :cleartext_tokens
def add_authentication_token_field(token_field) strategy = if options[:digest]
@token_fields = [] unless @token_fields TokenAuthenticatableStrategies::Digest.new(self, token_field, options)
@token_fields << token_field else
TokenAuthenticatableStrategies::Insecure.new(self, token_field, options)
end
define_singleton_method("find_by_#{token_field}") do |token| define_singleton_method("find_by_#{token_field}") do |token|
find_by(token_field => token) if token strategy.find_token_authenticatable(token)
end end
define_method("ensure_#{token_field}") do define_method(token_field) do
current_token = read_attribute(token_field) strategy.get_token(self)
current_token.blank? ? write_new_token(token_field) : current_token
end end
define_method("set_#{token_field}") do |token| define_method("set_#{token_field}") do |token|
write_attribute(token_field, token) if token strategy.set_token(self, token)
end
define_method("ensure_#{token_field}") do
strategy.ensure_token(self)
end end
# Returns a token, but only saves when the database is in read & write mode # Returns a token, but only saves when the database is in read & write mode
define_method("ensure_#{token_field}!") do define_method("ensure_#{token_field}!") do
send("reset_#{token_field}!") if read_attribute(token_field).blank? # rubocop:disable GitlabSecurity/PublicSend strategy.ensure_token!(self)
read_attribute(token_field)
end end
# Resets the token, but only saves when the database is in read & write mode # Resets the token, but only saves when the database is in read & write mode
define_method("reset_#{token_field}!") do define_method("reset_#{token_field}!") do
write_new_token(token_field) strategy.reset_token!(self)
save! if Gitlab::Database.read_write?
end end
end end
end end
......
# frozen_string_literal: true
module TokenAuthenticatableStrategies
class Base
def initialize(klass, token_field, options)
@klass = klass
@token_field = token_field
@options = options
end
def find_token_authenticatable(instance, unscoped = false)
raise NotImplementedError
end
def get_token(instance)
raise NotImplementedError
end
def set_token(instance)
raise NotImplementedError
end
def ensure_token(instance)
write_new_token(instance) unless token_set?(instance)
end
# Returns a token, but only saves when the database is in read & write mode
def ensure_token!(instance)
reset_token!(instance) unless token_set?(instance)
get_token(instance)
end
# Resets the token, but only saves when the database is in read & write mode
def reset_token!(instance)
write_new_token(instance)
instance.save! if Gitlab::Database.read_write?
end
protected
def write_new_token(instance)
new_token = generate_available_token
set_token(instance, new_token)
end
def generate_available_token
loop do
token = generate_token
break token unless find_token_authenticatable(token, true)
end
end
def generate_token
@options[:token_generator] ? @options[:token_generator].call : Devise.friendly_token
end
def relation(unscoped)
unscoped ? @klass.unscoped : @klass
end
def token_set?(instance)
raise NotImplementedError
end
def token_field_name
@token_field
end
end
end
# frozen_string_literal: true
module TokenAuthenticatableStrategies
class Digest < Base
def find_token_authenticatable(token, unscoped = false)
return unless token
token_authenticatable = relation(unscoped).find_by(token_field_name => Gitlab::CryptoHelper.sha256(token))
if @options[:fallback]
token_authenticatable ||= fallback_strategy.find_token_authenticatable(token)
end
token_authenticatable
end
def get_token(instance)
token = instance.cleartext_tokens&.[](@token_field)
token ||= fallback_strategy.get_token(instance) if @options[:fallback]
token
end
def set_token(instance, token)
return unless token
instance.cleartext_tokens ||= {}
instance.cleartext_tokens[@token_field] = token
instance[token_field_name] = Gitlab::CryptoHelper.sha256(token)
instance[@token_field] = nil if @options[:fallback]
end
protected
def fallback_strategy
@fallback_strategy ||= TokenAuthenticatableStrategies::Insecure.new(@klass, @token_field, @options)
end
def token_set?(instance)
token_digest = instance.read_attribute(token_field_name)
token_digest ||= instance.read_attribute(@token_field) if @options[:fallback]
token_digest.present?
end
def token_field_name
"#{@token_field}_digest"
end
end
end
# frozen_string_literal: true
module TokenAuthenticatableStrategies
class Insecure < Base
def find_token_authenticatable(token, unscoped = false)
relation(unscoped).find_by(@token_field => token) if token
end
def get_token(instance)
instance.read_attribute(@token_field)
end
def set_token(instance, token)
instance[@token_field] = token if token
end
protected
def token_set?(instance)
instance.read_attribute(@token_field).present?
end
end
end
...@@ -8,7 +8,7 @@ class LfsObject < ActiveRecord::Base ...@@ -8,7 +8,7 @@ class LfsObject < ActiveRecord::Base
has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :lfs_objects_projects has_many :projects, through: :lfs_objects_projects
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: LfsObjectUploader::Store::LOCAL) }
validates :oid, presence: true, uniqueness: true validates :oid, presence: true, uniqueness: true
...@@ -27,7 +27,7 @@ class LfsObject < ActiveRecord::Base ...@@ -27,7 +27,7 @@ class LfsObject < ActiveRecord::Base
end end
def local_store? def local_store?
[nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store) file_store == LfsObjectUploader::Store::LOCAL
end end
# rubocop: disable DestroyAll # rubocop: disable DestroyAll
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
class PersonalAccessToken < ActiveRecord::Base class PersonalAccessToken < ActiveRecord::Base
include Expirable include Expirable
include TokenAuthenticatable include TokenAuthenticatable
add_authentication_token_field :token add_authentication_token_field :token, digest: true, fallback: true
REDIS_EXPIRY_TIME = 3.minutes REDIS_EXPIRY_TIME = 3.minutes
...@@ -33,16 +33,22 @@ class PersonalAccessToken < ActiveRecord::Base ...@@ -33,16 +33,22 @@ class PersonalAccessToken < ActiveRecord::Base
def self.redis_getdel(user_id) def self.redis_getdel(user_id)
Gitlab::Redis::SharedState.with do |redis| Gitlab::Redis::SharedState.with do |redis|
token = redis.get(redis_shared_state_key(user_id)) encrypted_token = redis.get(redis_shared_state_key(user_id))
redis.del(redis_shared_state_key(user_id)) redis.del(redis_shared_state_key(user_id))
token begin
Gitlab::CryptoHelper.aes256_gcm_decrypt(encrypted_token)
rescue => ex
logger.warn "Failed to decrypt PersonalAccessToken value stored in Redis for User ##{user_id}: #{ex.class}"
encrypted_token
end
end end
end end
def self.redis_store!(user_id, token) def self.redis_store!(user_id, token)
encrypted_token = Gitlab::CryptoHelper.aes256_gcm_encrypt(token)
Gitlab::Redis::SharedState.with do |redis| Gitlab::Redis::SharedState.with do |redis|
redis.set(redis_shared_state_key(user_id), token, ex: REDIS_EXPIRY_TIME) redis.set(redis_shared_state_key(user_id), encrypted_token, ex: REDIS_EXPIRY_TIME)
token
end end
end end
......
...@@ -30,7 +30,7 @@ class User < ActiveRecord::Base ...@@ -30,7 +30,7 @@ class User < ActiveRecord::Base
ignore_column :email_provider ignore_column :email_provider
ignore_column :authentication_token ignore_column :authentication_token
add_authentication_token_field :incoming_email_token add_authentication_token_field :incoming_email_token, token_generator: -> { SecureRandom.hex.to_i(16).to_s(36) }
add_authentication_token_field :feed_token add_authentication_token_field :feed_token
default_value_for :admin, false default_value_for :admin, false
...@@ -476,7 +476,7 @@ class User < ActiveRecord::Base ...@@ -476,7 +476,7 @@ class User < ActiveRecord::Base
def find_by_personal_access_token(token_string) def find_by_personal_access_token(token_string)
return unless token_string return unless token_string
PersonalAccessTokensFinder.new(state: 'active').find_by(token: token_string)&.user # rubocop: disable CodeReuse/Finder PersonalAccessTokensFinder.new(state: 'active').find_by_token(token_string)&.user # rubocop: disable CodeReuse/Finder
end end
# Returns a user for the given SSH key. # Returns a user for the given SSH key.
...@@ -1486,15 +1486,6 @@ class User < ActiveRecord::Base ...@@ -1486,15 +1486,6 @@ class User < ActiveRecord::Base
end end
end end
def generate_token(token_field)
if token_field == :incoming_email_token
# Needs to be all lowercase and alphanumeric because it's gonna be used in an email address.
SecureRandom.hex.to_i(16).to_s(36)
else
super
end
end
def self.unique_internal(scope, username, email_pattern, &block) def self.unique_internal(scope, username, email_pattern, &block)
scope.first || create_unique_internal(scope, username, email_pattern, &block) scope.first || create_unique_internal(scope, username, email_pattern, &block)
end end
......
...@@ -110,16 +110,10 @@ class MergeRequestPresenter < Gitlab::View::Presenter::Delegated ...@@ -110,16 +110,10 @@ class MergeRequestPresenter < Gitlab::View::Presenter::Delegated
namespace = source_project_namespace namespace = source_project_namespace
branch = source_branch branch = source_branch
if source_branch_exists? namespace_link = source_branch_exists? ? link_to(namespace, project_path(source_project)) : ERB::Util.html_escape(namespace)
namespace = link_to(namespace, project_path(source_project)) branch_link = source_branch_exists? ? link_to(branch, project_tree_path(source_project, source_branch)) : ERB::Util.html_escape(branch)
branch = link_to(branch, project_tree_path(source_project, source_branch))
end
if for_fork? for_fork? ? "#{namespace_link}:#{branch_link}" : branch_link
namespace + ":" + branch
else
branch
end
end end
def closing_issues_links def closing_issues_links
......
...@@ -87,7 +87,7 @@ module MergeRequests ...@@ -87,7 +87,7 @@ module MergeRequests
.where.not(target_project: @project).to_a .where.not(target_project: @project).to_a
filter_merge_requests(merge_requests).each do |merge_request| filter_merge_requests(merge_requests).each do |merge_request|
if merge_request.source_branch == @push.branch_name || @push.force_push? if branch_and_project_match?(merge_request) || @push.force_push?
merge_request.reload_diff(current_user) merge_request.reload_diff(current_user)
else else
mr_commit_ids = merge_request.commit_shas mr_commit_ids = merge_request.commit_shas
...@@ -106,6 +106,11 @@ module MergeRequests ...@@ -106,6 +106,11 @@ module MergeRequests
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
def branch_and_project_match?(merge_request)
merge_request.source_project == @project &&
merge_request.source_branch == @push.branch_name
end
def reset_merge_when_pipeline_succeeds def reset_merge_when_pipeline_succeeds
merge_requests_for_source_branch.each(&:reset_merge_when_pipeline_succeeds) merge_requests_for_source_branch.each(&:reset_merge_when_pipeline_succeeds)
end end
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
- else - else
- search_path_url = search_path - search_path_url = search_path
%header.navbar.navbar-gitlab.qa-navbar.navbar-expand-sm.navbar-gitlab-new.js-navbar %header.navbar.navbar-gitlab.qa-navbar.navbar-expand-sm.js-navbar
%a.sr-only.gl-accessibility{ href: "#content-body", tabindex: "1" } Skip to content %a.sr-only.gl-accessibility{ href: "#content-body", tabindex: "1" } Skip to content
.container-fluid .container-fluid
.header-content .header-content
......
---
title: Fix XSS in merge request source branch name
merge_request:
author:
type: security
---
title: Only renders dropdown for review app changes when we have a list of files to
show. Otherwise will render the regular review app button
merge_request:
author:
type: other
---
title: 'Fix #53298: JupyterHub restarts should work without errors'
merge_request: 22671
author: Amit Rathi
type: fixed
---
title: Escape entity title while autocomplete template rendering to prevent XSS
merge_request: 2556
author:
type: security
---
title: Persist only SHA digest of PersonalAccessToken#token
merge_request:
author:
type: security
---
title: Fix extra merge request versions created from forked merge requests
merge_request: 22611
author:
type: fixed
---
title: Validate Wiki attachments are valid temporary files
merge_request:
author:
type: security
---
title: Enhance performance of counting local LFS objects
merge_request: 22143
author:
type: performance
---
title: Add dynamic timer for delayed jobs in pipelines list
merge_request: 22621
author:
type: changed
# frozen_string_literal: true
class AddTokenDigestToPersonalAccessTokens < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
change_column :personal_access_tokens, :token, :string, null: true
add_column :personal_access_tokens, :token_digest, :string
end
def down
remove_column :personal_access_tokens, :token_digest
change_column :personal_access_tokens, :token, :string, null: false
end
end
# frozen_string_literal: true
class AddIndexToTokenDigestOnPersonalAccessTokens < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :personal_access_tokens, :token_digest, unique: true
end
def down
remove_concurrent_index :personal_access_tokens, :token_digest if index_exists?(:personal_access_tokens, :token_digest)
end
end
# frozen_string_literal: true
class AddIndexToLfsObjectsFileStore < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :lfs_objects, :file_store
end
def down
remove_concurrent_index :lfs_objects, :file_store
end
end
class ScheduleDigestPersonalAccessTokens < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 10_000
MIGRATION = 'DigestColumn'
DELAY_INTERVAL = 5.minutes.to_i
disable_ddl_transaction!
class PersonalAccessToken < ActiveRecord::Base
include EachBatch
self.table_name = 'personal_access_tokens'
end
def up
PersonalAccessToken.where('token is NOT NULL').each_batch(of: BATCH_SIZE) do |batch, index|
range = batch.pluck('MIN(id)', 'MAX(id)').first
BackgroundMigrationWorker.perform_in(index * DELAY_INTERVAL, MIGRATION, ['PersonalAccessToken', :token, :token_digest, *range])
end
end
def down
# raise ActiveRecord::IrreversibleMigration
end
end
...@@ -1619,6 +1619,7 @@ ActiveRecord::Schema.define(version: 20181017131623) do ...@@ -1619,6 +1619,7 @@ ActiveRecord::Schema.define(version: 20181017131623) do
t.integer "file_store" t.integer "file_store"
end end
add_index "lfs_objects", ["file_store"], name: "index_lfs_objects_on_file_store", using: :btree
add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
create_table "lfs_objects_projects", force: :cascade do |t| create_table "lfs_objects_projects", force: :cascade do |t|
...@@ -2104,7 +2105,7 @@ ActiveRecord::Schema.define(version: 20181017131623) do ...@@ -2104,7 +2105,7 @@ ActiveRecord::Schema.define(version: 20181017131623) do
create_table "personal_access_tokens", force: :cascade do |t| create_table "personal_access_tokens", force: :cascade do |t|
t.integer "user_id", null: false t.integer "user_id", null: false
t.string "token", null: false t.string "token"
t.string "name", null: false t.string "name", null: false
t.boolean "revoked", default: false t.boolean "revoked", default: false
t.date "expires_at" t.date "expires_at"
...@@ -2112,9 +2113,11 @@ ActiveRecord::Schema.define(version: 20181017131623) do ...@@ -2112,9 +2113,11 @@ ActiveRecord::Schema.define(version: 20181017131623) do
t.datetime "updated_at", null: false t.datetime "updated_at", null: false
t.string "scopes", default: "--- []\n", null: false t.string "scopes", default: "--- []\n", null: false
t.boolean "impersonation", default: false, null: false t.boolean "impersonation", default: false, null: false
t.string "token_digest"
end end
add_index "personal_access_tokens", ["token"], name: "index_personal_access_tokens_on_token", unique: true, using: :btree add_index "personal_access_tokens", ["token"], name: "index_personal_access_tokens_on_token", unique: true, using: :btree
add_index "personal_access_tokens", ["token_digest"], name: "index_personal_access_tokens_on_token_digest", unique: true, using: :btree
add_index "personal_access_tokens", ["user_id"], name: "index_personal_access_tokens_on_user_id", using: :btree add_index "personal_access_tokens", ["user_id"], name: "index_personal_access_tokens_on_user_id", using: :btree
create_table "plans", force: :cascade do |t| create_table "plans", force: :cascade do |t|
......
...@@ -37,9 +37,9 @@ Review Apps are automatically deployed by each pipeline, both in ...@@ -37,9 +37,9 @@ Review Apps are automatically deployed by each pipeline, both in
review app manually, and is also started by GitLab once a branch is deleted review app manually, and is also started by GitLab once a branch is deleted
- [TBD] Review apps are cleaned up regularly using a pipeline schedule that runs - [TBD] Review apps are cleaned up regularly using a pipeline schedule that runs
the [`scripts/review_apps/automated_cleanup.rb`][automated_cleanup.rb] script the [`scripts/review_apps/automated_cleanup.rb`][automated_cleanup.rb] script
- If you're unable to log in using the `root` username and password the - If you're unable to log in using the `root` username and password, the
deployment may have failed. Stop the review app via the `stop_review` deployment may have failed. Stop the Review App via the `stop_review`
manual job and then retry the `review` job to redeploy the review app. manual job and then retry the `review` job to redeploy the Review App.
[^1]: We use the `CNG-mirror` project so that the `CNG`, (**C**loud **N**ative **G**itLab), project's registry is [^1]: We use the `CNG-mirror` project so that the `CNG`, (**C**loud **N**ative **G**itLab), project's registry is
not overloaded with a lot of transient Docker images. not overloaded with a lot of transient Docker images.
......
# frozen_string_literal: true
# This module overrides the Grape type validator defined in
# https://github.com/ruby-grape/grape/blob/master/lib/grape/validations/types/file.rb
module API
module Validations
module Types
class SafeFile < ::Grape::Validations::Types::File
def value_coerced?(value)
super && value[:tempfile].is_a?(Tempfile)
end
end
end
end
end
...@@ -6,7 +6,7 @@ module API ...@@ -6,7 +6,7 @@ module API
def commit_params(attrs) def commit_params(attrs)
{ {
file_name: attrs[:file][:filename], file_name: attrs[:file][:filename],
file_content: File.read(attrs[:file][:tempfile]), file_content: attrs[:file][:tempfile].read,
branch_name: attrs[:branch] branch_name: attrs[:branch]
} }
end end
...@@ -100,7 +100,7 @@ module API ...@@ -100,7 +100,7 @@ module API
success Entities::WikiAttachment success Entities::WikiAttachment
end end
params do params do
requires :file, type: File, desc: 'The attachment file to be uploaded' requires :file, type: ::API::Validations::Types::SafeFile, desc: 'The attachment file to be uploaded'
optional :branch, type: String, desc: 'The name of the branch' optional :branch, type: String, desc: 'The name of the branch'
end end
post ":id/wikis/attachments", requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do post ":id/wikis/attachments", requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
......
...@@ -153,17 +153,15 @@ module Gitlab ...@@ -153,17 +153,15 @@ module Gitlab
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def personal_access_token_check(password) def personal_access_token_check(password)
return unless password.present? return unless password.present?
token = PersonalAccessTokensFinder.new(state: 'active').find_by(token: password) token = PersonalAccessTokensFinder.new(state: 'active').find_by_token(password)
if token && valid_scoped_token?(token, available_scopes) if token && valid_scoped_token?(token, available_scopes)
Gitlab::Auth::Result.new(token.user, nil, :personal_access_token, abilities_for_scopes(token.scopes)) Gitlab::Auth::Result.new(token.user, nil, :personal_access_token, abilities_for_scopes(token.scopes))
end end
end end
# rubocop: enable CodeReuse/ActiveRecord
def valid_oauth_token?(token) def valid_oauth_token?(token)
token && token.accessible? && valid_scoped_token?(token, [:api]) token && token.accessible? && valid_scoped_token?(token, [:api])
......
...@@ -75,7 +75,6 @@ module Gitlab ...@@ -75,7 +75,6 @@ module Gitlab
end end
end end
# rubocop: disable CodeReuse/ActiveRecord
def find_personal_access_token def find_personal_access_token
token = token =
current_request.params[PRIVATE_TOKEN_PARAM].presence || current_request.params[PRIVATE_TOKEN_PARAM].presence ||
...@@ -84,9 +83,8 @@ module Gitlab ...@@ -84,9 +83,8 @@ module Gitlab
return unless token return unless token
# Expiration, revocation and scopes are verified in `validate_access_token!` # Expiration, revocation and scopes are verified in `validate_access_token!`
PersonalAccessToken.find_by(token: token) || raise(UnauthorizedError) PersonalAccessToken.find_by_token(token) || raise(UnauthorizedError)
end end
# rubocop: enable CodeReuse/ActiveRecord
def find_oauth_access_token def find_oauth_access_token
token = Doorkeeper::OAuth::Token.from_request(current_request, *Doorkeeper.configuration.access_token_methods) token = Doorkeeper::OAuth::Token.from_request(current_request, *Doorkeeper.configuration.access_token_methods)
......
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class DigestColumn
class PersonalAccessToken < ActiveRecord::Base
self.table_name = 'personal_access_tokens'
end
def perform(model, attribute_from, attribute_to, start_id, stop_id)
model = model.constantize if model.is_a?(String)
model.transaction do
relation = model.where(id: start_id..stop_id).where.not(attribute_from => nil).lock
relation.each do |instance|
instance.update_columns(attribute_to => Gitlab::CryptoHelper.sha256(instance.read_attribute(attribute_from)),
attribute_from => nil)
end
end
end
end
end
end
...@@ -14,10 +14,10 @@ module Gitlab ...@@ -14,10 +14,10 @@ module Gitlab
test_case = create_test_case(test_case) test_case = create_test_case(test_case)
test_suite.add_test_case(test_case) test_suite.add_test_case(test_case)
end end
rescue REXML::ParseException => e rescue REXML::ParseException
raise JunitParserError, "XML parsing failed: #{e.message}" raise JunitParserError, "XML parsing failed"
rescue => e rescue
raise JunitParserError, "JUnit parsing failed: #{e.message}" raise JunitParserError, "JUnit parsing failed"
end end
private private
......
# frozen_string_literal: true
module Gitlab
module CryptoHelper
extend self
AES256_GCM_OPTIONS = {
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_truncated,
iv: Settings.attr_encrypted_db_key_base_truncated[0..11]
}.freeze
def sha256(value)
salt = Settings.attr_encrypted_db_key_base_truncated
::Digest::SHA256.base64digest("#{value}#{salt}")
end
def aes256_gcm_encrypt(value)
encrypted_token = Encryptor.encrypt(AES256_GCM_OPTIONS.merge(value: value))
Base64.encode64(encrypted_token)
end
def aes256_gcm_decrypt(value)
return unless value
encrypted_token = Base64.decode64(value)
Encryptor.decrypt(AES256_GCM_OPTIONS.merge(value: encrypted_token))
end
end
end
...@@ -32,6 +32,7 @@ module Gitlab ...@@ -32,6 +32,7 @@ module Gitlab
end end
validate_localhost!(addrs_info) unless allow_localhost validate_localhost!(addrs_info) unless allow_localhost
validate_loopback!(addrs_info) unless allow_localhost
validate_local_network!(addrs_info) unless allow_local_network validate_local_network!(addrs_info) unless allow_local_network
validate_link_local!(addrs_info) unless allow_local_network validate_link_local!(addrs_info) unless allow_local_network
...@@ -86,6 +87,12 @@ module Gitlab ...@@ -86,6 +87,12 @@ module Gitlab
raise BlockedUrlError, "Requests to localhost are not allowed" raise BlockedUrlError, "Requests to localhost are not allowed"
end end
def validate_loopback!(addrs_info)
return unless addrs_info.any? { |addr| addr.ipv4_loopback? || addr.ipv6_loopback? }
raise BlockedUrlError, "Requests to loopback addresses are not allowed"
end
def validate_local_network!(addrs_info) def validate_local_network!(addrs_info)
return unless addrs_info.any? { |addr| addr.ipv4_private? || addr.ipv6_sitelocal? } return unless addrs_info.any? { |addr| addr.ipv4_private? || addr.ipv6_sitelocal? }
......
require_relative '../../app/models/concerns/token_authenticatable.rb' require_relative '../../app/models/concerns/token_authenticatable.rb'
require_relative '../../app/models/concerns/token_authenticatable_strategies/base.rb'
require_relative '../../app/models/concerns/token_authenticatable_strategies/insecure.rb'
require_relative '../../app/models/concerns/token_authenticatable_strategies/digest.rb'
namespace :tokens do namespace :tokens do
desc "Reset all GitLab incoming email tokens" desc "Reset all GitLab incoming email tokens"
...@@ -26,13 +29,6 @@ class TmpUser < ActiveRecord::Base ...@@ -26,13 +29,6 @@ class TmpUser < ActiveRecord::Base
self.table_name = 'users' self.table_name = 'users'
def reset_incoming_email_token! add_authentication_token_field :incoming_email_token, token_generator: -> { SecureRandom.hex.to_i(16).to_s(36) }
write_new_token(:incoming_email_token) add_authentication_token_field :feed_token
save!(validate: false)
end
def reset_feed_token!
write_new_token(:feed_token)
save!(validate: false)
end
end end
...@@ -7,35 +7,42 @@ module QA ...@@ -7,35 +7,42 @@ module QA
class Show < Page::Base class Show < Page::Base
include Page::Component::Issuable::Common include Page::Component::Issuable::Common
view 'app/views/projects/issues/show.html.haml' do
element :issue_details, '.issue-details' # rubocop:disable QA/ElementWithPattern
element :title, '.title' # rubocop:disable QA/ElementWithPattern
end
view 'app/views/shared/notes/_form.html.haml' do view 'app/views/shared/notes/_form.html.haml' do
element :new_note_form, 'new-note' # rubocop:disable QA/ElementWithPattern element :new_note_form, 'new-note' # rubocop:disable QA/ElementWithPattern
element :new_note_form, 'attr: :note' # rubocop:disable QA/ElementWithPattern element :new_note_form, 'attr: :note' # rubocop:disable QA/ElementWithPattern
end end
view 'app/views/shared/notes/_comment_button.html.haml' do view 'app/assets/javascripts/notes/components/comment_form.vue' do
element :comment_button, '%strong Comment' # rubocop:disable QA/ElementWithPattern element :comment_button
element :comment_input
end end
def issue_title view 'app/assets/javascripts/notes/components/discussion_filter.vue' do
find('.issue-details .title').text element :discussion_filter
element :filter_options
end end
# Adds a comment to an issue # Adds a comment to an issue
# attachment option should be an absolute path # attachment option should be an absolute path
def comment(text, attachment: nil) def comment(text, attachment: nil)
fill_in(with: text, name: 'note[note]') fill_element :comment_input, text
unless attachment.nil? unless attachment.nil?
QA::Page::Component::Dropzone.new(self, '.new-note') QA::Page::Component::Dropzone.new(self, '.new-note')
.attach_file(attachment) .attach_file(attachment)
end end
click_on 'Comment' click_element :comment_button
end
def select_comments_only_filter
click_element :discussion_filter
all_elements(:filter_options).last.click
end
def select_all_activities_filter
click_element :discussion_filter
all_elements(:filter_options).first.click
end end
end end
end end
......
# frozen_string_literal: true
module QA
context 'Plan' do
describe 'filter issue comments activities' do
let(:issue_title) { 'issue title' }
it 'user filters comments and activites in an issue' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.act { sign_in_using_credentials }
Factory::Resource::Issue.fabricate! do |issue|
issue.title = issue_title
end
expect(page).to have_content(issue_title)
Page::Project::Issue::Show.perform do |show_page|
show_page.select_comments_only_filter
show_page.comment('/confidential')
show_page.comment('My own comment')
expect(show_page).not_to have_content("made the issue confidential")
expect(show_page).to have_content("My own comment")
show_page.select_all_activities_filter
expect(show_page).to have_content("made the issue confidential")
expect(show_page).to have_content("My own comment")
end
end
end
end
end
...@@ -25,6 +25,7 @@ module QA ...@@ -25,6 +25,7 @@ module QA
push.file_content = "Test with unicode characters ❤✓€❄" push.file_content = "Test with unicode characters ❤✓€❄"
end end
Page::Project::Show.perform(&:wait_for_push)
merge_request.visit! merge_request.visit!
expect(page).to have_text('to be squashed') expect(page).to have_text('to be squashed')
......
...@@ -792,4 +792,30 @@ describe ApplicationController do ...@@ -792,4 +792,30 @@ describe ApplicationController do
end end
end end
end end
context 'control headers' do
controller(described_class) do
def index
render json: :ok
end
end
context 'user not logged in' do
it 'sets the default headers' do
get :index
expect(response.headers['Cache-Control']).to be_nil
end
end
context 'user logged in' do
it 'sets the default headers' do
sign_in(user)
get :index
expect(response.headers['Cache-Control']).to eq 'max-age=0, private, must-revalidate, no-store'
end
end
end
end end
...@@ -5,6 +5,17 @@ shared_examples 'content not cached without revalidation' do ...@@ -5,6 +5,17 @@ shared_examples 'content not cached without revalidation' do
end end
end end
shared_examples 'content not cached without revalidation and no-store' do
it 'ensures content will not be cached without revalidation' do
# Fixed in newer versions of ActivePack, it will only output a single `private`.
if Gitlab.rails5?
expect(subject['Cache-Control']).to eq('max-age=0, private, must-revalidate, no-store')
else
expect(subject['Cache-Control']).to eq('max-age=0, private, must-revalidate, private, no-store')
end
end
end
describe UploadsController do describe UploadsController do
let!(:user) { create(:user, avatar: fixture_file_upload("spec/fixtures/dk.png", "image/png")) } let!(:user) { create(:user, avatar: fixture_file_upload("spec/fixtures/dk.png", "image/png")) }
...@@ -177,7 +188,7 @@ describe UploadsController do ...@@ -177,7 +188,7 @@ describe UploadsController do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
it_behaves_like 'content not cached without revalidation' do it_behaves_like 'content not cached without revalidation and no-store' do
subject do subject do
get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'dk.png' get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'dk.png'
...@@ -239,7 +250,7 @@ describe UploadsController do ...@@ -239,7 +250,7 @@ describe UploadsController do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
it_behaves_like 'content not cached without revalidation' do it_behaves_like 'content not cached without revalidation and no-store' do
subject do subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'dk.png' get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'dk.png'
...@@ -292,7 +303,7 @@ describe UploadsController do ...@@ -292,7 +303,7 @@ describe UploadsController do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
it_behaves_like 'content not cached without revalidation' do it_behaves_like 'content not cached without revalidation and no-store' do
subject do subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'dk.png' get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'dk.png'
...@@ -344,7 +355,7 @@ describe UploadsController do ...@@ -344,7 +355,7 @@ describe UploadsController do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
it_behaves_like 'content not cached without revalidation' do it_behaves_like 'content not cached without revalidation and no-store' do
subject do subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'dk.png' get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'dk.png'
...@@ -388,7 +399,7 @@ describe UploadsController do ...@@ -388,7 +399,7 @@ describe UploadsController do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
it_behaves_like 'content not cached without revalidation' do it_behaves_like 'content not cached without revalidation and no-store' do
subject do subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'dk.png' get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'dk.png'
...@@ -445,7 +456,7 @@ describe UploadsController do ...@@ -445,7 +456,7 @@ describe UploadsController do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
it_behaves_like 'content not cached without revalidation' do it_behaves_like 'content not cached without revalidation and no-store' do
subject do subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'dk.png' get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'dk.png'
...@@ -498,7 +509,7 @@ describe UploadsController do ...@@ -498,7 +509,7 @@ describe UploadsController do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
it_behaves_like 'content not cached without revalidation' do it_behaves_like 'content not cached without revalidation and no-store' do
subject do subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'dk.png' get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'dk.png'
......
...@@ -322,6 +322,22 @@ describe 'Project' do ...@@ -322,6 +322,22 @@ describe 'Project' do
end end
end end
context 'content is not cached after signing out', :js do
let(:user) { create(:user, project_view: 'activity') }
let(:project) { create(:project, :repository) }
it 'does not load activity', :js do
project.add_maintainer(user)
sign_in(user)
visit project_path(project)
sign_out(user)
page.evaluate_script('window.history.back()')
expect(page).not_to have_selector('.event-item')
end
end
def remove_with_confirm(button_text, confirm_with) def remove_with_confirm(button_text, confirm_with)
click_button button_text click_button button_text
fill_in 'confirm_name_input', with: confirm_with fill_in 'confirm_name_input', with: confirm_with
......
...@@ -221,6 +221,7 @@ describe('DiffsStoreMutations', () => { ...@@ -221,6 +221,7 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].parallelDiffLines[0].left.discussions.length).toEqual(1); expect(state.diffFiles[0].parallelDiffLines[0].left.discussions.length).toEqual(1);
expect(state.diffFiles[0].parallelDiffLines[0].left.discussions[0].id).toEqual(1); expect(state.diffFiles[0].parallelDiffLines[0].left.discussions[0].id).toEqual(1);
expect(state.diffFiles[0].parallelDiffLines[0].right.discussions).toEqual([]);
expect(state.diffFiles[0].highlightedDiffLines[0].discussions.length).toEqual(1); expect(state.diffFiles[0].highlightedDiffLines[0].discussions.length).toEqual(1);
expect(state.diffFiles[0].highlightedDiffLines[0].discussions[0].id).toEqual(1); expect(state.diffFiles[0].highlightedDiffLines[0].discussions[0].id).toEqual(1);
......
...@@ -62,9 +62,13 @@ describe('Pipelines Actions dropdown', () => { ...@@ -62,9 +62,13 @@ describe('Pipelines Actions dropdown', () => {
); );
}; };
beforeEach(() => { beforeEach(done => {
spyOn(Date, 'now').and.callFake(() => new Date('2063-04-04T00:42:00Z').getTime()); spyOn(Date, 'now').and.callFake(() => new Date('2063-04-04T00:42:00Z').getTime());
vm = mountComponent(Component, { actions: [scheduledJobAction, expiredJobAction] }); vm = mountComponent(Component, { actions: [scheduledJobAction, expiredJobAction] });
Vue.nextTick()
.then(done)
.catch(done.fail);
}); });
it('emits postAction event after confirming', () => { it('emits postAction event after confirming', () => {
......
...@@ -211,6 +211,26 @@ describe('Deployment component', () => { ...@@ -211,6 +211,26 @@ describe('Deployment component', () => {
}); });
}); });
describe('without changes', () => {
beforeEach(() => {
window.gon = window.gon || {};
window.gon.features = window.gon.features || {};
window.gon.features.ciEnvironmentsStatusChanges = true;
delete deploymentMockData.changes;
vm = mountComponent(Component, { deployment: { ...deploymentMockData } });
});
afterEach(() => {
delete window.gon.features.ciEnvironmentsStatusChanges;
});
it('renders the link to the review app without dropdown', () => {
expect(vm.$el.querySelector('.js-mr-wigdet-deployment-dropdown')).toBeNull();
expect(vm.$el.querySelector('.js-deploy-url-feature-flag')).not.toBeNull();
});
});
describe('deployment status', () => { describe('deployment status', () => {
describe('running', () => { describe('running', () => {
beforeEach(() => { beforeEach(() => {
......
import Vue from 'vue';
import component from '~/vue_merge_request_widget/components/review_app_link.vue';
import mountComponent from '../../helpers/vue_mount_component_helper';
describe('review app link', () => {
const Component = Vue.extend(component);
const props = {
link: '/review',
cssClass: 'js-link',
};
let vm;
let el;
beforeEach(() => {
vm = mountComponent(Component, props);
el = vm.$el;
});
afterEach(() => {
vm.$destroy();
});
it('renders provided link as href attribute', () => {
expect(el.getAttribute('href')).toEqual(props.link);
});
it('renders provided cssClass as class attribute', () => {
expect(el.getAttribute('class')).toEqual(props.cssClass);
});
it('renders View app text', () => {
expect(el.textContent.trim()).toEqual('View app');
});
it('renders svg icon', () => {
expect(el.querySelector('svg')).not.toBeNull();
});
});
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::DigestColumn, :migration, schema: 20180913142237 do
let(:personal_access_tokens) { table(:personal_access_tokens) }
let(:users) { table(:users) }
subject { described_class.new }
describe '#perform' do
context 'token is not yet hashed' do
before do
users.create(id: 1, email: 'user@example.com', projects_limit: 10)
personal_access_tokens.create!(id: 1, user_id: 1, name: 'pat-01', token: 'token-01')
end
it 'saves token digest' do
expect { subject.perform(PersonalAccessToken, :token, :token_digest, 1, 2) }.to(
change { PersonalAccessToken.find(1).token_digest }.from(nil).to(Gitlab::CryptoHelper.sha256('token-01')))
end
it 'erases token' do
expect { subject.perform(PersonalAccessToken, :token, :token_digest, 1, 2) }.to(
change { PersonalAccessToken.find(1).token }.from('token-01').to(nil))
end
end
context 'token is already hashed' do
before do
users.create(id: 1, email: 'user@example.com', projects_limit: 10)
personal_access_tokens.create!(id: 1, user_id: 1, name: 'pat-01', token_digest: 'token-digest-01')
end
it 'does not change existing token digest' do
expect { subject.perform(PersonalAccessToken, :token, :token_digest, 1, 2) }.not_to(
change { PersonalAccessToken.find(1).token_digest })
end
it 'leaves token empty' do
expect { subject.perform(PersonalAccessToken, :token, :token_digest, 1, 2) }.not_to(
change { PersonalAccessToken.find(1).token }.from(nil))
end
end
end
end
...@@ -29,6 +29,16 @@ describe Gitlab::UrlBlocker do ...@@ -29,6 +29,16 @@ describe Gitlab::UrlBlocker do
expect(described_class.blocked_url?('https://gitlab.com/foo/foo.git', protocols: ['http'])).to be true expect(described_class.blocked_url?('https://gitlab.com/foo/foo.git', protocols: ['http'])).to be true
end end
it 'returns true for localhost IPs' do
expect(described_class.blocked_url?('https://0.0.0.0/foo/foo.git')).to be true
expect(described_class.blocked_url?('https://[::1]/foo/foo.git')).to be true
expect(described_class.blocked_url?('https://127.0.0.1/foo/foo.git')).to be true
end
it 'returns true for loopback IP' do
expect(described_class.blocked_url?('https://127.0.0.2/foo/foo.git')).to be true
end
it 'returns true for alternative version of 127.0.0.1 (0177.1)' do it 'returns true for alternative version of 127.0.0.1 (0177.1)' do
expect(described_class.blocked_url?('https://0177.1:65535/foo/foo.git')).to be true expect(described_class.blocked_url?('https://0177.1:65535/foo/foo.git')).to be true
end end
...@@ -84,6 +94,16 @@ describe Gitlab::UrlBlocker do ...@@ -84,6 +94,16 @@ describe Gitlab::UrlBlocker do
end end
end end
it 'allows localhost endpoints' do
expect(described_class).not_to be_blocked_url('http://0.0.0.0', allow_localhost: true)
expect(described_class).not_to be_blocked_url('http://localhost', allow_localhost: true)
expect(described_class).not_to be_blocked_url('http://127.0.0.1', allow_localhost: true)
end
it 'allows loopback endpoints' do
expect(described_class).not_to be_blocked_url('http://127.0.0.2', allow_localhost: true)
end
it 'allows IPv4 link-local endpoints' do it 'allows IPv4 link-local endpoints' do
expect(described_class).not_to be_blocked_url('http://169.254.169.254') expect(described_class).not_to be_blocked_url('http://169.254.169.254')
expect(described_class).not_to be_blocked_url('http://169.254.168.100') expect(described_class).not_to be_blocked_url('http://169.254.168.100')
...@@ -122,7 +142,7 @@ describe Gitlab::UrlBlocker do ...@@ -122,7 +142,7 @@ describe Gitlab::UrlBlocker do
end end
def stub_domain_resolv(domain, ip) def stub_domain_resolv(domain, ip)
allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([double(ip_address: ip, ipv4_private?: true, ipv6_link_local?: false)]) allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([double(ip_address: ip, ipv4_private?: true, ipv6_link_local?: false, ipv4_loopback?: false, ipv6_loopback?: false)])
end end
def unstub_domain_resolv def unstub_domain_resolv
......
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180913142237_schedule_digest_personal_access_tokens.rb')
describe ScheduleDigestPersonalAccessTokens, :migration, :sidekiq do
let(:personal_access_tokens) { table(:personal_access_tokens) }
let(:users) { table(:users) }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 4)
users.create(id: 1, email: 'user@example.com', projects_limit: 10)
personal_access_tokens.create!(id: 1, user_id: 1, name: 'pat-01', token: 'token-01')
personal_access_tokens.create!(id: 2, user_id: 1, name: 'pat-02', token: 'token-02')
personal_access_tokens.create!(id: 3, user_id: 1, name: 'pat-03', token_digest: 'token_digest')
personal_access_tokens.create!(id: 4, user_id: 1, name: 'pat-04', token: 'token-04')
personal_access_tokens.create!(id: 5, user_id: 1, name: 'pat-05', token: 'token-05')
personal_access_tokens.create!(id: 6, user_id: 1, name: 'pat-06', token: 'token-06')
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
migrate!
expect(described_class::MIGRATION).to(
be_scheduled_delayed_migration(
5.minutes, 'PersonalAccessToken', 'token', 'token_digest', 1, 5))
expect(described_class::MIGRATION).to(
be_scheduled_delayed_migration(
10.minutes, 'PersonalAccessToken', 'token', 'token_digest', 6, 6))
expect(BackgroundMigrationWorker.jobs.size).to eq 2
end
end
it 'schedules background migrations' do
perform_enqueued_jobs do
plain_text_token = 'token IS NOT NULL'
expect(personal_access_tokens.where(plain_text_token).count).to eq 5
migrate!
expect(personal_access_tokens.where(plain_text_token).count).to eq 0
end
end
end
...@@ -2,8 +2,6 @@ require 'spec_helper' ...@@ -2,8 +2,6 @@ require 'spec_helper'
shared_examples 'TokenAuthenticatable' do shared_examples 'TokenAuthenticatable' do
describe 'dynamically defined methods' do describe 'dynamically defined methods' do
it { expect(described_class).to be_private_method_defined(:generate_token) }
it { expect(described_class).to be_private_method_defined(:write_new_token) }
it { expect(described_class).to respond_to("find_by_#{token_field}") } it { expect(described_class).to respond_to("find_by_#{token_field}") }
it { is_expected.to respond_to("ensure_#{token_field}") } it { is_expected.to respond_to("ensure_#{token_field}") }
it { is_expected.to respond_to("set_#{token_field}") } it { is_expected.to respond_to("set_#{token_field}") }
...@@ -66,13 +64,275 @@ describe ApplicationSetting, 'TokenAuthenticatable' do ...@@ -66,13 +64,275 @@ describe ApplicationSetting, 'TokenAuthenticatable' do
end end
describe 'multiple token fields' do describe 'multiple token fields' do
before do before(:all) do
described_class.send(:add_authentication_token_field, :yet_another_token) described_class.send(:add_authentication_token_field, :yet_another_token)
end end
describe '.token_fields' do it { is_expected.to respond_to(:ensure_runners_registration_token) }
subject { described_class.authentication_token_fields } it { is_expected.to respond_to(:ensure_yet_another_token) }
it { is_expected.to include(:runners_registration_token, :yet_another_token) } end
describe 'setting same token field multiple times' do
subject { described_class.send(:add_authentication_token_field, :runners_registration_token) }
it 'raises error' do
expect {subject}.to raise_error(ArgumentError)
end
end
end
describe PersonalAccessToken, 'TokenAuthenticatable' do
let(:personal_access_token_name) { 'test-pat-01' }
let(:token_value) { 'token' }
let(:user) { create(:user) }
let(:personal_access_token) do
described_class.new(name: personal_access_token_name,
user_id: user.id,
scopes: [:api],
token: token,
token_digest: token_digest)
end
before do
allow(Devise).to receive(:friendly_token).and_return(token_value)
end
describe '.find_by_token' do
subject { PersonalAccessToken.find_by_token(token_value) }
before do
personal_access_token.save
end
context 'token_digest already exists' do
let(:token) { nil }
let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) }
it 'finds the token' do
expect(subject).not_to be_nil
expect(subject.name).to eql(personal_access_token_name)
end
end
context 'token_digest does not exist' do
let(:token) { token_value }
let(:token_digest) { nil }
it 'finds the token' do
expect(subject).not_to be_nil
expect(subject.name).to eql(personal_access_token_name)
end
end
end
describe '#set_token' do
let(:new_token_value) { 'new-token' }
subject { personal_access_token.set_token(new_token_value) }
context 'token_digest already exists' do
let(:token) { nil }
let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) }
it 'overwrites token_digest' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(new_token_value)
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(new_token_value))
end
end
context 'token_digest does not exist but token does' do
let(:token) { token_value }
let(:token_digest) { nil }
it 'creates new token_digest and clears token' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(new_token_value)
expect(personal_access_token.token_digest).to eql(Gitlab::CryptoHelper.sha256(new_token_value))
end
end
context 'token_digest does not exist, nor token' do
let(:token) { nil }
let(:token_digest) { nil }
it 'creates new token_digest' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(new_token_value)
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(new_token_value))
end
end
end
describe '#ensure_token' do
subject { personal_access_token.ensure_token }
context 'token_digest already exists' do
let(:token) { nil }
let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) }
it 'does not change token fields' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to be_nil
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(token_value))
end
end
context 'token_digest does not exist but token does' do
let(:token) { token_value }
let(:token_digest) { nil }
it 'does not change token fields' do
subject
expect(personal_access_token.read_attribute('token')).to eql(token_value)
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to be_nil
end
end
context 'token_digest does not exist, nor token' do
let(:token) { nil }
let(:token_digest) { nil }
it 'creates token_digest' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(token_value))
end
end
end
describe '#ensure_token!' do
subject { personal_access_token.ensure_token! }
context 'token_digest already exists' do
let(:token) { nil }
let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) }
it 'does not change token fields' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to be_nil
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(token_value))
end
end
context 'token_digest does not exist but token does' do
let(:token) { token_value }
let(:token_digest) { nil }
it 'does not change token fields' do
subject
expect(personal_access_token.read_attribute('token')).to eql(token_value)
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to be_nil
end
end
context 'token_digest does not exist, nor token' do
let(:token) { nil }
let(:token_digest) { nil }
it 'creates token_digest' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(token_value))
end
end
end
describe '#reset_token!' do
subject { personal_access_token.reset_token! }
context 'token_digest already exists' do
let(:token) { nil }
let(:token_digest) { Gitlab::CryptoHelper.sha256('old-token') }
it 'creates new token_digest' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(token_value))
end
end
context 'token_digest does not exist but token does' do
let(:token) { 'old-token' }
let(:token_digest) { nil }
it 'creates new token_digest and clears token' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to eql(Gitlab::CryptoHelper.sha256(token_value))
end
end
context 'token_digest does not exist, nor token' do
let(:token) { nil }
let(:token_digest) { nil }
it 'creates new token_digest' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(token_value))
end
end
context 'token_digest exists and newly generated token would be the same' do
let(:token) { nil }
let(:token_digest) { Gitlab::CryptoHelper.sha256('old-token') }
before do
personal_access_token.save
allow(Devise).to receive(:friendly_token).and_return(
'old-token', token_value, 'boom!')
end
it 'regenerates a new token_digest' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(token_value))
end
end
context 'token exists and newly generated token would be the same' do
let(:token) { 'old-token' }
let(:token_digest) { nil }
before do
personal_access_token.save
allow(Devise).to receive(:friendly_token).and_return(
'old-token', token_value, 'boom!')
end
it 'regenerates a new token_digest' do
subject
expect(personal_access_token.read_attribute('token')).to be_nil
expect(personal_access_token.token).to eql(token_value)
expect(personal_access_token.token_digest).to eql( Gitlab::CryptoHelper.sha256(token_value))
end
end end
end end
end end
...@@ -2,12 +2,6 @@ require 'spec_helper' ...@@ -2,12 +2,6 @@ require 'spec_helper'
describe LfsObject do describe LfsObject do
describe '#local_store?' do describe '#local_store?' do
it 'returns true when file_store is nil' do
subject.file_store = nil
expect(subject.local_store?).to eq true
end
it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
subject.file_store = LfsObjectUploader::Store::LOCAL subject.file_store = LfsObjectUploader::Store::LOCAL
...@@ -83,19 +77,6 @@ describe LfsObject do ...@@ -83,19 +77,6 @@ describe LfsObject do
describe 'file is being stored' do describe 'file is being stored' do
let(:lfs_object) { create(:lfs_object, :with_file) } let(:lfs_object) { create(:lfs_object, :with_file) }
context 'when object has nil store' do
before do
lfs_object.update_column(:file_store, nil)
lfs_object.reload
end
it 'is stored locally' do
expect(lfs_object.file_store).to be(nil)
expect(lfs_object.file).to be_file_storage
expect(lfs_object.file.object_store).to eq(ObjectStorage::Store::LOCAL)
end
end
context 'when existing object has local store' do context 'when existing object has local store' do
it 'is stored locally' do it 'is stored locally' do
expect(lfs_object.file_store).to be(ObjectStorage::Store::LOCAL) expect(lfs_object.file_store).to be(ObjectStorage::Store::LOCAL)
......
...@@ -49,8 +49,9 @@ describe PersonalAccessToken do ...@@ -49,8 +49,9 @@ describe PersonalAccessToken do
describe 'Redis storage' do describe 'Redis storage' do
let(:user_id) { 123 } let(:user_id) { 123 }
let(:token) { 'abc000foo' } let(:token) { 'KS3wegQYXBLYhQsciwsj' }
context 'reading encrypted data' do
before do before do
subject.redis_store!(user_id, token) subject.redis_store!(user_id, token)
end end
...@@ -58,9 +59,26 @@ describe PersonalAccessToken do ...@@ -58,9 +59,26 @@ describe PersonalAccessToken do
it 'returns stored data' do it 'returns stored data' do
expect(subject.redis_getdel(user_id)).to eq(token) expect(subject.redis_getdel(user_id)).to eq(token)
end end
end
context 'reading unencrypted data' do
before do
Gitlab::Redis::SharedState.with do |redis|
redis.set(described_class.redis_shared_state_key(user_id),
token,
ex: PersonalAccessToken::REDIS_EXPIRY_TIME)
end
end
it 'returns stored data unmodified' do
expect(subject.redis_getdel(user_id)).to eq(token)
end
end
context 'after deletion' do context 'after deletion' do
before do before do
subject.redis_store!(user_id, token)
expect(subject.redis_getdel(user_id)).to eq(token) expect(subject.redis_getdel(user_id)).to eq(token)
end end
......
...@@ -758,6 +758,14 @@ describe User do ...@@ -758,6 +758,14 @@ describe User do
expect(user.incoming_email_token).not_to be_blank expect(user.incoming_email_token).not_to be_blank
end end
it 'uses SecureRandom to generate the incoming email token' do
expect(SecureRandom).to receive(:hex).and_return('3b8ca303')
user = create(:user)
expect(user.incoming_email_token).to eql('gitlab')
end
end end
describe '#ensure_user_rights_and_limits' do describe '#ensure_user_rights_and_limits' do
......
...@@ -508,6 +508,15 @@ describe MergeRequestPresenter do ...@@ -508,6 +508,15 @@ describe MergeRequestPresenter do
is_expected is_expected
.to eq("<a href=\"/#{resource.source_project.full_path}/tree/#{resource.source_branch}\">#{resource.source_branch}</a>") .to eq("<a href=\"/#{resource.source_project.full_path}/tree/#{resource.source_branch}\">#{resource.source_branch}</a>")
end end
it 'escapes html, when source_branch does not exist' do
xss_attempt = "<img src='x' onerror=alert('bad stuff') />"
allow(resource).to receive(:source_branch) { xss_attempt }
allow(resource).to receive(:source_branch_exists?) { false }
is_expected.to eq(ERB::Util.html_escape(xss_attempt))
end
end end
describe '#rebase_path' do describe '#rebase_path' do
......
...@@ -158,6 +158,16 @@ describe API::Wikis do ...@@ -158,6 +158,16 @@ describe API::Wikis do
expect(json_response.size).to eq(1) expect(json_response.size).to eq(1)
expect(json_response['error']).to eq('file is missing') expect(json_response['error']).to eq('file is missing')
end end
it 'responds with validation error on invalid temp file' do
payload[:file] = { tempfile: '/etc/hosts' }
post(api(url, user), payload)
expect(response).to have_gitlab_http_status(400)
expect(json_response.size).to eq(1)
expect(json_response['error']).to eq('file is invalid')
end
end end
describe 'GET /projects/:id/wikis' do describe 'GET /projects/:id/wikis' do
......
...@@ -324,6 +324,66 @@ describe MergeRequests::RefreshService do ...@@ -324,6 +324,66 @@ describe MergeRequests::RefreshService do
end end
end end
context 'forked projects with the same source branch name as target branch' do
let!(:first_commit) do
@fork_project.repository.create_file(@user, 'test1.txt', 'Test data',
message: 'Test commit',
branch_name: 'master')
end
let!(:second_commit) do
@fork_project.repository.create_file(@user, 'test2.txt', 'More test data',
message: 'Second test commit',
branch_name: 'master')
end
let!(:forked_master_mr) do
create(:merge_request,
source_project: @fork_project,
source_branch: 'master',
target_branch: 'master',
target_project: @project)
end
let(:force_push_commit) { @project.commit('feature').id }
it 'should reload a new diff for a push to the forked project' do
expect do
service.new(@fork_project, @user).execute(@oldrev, first_commit, 'refs/heads/master')
reload_mrs
end.to change { forked_master_mr.merge_request_diffs.count }.by(1)
end
it 'should reload a new diff for a force push to the source branch' do
expect do
service.new(@fork_project, @user).execute(@oldrev, force_push_commit, 'refs/heads/master')
reload_mrs
end.to change { forked_master_mr.merge_request_diffs.count }.by(1)
end
it 'should reload a new diff for a force push to the target branch' do
expect do
service.new(@project, @user).execute(@oldrev, force_push_commit, 'refs/heads/master')
reload_mrs
end.to change { forked_master_mr.merge_request_diffs.count }.by(1)
end
it 'should reload a new diff for a push to the target project that contains a commit in the MR' do
expect do
service.new(@project, @user).execute(@oldrev, first_commit, 'refs/heads/master')
reload_mrs
end.to change { forked_master_mr.merge_request_diffs.count }.by(1)
end
it 'should not increase the diff count for a new push to target branch' do
new_commit = @project.repository.create_file(@user, 'new-file.txt', 'A new file',
message: 'This is a test',
branch_name: 'master')
expect do
service.new(@project, @user).execute(@newrev, new_commit, 'refs/heads/master')
reload_mrs
end.not_to change { forked_master_mr.merge_request_diffs.count }
end
end
context 'push to origin repo target branch after fork project was removed' do context 'push to origin repo target branch after fork project was removed' do
before do before do
@fork_project.destroy @fork_project.destroy
......
...@@ -16,7 +16,7 @@ singleuser: ...@@ -16,7 +16,7 @@ singleuser:
lifecycleHooks: lifecycleHooks:
postStart: postStart:
exec: exec:
command: ["git", "clone", "https://gitlab.com/gitlab-org/nurtch-demo.git", "DevOps-Runbook-Demo"] command: ["sh", "-c", "git clone https://gitlab.com/gitlab-org/nurtch-demo.git DevOps-Runbook-Demo || true"]
ingress: ingress:
enabled: true enabled: true
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment