Commit e2d686ec authored by Rémy Coutable's avatar Rémy Coutable

Merge remote-tracking branch 'origin/master' into ce-to-ee-2017-11-08

Signed-off-by: default avatarRémy Coutable <remy@rymai.me>
parents 2882c934 224e7784
...@@ -7,7 +7,7 @@ const Api = { ...@@ -7,7 +7,7 @@ const Api = {
groupProjectsPath: '/api/:version/groups/:id/projects.json', groupProjectsPath: '/api/:version/groups/:id/projects.json',
projectsPath: '/api/:version/projects.json', projectsPath: '/api/:version/projects.json',
projectLabelsPath: '/:namespace_path/:project_path/labels', projectLabelsPath: '/:namespace_path/:project_path/labels',
groupLabelsPath: '/groups/:namespace_path/labels', groupLabelsPath: '/groups/:namespace_path/-/labels',
licensePath: '/api/:version/templates/licenses/:key', licensePath: '/api/:version/templates/licenses/:key',
gitignorePath: '/api/:version/templates/gitignores/:key', gitignorePath: '/api/:version/templates/gitignores/:key',
gitlabCiYmlPath: '/api/:version/templates/gitlab_ci_ymls/:key', gitlabCiYmlPath: '/api/:version/templates/gitlab_ci_ymls/:key',
......
...@@ -8,6 +8,7 @@ const unknownClass = 'geo-node-unknown'; ...@@ -8,6 +8,7 @@ const unknownClass = 'geo-node-unknown';
const healthyIcon = 'fa-check'; const healthyIcon = 'fa-check';
const unhealthyIcon = 'fa-times'; const unhealthyIcon = 'fa-times';
const unknownIcon = 'fa-times'; const unknownIcon = 'fa-times';
const notAvailable = 'Not Available';
class GeoNodeStatus { class GeoNodeStatus {
constructor(el) { constructor(el) {
...@@ -49,7 +50,19 @@ class GeoNodeStatus { ...@@ -49,7 +50,19 @@ class GeoNodeStatus {
} }
static formatCountAndPercentage(count, total, percentage) { static formatCountAndPercentage(count, total, percentage) {
return `${gl.text.addDelimiter(count)}/${gl.text.addDelimiter(total)} (${percentage})`; if (count !== null || total != null) {
return `${gl.text.addDelimiter(count)}/${gl.text.addDelimiter(total)} (${percentage})`;
}
return notAvailable;
}
static formatCount(count) {
if (count !== null) {
gl.text.addDelimiter(count);
}
return notAvailable;
} }
getStatus() { getStatus() {
...@@ -73,21 +86,21 @@ class GeoNodeStatus { ...@@ -73,21 +86,21 @@ class GeoNodeStatus {
status.repositories_count, status.repositories_count,
status.repositories_synced_in_percentage); status.repositories_synced_in_percentage);
const repoFailedText = gl.text.addDelimiter(status.repositories_failed_count); const repoFailedText = GeoNodeStatus.formatCount(status.repositories_failed_count);
const lfsText = GeoNodeStatus.formatCountAndPercentage( const lfsText = GeoNodeStatus.formatCountAndPercentage(
status.lfs_objects_synced_count, status.lfs_objects_synced_count,
status.lfs_objects_count, status.lfs_objects_count,
status.lfs_objects_synced_in_percentage); status.lfs_objects_synced_in_percentage);
const lfsFailedText = gl.text.addDelimiter(status.lfs_objects_failed_count); const lfsFailedText = GeoNodeStatus.formatCount(status.lfs_objects_failed_count);
const attachmentText = GeoNodeStatus.formatCountAndPercentage( const attachmentText = GeoNodeStatus.formatCountAndPercentage(
status.attachments_synced_count, status.attachments_synced_count,
status.attachments_count, status.attachments_count,
status.attachments_synced_in_percentage); status.attachments_synced_in_percentage);
const attachmentFailedText = gl.text.addDelimiter(status.attachments_failed_count); const attachmentFailedText = GeoNodeStatus.formatCount(status.attachments_failed_count);
this.$repositoriesSynced.text(repoText); this.$repositoriesSynced.text(repoText);
this.$repositoriesFailed.text(repoFailedText); this.$repositoriesFailed.text(repoFailedText);
...@@ -96,14 +109,14 @@ class GeoNodeStatus { ...@@ -96,14 +109,14 @@ class GeoNodeStatus {
this.$attachmentsSynced.text(attachmentText); this.$attachmentsSynced.text(attachmentText);
this.$attachmentsFailed.text(attachmentFailedText); this.$attachmentsFailed.text(attachmentFailedText);
let eventDate = 'N/A'; let eventDate = notAvailable;
let cursorDate = 'N/A'; let cursorDate = notAvailable;
if (status.last_event_timestamp !== null) { if (status.last_event_timestamp !== null && status.last_event_timestamp > 0) {
eventDate = gl.utils.formatDate(new Date(status.last_event_timestamp * 1000)); eventDate = gl.utils.formatDate(new Date(status.last_event_timestamp * 1000));
} }
if (status.cursor_last_event_timestamp !== null) { if (status.cursor_last_event_timestamp !== null && status.cursor_last_event_timestamp > 0) {
cursorDate = gl.utils.formatDate(new Date(status.cursor_last_event_timestamp * 1000)); cursorDate = gl.utils.formatDate(new Date(status.cursor_last_event_timestamp * 1000));
} }
......
import PipelineStage from '../../pipelines/components/stage.vue';
import ciIcon from '../../vue_shared/components/ci_icon.vue';
import icon from '../../vue_shared/components/icon.vue';
import linkedPipelinesMiniList from '../../vue_shared/components/linked_pipelines_mini_list.vue';
export default {
name: 'MRWidgetPipeline',
props: {
mr: { type: Object, required: true },
},
components: {
'pipeline-stage': PipelineStage,
ciIcon,
icon,
linkedPipelinesMiniList,
},
computed: {
hasPipeline() {
return this.mr.pipeline && Object.keys(this.mr.pipeline).length > 0;
},
hasCIError() {
const { hasCI, ciStatus } = this.mr;
return hasCI && !ciStatus;
},
stageText() {
return this.mr.pipeline.details.stages.length > 1 ? 'stages' : 'stage';
},
status() {
return this.mr.pipeline.details.status || {};
},
/* We typically set defaults ([]) in the store or prop declarations, but because triggered
* and triggeredBy are appended to `pipeline`, we can't set defaults in the store, and we
* need to check their length here to prevent initializing linked-pipeline-mini-lists
* unneccessarily. */
triggered() {
return this.mr.pipeline.triggered || [];
},
triggeredBy() {
const response = this.mr.pipeline.triggered_by;
return response ? [response] : [];
},
},
template: `
<div
v-if="hasPipeline || hasCIError"
class="mr-widget-heading">
<div class="ci-widget media">
<template v-if="hasCIError">
<div class="ci-status-icon ci-status-icon-failed ci-error js-ci-error append-right-10">
<span
aria-hidden="true">
<icon
name="status_failed"/>
</span>
</div>
<div class="media-body">
Could not connect to the CI server. Please check your settings and try again
</div>
</template>
<template v-else-if="hasPipeline">
<div class="ci-status-icon append-right-10">
<a
class="icon-link"
:href="this.status.details_path">
<ci-icon :status="status" />
</a>
</div>
<div class="media-body">
<span>
Pipeline
<a
:href="mr.pipeline.path"
class="pipeline-id">#{{mr.pipeline.id}}</a>
</span>
<span class="mr-widget-pipeline-graph">
<span class="stage-cell">
<linked-pipelines-mini-list
v-if="triggeredBy.length"
:triggered-by="triggeredBy"
/>
<div
v-if="mr.pipeline.details.stages.length > 0"
v-for="(stage, index) in mr.pipeline.details.stages"
class="stage-container dropdown js-mini-pipeline-graph"
:class="{
'has-downstream': index === mr.pipeline.details.stages.length - 1 && triggered.length
}">
<pipeline-stage :stage="stage" />
</div>
<linked-pipelines-mini-list
v-if="triggered.length"
:triggered="triggered"
/>
</span>
</span>
<span>
{{mr.pipeline.details.status.label}} for
<a
:href="mr.pipeline.commit.commit_path"
class="commit-sha js-commit-link">
{{mr.pipeline.commit.short_id}}</a>.
</span>
<span
v-if="mr.pipeline.coverage"
class="js-mr-coverage">
Coverage {{mr.pipeline.coverage}}%
</span>
</div>
</template>
</div>
</div>
`,
};
<script>
import pipelineStage from '../../pipelines/components/stage.vue';
import ciIcon from '../../vue_shared/components/ci_icon.vue';
import icon from '../../vue_shared/components/icon.vue';
import linkedPipelinesMiniList from '../../vue_shared/components/linked_pipelines_mini_list.vue';
export default {
name: 'MRWidgetPipeline',
props: {
pipeline: {
type: Object,
required: true,
},
// This prop needs to be camelCase, html attributes are case insensive
// https://vuejs.org/v2/guide/components.html#camelCase-vs-kebab-case
hasCi: {
type: Boolean,
required: false,
},
ciStatus: {
type: String,
required: false,
},
},
components: {
pipelineStage,
ciIcon,
icon,
linkedPipelinesMiniList,
},
computed: {
hasPipeline() {
return this.pipeline && Object.keys(this.pipeline).length > 0;
},
hasCIError() {
return this.hasCi && !this.ciStatus;
},
status() {
return this.pipeline.details &&
this.pipeline.details.status ? this.pipeline.details.status : {};
},
hasStages() {
return this.pipeline.details &&
this.pipeline.details.stages &&
this.pipeline.details.stages.length;
},
/* We typically set defaults ([]) in the store or prop declarations, but because triggered
* and triggeredBy are appended to `pipeline`, we can't set defaults in the store, and we
* need to check their length here to prevent initializing linked-pipeline-mini-lists
* unneccessarily. */
triggered() {
return this.pipeline.triggered || [];
},
triggeredBy() {
const response = this.pipeline.triggered_by;
return response ? [response] : [];
},
},
};
</script>
<template>
<div
v-if="hasPipeline || hasCIError"
class="mr-widget-heading">
<div class="ci-widget media">
<template v-if="hasCIError">
<div class="ci-status-icon ci-status-icon-failed ci-error js-ci-error append-right-10">
<icon
name="status_failed"/>
</div>
<div class="media-body">
Could not connect to the CI server. Please check your settings and try again
</div>
</template>
<template v-else-if="hasPipeline">
<a
class="append-right-10"
:href="this.status.details_path">
<ci-icon :status="status" />
</a>
<div class="media-body">
Pipeline
<a
:href="pipeline.path"
class="pipeline-id">
#{{pipeline.id}}
</a>
{{pipeline.details.status.label}} for
<a
:href="pipeline.commit.commit_path"
class="commit-sha js-commit-link">
{{pipeline.commit.short_id}}</a>.
<span class="mr-widget-pipeline-graph">
<span class="stage-cell">
<linked-pipelines-mini-list
v-if="triggeredBy.length"
:triggered-by="triggeredBy"
/>
<div
v-if="hasStages"
v-for="(stage, i) in pipeline.details.stages"
:key="i"
class="stage-container dropdown js-mini-pipeline-graph"
:class="{
'has-downstream': i === pipeline.details.stages.length - 1 && triggered.length
}">
<pipeline-stage :stage="stage" />
</div>
<linked-pipelines-mini-list
v-if="triggered.length"
:triggered="triggered"
/>
</span>
</span>
<template v-if="pipeline.coverage">
Coverage {{pipeline.coverage}}%
</template>
</div>
</template>
</div>
</div>
</template>
...@@ -13,7 +13,7 @@ export { default as Vue } from 'vue'; ...@@ -13,7 +13,7 @@ export { default as Vue } from 'vue';
export { default as SmartInterval } from '~/smart_interval'; export { default as SmartInterval } from '~/smart_interval';
export { default as WidgetHeader } from './components/mr_widget_header'; export { default as WidgetHeader } from './components/mr_widget_header';
export { default as WidgetMergeHelp } from './components/mr_widget_merge_help'; export { default as WidgetMergeHelp } from './components/mr_widget_merge_help';
export { default as WidgetPipeline } from './components/mr_widget_pipeline'; export { default as WidgetPipeline } from './components/mr_widget_pipeline.vue';
export { default as WidgetDeployment } from './components/mr_widget_deployment'; export { default as WidgetDeployment } from './components/mr_widget_deployment';
export { default as WidgetRelatedLinks } from './components/mr_widget_related_links'; export { default as WidgetRelatedLinks } from './components/mr_widget_related_links';
export { default as MergedState } from './components/states/mr_widget_merged'; export { default as MergedState } from './components/states/mr_widget_merged';
......
...@@ -234,7 +234,10 @@ export default { ...@@ -234,7 +234,10 @@ export default {
<mr-widget-header :mr="mr" /> <mr-widget-header :mr="mr" />
<mr-widget-pipeline <mr-widget-pipeline
v-if="shouldRenderPipelines" v-if="shouldRenderPipelines"
:mr="mr" /> :pipeline="mr.pipeline"
:ci-status="mr.ciStatus"
:has-ci="mr.hasCI"
/>
<mr-widget-deployment <mr-widget-deployment
v-if="shouldRenderDeployments" v-if="shouldRenderDeployments"
:mr="mr" :mr="mr"
......
...@@ -74,9 +74,10 @@ module LfsRequest ...@@ -74,9 +74,10 @@ module LfsRequest
def lfs_upload_access? def lfs_upload_access?
return false unless project.lfs_enabled? return false unless project.lfs_enabled?
return false unless has_authentication_ability?(:push_code)
return false if project.above_size_limit? || objects_exceed_repo_limit? return false if project.above_size_limit? || objects_exceed_repo_limit?
has_authentication_ability?(:push_code) && can?(user, :push_code, project) lfs_deploy_token? || can?(user, :push_code, project)
end end
def lfs_deploy_token? def lfs_deploy_token?
......
...@@ -43,7 +43,7 @@ class Import::GithubController < Import::BaseController ...@@ -43,7 +43,7 @@ class Import::GithubController < Import::BaseController
@target_namespace = find_or_create_namespace(namespace_path, current_user.namespace_path) @target_namespace = find_or_create_namespace(namespace_path, current_user.namespace_path)
if can?(current_user, :create_projects, @target_namespace) if can?(current_user, :create_projects, @target_namespace)
@project = Gitlab::GithubImport::ProjectCreator.new(repo, @project_name, @target_namespace, current_user, access_params, type: provider).execute @project = Gitlab::LegacyGithubImport::ProjectCreator.new(repo, @project_name, @target_namespace, current_user, access_params, type: provider).execute
else else
render 'unauthorized' render 'unauthorized'
end end
...@@ -52,7 +52,7 @@ class Import::GithubController < Import::BaseController ...@@ -52,7 +52,7 @@ class Import::GithubController < Import::BaseController
private private
def client def client
@client ||= Gitlab::GithubImport::Client.new(session[access_token_key], client_options) @client ||= Gitlab::LegacyGithubImport::Client.new(session[access_token_key], client_options)
end end
def verify_import_enabled def verify_import_enabled
......
...@@ -10,9 +10,6 @@ class Projects::CommitsController < Projects::ApplicationController ...@@ -10,9 +10,6 @@ class Projects::CommitsController < Projects::ApplicationController
before_action :set_commits before_action :set_commits
def show def show
@note_counts = project.notes.where(commit_id: @commits.map(&:id))
.group(:commit_id).count
@merge_request = MergeRequestsFinder.new(current_user, project_id: @project.id).execute.opened @merge_request = MergeRequestsFinder.new(current_user, project_id: @project.id).execute.opened
.find_by(source_project: @project, source_branch: @ref, target_branch: @repository.root_ref) .find_by(source_project: @project, source_branch: @ref, target_branch: @repository.root_ref)
......
...@@ -112,9 +112,6 @@ class Projects::MergeRequests::CreationsController < Projects::MergeRequests::Ap ...@@ -112,9 +112,6 @@ class Projects::MergeRequests::CreationsController < Projects::MergeRequests::Ap
@commits = prepare_commits_for_rendering(@merge_request.commits) @commits = prepare_commits_for_rendering(@merge_request.commits)
@commit = @merge_request.diff_head_commit @commit = @merge_request.diff_head_commit
@note_counts = Note.where(commit_id: @commits.map(&:id))
.group(:commit_id).count
@labels = LabelsFinder.new(current_user, project_id: @project.id).execute @labels = LabelsFinder.new(current_user, project_id: @project.id).execute
set_pipeline_variables set_pipeline_variables
......
...@@ -83,8 +83,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -83,8 +83,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
# Get commits from repository # Get commits from repository
# or from cache if already merged # or from cache if already merged
@commits = prepare_commits_for_rendering(@merge_request.commits) @commits = prepare_commits_for_rendering(@merge_request.commits)
@note_counts = Note.where(commit_id: @commits.map(&:id))
.group(:commit_id).count
render json: { html: view_to_html_string('projects/merge_requests/_commits') } render json: { html: view_to_html_string('projects/merge_requests/_commits') }
end end
......
...@@ -422,7 +422,7 @@ module Ci ...@@ -422,7 +422,7 @@ module Ci
end end
def notes def notes
Note.for_commit_id(sha) project.notes.for_commit_id(sha)
end end
def process! def process!
......
...@@ -47,4 +47,8 @@ class ExternalIssue ...@@ -47,4 +47,8 @@ class ExternalIssue
id id
end end
def notes
Note.none
end
end end
...@@ -22,6 +22,10 @@ module Geo ...@@ -22,6 +22,10 @@ module Geo
class_name: 'Geo::RepositoriesChangedEvent', class_name: 'Geo::RepositoriesChangedEvent',
foreign_key: :repositories_changed_event_id foreign_key: :repositories_changed_event_id
belongs_to :hashed_storage_migrated_event,
class_name: 'Geo::HashedStorageMigratedEvent',
foreign_key: :hashed_storage_migrated_event_id
def self.latest_event def self.latest_event
order(id: :desc).first order(id: :desc).first
end end
...@@ -31,7 +35,8 @@ module Geo ...@@ -31,7 +35,8 @@ module Geo
repository_updated_event || repository_updated_event ||
repository_deleted_event || repository_deleted_event ||
repository_renamed_event || repository_renamed_event ||
repositories_changed_event repositories_changed_event ||
hashed_storage_migrated_event
end end
def project_id def project_id
......
module Geo
class HashedStorageMigratedEvent < ActiveRecord::Base
include Geo::Model
belongs_to :project
validates :project, :repository_storage_name, :repository_storage_path,
:old_disk_path, :new_disk_path, :old_wiki_disk_path,
:new_wiki_disk_path, :new_storage_version, presence: true
end
end
...@@ -132,7 +132,7 @@ class GeoNodeStatus < ActiveRecord::Base ...@@ -132,7 +132,7 @@ class GeoNodeStatus < ActiveRecord::Base
private private
def sync_percentage(total, synced) def sync_percentage(total, synced)
return 0 if total.zero? return 0 if !total.present? || total.zero?
(synced.to_f / total.to_f) * 100.0 (synced.to_f / total.to_f) * 100.0
end end
......
...@@ -601,7 +601,7 @@ class MergeRequest < ActiveRecord::Base ...@@ -601,7 +601,7 @@ class MergeRequest < ActiveRecord::Base
commit_notes = Note commit_notes = Note
.except(:order) .except(:order)
.where(project_id: [source_project_id, target_project_id]) .where(project_id: [source_project_id, target_project_id])
.where(noteable_type: 'Commit', commit_id: commit_ids) .for_commit_id(commit_ids)
# We're using a UNION ALL here since this results in better performance # We're using a UNION ALL here since this results in better performance
# compared to using OR statements. We're using UNION ALL since the queries # compared to using OR statements. We're using UNION ALL since the queries
......
...@@ -368,6 +368,7 @@ class Project < ActiveRecord::Base ...@@ -368,6 +368,7 @@ class Project < ActiveRecord::Base
scope :abandoned, -> { where('projects.last_activity_at < ?', 6.months.ago) } scope :abandoned, -> { where('projects.last_activity_at < ?', 6.months.ago) }
scope :excluding_project, ->(project) { where.not(id: project) } scope :excluding_project, ->(project) { where.not(id: project) }
scope :import_started, -> { where(import_status: 'started') }
state_machine :import_status, initial: :none do state_machine :import_status, initial: :none do
event :import_schedule do event :import_schedule do
...@@ -1190,6 +1191,10 @@ class Project < ActiveRecord::Base ...@@ -1190,6 +1191,10 @@ class Project < ActiveRecord::Base
!!repository.exists? !!repository.exists?
end end
def wiki_repository_exists?
wiki.repository_exists?
end
# update visibility_level of forks # update visibility_level of forks
def update_forks_visibility_level def update_forks_visibility_level
return unless visibility_level < visibility_level_was return unless visibility_level < visibility_level_was
...@@ -1433,6 +1438,31 @@ class Project < ActiveRecord::Base ...@@ -1433,6 +1438,31 @@ class Project < ActiveRecord::Base
reload_repository! reload_repository!
end end
def after_import
repository.after_import
import_finish
remove_import_jid
update_project_counter_caches
end
def update_project_counter_caches
classes = [
Projects::OpenIssuesCountService,
Projects::OpenMergeRequestsCountService
]
classes.each do |klass|
klass.new(self).refresh_cache
end
end
def remove_import_jid
return unless import_jid
Gitlab::SidekiqStatus.unset(import_jid)
update_column(:import_jid, nil)
end
def running_or_pending_build_count(force: false) def running_or_pending_build_count(force: false)
Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do
builds.running_or_pending.count(:all) builds.running_or_pending.count(:all)
...@@ -1686,6 +1716,17 @@ class Project < ActiveRecord::Base ...@@ -1686,6 +1716,17 @@ class Project < ActiveRecord::Base
Gitlab::ReferenceCounter.new(gl_repository(is_wiki: wiki)) Gitlab::ReferenceCounter.new(gl_repository(is_wiki: wiki))
end end
# Refreshes the expiration time of the associated import job ID.
#
# This method can be used by asynchronous importers to refresh the status,
# preventing the StuckImportJobsWorker from marking the import as failed.
def refresh_import_jid_expiration
return unless import_jid
Gitlab::SidekiqStatus
.set(import_jid, StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
end
private private
def storage def storage
......
...@@ -1029,6 +1029,10 @@ class Repository ...@@ -1029,6 +1029,10 @@ class Repository
raw_repository.fetch_source_branch!(source_repository.raw_repository, source_branch, local_ref) raw_repository.fetch_source_branch!(source_repository.raw_repository, source_branch, local_ref)
end end
def remote_exists?(name)
raw_repository.remote_exists?(name)
end
def compare_source_branch(target_branch_name, source_repository, source_branch_name, straight:) def compare_source_branch(target_branch_name, source_repository, source_branch_name, straight:)
raw_repository.compare_source_branch(target_branch_name, source_repository.raw_repository, source_branch_name, straight: straight) raw_repository.compare_source_branch(target_branch_name, source_repository.raw_repository, source_branch_name, straight: straight)
end end
......
...@@ -277,18 +277,23 @@ class User < ActiveRecord::Base ...@@ -277,18 +277,23 @@ class User < ActiveRecord::Base
end end
end end
def for_github_id(id)
joins(:identities)
.where(identities: { provider: :github, extern_uid: id.to_s })
end
# Find a User by their primary email or any associated secondary email # Find a User by their primary email or any associated secondary email
def find_by_any_email(email) def find_by_any_email(email)
sql = 'SELECT * by_any_email(email).take
FROM users end
WHERE id IN (
SELECT id FROM users WHERE email = :email # Returns a relation containing all the users for the given Email address
UNION def by_any_email(email)
SELECT emails.user_id FROM emails WHERE email = :email users = where(email: email)
) emails = joins(:emails).where(emails: { email: email })
LIMIT 1;' union = Gitlab::SQL::Union.new([users, emails])
User.find_by_sql([sql, { email: email }]).first from("(#{union.to_sql}) #{table_name}")
end end
def existing_member?(email) def existing_member?(email)
......
module Geo
class HashedStorageMigratedEventStore < EventStore
self.event_type = :hashed_storage_migrated_event
private
def build_event
Geo::HashedStorageMigratedEvent.new(
project: project,
old_storage_version: old_storage_version,
new_storage_version: project.storage_version,
repository_storage_name: project.repository.storage,
repository_storage_path: project.repository_storage_path,
old_disk_path: old_disk_path,
new_disk_path: project.disk_path,
old_wiki_disk_path: old_wiki_disk_path,
new_wiki_disk_path: project.wiki.disk_path
)
end
def old_storage_version
params.fetch(:old_storage_version)
end
def old_disk_path
params.fetch(:old_disk_path)
end
def old_wiki_disk_path
params.fetch(:old_wiki_disk_path)
end
end
end
module Geo
class HashedStorageMigrationService
attr_reader :project_id, :old_disk_path, :new_disk_path, :old_storage_version
def initialize(project_id, old_disk_path:, new_disk_path:, old_storage_version:)
@project_id = project_id
@old_disk_path = old_disk_path
@new_disk_path = new_disk_path
@old_storage_version = old_storage_version
end
def async_execute
Geo::HashedStorageMigrationWorker.perform_async(
project_id,
old_disk_path,
new_disk_path,
old_storage_version
)
end
def execute
project = Project.find(project_id)
project.expire_caches_before_rename(old_disk_path)
if migrating_from_legacy_storage?(project)
Geo::MoveRepositoryService.new(project, old_disk_path, new_disk_path).execute
end
true
end
private
def migrating_from_legacy_storage?(project)
from_legacy_storage? && project.hashed_storage?(:repository)
end
def from_legacy_storage?
old_storage_version.nil? || old_storage_version.zero?
end
end
end
...@@ -2,34 +2,26 @@ module Geo ...@@ -2,34 +2,26 @@ module Geo
class MoveRepositoryService class MoveRepositoryService
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
attr_reader :id, :name, :old_path_with_namespace, :new_path_with_namespace attr_reader :project, :old_disk_path, :new_disk_path
def initialize(id, name, old_path_with_namespace, new_path_with_namespace) def initialize(project, old_disk_path, new_disk_path)
@id = id @project = project
@name = name @old_disk_path = old_disk_path
@old_path_with_namespace = old_path_with_namespace @new_disk_path = new_disk_path
@new_path_with_namespace = new_path_with_namespace
end
def async_execute
GeoRepositoryMoveWorker.perform_async(id, name, old_path_with_namespace, new_path_with_namespace)
end end
def execute def execute
project = Project.find(id)
project.expire_caches_before_rename(old_path_with_namespace)
# Make sure target directory exists (used when transfering repositories) # Make sure target directory exists (used when transfering repositories)
project.ensure_storage_path_exists project.ensure_storage_path_exists
if gitlab_shell.mv_repository(project.repository_storage_path, if gitlab_shell.mv_repository(project.repository_storage_path,
old_path_with_namespace, new_path_with_namespace) old_disk_path, new_disk_path)
# If repository moved successfully we need to send update instructions to users. # If repository moved successfully we need to send update instructions to users.
# However we cannot allow rollback since we moved repository # However we cannot allow rollback since we moved repository
# So we basically we mute exceptions in next actions # So we basically we mute exceptions in next actions
begin begin
gitlab_shell.mv_repository(project.repository_storage_path, gitlab_shell.mv_repository(project.repository_storage_path,
"#{old_path_with_namespace}.wiki", "#{new_path_with_namespace}.wiki") "#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
rescue rescue
# Returning false does not rollback after_* transaction but gives # Returning false does not rollback after_* transaction but gives
# us information about failing some of tasks # us information about failing some of tasks
...@@ -38,7 +30,7 @@ module Geo ...@@ -38,7 +30,7 @@ module Geo
else else
# if we cannot move namespace directory we should rollback # if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs # db changes in order to prevent out of sync between db and fs
raise Exception.new('repository cannot be renamed') raise StandardError.new('Repository cannot be renamed')
end end
true true
......
...@@ -8,7 +8,7 @@ module Geo ...@@ -8,7 +8,7 @@ module Geo
end end
def execute def execute
GeoNode.create(params).persisted? GeoNode.create(params)
end end
end end
end end
module Geo
class RenameRepositoryService
attr_reader :project_id, :old_disk_path, :new_disk_path
def initialize(project_id, old_disk_path, new_disk_path)
@project_id = project_id
@old_disk_path = old_disk_path
@new_disk_path = new_disk_path
end
def async_execute
Geo::RenameRepositoryWorker.perform_async(project_id, old_disk_path, new_disk_path)
end
def execute
project = Project.find(project_id)
project.expire_caches_before_rename(old_disk_path)
return true if project.hashed_storage?(:repository)
Geo::MoveRepositoryService.new(project, old_disk_path, new_disk_path).execute
end
end
end
...@@ -4,7 +4,7 @@ module Projects ...@@ -4,7 +4,7 @@ module Projects
prepend ::EE::Projects::HashedStorageMigrationService prepend ::EE::Projects::HashedStorageMigrationService
attr_reader :old_disk_path, :new_disk_path attr_reader :old_disk_path, :new_disk_path, :old_wiki_disk_path, :old_storage_version
def initialize(project, logger = nil) def initialize(project, logger = nil)
@project = project @project = project
...@@ -17,6 +17,7 @@ module Projects ...@@ -17,6 +17,7 @@ module Projects
@old_disk_path = project.disk_path @old_disk_path = project.disk_path
has_wiki = project.wiki.repository_exists? has_wiki = project.wiki.repository_exists?
@old_storage_version = project.storage_version
project.storage_version = Storage::HashedProject::STORAGE_VERSION project.storage_version = Storage::HashedProject::STORAGE_VERSION
project.ensure_storage_path_exists project.ensure_storage_path_exists
...@@ -25,7 +26,8 @@ module Projects ...@@ -25,7 +26,8 @@ module Projects
result = move_repository(@old_disk_path, @new_disk_path) result = move_repository(@old_disk_path, @new_disk_path)
if has_wiki if has_wiki
result &&= move_repository("#{@old_disk_path}.wiki", "#{@new_disk_path}.wiki") @old_wiki_disk_path = "#{@old_disk_path}.wiki"
result &&= move_repository(@old_wiki_disk_path, "#{@new_disk_path}.wiki")
end end
unless result unless result
......
...@@ -4,6 +4,18 @@ module Projects ...@@ -4,6 +4,18 @@ module Projects
Error = Class.new(StandardError) Error = Class.new(StandardError)
# Returns true if this importer is supposed to perform its work in the
# background.
#
# This method will only return `true` if async importing is explicitly
# supported by an importer class (`Gitlab::GithubImport::ParallelImporter`
# for example).
def async?
return false unless has_importer?
!!importer_class.try(:async?)
end
def execute def execute
add_repository_to_project unless project.gitlab_project_import? add_repository_to_project unless project.gitlab_project_import?
...@@ -75,12 +87,16 @@ module Projects ...@@ -75,12 +87,16 @@ module Projects
end end
end end
def importer_class
Gitlab::ImportSources.importer(project.import_type)
end
def has_importer? def has_importer?
Gitlab::ImportSources.importer_names.include?(project.import_type) Gitlab::ImportSources.importer_names.include?(project.import_type)
end end
def importer def importer
Gitlab::ImportSources.importer(project.import_type).new(project) importer_class.new(project)
end end
def unknown_url? def unknown_url?
......
...@@ -481,17 +481,7 @@ module SystemNoteService ...@@ -481,17 +481,7 @@ module SystemNoteService
# #
# Returns Boolean # Returns Boolean
def cross_reference_exists?(noteable, mentioner) def cross_reference_exists?(noteable, mentioner)
# Initial scope should be system notes of this noteable type notes = noteable.notes.system
notes = Note.system.where(noteable_type: noteable.class)
notes =
if noteable.is_a?(Commit)
# Commits have non-integer IDs, so they're stored in `commit_id`
notes.where(commit_id: noteable.id)
else
notes.where(noteable_id: noteable.id)
end
notes_for_mentioner(mentioner, noteable, notes).exists? notes_for_mentioner(mentioner, noteable, notes).exists?
end end
......
- ref = local_assigns.fetch(:ref) - ref = local_assigns.fetch(:ref)
- show_project_name = local_assigns.fetch(:show_project_name, false)
- if @note_counts
- note_count = @note_counts.fetch(commit.id, 0)
- else
- notes = commit.notes
- note_count = notes.user.count
- cache_key = [project.full_path, commit.id, current_application_settings, note_count, @path.presence, current_controller?(:commits), I18n.locale] - cache_key = [project.full_path, commit.id, current_application_settings, @path.presence, current_controller?(:commits), I18n.locale]
- cache_key.push(commit.status(ref)) if commit.status(ref) - cache_key.push(commit.status(ref)) if commit.status(ref)
-# EE-only
- show_project_name = local_assigns.fetch(:show_project_name, false)
- cache_key << show_project_name
= cache(cache_key, expires_in: 1.day) do = cache(cache_key, expires_in: 1.day) do
%li.commit.flex-row.js-toggle-container{ id: "commit-#{commit.short_id}" } %li.commit.flex-row.js-toggle-container{ id: "commit-#{commit.short_id}" }
......
# frozen_string_literal: true
module Gitlab
module GithubImport
# NotifyUponDeath can be included into a GitHub worker class if it should
# notify any JobWaiter instances upon being moved to the Sidekiq dead queue.
#
# Note that this will only notify the waiter upon graceful termination, a
# SIGKILL will still result in the waiter _not_ being notified.
#
# Workers including this module must have jobs passed where the last
# argument is the key to notify, as a String.
module NotifyUponDeath
extend ActiveSupport::Concern
included do
# If a job is being exhausted we still want to notify the
# AdvanceStageWorker. This prevents the entire import from getting stuck
# just because 1 job threw too many errors.
sidekiq_retries_exhausted do |job|
args = job['args']
jid = job['jid']
if args.length == 3 && (key = args.last) && key.is_a?(String)
JobWaiter.notify(key, jid)
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
# ObjectImporter defines the base behaviour for every Sidekiq worker that
# imports a single resource such as a note or pull request.
module ObjectImporter
extend ActiveSupport::Concern
included do
include Sidekiq::Worker
include GithubImport::Queue
include ReschedulingMethods
include NotifyUponDeath
end
# project - An instance of `Project` to import the data into.
# client - An instance of `Gitlab::GithubImport::Client`
# hash - A Hash containing the details of the object to import.
def import(project, client, hash)
object = representation_class.from_json_hash(hash)
importer_class.new(object, project, client).execute
counter.increment(project: project.path_with_namespace)
end
def counter
@counter ||= Gitlab::Metrics.counter(counter_name, counter_description)
end
# Returns the representation class to use for the object. This class must
# define the class method `from_json_hash`.
def representation_class
raise NotImplementedError
end
# Returns the class to use for importing the object.
def importer_class
raise NotImplementedError
end
# Returns the name (as a Symbol) of the Prometheus counter.
def counter_name
raise NotImplementedError
end
# Returns the description (as a String) of the Prometheus counter.
def counter_description
raise NotImplementedError
end
end
end
end
module Gitlab
module GithubImport
module Queue
extend ActiveSupport::Concern
included do
# If a job produces an error it may block a stage from advancing
# forever. To prevent this from happening we prevent jobs from going to
# the dead queue. This does mean some resources may not be imported, but
# this is better than a project being stuck in the "import" state
# forever.
sidekiq_options queue: 'github_importer', dead: false, retry: 5
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
# Module that provides methods shared by the various workers used for
# importing GitHub projects.
module ReschedulingMethods
# project_id - The ID of the GitLab project to import the note into.
# hash - A Hash containing the details of the GitHub object to imoprt.
# notify_key - The Redis key to notify upon completion, if any.
def perform(project_id, hash, notify_key = nil)
project = Project.find_by(id: project_id)
return notify_waiter(notify_key) unless project
client = GithubImport.new_client_for(project, parallel: true)
if try_import(project, client, hash)
notify_waiter(notify_key)
else
# In the event of hitting the rate limit we want to reschedule the job
# so its retried after our rate limit has been reset.
self.class
.perform_in(client.rate_limit_resets_in, project.id, hash, notify_key)
end
end
def try_import(*args)
import(*args)
true
rescue RateLimitError
false
end
def notify_waiter(key = nil)
JobWaiter.notify(key, jid) if key
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module StageMethods
# project_id - The ID of the GitLab project to import the data into.
def perform(project_id)
return unless (project = find_project(project_id))
client = GithubImport.new_client_for(project)
try_import(client, project)
end
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def try_import(client, project)
import(client, project)
rescue RateLimitError
self.class.perform_in(client.rate_limit_resets_in, project.id)
end
def find_project(id)
# If the project has been marked as failed we want to bail out
# automatically.
Project.import_started.find_by(id: id)
end
end
end
end
module Geo
class HashedStorageMigrationWorker
include Sidekiq::Worker
include GeoQueue
def perform(project_id, old_disk_path, new_disk_path, old_storage_version)
Geo::HashedStorageMigrationService.new(
project_id,
old_disk_path: old_disk_path,
new_disk_path: new_disk_path,
old_storage_version: old_storage_version
).execute
end
end
end
module Geo
class RenameRepositoryWorker
include Sidekiq::Worker
include GeoQueue
def perform(project_id, old_disk_path, new_disk_path)
Geo::RenameRepositoryService.new(project_id, old_disk_path, new_disk_path).execute
end
end
end
class GeoRepositoryMoveWorker
include Sidekiq::Worker
include GeoQueue
def perform(id, name, old_path_with_namespace, new_path_with_namespace)
Geo::MoveRepositoryService.new(id, name, old_path_with_namespace, new_path_with_namespace).execute
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
# AdvanceStageWorker is a worker used by the GitHub importer to wait for a
# number of jobs to complete, without blocking a thread. Once all jobs have
# been completed this worker will advance the import process to the next
# stage.
class AdvanceStageWorker
include Sidekiq::Worker
sidekiq_options queue: 'github_importer_advance_stage', dead: false
INTERVAL = 30.seconds.to_i
# The number of seconds to wait (while blocking the thread) before
# continueing to the next waiter.
BLOCKING_WAIT_TIME = 5
# The known importer stages and their corresponding Sidekiq workers.
STAGES = {
issues_and_diff_notes: Stage::ImportIssuesAndDiffNotesWorker,
notes: Stage::ImportNotesWorker,
finish: Stage::FinishImportWorker
}.freeze
# project_id - The ID of the project being imported.
# waiters - A Hash mapping Gitlab::JobWaiter keys to the number of
# remaining jobs.
# next_stage - The name of the next stage to start when all jobs have been
# completed.
def perform(project_id, waiters, next_stage)
return unless (project = find_project(project_id))
new_waiters = wait_for_jobs(waiters)
if new_waiters.empty?
# We refresh the import JID here so workers importing individual
# resources (e.g. notes) don't have to do this all the time, reducing
# the pressure on Redis. We _only_ do this once all jobs are done so
# we don't get stuck forever if one or more jobs failed to notify the
# JobWaiter.
project.refresh_import_jid_expiration
STAGES.fetch(next_stage.to_sym).perform_async(project_id)
else
self.class.perform_in(INTERVAL, project_id, new_waiters, next_stage)
end
end
def wait_for_jobs(waiters)
waiters.each_with_object({}) do |(key, remaining), new_waiters|
waiter = JobWaiter.new(remaining, key)
# We wait for a brief moment of time so we don't reschedule if we can
# complete the work fast enough.
waiter.wait(BLOCKING_WAIT_TIME)
next unless waiter.jobs_remaining.positive?
new_waiters[waiter.key] = waiter.jobs_remaining
end
end
def find_project(id)
# We only care about the import JID so we can refresh it. We also only
# want the project if it hasn't been marked as failed yet. It's possible
# the import gets marked as stuck when jobs of the current stage failed
# somehow.
Project.select(:import_jid).import_started.find_by(id: id)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
class ImportDiffNoteWorker
include ObjectImporter
def representation_class
Representation::DiffNote
end
def importer_class
Importer::DiffNoteImporter
end
def counter_name
:github_importer_imported_diff_notes
end
def counter_description
'The number of imported GitHub pull request review comments'
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
class ImportIssueWorker
include ObjectImporter
def representation_class
Representation::Issue
end
def importer_class
Importer::IssueAndLabelLinksImporter
end
def counter_name
:github_importer_imported_issues
end
def counter_description
'The number of imported GitHub issues'
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
class ImportNoteWorker
include ObjectImporter
def representation_class
Representation::Note
end
def importer_class
Importer::NoteImporter
end
def counter_name
:github_importer_imported_notes
end
def counter_description
'The number of imported GitHub comments'
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
class ImportPullRequestWorker
include ObjectImporter
def representation_class
Representation::PullRequest
end
def importer_class
Importer::PullRequestImporter
end
def counter_name
:github_importer_imported_pull_requests
end
def counter_description
'The number of imported GitHub pull requests'
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
class RefreshImportJidWorker
include Sidekiq::Worker
include GithubImport::Queue
# The interval to schedule new instances of this job at.
INTERVAL = 1.minute.to_i
def self.perform_in_the_future(*args)
perform_in(INTERVAL, *args)
end
# project_id - The ID of the project that is being imported.
# check_job_id - The ID of the job for which to check the status.
def perform(project_id, check_job_id)
return unless (project = find_project(project_id))
if SidekiqStatus.running?(check_job_id)
# As long as the repository is being cloned we want to keep refreshing
# the import JID status.
project.refresh_import_jid_expiration
self.class.perform_in_the_future(project_id, check_job_id)
end
# If the job is no longer running there's nothing else we need to do. If
# the clone job completed successfully it will have scheduled the next
# stage, if it died there's nothing we can do anyway.
end
def find_project(id)
Project.select(:import_jid).import_started.find_by(id: id)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Stage
class FinishImportWorker
include Sidekiq::Worker
include GithubImport::Queue
include StageMethods
# project - An instance of Project.
def import(_, project)
project.after_import
report_import_time(project)
end
def report_import_time(project)
duration = Time.zone.now - project.created_at
path = project.path_with_namespace
histogram.observe({ project: path }, duration)
counter.increment
logger.info("GitHub importer finished for #{path} in #{duration.round(2)} seconds")
end
def histogram
@histogram ||= Gitlab::Metrics.histogram(
:github_importer_total_duration_seconds,
'Total time spent importing GitHub projects, in seconds'
)
end
def counter
@counter ||= Gitlab::Metrics.counter(
:github_importer_imported_projects,
'The number of imported GitHub projects'
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Stage
class ImportBaseDataWorker
include Sidekiq::Worker
include GithubImport::Queue
include StageMethods
# These importers are fast enough that we can just run them in the same
# thread.
IMPORTERS = [
Importer::LabelsImporter,
Importer::MilestonesImporter,
Importer::ReleasesImporter
].freeze
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
IMPORTERS.each do |klass|
klass.new(project, client).execute
end
project.refresh_import_jid_expiration
ImportPullRequestsWorker.perform_async(project.id)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Stage
class ImportIssuesAndDiffNotesWorker
include Sidekiq::Worker
include GithubImport::Queue
include StageMethods
# The importers to run in this stage. Issues can't be imported earlier
# on as we also use these to enrich pull requests with assigned labels.
IMPORTERS = [
Importer::IssuesImporter,
Importer::DiffNotesImporter
].freeze
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
waiters = IMPORTERS.each_with_object({}) do |klass, hash|
waiter = klass.new(project, client).execute
hash[waiter.key] = waiter.jobs_remaining
end
AdvanceStageWorker.perform_async(project.id, waiters, :notes)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Stage
class ImportNotesWorker
include Sidekiq::Worker
include GithubImport::Queue
include StageMethods
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
waiter = Importer::NotesImporter
.new(project, client)
.execute
AdvanceStageWorker.perform_async(
project.id,
{ waiter.key => waiter.jobs_remaining },
:finish
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Stage
class ImportPullRequestsWorker
include Sidekiq::Worker
include GithubImport::Queue
include StageMethods
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
waiter = Importer::PullRequestsImporter
.new(project, client)
.execute
project.refresh_import_jid_expiration
AdvanceStageWorker.perform_async(
project.id,
{ waiter.key => waiter.jobs_remaining },
:issues_and_diff_notes
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Stage
class ImportRepositoryWorker
include Sidekiq::Worker
include GithubImport::Queue
include StageMethods
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
# In extreme cases it's possible for a clone to take more than the
# import job expiration time. To work around this we schedule a
# separate job that will periodically run and refresh the import
# expiration time.
RefreshImportJidWorker.perform_in_the_future(project.id, jid)
importer = Importer::RepositoryImporter.new(project, client)
return unless importer.execute
counter.increment
ImportBaseDataWorker.perform_async(project.id)
end
def counter
Gitlab::Metrics.counter(
:github_importer_imported_repositories,
'The number of imported GitHub repositories'
)
end
end
end
end
end
...@@ -17,11 +17,16 @@ class RepositoryImportWorker ...@@ -17,11 +17,16 @@ class RepositoryImportWorker
import_url: project.import_url, import_url: project.import_url,
path: project.full_path) path: project.full_path)
result = Projects::ImportService.new(project, project.creator).execute service = Projects::ImportService.new(project, project.creator)
result = service.execute
# Some importers may perform their work asynchronously. In this case it's up
# to those importers to mark the import process as complete.
return if service.async?
raise ImportError, result[:message] if result[:status] == :error raise ImportError, result[:message] if result[:status] == :error
project.repository.after_import project.after_import
project.import_finish
# Explicitly enqueue mirror for update so # Explicitly enqueue mirror for update so
# that upstream remote is created and fetched # that upstream remote is created and fetched
......
---
title: Geo - Does not move projects backed by hashed storage when handling renamed events
merge_request: 3066
author:
type: fixed
---
title: Improve performance of commits list by fully using DB index when getting commit
note counts
merge_request:
author:
type: performance
...@@ -134,7 +134,6 @@ def instrument_classes(instrumentation) ...@@ -134,7 +134,6 @@ def instrument_classes(instrumentation)
instrumentation.instrument_instance_methods(Gitlab::BitbucketImport::Importer) instrumentation.instrument_instance_methods(Gitlab::BitbucketImport::Importer)
instrumentation.instrument_instance_methods(Bitbucket::Connection) instrumentation.instrument_instance_methods(Bitbucket::Connection)
instrumentation.instrument_instance_methods(Github::Client)
instrumentation.instrument_instance_methods(Geo::RepositorySyncWorker) instrumentation.instrument_instance_methods(Geo::RepositorySyncWorker)
......
...@@ -106,5 +106,11 @@ constraints(GroupUrlConstrainer.new) do ...@@ -106,5 +106,11 @@ constraints(GroupUrlConstrainer.new) do
Gitlab::Routing.redirect_legacy_paths(self, :labels, :milestones, :group_members, Gitlab::Routing.redirect_legacy_paths(self, :labels, :milestones, :group_members,
:edit, :issues, :merge_requests, :projects, :edit, :issues, :merge_requests, :projects,
:activity) :activity)
## EE-specific
Gitlab::Routing.redirect_legacy_paths(self, :analytics, :ldap, :ldap_group_links,
:notification_setting, :audit_events,
:pipeline_quota, :hooks, :boards)
## EE-specific
end end
end end
...@@ -40,6 +40,8 @@ ...@@ -40,6 +40,8 @@
- [upload_checksum, 1] - [upload_checksum, 1]
- [repository_fork, 1] - [repository_fork, 1]
- [repository_import, 1] - [repository_import, 1]
- [github_importer, 1]
- [github_importer_advance_stage, 1]
- [project_service, 1] - [project_service, 1]
- [delete_user, 1] - [delete_user, 1]
- [delete_merged_branches, 1] - [delete_merged_branches, 1]
......
class CreateGeoHashedStorageMigratedEvents < ActiveRecord::Migration
DOWNTIME = false
def change
create_table :geo_hashed_storage_migrated_events, id: :bigserial do |t|
t.references :project, index: true, foreign_key: { on_delete: :cascade }, null: false
t.text :repository_storage_name, null: false
t.text :repository_storage_path, null: false
t.text :old_disk_path, null: false
t.text :new_disk_path, null: false
t.text :old_wiki_disk_path, null: false
t.text :new_wiki_disk_path, null: false
t.integer :old_storage_version, limit: 2
t.integer :new_storage_version, null: false, limit: 2
end
add_column :geo_event_log, :hashed_storage_migrated_event_id, :integer, limit: 8
end
end
class AddGeoHashedStorageMigratedEventsForeignKey < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :geo_event_log, :geo_hashed_storage_migrated_events,
column: :hashed_storage_migrated_event_id, on_delete: :cascade
end
def down
remove_foreign_key :geo_event_log, column: :hashed_storage_migrated_event_id
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171107090120) do ActiveRecord::Schema.define(version: 20171107144726) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -861,6 +861,7 @@ ActiveRecord::Schema.define(version: 20171107090120) do ...@@ -861,6 +861,7 @@ ActiveRecord::Schema.define(version: 20171107090120) do
t.integer "repository_renamed_event_id", limit: 8 t.integer "repository_renamed_event_id", limit: 8
t.integer "repositories_changed_event_id", limit: 8 t.integer "repositories_changed_event_id", limit: 8
t.integer "repository_created_event_id", limit: 8 t.integer "repository_created_event_id", limit: 8
t.integer "hashed_storage_migrated_event_id", limit: 8
end end
add_index "geo_event_log", ["repositories_changed_event_id"], name: "index_geo_event_log_on_repositories_changed_event_id", using: :btree add_index "geo_event_log", ["repositories_changed_event_id"], name: "index_geo_event_log_on_repositories_changed_event_id", using: :btree
...@@ -869,6 +870,20 @@ ActiveRecord::Schema.define(version: 20171107090120) do ...@@ -869,6 +870,20 @@ ActiveRecord::Schema.define(version: 20171107090120) do
add_index "geo_event_log", ["repository_renamed_event_id"], name: "index_geo_event_log_on_repository_renamed_event_id", using: :btree add_index "geo_event_log", ["repository_renamed_event_id"], name: "index_geo_event_log_on_repository_renamed_event_id", using: :btree
add_index "geo_event_log", ["repository_updated_event_id"], name: "index_geo_event_log_on_repository_updated_event_id", using: :btree add_index "geo_event_log", ["repository_updated_event_id"], name: "index_geo_event_log_on_repository_updated_event_id", using: :btree
create_table "geo_hashed_storage_migrated_events", id: :bigserial, force: :cascade do |t|
t.integer "project_id", null: false
t.text "repository_storage_name", null: false
t.text "repository_storage_path", null: false
t.text "old_disk_path", null: false
t.text "new_disk_path", null: false
t.text "old_wiki_disk_path", null: false
t.text "new_wiki_disk_path", null: false
t.integer "old_storage_version", limit: 2
t.integer "new_storage_version", limit: 2, null: false
end
add_index "geo_hashed_storage_migrated_events", ["project_id"], name: "index_geo_hashed_storage_migrated_events_on_project_id", using: :btree
create_table "geo_node_namespace_links", force: :cascade do |t| create_table "geo_node_namespace_links", force: :cascade do |t|
t.integer "geo_node_id", null: false t.integer "geo_node_id", null: false
t.integer "namespace_id", null: false t.integer "namespace_id", null: false
...@@ -2393,11 +2408,13 @@ ActiveRecord::Schema.define(version: 20171107090120) do ...@@ -2393,11 +2408,13 @@ ActiveRecord::Schema.define(version: 20171107090120) do
add_foreign_key "gcp_clusters", "projects", on_delete: :cascade add_foreign_key "gcp_clusters", "projects", on_delete: :cascade
add_foreign_key "gcp_clusters", "services", on_delete: :nullify add_foreign_key "gcp_clusters", "services", on_delete: :nullify
add_foreign_key "gcp_clusters", "users", on_delete: :nullify add_foreign_key "gcp_clusters", "users", on_delete: :nullify
add_foreign_key "geo_event_log", "geo_hashed_storage_migrated_events", column: "hashed_storage_migrated_event_id", name: "fk_27548c6db3", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repositories_changed_events", column: "repositories_changed_event_id", name: "fk_4a99ebfd60", on_delete: :cascade add_foreign_key "geo_event_log", "geo_repositories_changed_events", column: "repositories_changed_event_id", name: "fk_4a99ebfd60", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repository_created_events", column: "repository_created_event_id", name: "fk_9b9afb1916", on_delete: :cascade add_foreign_key "geo_event_log", "geo_repository_created_events", column: "repository_created_event_id", name: "fk_9b9afb1916", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repository_deleted_events", column: "repository_deleted_event_id", name: "fk_c4b1c1f66e", on_delete: :cascade add_foreign_key "geo_event_log", "geo_repository_deleted_events", column: "repository_deleted_event_id", name: "fk_c4b1c1f66e", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repository_renamed_events", column: "repository_renamed_event_id", name: "fk_86c84214ec", on_delete: :cascade add_foreign_key "geo_event_log", "geo_repository_renamed_events", column: "repository_renamed_event_id", name: "fk_86c84214ec", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repository_updated_events", column: "repository_updated_event_id", on_delete: :cascade add_foreign_key "geo_event_log", "geo_repository_updated_events", column: "repository_updated_event_id", on_delete: :cascade
add_foreign_key "geo_hashed_storage_migrated_events", "projects", on_delete: :cascade
add_foreign_key "geo_node_namespace_links", "geo_nodes", on_delete: :cascade add_foreign_key "geo_node_namespace_links", "geo_nodes", on_delete: :cascade
add_foreign_key "geo_node_namespace_links", "namespaces", on_delete: :cascade add_foreign_key "geo_node_namespace_links", "namespaces", on_delete: :cascade
add_foreign_key "geo_node_statuses", "geo_nodes", on_delete: :cascade add_foreign_key "geo_node_statuses", "geo_nodes", on_delete: :cascade
......
...@@ -203,6 +203,7 @@ have access to GitLab administration tools and settings. ...@@ -203,6 +203,7 @@ have access to GitLab administration tools and settings.
- [GitLab performance monitoring with InfluxDB](administration/monitoring/performance/introduction.md): Configure GitLab and InfluxDB for measuring performance metrics. - [GitLab performance monitoring with InfluxDB](administration/monitoring/performance/introduction.md): Configure GitLab and InfluxDB for measuring performance metrics.
- [GitLab performance monitoring with Prometheus](administration/monitoring/prometheus/index.md): Configure GitLab and Prometheus for measuring performance metrics. - [GitLab performance monitoring with Prometheus](administration/monitoring/prometheus/index.md): Configure GitLab and Prometheus for measuring performance metrics.
- [Monitoring uptime](user/admin_area/monitoring/health_check.md): Check the server status using the health check endpoint. - [Monitoring uptime](user/admin_area/monitoring/health_check.md): Check the server status using the health check endpoint.
- [Monitoring GitHub imports](administration/monitoring/github_imports.md)
### Performance ### Performance
......
# Monitoring GitHub imports
>**Note:**
Available since [GitLab 10.2][14731].
The GitHub importer exposes various Prometheus metrics that you can use to
monitor the health and progress of the importer.
## Import Duration Times
| Name | Type |
|------------------------------------------|-----------|
| `github_importer_total_duration_seconds` | histogram |
This metric tracks the total time spent (in seconds) importing a project (from
project creation until the import process finishes), for every imported project.
The name of the project is stored in the `project` label in the format
`namespace/name` (e.g. `gitlab-org/gitlab-ce`).
## Number of imported projects
| Name | Type |
|-------------------------------------|---------|
| `github_importer_imported_projects` | counter |
This metric tracks the total number of projects imported over time. This metric
does not expose any labels.
## Number of GitHub API calls
| Name | Type |
|---------------------------------|---------|
| `github_importer_request_count` | counter |
This metric tracks the total number of GitHub API calls performed over time, for
all projects. This metric does not expose any labels.
## Rate limit errors
| Name | Type |
|-----------------------------------|---------|
| `github_importer_rate_limit_hits` | counter |
This metric tracks the number of times we hit the GitHub rate limit, for all
projects. This metric does not expose any labels.
## Number of imported issues
| Name | Type |
|-----------------------------------|---------|
| `github_importer_imported_issues` | counter |
This metric tracks the number of imported issues across all projects.
The name of the project is stored in the `project` label in the format
`namespace/name` (e.g. `gitlab-org/gitlab-ce`).
## Number of imported pull requests
| Name | Type |
|------------------------------------------|---------|
| `github_importer_imported_pull_requests` | counter |
This metric tracks the number of imported pull requests across all projects.
The name of the project is stored in the `project` label in the format
`namespace/name` (e.g. `gitlab-org/gitlab-ce`).
## Number of imported comments
| Name | Type |
|----------------------------------|---------|
| `github_importer_imported_notes` | counter |
This metric tracks the number of imported comments across all projects.
The name of the project is stored in the `project` label in the format
`namespace/name` (e.g. `gitlab-org/gitlab-ce`).
## Number of imported pull request review comments
| Name | Type |
|---------------------------------------|---------|
| `github_importer_imported_diff_notes` | counter |
This metric tracks the number of imported comments across all projects.
The name of the project is stored in the `project` label in the format
`namespace/name` (e.g. `gitlab-org/gitlab-ce`).
## Number of imported repositories
| Name | Type |
|-----------------------------------------|---------|
| `github_importer_imported_repositories` | counter |
This metric tracks the number of imported repositories across all projects. This
metric does not expose any labels.
[14731]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/14731
...@@ -38,6 +38,7 @@ comments: false ...@@ -38,6 +38,7 @@ comments: false
- [Gotchas](gotchas.md) to avoid - [Gotchas](gotchas.md) to avoid
- [Issue and merge requests state models](object_state_models.md) - [Issue and merge requests state models](object_state_models.md)
- [How to dump production data to staging](db_dump.md) - [How to dump production data to staging](db_dump.md)
- [Working with the GitHub importer](github_importer.md)
## Performance guides ## Performance guides
......
# Working with the GitHub importer
In GitLab 10.2 a new version of the GitHub importer was introduced. This new
importer performs its work in parallel using Sidekiq, greatly reducing the time
necessary to import GitHub projects into a GitLab instance.
The GitHub importer offers two different types of importers: a sequential
importer and a parallel importer. The Rake task `import:github` uses the
sequential importer, while everything else uses the parallel importer. The
difference between these two importers is quite simple: the sequential importer
does all work in a single thread, making it more useful for debugging purposes
or Rake tasks. The parallel importer on the other hand uses Sidekiq.
## Requirements
* GitLab CE 10.2.0 or newer.
* Sidekiq workers that process the `github_importer` and
`github_importer_advance_stage` queues (this is enabled by default).
* Octokit (used for interacting with the GitHub API)
## Code structure
The importer's codebase is broken up into the following directories:
* `lib/gitlab/github_import`: this directory contains most of the code such as
the classes used for importing resources.
* `app/workers/gitlab/github_import`: this directory contains the Sidekiq
workers.
* `app/workers/concerns/gitlab/github_import`: this directory contains a few
modules reused by the various Sidekiq workers.
## Architecture overview
When a GitHub project is imported we schedule and execute a job for the
`RepositoryImportworker` worker as all other importers. However, unlike other
importers we don't immediately perform the work necessary. Instead work is
divided into separate stages, with each stage consisting out of a set of Sidekiq
jobs that are executed. Between every stage a job is scheduled that periodically
checks if all work of the current stage is completed, advancing the import
process to the next stage when this is the case. The worker handling this is
called `Gitlab::GithubImport::AdvanceStageWorker`.
## Stages
### 1. RepositoryImportWorker
This worker will kick off the import process by simply scheduling a job for the
next worker.
### 2. Stage::ImportRepositoryWorker
This worker will import the repository and wiki, scheduling the next stage when
done.
### 3. Stage::ImportBaseDataWorker
This worker will import base data such as labels, milestones, and releases. This
work is done in a single thread since it can be performed fast enough that we
don't need to perform this work in parallel.
### 4. Stage::ImportPullRequestsWorker
This worker will import all pull requests. For every pull request a job for the
`Gitlab::GithubImport::ImportPullRequestWorker` worker is scheduled.
### 5. Stage::ImportIssuesAndDiffNotesWorker
This worker will import all issues and pull request comments. For every issue we
schedule a job for the `Gitlab::GithubImport::ImportIssueWorker` worker. For
pull request comments we instead schedule jobs for the
`Gitlab::GithubImport::DiffNoteImporter` worker.
This worker processes both issues and diff notes in parallel so we don't need to
schedule a separate stage and wait for the previous one to complete.
Issues are imported separately from pull requests because only the "issues" API
includes labels for both issue and pull requests. Importing issues and setting
label links in the same worker removes the need for performing a separate crawl
through the API data, reducing the number of API calls necessary to import a
project.
### 6. Stage::ImportNotesWorker
This worker imports regular comments for both issues and pull requests. For
every comment we schedule a job for the
`Gitlab::GithubImport::ImportNoteWorker` worker.
Regular comments have to be imported at the end since the GitHub API used
returns comments for both issues and pull requests. This means we have to wait
for all issues and pull requests to be imported before we can import regular
comments.
### 7. Stage::FinishImportWorker
This worker will wrap up the import process by performing some housekeeping
(such as flushing any caches) and by marking the import as completed.
## Advancing stages
Advancing stages is done in one of two ways:
1. Scheduling the worker for the next stage directly.
2. Scheduling a job for `Gitlab::GithubImport::AdvanceStageWorker` which will
advance the stage when all work of the current stage has been completed.
The first approach should only be used by workers that perform all their work in
a single thread, while `AdvanceStageWorker` should be used for everything else.
The way `AdvanceStageWorker` works is fairly simple. When scheduling a job it
will be given a project ID, a list of Redis keys, and the name of the next
stage. The Redis keys (produced by `Gitlab::JobWaiter`) are used to check if the
currently running stage has been completed or not. If the stage has not yet been
completed `AdvanceStageWorker` will reschedule itself. Once a stage finishes
`AdvanceStageworker` will refresh the import JID (more on this below) and
schedule the worker of the next stage.
To reduce the number of `AdvanceStageWorker` jobs scheduled this worker will
briefly wait for jobs to complete before deciding what the next action should
be. For small projects this may slow down the import process a bit, but it will
also reduce pressure on the system as a whole.
## Refreshing import JIDs
GitLab includes a worker called `StuckImportJobsWorker` that will periodically
run and mark project imports as failed if they have been running for more than
15 hours. For GitHub projects this poses a bit of a problem: importing large
projects could take several hours depending on how often we hit the GitHub rate
limit (more on this below), but we don't want `StuckImportJobsWorker` to mark
our import as failed because of this.
To prevent this from happening we periodically refresh the expiration time of
the import process. This works by storing the JID of the import job in the
database, then refreshing this JID's TTL at various stages throughout the import
process. This is done by calling `Project#refresh_import_jid_expiration`. By
refreshing this TTL we can ensure our import does not get marked as failed so
long we're still performing work.
## GitHub rate limit
GitHub has a rate limit of 5 000 API calls per hour. The number of requests
necessary to import a project is largely dominated by the number of unique users
involved in a project (e.g. issue authors). Other data such as issue pages
and comments typically only requires a few dozen requests to import. This is
because we need the Email address of users in order to map them to GitLab users.
We handle this by doing the following:
1. Once we hit the rate limit all jobs will automatically reschedule themselves
in such a way that they are not executed until the rate limit has been reset.
2. We cache the mapping of GitHub users to GitLab users in Redis.
More information on user caching can be found below.
## Caching user lookups
When mapping GitHub users to GitLab users we need to (in the worst case)
perform:
1. One API call to get the user's Email address.
2. Two database queries to see if a corresponding GitLab user exists. One query
will try to find the user based on the GitHub user ID, while the second query
is used to find the user using their GitHub Email address.
Because this process is quite expensive we cache the result of these lookups in
Redis. For every user looked up we store three keys:
1. A Redis key mapping GitHub usernames to their Email addresses.
2. A Redis key mapping a GitHub Email addresses to a GitLab user ID.
3. A Redis key mapping a GitHub user ID to GitLab user ID.
There are two types of lookups we cache:
1. A positive lookup, meaning we found a GitLab user ID.
2. A negative lookup, meaning we didn't find a GitLab user ID. Caching this
prevents us from performing the same work for users that we know don't exist
in our GitLab database.
The expiration time of these keys is 24 hours. When retrieving the cache of a
positive lookups we refresh the TTL automatically. The TTL of false lookups is
never refreshed.
Because of this caching layer it's possible newly registered GitLab accounts
won't be linked to their corresponding GitHub accounts. This however will sort
itself out once the cached keys expire.
The user cache lookup is shared across projects. This means that the more
projects get imported the fewer GitHub API calls will be needed.
The code for this resides in:
* `lib/gitlab/github_import/user_finder.rb`
* `lib/gitlab/github_import/caching.rb`
## Mapping labels and milestones
To reduce pressure on the database we do not query it when setting labels and
milestones on issues and merge requests. Instead we cache this data when we
import labels and milestones, then we reuse this cache when assigning them to
issues/merge requests. Similar to the user lookups these cache keys are expired
automatically after 24 hours of not being used.
Unlike the user lookup caches these label and milestone caches are scoped to the
project that is being imported.
The code for this resides in:
* `lib/gitlab/github_import/label_finder.rb`
* `lib/gitlab/github_import/milestone_finder.rb`
* `lib/gitlab/github_import/caching.rb`
...@@ -68,15 +68,17 @@ The following guide assumes that: ...@@ -68,15 +68,17 @@ The following guide assumes that:
``` ```
1. Omnibus GitLab has already a replication user called `gitlab_replicator`. 1. Omnibus GitLab has already a replication user called `gitlab_replicator`.
You must set its password manually. Replace `thepassword` with a strong You must set its password manually. You will be prompted to enter a
password: password:
```bash ```bash
sudo -u gitlab-psql /opt/gitlab/embedded/bin/psql -h /var/opt/gitlab/postgresql \ gitlab-ctl set-replication-password
-d template1 \
-c "ALTER USER gitlab_replicator WITH ENCRYPTED PASSWORD 'thepassword'"
``` ```
This command will also read `postgresql['sql_replication_user']` Omnibus
setting in case you have changed `gitlab_replicator` username to something
else.
1. Edit `/etc/gitlab/gitlab.rb` and add the following. Note that GitLab 9.1 added 1. Edit `/etc/gitlab/gitlab.rb` and add the following. Note that GitLab 9.1 added
the `geo_primary_role` configuration variable: the `geo_primary_role` configuration variable:
......
...@@ -24,6 +24,8 @@ constrains of a Sidekiq worker. ...@@ -24,6 +24,8 @@ constrains of a Sidekiq worker.
- the milestones (GitLab 8.7+) - the milestones (GitLab 8.7+)
- the labels (GitLab 8.7+) - the labels (GitLab 8.7+)
- the release note descriptions (GitLab 8.12+) - the release note descriptions (GitLab 8.12+)
- the pull request review comments (GitLab 10.2+)
- the regular issue and pull request comments
- References to pull requests and issues are preserved (GitLab 8.7+) - References to pull requests and issues are preserved (GitLab 8.7+)
- Repository public access is retained. If a repository is private in GitHub - Repository public access is retained. If a repository is private in GitHub
it will be created as private in GitLab as well. it will be created as private in GitLab as well.
...@@ -43,10 +45,13 @@ the case the namespace is taken, the repository will be imported under the user' ...@@ -43,10 +45,13 @@ the case the namespace is taken, the repository will be imported under the user'
namespace that started the import process. namespace that started the import process.
The importer will also import branches on forks of projects related to open pull The importer will also import branches on forks of projects related to open pull
requests. These branches will be imported with a naming scheume similar to requests. These branches will be imported with a naming scheme similar to
GH-SHA-Username/Pull-Request-number/fork-name/branch. This may lead to a discrepency GH-SHA-Username/Pull-Request-number/fork-name/branch. This may lead to a discrepency
in branches compared to the GitHub Repository. in branches compared to the GitHub Repository.
For a more technical description and an overview of the architecture you can
refer to [Working with the GitHub importer][gh-import-dev-docs].
## Importing your GitHub repositories ## Importing your GitHub repositories
The importer page is visible when you create a new project. The importer page is visible when you create a new project.
...@@ -121,7 +126,29 @@ If you want, you can import all your GitHub projects in one go by hitting ...@@ -121,7 +126,29 @@ If you want, you can import all your GitHub projects in one go by hitting
You can also choose a different name for the project and a different namespace, You can also choose a different name for the project and a different namespace,
if you have the privileges to do so. if you have the privileges to do so.
## Making the import process go faster
For large projects it may take a while to import all data. To reduce the time
necessary you can increase the number of Sidekiq workers that process the
following queues:
* `github_importer`
* `github_importer_advance_stage`
For an optimal experience we recommend having at least 4 Sidekiq processes (each
running a number of threads equal to the number of CPU cores) that _only_
process these queues. We also recommend that these processes run on separate
servers. For 4 servers with 8 cores this means you can import up to 32 objects
(e.g. issues) in parallel.
Reducing the time spent in cloning a repository can be done by increasing
network throughput, CPU capacity, and disk performance (e.g. by using high
performance SSDs) of the disks that store the Git repositories (for your GitLab
instance). Increasing the number of Sidekiq workers will _not_ reduce the time
spent cloning repositories.
[gh-import]: ../../../integration/github.md "GitHub integration" [gh-import]: ../../../integration/github.md "GitHub integration"
[gh-rake]: ../../../administration/raketasks/github_import.md "GitHub rake task" [gh-rake]: ../../../administration/raketasks/github_import.md "GitHub rake task"
[gh-integration]: #authorize-access-to-your-repositories-using-the-github-integration [gh-integration]: #authorize-access-to-your-repositories-using-the-github-integration
[gh-token]: #authorize-access-to-your-repositories-using-a-personal-access-token [gh-token]: #authorize-access-to-your-repositories-using-a-personal-access-token
[gh-import-dev-docs]: ../../../development/github_importer.md "Working with the GitHub importer"
...@@ -26,7 +26,10 @@ export default { ...@@ -26,7 +26,10 @@ export default {
<mr-widget-header :mr="mr" /> <mr-widget-header :mr="mr" />
<mr-widget-pipeline <mr-widget-pipeline
v-if="shouldRenderPipelines" v-if="shouldRenderPipelines"
:mr="mr" /> :pipeline="mr.pipeline"
:ci-status="mr.ciStatus"
:has-ci="mr.hasCI"
/>
<mr-widget-deployment <mr-widget-deployment
v-if="shouldRenderDeployments" v-if="shouldRenderDeployments"
:mr="mr" :mr="mr"
......
...@@ -14,12 +14,12 @@ class Admin::GeoNodesController < Admin::ApplicationController ...@@ -14,12 +14,12 @@ class Admin::GeoNodesController < Admin::ApplicationController
end end
def create def create
if Geo::NodeCreateService.new(geo_node_params).execute @node = Geo::NodeCreateService.new(geo_node_params).execute
if @node.persisted?
redirect_to admin_geo_nodes_path, notice: 'Node was successfully created.' redirect_to admin_geo_nodes_path, notice: 'Node was successfully created.'
else else
@nodes = GeoNode.all @nodes = GeoNode.all
@node = GeoNode.new(geo_node_params)
flash.now[:alert] = 'Failed to create new node'
render :index render :index
end end
......
...@@ -445,7 +445,6 @@ module EE ...@@ -445,7 +445,6 @@ module EE
end end
alias_method :merge_requests_ff_only_enabled?, :merge_requests_ff_only_enabled alias_method :merge_requests_ff_only_enabled?, :merge_requests_ff_only_enabled
# TODO: check storage type and NOOP when not using Legacy
def rename_repo def rename_repo
raise NotImplementedError unless defined?(super) raise NotImplementedError unless defined?(super)
......
...@@ -5,10 +5,11 @@ module EE ...@@ -5,10 +5,11 @@ module EE
raise NotImplementedError.new unless defined?(super) raise NotImplementedError.new unless defined?(super)
super do super do
::Geo::RepositoryRenamedEventStore.new( ::Geo::HashedStorageMigratedEventStore.new(
project, project,
old_path: File.basename(old_disk_path), old_storage_version: old_storage_version,
old_path_with_namespace: old_disk_path old_disk_path: old_disk_path,
old_wiki_disk_path: old_wiki_disk_path
).create ).create
end end
end end
......
...@@ -117,7 +117,7 @@ module API ...@@ -117,7 +117,7 @@ module API
commit = user_project.commit(params[:sha]) commit = user_project.commit(params[:sha])
not_found! 'Commit' unless commit not_found! 'Commit' unless commit
notes = user_project.notes.where(commit_id: commit.id).order(:created_at) notes = commit.notes.order(:created_at)
present paginate(notes), with: Entities::CommitNote present paginate(notes), with: Entities::CommitNote
end end
......
...@@ -106,7 +106,7 @@ module API ...@@ -106,7 +106,7 @@ module API
commit = user_project.commit(params[:sha]) commit = user_project.commit(params[:sha])
not_found! 'Commit' unless commit not_found! 'Commit' unless commit
notes = Note.where(commit_id: commit.id).order(:created_at) notes = commit.notes.order(:created_at)
present paginate(notes), with: ::API::Entities::CommitNote present paginate(notes), with: ::API::Entities::CommitNote
end end
......
...@@ -5,6 +5,10 @@ class Feature ...@@ -5,6 +5,10 @@ class Feature
class FlipperFeature < Flipper::Adapters::ActiveRecord::Feature class FlipperFeature < Flipper::Adapters::ActiveRecord::Feature
# Using `self.table_name` won't work. ActiveRecord bug? # Using `self.table_name` won't work. ActiveRecord bug?
superclass.table_name = 'features' superclass.table_name = 'features'
def self.feature_names
pluck(:key)
end
end end
class FlipperGate < Flipper::Adapters::ActiveRecord::Gate class FlipperGate < Flipper::Adapters::ActiveRecord::Gate
...@@ -22,11 +26,19 @@ class Feature ...@@ -22,11 +26,19 @@ class Feature
flipper.feature(key) flipper.feature(key)
end end
def persisted_names
if RequestStore.active?
RequestStore[:flipper_persisted_names] ||= FlipperFeature.feature_names
else
FlipperFeature.feature_names
end
end
def persisted?(feature) def persisted?(feature)
# Flipper creates on-memory features when asked for a not-yet-created one. # Flipper creates on-memory features when asked for a not-yet-created one.
# If we want to check if a feature has been actually set, we look for it # If we want to check if a feature has been actually set, we look for it
# on the persisted features list. # on the persisted features list.
all.map(&:name).include?(feature.name) persisted_names.include?(feature.name)
end end
def enabled?(key, thing = nil) def enabled?(key, thing = nil)
......
module Github
class Client
TIMEOUT = 60
DEFAULT_PER_PAGE = 100
attr_reader :connection, :rate_limit
def initialize(options)
@connection = Faraday.new(url: options.fetch(:url, root_endpoint)) do |faraday|
faraday.options.open_timeout = options.fetch(:timeout, TIMEOUT)
faraday.options.timeout = options.fetch(:timeout, TIMEOUT)
faraday.authorization 'token', options.fetch(:token)
faraday.adapter :net_http
faraday.ssl.verify = verify_ssl
end
@rate_limit = RateLimit.new(connection)
end
def get(url, query = {})
exceed, reset_in = rate_limit.get
sleep reset_in if exceed
Github::Response.new(connection.get(url, { per_page: DEFAULT_PER_PAGE }.merge(query)))
end
private
def root_endpoint
custom_endpoint || github_endpoint
end
def custom_endpoint
github_omniauth_provider.dig('args', 'client_options', 'site')
end
def verify_ssl
# If there is no config, we're connecting to github.com
# and we should verify ssl.
github_omniauth_provider.fetch('verify_ssl', true)
end
def github_endpoint
OmniAuth::Strategies::GitHub.default_options[:client_options][:site]
end
def github_omniauth_provider
@github_omniauth_provider ||=
Gitlab.config.omniauth.providers
.find { |provider| provider.name == 'github' }
.to_h
end
end
end
module Github
class Collection
attr_reader :options
def initialize(options)
@options = options
end
def fetch(url, query = {})
return [] if url.blank?
Enumerator.new do |yielder|
loop do
response = client.get(url, query)
response.body.each { |item| yielder << item }
raise StopIteration unless response.rels.key?(:next)
url = response.rels[:next]
end
end.lazy
end
private
def client
@client ||= Github::Client.new(options)
end
end
end
module Github
RepositoryFetchError = Class.new(StandardError)
end
This diff is collapsed.
module Github
class Import
class Issue < ::Issue
self.table_name = 'issues'
self.reset_callbacks :save
self.reset_callbacks :create
self.reset_callbacks :commit
self.reset_callbacks :update
self.reset_callbacks :validate
end
end
end
module Github
class Import
class LegacyDiffNote < ::LegacyDiffNote
self.table_name = 'notes'
self.store_full_sti_class = false
self.reset_callbacks :commit
self.reset_callbacks :update
self.reset_callbacks :validate
end
end
end
module Github
class Import
class MergeRequest < ::MergeRequest
self.table_name = 'merge_requests'
self.reset_callbacks :create
self.reset_callbacks :save
self.reset_callbacks :commit
self.reset_callbacks :update
self.reset_callbacks :validate
end
end
end
module Github
class Import
class Note < ::Note
self.table_name = 'notes'
self.store_full_sti_class = false
self.reset_callbacks :save
self.reset_callbacks :commit
self.reset_callbacks :update
self.reset_callbacks :validate
end
end
end
module Github
class RateLimit
SAFE_REMAINING_REQUESTS = 100
SAFE_RESET_TIME = 500
RATE_LIMIT_URL = '/rate_limit'.freeze
attr_reader :connection
def initialize(connection)
@connection = connection
end
def get
response = connection.get(RATE_LIMIT_URL)
# GitHub Rate Limit API returns 404 when the rate limit is disabled
return false unless response.status != 404
body = Oj.load(response.body, class_cache: false, mode: :compat)
remaining = body.dig('rate', 'remaining').to_i
reset_in = body.dig('rate', 'reset').to_i
exceed = remaining <= SAFE_REMAINING_REQUESTS
[exceed, reset_in]
end
end
end
module Github
class Repositories
attr_reader :options
def initialize(options)
@options = options
end
def fetch
Collection.new(options).fetch(repos_url)
end
private
def repos_url
'/user/repos'
end
end
end
module Github
module Representation
class Base
def initialize(raw, options = {})
@raw = raw
@options = options
end
def id
raw['id']
end
def url
raw['url']
end
def created_at
raw['created_at']
end
def updated_at
raw['updated_at']
end
private
attr_reader :raw, :options
end
end
end
module Github
module Representation
class Branch < Representation::Base
attr_reader :repository
def user
raw.dig('user', 'login') || 'unknown'
end
def repo?
raw['repo'].present?
end
def repo
return unless repo?
@repo ||= Github::Representation::Repo.new(raw['repo'])
end
def ref
raw['ref']
end
def sha
raw['sha']
end
def short_sha
Commit.truncate_sha(sha)
end
def valid?
sha.present? && ref.present?
end
def restore!(name)
repository.create_branch(name, sha)
rescue Gitlab::Git::Repository::InvalidRef => e
Rails.logger.error("#{self.class.name}: Could not restore branch #{name}: #{e}")
end
def remove!(name)
repository.delete_branch(name)
rescue Gitlab::Git::Repository::DeleteBranchError => e
Rails.logger.error("#{self.class.name}: Could not remove branch #{name}: #{e}")
end
private
def repository
@repository ||= options.fetch(:repository)
end
end
end
end
module Github
module Representation
class Comment < Representation::Base
def note
raw['body'] || ''
end
def author
@author ||= Github::Representation::User.new(raw['user'], options)
end
def commit_id
raw['commit_id']
end
def line_code
return unless on_diff?
parsed_lines = Gitlab::Diff::Parser.new.parse(diff_hunk.lines)
generate_line_code(parsed_lines.to_a.last)
end
private
def generate_line_code(line)
Gitlab::Git.diff_line_code(file_path, line.new_pos, line.old_pos)
end
def on_diff?
diff_hunk.present?
end
def diff_hunk
raw['diff_hunk']
end
def file_path
raw['path']
end
end
end
end
module Github
module Representation
class Issuable < Representation::Base
def iid
raw['number']
end
def title
raw['title']
end
def description
raw['body'] || ''
end
def milestone
return unless raw['milestone'].present?
@milestone ||= Github::Representation::Milestone.new(raw['milestone'])
end
def author
@author ||= Github::Representation::User.new(raw['user'], options)
end
def labels?
raw['labels'].any?
end
def labels
@labels ||= Array(raw['labels']).map do |label|
Github::Representation::Label.new(label, options)
end
end
end
end
end
module Github
module Representation
class Issue < Representation::Issuable
def state
raw['state'] == 'closed' ? 'closed' : 'opened'
end
def comments?
raw['comments'] > 0
end
def pull_request?
raw['pull_request'].present?
end
def assigned?
raw['assignees'].present?
end
def assignees
@assignees ||= Array(raw['assignees']).map do |user|
Github::Representation::User.new(user, options)
end
end
end
end
end
module Github
module Representation
class Label < Representation::Base
def color
"##{raw['color']}"
end
def title
raw['name']
end
end
end
end
module Github
module Representation
class Milestone < Representation::Base
def iid
raw['number']
end
def title
raw['title']
end
def description
raw['description']
end
def due_date
raw['due_on']
end
def state
raw['state'] == 'closed' ? 'closed' : 'active'
end
end
end
end
module Github
module Representation
class PullRequest < Representation::Issuable
delegate :sha, to: :source_branch, prefix: true
delegate :sha, to: :target_branch, prefix: true
def source_project
project
end
def source_branch_name
# Mimic the "user:branch" displayed in the MR widget,
# i.e. "Request to merge rymai:add-external-mounts into master"
cross_project? ? "#{source_branch.user}:#{source_branch.ref}" : source_branch.ref
end
def target_project
project
end
def target_branch_name
target_branch.ref
end
def state
return 'merged' if raw['state'] == 'closed' && raw['merged_at'].present?
return 'closed' if raw['state'] == 'closed'
'opened'
end
def opened?
state == 'opened'
end
def valid?
source_branch.valid? && target_branch.valid?
end
def assigned?
raw['assignee'].present?
end
def assignee
return unless assigned?
@assignee ||= Github::Representation::User.new(raw['assignee'], options)
end
private
def project
@project ||= options.fetch(:project)
end
def source_branch
@source_branch ||= Representation::Branch.new(raw['head'], repository: project.repository)
end
def target_branch
@target_branch ||= Representation::Branch.new(raw['base'], repository: project.repository)
end
def cross_project?
return true unless source_branch.repo?
source_branch.repo.id != target_branch.repo.id
end
end
end
end
module Github
module Representation
class Release < Representation::Base
def description
raw['body']
end
def tag
raw['tag_name']
end
def valid?
!raw['draft']
end
end
end
end
module Github
module Representation
class Repo < Representation::Base
end
end
end
module Github
module Representation
class User < Representation::Base
def email
return @email if defined?(@email)
@email = Github::User.new(username, options).get.fetch('email', nil)
end
def username
raw['login']
end
end
end
end
module Github
class Response
attr_reader :raw, :headers, :status
def initialize(response)
@raw = response
@headers = response.headers
@status = response.status
end
def body
Oj.load(raw.body, class_cache: false, mode: :compat)
end
def rels
links = headers['Link'].to_s.split(', ').map do |link|
href, name = link.match(/<(.*?)>; rel="(\w+)"/).captures
[name.to_sym, href]
end
Hash[*links.flatten]
end
end
end
module Github
class User
attr_reader :username, :options
def initialize(username, options)
@username = username
@options = options
end
def get
client.get(user_url).body
end
private
def client
@client ||= Github::Client.new(options)
end
def user_url
"/users/#{username}"
end
end
end
...@@ -26,7 +26,7 @@ module Gitlab ...@@ -26,7 +26,7 @@ module Gitlab
result = result =
service_request_check(login, password, project) || service_request_check(login, password, project) ||
build_access_token_check(login, password) || build_access_token_check(login, password) ||
lfs_token_check(login, password) || lfs_token_check(login, password, project) ||
oauth_access_token_check(login, password) || oauth_access_token_check(login, password) ||
personal_access_token_check(password) || personal_access_token_check(password) ||
user_with_password_for_git(login, password) || user_with_password_for_git(login, password) ||
...@@ -147,7 +147,7 @@ module Gitlab ...@@ -147,7 +147,7 @@ module Gitlab
end.flatten.uniq end.flatten.uniq
end end
def lfs_token_check(login, password) def lfs_token_check(login, password, project)
deploy_key_matches = login.match(/\Alfs\+deploy-key-(\d+)\z/) deploy_key_matches = login.match(/\Alfs\+deploy-key-(\d+)\z/)
actor = actor =
...@@ -164,6 +164,8 @@ module Gitlab ...@@ -164,6 +164,8 @@ module Gitlab
authentication_abilities = authentication_abilities =
if token_handler.user? if token_handler.user?
full_authentication_abilities full_authentication_abilities
elsif token_handler.deploy_key_pushable?(project)
read_write_authentication_abilities
else else
read_authentication_abilities read_authentication_abilities
end end
...@@ -209,10 +211,15 @@ module Gitlab ...@@ -209,10 +211,15 @@ module Gitlab
] ]
end end
def full_authentication_abilities def read_write_authentication_abilities
read_authentication_abilities + [ read_authentication_abilities + [
:push_code, :push_code,
:create_container_image, :create_container_image
]
end
def full_authentication_abilities
read_write_authentication_abilities + [
:admin_container_image :admin_container_image
] ]
end end
......
...@@ -112,20 +112,41 @@ module Gitlab ...@@ -112,20 +112,41 @@ module Gitlab
end end
end end
def self.bulk_insert(table, rows) # Bulk inserts a number of rows into a table, optionally returning their
# IDs.
#
# table - The name of the table to insert the rows into.
# rows - An Array of Hash instances, each mapping the columns to their
# values.
# return_ids - When set to true the return value will be an Array of IDs of
# the inserted rows, this only works on PostgreSQL.
def self.bulk_insert(table, rows, return_ids: false)
return if rows.empty? return if rows.empty?
keys = rows.first.keys keys = rows.first.keys
columns = keys.map { |key| connection.quote_column_name(key) } columns = keys.map { |key| connection.quote_column_name(key) }
return_ids = false if mysql?
tuples = rows.map do |row| tuples = rows.map do |row|
row.values_at(*keys).map { |value| connection.quote(value) } row.values_at(*keys).map { |value| connection.quote(value) }
end end
connection.execute <<-EOF sql = <<-EOF
INSERT INTO #{table} (#{columns.join(', ')}) INSERT INTO #{table} (#{columns.join(', ')})
VALUES #{tuples.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')} VALUES #{tuples.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')}
EOF EOF
if return_ids
sql << 'RETURNING id'
end
result = connection.execute(sql)
if return_ids
result.values.map { |tuple| tuple[0].to_i }
else
[]
end
end end
def self.sanitize_timestamp(timestamp) def self.sanitize_timestamp(timestamp)
......
...@@ -76,6 +76,8 @@ module Gitlab ...@@ -76,6 +76,8 @@ module Gitlab
handle_repositories_changed(event_log.repositories_changed_event) handle_repositories_changed(event_log.repositories_changed_event)
elsif event_log.repository_renamed_event elsif event_log.repository_renamed_event
handle_repository_renamed(event_log) handle_repository_renamed(event_log)
elsif event_log.hashed_storage_migrated_event
handle_hashed_storage_migrated(event_log)
end end
end end
end end
...@@ -182,8 +184,8 @@ module Gitlab ...@@ -182,8 +184,8 @@ module Gitlab
old_path = event.old_path_with_namespace old_path = event.old_path_with_namespace
new_path = event.new_path_with_namespace new_path = event.new_path_with_namespace
job_id = ::Geo::MoveRepositoryService job_id = ::Geo::RenameRepositoryService
.new(event.project_id, '', old_path, new_path) .new(event.project_id, old_path, new_path)
.async_execute .async_execute
log_event_info( log_event_info(
...@@ -195,6 +197,28 @@ module Gitlab ...@@ -195,6 +197,28 @@ module Gitlab
job_id: job_id) job_id: job_id)
end end
def handle_hashed_storage_migrated(event_log)
event = event_log.hashed_storage_migrated_event
return unless event.project_id
job_id = ::Geo::HashedStorageMigrationService.new(
event.project_id,
old_disk_path: event.old_disk_path,
new_disk_path: event.new_disk_path,
old_storage_version: event.old_storage_version
).async_execute
log_event_info(
event_log.created_at,
message: 'Migrating project to hashed storage',
project_id: event.project_id,
old_storage_version: event.old_storage_version,
new_storage_version: event.new_storage_version,
old_disk_path: event.old_disk_path,
new_disk_path: event.new_disk_path,
job_id: job_id)
end
def find_or_initialize_registry(project_id, attrs) def find_or_initialize_registry(project_id, attrs)
registry = ::Geo::ProjectRegistry.find_or_initialize_by(project_id: project_id) registry = ::Geo::ProjectRegistry.find_or_initialize_by(project_id: project_id)
registry.assign_attributes(attrs) registry.assign_attributes(attrs)
......
...@@ -920,6 +920,11 @@ module Gitlab ...@@ -920,6 +920,11 @@ module Gitlab
false false
end end
# Returns true if a remote exists.
def remote_exists?(name)
rugged.remotes[name].present?
end
# Update the specified remote using the values in the +options+ hash # Update the specified remote using the values in the +options+ hash
# #
# Example # Example
......
module Gitlab
module GithubImport
def self.new_client_for(project, token: nil, parallel: true)
token_to_use = token || project.import_data&.credentials&.fetch(:user)
Client.new(token_to_use, parallel: parallel)
end
# Inserts a raw row and returns the ID of the inserted row.
#
# attributes - The attributes/columns to set.
# relation - An ActiveRecord::Relation to use for finding the ID of the row
# when using MySQL.
def self.insert_and_return_id(attributes, relation)
# We use bulk_insert here so we can bypass any queries executed by
# callbacks or validation rules, as doing this wouldn't scale when
# importing very large projects.
result = Gitlab::Database
.bulk_insert(relation.table_name, [attributes], return_ids: true)
# MySQL doesn't support returning the IDs of a bulk insert in a way that
# is not a pain, so in this case we'll issue an extra query instead.
result.first ||
relation.where(iid: attributes[:iid]).limit(1).pluck(:id).first
end
# Returns the ID of the ghost user.
def self.ghost_user_id
key = 'github-import/ghost-user-id'
Caching.read_integer(key) || Caching.write(key, User.select(:id).ghost.id)
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module BulkImporting
# Builds and returns an Array of objects to bulk insert into the
# database.
#
# enum - An Enumerable that returns the objects to turn into database
# rows.
def build_database_rows(enum)
enum.each_with_object([]) do |(object, _), rows|
rows << build(object) unless already_imported?(object)
end
end
# Bulk inserts the given rows into the database.
def bulk_insert(model, rows, batch_size: 100)
rows.each_slice(batch_size) do |slice|
Gitlab::Database.bulk_insert(model.table_name, slice)
end
end
end
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment