Commit 12162339 authored by Clement Ho's avatar Clement Ho

Merge branch 'master' into auto-search-when-state-changed

parents 63746d2d 94644565
...@@ -56,6 +56,8 @@ ...@@ -56,6 +56,8 @@
if (job.import_status === 'finished') { if (job.import_status === 'finished') {
job_item.removeClass("active").addClass("success"); job_item.removeClass("active").addClass("success");
return status_field.html('<span><i class="fa fa-check"></i> done</span>'); return status_field.html('<span><i class="fa fa-check"></i> done</span>');
} else if (job.import_status === 'scheduled') {
return status_field.html("<i class='fa fa-spinner fa-spin'></i> scheduled");
} else if (job.import_status === 'started') { } else if (job.import_status === 'started') {
return status_field.html("<i class='fa fa-spinner fa-spin'></i> started"); return status_field.html("<i class='fa fa-spinner fa-spin'></i> started");
} else { } else {
......
...@@ -14,14 +14,7 @@ class Projects::ImportsController < Projects::ApplicationController ...@@ -14,14 +14,7 @@ class Projects::ImportsController < Projects::ApplicationController
@project.import_url = params[:project][:import_url] @project.import_url = params[:project][:import_url]
if @project.save if @project.save
@project.reload @project.reload.import_schedule
if @project.import_failed?
@project.import_retry
else
@project.import_start
@project.add_import_job
end
end end
redirect_to namespace_project_import_path(@project.namespace, @project) redirect_to namespace_project_import_path(@project.namespace, @project)
......
...@@ -199,7 +199,7 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -199,7 +199,7 @@ class ApplicationSetting < ActiveRecord::Base
ApplicationSetting.define_attribute_methods ApplicationSetting.define_attribute_methods
end end
def self.defaults_ce def self.defaults
{ {
after_sign_up_text: nil, after_sign_up_text: nil,
akismet_enabled: false, akismet_enabled: false,
...@@ -250,10 +250,6 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -250,10 +250,6 @@ class ApplicationSetting < ActiveRecord::Base
} }
end end
def self.defaults
defaults_ce
end
def self.create_from_defaults def self.create_from_defaults
create(defaults) create(defaults)
end end
......
...@@ -165,7 +165,7 @@ class Project < ActiveRecord::Base ...@@ -165,7 +165,7 @@ class Project < ActiveRecord::Base
has_many :todos, dependent: :destroy has_many :todos, dependent: :destroy
has_many :notification_settings, dependent: :destroy, as: :source has_many :notification_settings, dependent: :destroy, as: :source
has_one :import_data, dependent: :destroy, class_name: "ProjectImportData" has_one :import_data, dependent: :delete, class_name: "ProjectImportData"
has_one :project_feature, dependent: :destroy has_one :project_feature, dependent: :destroy
has_one :statistics, class_name: 'ProjectStatistics', dependent: :delete has_one :statistics, class_name: 'ProjectStatistics', dependent: :delete
has_many :container_repositories, dependent: :destroy has_many :container_repositories, dependent: :destroy
...@@ -298,8 +298,16 @@ class Project < ActiveRecord::Base ...@@ -298,8 +298,16 @@ class Project < ActiveRecord::Base
scope :excluding_project, ->(project) { where.not(id: project) } scope :excluding_project, ->(project) { where.not(id: project) }
state_machine :import_status, initial: :none do state_machine :import_status, initial: :none do
event :import_schedule do
transition [:none, :finished, :failed] => :scheduled
end
event :force_import_start do
transition [:none, :finished, :failed] => :started
end
event :import_start do event :import_start do
transition [:none, :finished] => :started transition scheduled: :started
end end
event :import_finish do event :import_finish do
...@@ -307,18 +315,23 @@ class Project < ActiveRecord::Base ...@@ -307,18 +315,23 @@ class Project < ActiveRecord::Base
end end
event :import_fail do event :import_fail do
transition started: :failed transition [:scheduled, :started] => :failed
end end
event :import_retry do event :import_retry do
transition failed: :started transition failed: :started
end end
state :scheduled
state :started state :started
state :finished state :finished
state :failed state :failed
after_transition any => :finished, do: :reset_cache_and_import_attrs after_transition [:none, :finished, :failed] => :scheduled do |project, _|
project.run_after_commit { add_import_job }
end
after_transition started: :finished, do: :reset_cache_and_import_attrs
end end
class << self class << self
...@@ -532,9 +545,17 @@ class Project < ActiveRecord::Base ...@@ -532,9 +545,17 @@ class Project < ActiveRecord::Base
end end
def import_in_progress? def import_in_progress?
import_started? || import_scheduled?
end
def import_started?
import? && import_status == 'started' import? && import_status == 'started'
end end
def import_scheduled?
import_status == 'scheduled'
end
def import_failed? def import_failed?
import_status == 'failed' import_status == 'failed'
end end
......
...@@ -48,15 +48,14 @@ module Projects ...@@ -48,15 +48,14 @@ module Projects
save_project_and_import_data(import_data) save_project_and_import_data(import_data)
@project.import_start if @project.import?
after_create_actions if @project.persisted? after_create_actions if @project.persisted?
if @project.errors.empty? if @project.errors.empty?
@project.add_import_job if @project.import? @project.import_schedule if @project.import?
else else
fail(error: @project.errors.full_messages.join(', ')) fail(error: @project.errors.full_messages.join(', '))
end end
@project @project
rescue ActiveRecord::RecordInvalid => e rescue ActiveRecord::RecordInvalid => e
message = "Unable to save #{e.record.type}: #{e.record.errors.full_messages.join(", ")} " message = "Unable to save #{e.record.type}: #{e.record.errors.full_messages.join(", ")} "
......
class RepositoryForkWorker class RepositoryForkWorker
ForkError = Class.new(StandardError)
include Sidekiq::Worker include Sidekiq::Worker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
...@@ -8,29 +10,31 @@ class RepositoryForkWorker ...@@ -8,29 +10,31 @@ class RepositoryForkWorker
source_path: source_path, source_path: source_path,
target_path: target_path) target_path: target_path)
project = Project.find_by_id(project_id) project = Project.find(project_id)
project.import_start
unless project.present?
logger.error("Project #{project_id} no longer exists!")
return
end
result = gitlab_shell.fork_repository(forked_from_repository_storage_path, source_path, result = gitlab_shell.fork_repository(forked_from_repository_storage_path, source_path,
project.repository_storage_path, target_path) project.repository_storage_path, target_path)
unless result raise ForkError, "Unable to fork project #{project_id} for repository #{source_path} -> #{target_path}" unless result
logger.error("Unable to fork project #{project_id} for repository #{source_path} -> #{target_path}")
project.mark_import_as_failed('The project could not be forked.')
return
end
project.repository.after_import project.repository.after_import
raise ForkError, "Project #{project_id} had an invalid repository after fork" unless project.valid_repo?
unless project.valid_repo? project.import_finish
logger.error("Project #{project_id} had an invalid repository after fork") rescue ForkError => ex
project.mark_import_as_failed('The forked repository is invalid.') fail_fork(project, ex.message)
return raise
rescue => ex
return unless project
fail_fork(project, ex.message)
raise ForkError, "#{ex.class} #{ex.message}"
end end
project.import_finish private
def fail_fork(project, message)
Rails.logger.error(message)
project.mark_import_as_failed(message)
end end
end end
class RepositoryImportWorker class RepositoryImportWorker
ImportError = Class.new(StandardError)
include Sidekiq::Worker include Sidekiq::Worker
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
...@@ -10,6 +12,8 @@ class RepositoryImportWorker ...@@ -10,6 +12,8 @@ class RepositoryImportWorker
@project = Project.find(project_id) @project = Project.find(project_id)
@current_user = @project.creator @current_user = @project.creator
project.import_start
Gitlab::Metrics.add_event(:import_repository, Gitlab::Metrics.add_event(:import_repository,
import_url: @project.import_url, import_url: @project.import_url,
path: @project.path_with_namespace) path: @project.path_with_namespace)
...@@ -17,13 +21,23 @@ class RepositoryImportWorker ...@@ -17,13 +21,23 @@ class RepositoryImportWorker
project.update_columns(import_jid: self.jid, import_error: nil) project.update_columns(import_jid: self.jid, import_error: nil)
result = Projects::ImportService.new(project, current_user).execute result = Projects::ImportService.new(project, current_user).execute
raise ImportError, result[:message] if result[:status] == :error
if result[:status] == :error
project.mark_import_as_failed(result[:message])
return
end
project.repository.after_import project.repository.after_import
project.import_finish project.import_finish
rescue ImportError => ex
fail_import(project, ex.message)
raise
rescue => ex
return unless project
fail_import(project, ex.message)
raise ImportError, "#{ex.class} #{ex.message}"
end
private
def fail_import(project, message)
project.mark_import_as_failed(message)
end end
end end
...@@ -37,7 +37,7 @@ class GithubImport ...@@ -37,7 +37,7 @@ class GithubImport
end end
def import! def import!
@project.import_start @project.force_import_start
timings = Benchmark.measure do timings = Benchmark.measure do
Github::Import.new(@project, @options).execute Github::Import.new(@project, @options).execute
......
...@@ -7,5 +7,9 @@ FactoryGirl.define do ...@@ -7,5 +7,9 @@ FactoryGirl.define do
link.forked_from_project.reload link.forked_from_project.reload
link.forked_to_project.reload link.forked_to_project.reload
end end
trait :forked_to_empty_project do
association :forked_to_project, factory: :empty_project
end
end end
end end
...@@ -26,6 +26,22 @@ FactoryGirl.define do ...@@ -26,6 +26,22 @@ FactoryGirl.define do
visibility_level Gitlab::VisibilityLevel::PRIVATE visibility_level Gitlab::VisibilityLevel::PRIVATE
end end
trait :import_scheduled do
import_status :scheduled
end
trait :import_started do
import_status :started
end
trait :import_finished do
import_status :finished
end
trait :import_failed do
import_status :failed
end
trait :archived do trait :archived do
archived true archived true
end end
......
...@@ -68,9 +68,14 @@ feature 'Diffs URL', js: true, feature: true do ...@@ -68,9 +68,14 @@ feature 'Diffs URL', js: true, feature: true do
let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) } let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) }
let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") } let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") }
before do
forked_project.repository.after_import
end
context 'as author' do context 'as author' do
it 'shows direct edit link' do it 'shows direct edit link' do
login_as(author_user) login_as(author_user)
visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax # Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
...@@ -81,6 +86,7 @@ feature 'Diffs URL', js: true, feature: true do ...@@ -81,6 +86,7 @@ feature 'Diffs URL', js: true, feature: true do
context 'as user who needs to fork' do context 'as user who needs to fork' do
it 'shows fork/cancel confirmation' do it 'shows fork/cancel confirmation' do
login_as(user) login_as(user)
visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax # Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
......
...@@ -10,6 +10,9 @@ describe('Pipeline details header', () => { ...@@ -10,6 +10,9 @@ describe('Pipeline details header', () => {
beforeEach(() => { beforeEach(() => {
HeaderComponent = Vue.extend(headerComponent); HeaderComponent = Vue.extend(headerComponent);
const threeWeeksAgo = new Date();
threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
props = { props = {
pipeline: { pipeline: {
details: { details: {
...@@ -22,7 +25,7 @@ describe('Pipeline details header', () => { ...@@ -22,7 +25,7 @@ describe('Pipeline details header', () => {
}, },
}, },
id: 123, id: 123,
created_at: '2017-05-08T14:57:39.781Z', created_at: threeWeeksAgo.toISOString(),
user: { user: {
web_url: 'path', web_url: 'path',
name: 'Foo', name: 'Foo',
......
...@@ -50,7 +50,7 @@ describe Project, models: true do ...@@ -50,7 +50,7 @@ describe Project, models: true do
it { is_expected.to have_one(:external_wiki_service).dependent(:destroy) } it { is_expected.to have_one(:external_wiki_service).dependent(:destroy) }
it { is_expected.to have_one(:project_feature).dependent(:destroy) } it { is_expected.to have_one(:project_feature).dependent(:destroy) }
it { is_expected.to have_one(:statistics).class_name('ProjectStatistics').dependent(:delete) } it { is_expected.to have_one(:statistics).class_name('ProjectStatistics').dependent(:delete) }
it { is_expected.to have_one(:import_data).class_name('ProjectImportData').dependent(:destroy) } it { is_expected.to have_one(:import_data).class_name('ProjectImportData').dependent(:delete) }
it { is_expected.to have_one(:last_event).class_name('Event') } it { is_expected.to have_one(:last_event).class_name('Event') }
it { is_expected.to have_one(:forked_from_project).through(:forked_project_link) } it { is_expected.to have_one(:forked_from_project).through(:forked_project_link) }
it { is_expected.to have_many(:commit_statuses) } it { is_expected.to have_many(:commit_statuses) }
...@@ -1446,16 +1446,13 @@ describe Project, models: true do ...@@ -1446,16 +1446,13 @@ describe Project, models: true do
end end
describe 'Project import job' do describe 'Project import job' do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project, import_url: generate(:url)) }
let(:mirror) { false }
before do before do
allow_any_instance_of(Gitlab::Shell).to receive(:import_repository) allow_any_instance_of(Gitlab::Shell).to receive(:import_repository)
.with(project.repository_storage_path, project.path_with_namespace, project.import_url) .with(project.repository_storage_path, project.path_with_namespace, project.import_url)
.and_return(true) .and_return(true)
allow(project).to receive(:repository_exists?).and_return(true)
expect_any_instance_of(Repository).to receive(:after_import) expect_any_instance_of(Repository).to receive(:after_import)
.and_call_original .and_call_original
end end
...@@ -1463,8 +1460,7 @@ describe Project, models: true do ...@@ -1463,8 +1460,7 @@ describe Project, models: true do
it 'imports a project' do it 'imports a project' do
expect_any_instance_of(RepositoryImportWorker).to receive(:perform).and_call_original expect_any_instance_of(RepositoryImportWorker).to receive(:perform).and_call_original
project.import_start project.import_schedule
project.add_import_job
expect(project.reload.import_status).to eq('finished') expect(project.reload.import_status).to eq('finished')
end end
...@@ -1551,7 +1547,7 @@ describe Project, models: true do ...@@ -1551,7 +1547,7 @@ describe Project, models: true do
describe '#add_import_job' do describe '#add_import_job' do
context 'forked' do context 'forked' do
let(:forked_project_link) { create(:forked_project_link) } let(:forked_project_link) { create(:forked_project_link, :forked_to_empty_project) }
let(:forked_from_project) { forked_project_link.forked_from_project } let(:forked_from_project) { forked_project_link.forked_from_project }
let(:project) { forked_project_link.forked_to_project } let(:project) { forked_project_link.forked_to_project }
...@@ -1565,9 +1561,9 @@ describe Project, models: true do ...@@ -1565,9 +1561,9 @@ describe Project, models: true do
end end
context 'not forked' do context 'not forked' do
let(:project) { create(:empty_project) }
it 'schedules a RepositoryImportWorker job' do it 'schedules a RepositoryImportWorker job' do
project = create(:empty_project, import_url: generate(:url))
expect(RepositoryImportWorker).to receive(:perform_async).with(project.id) expect(RepositoryImportWorker).to receive(:perform_async).with(project.id)
project.add_import_job project.add_import_job
......
...@@ -161,15 +161,13 @@ describe Projects::CreateService, '#execute', services: true do ...@@ -161,15 +161,13 @@ describe Projects::CreateService, '#execute', services: true do
end end
context 'when a bad service template is created' do context 'when a bad service template is created' do
before do
create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
end
it 'reports an error in the imported project' do it 'reports an error in the imported project' do
opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce' opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce'
create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
project = create_project(user, opts) project = create_project(user, opts)
expect(project.errors.full_messages_for(:base).first).to match /Unable to save project. Error: Unable to save DroneCiService/ expect(project.errors.full_messages_for(:base).first).to match(/Unable to save project. Error: Unable to save DroneCiService/)
expect(project.services.count).to eq 0 expect(project.services.count).to eq 0
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe RepositoryForkWorker do describe RepositoryForkWorker do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository, :import_scheduled) }
let(:fork_project) { create(:project, :repository, forked_from_project: project) } let(:fork_project) { create(:project, :repository, forked_from_project: project) }
let(:shell) { Gitlab::Shell.new } let(:shell) { Gitlab::Shell.new }
...@@ -46,15 +46,27 @@ describe RepositoryForkWorker do ...@@ -46,15 +46,27 @@ describe RepositoryForkWorker do
end end
it "handles bad fork" do it "handles bad fork" do
source_path = project.full_path
target_path = fork_project.namespace.full_path
error_message = "Unable to fork project #{project.id} for repository #{source_path} -> #{target_path}"
expect(shell).to receive(:fork_repository).and_return(false) expect(shell).to receive(:fork_repository).and_return(false)
expect(subject.logger).to receive(:error) expect do
subject.perform(project.id, '/test/path', source_path, target_path)
end.to raise_error(RepositoryForkWorker::ForkError, error_message)
end
subject.perform( it 'handles unexpected error' do
project.id, source_path = project.full_path
'/test/path', target_path = fork_project.namespace.full_path
project.full_path,
fork_project.namespace.full_path) allow_any_instance_of(Gitlab::Shell).to receive(:fork_repository).and_raise(RuntimeError)
expect do
subject.perform(project.id, '/test/path', source_path, target_path)
end.to raise_error(RepositoryForkWorker::ForkError)
expect(project.reload.import_status).to eq('failed')
end end
end end
end end
require 'spec_helper' require 'spec_helper'
describe RepositoryImportWorker do describe RepositoryImportWorker do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project, :import_scheduled) }
subject { described_class.new } subject { described_class.new }
...@@ -21,15 +21,26 @@ describe RepositoryImportWorker do ...@@ -21,15 +21,26 @@ describe RepositoryImportWorker do
context 'when the import has failed' do context 'when the import has failed' do
it 'hide the credentials that were used in the import URL' do it 'hide the credentials that were used in the import URL' do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found } error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
expect_any_instance_of(Projects::ImportService).to receive(:execute).
and_return({ status: :error, message: error }) expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error })
allow(subject).to receive(:jid).and_return('123') allow(subject).to receive(:jid).and_return('123')
expect do
subject.perform(project.id) subject.perform(project.id)
end.to raise_error(RepositoryImportWorker::ImportError, error)
expect(project.reload.import_error).to include("https://*****:*****@test.com/root/repoC.git/")
expect(project.reload.import_jid).not_to be_nil expect(project.reload.import_jid).not_to be_nil
end end
end end
context 'with unexpected error' do
it 'marks import as failed' do
allow_any_instance_of(Projects::ImportService).to receive(:execute).and_raise(RuntimeError)
expect do
subject.perform(project.id)
end.to raise_error(RepositoryImportWorker::ImportError)
expect(project.reload.import_status).to eq('failed')
end
end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment