Commit 02ab65d4 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 44113533
...@@ -155,11 +155,10 @@ Rails/ApplicationRecord: ...@@ -155,11 +155,10 @@ Rails/ApplicationRecord:
# as they need to be as decoupled from application code as possible # as they need to be as decoupled from application code as possible
- db/**/*.rb - db/**/*.rb
- lib/gitlab/background_migration/**/*.rb - lib/gitlab/background_migration/**/*.rb
- ee/lib/ee/gitlab/background_migration/**/*.rb
- lib/gitlab/database/**/*.rb - lib/gitlab/database/**/*.rb
- spec/**/*.rb - spec/**/*.rb
- ee/db/**/*.rb - ee/db/**/*.rb
- ee/lib/gitlab/background_migration/**/*.rb
- ee/lib/ee/gitlab/background_migration/**/*.rb
- ee/spec/**/*.rb - ee/spec/**/*.rb
# GitLab ################################################################### # GitLab ###################################################################
...@@ -233,7 +232,8 @@ RSpec/FactoriesInMigrationSpecs: ...@@ -233,7 +232,8 @@ RSpec/FactoriesInMigrationSpecs:
- 'spec/migrations/**/*.rb' - 'spec/migrations/**/*.rb'
- 'ee/spec/migrations/**/*.rb' - 'ee/spec/migrations/**/*.rb'
- 'spec/lib/gitlab/background_migration/**/*.rb' - 'spec/lib/gitlab/background_migration/**/*.rb'
- 'ee/spec/lib/gitlab/background_migration/**/*.rb' - 'spec/lib/ee/gitlab/background_migration/**/*.rb'
- 'ee/spec/lib/ee/gitlab/background_migration/**/*.rb'
Cop/IncludeActionViewContext: Cop/IncludeActionViewContext:
Enabled: true Enabled: true
...@@ -365,4 +365,4 @@ Style/MultilineWhenThen: ...@@ -365,4 +365,4 @@ Style/MultilineWhenThen:
Enabled: false Enabled: false
Style/FloatDivision: Style/FloatDivision:
Enabled: false Enabled: false
\ No newline at end of file
...@@ -16,21 +16,7 @@ export const redirectToUrl = (self, url) => visitUrl(url); ...@@ -16,21 +16,7 @@ export const redirectToUrl = (self, url) => visitUrl(url);
export const setInitialData = ({ commit }, data) => commit(types.SET_INITIAL_DATA, data); export const setInitialData = ({ commit }, data) => commit(types.SET_INITIAL_DATA, data);
export const discardAllChanges = ({ state, commit, dispatch }) => { export const discardAllChanges = ({ state, commit, dispatch }) => {
state.changedFiles.forEach(file => { state.changedFiles.forEach(file => dispatch('restoreOriginalFile', file.path));
if (file.tempFile || file.prevPath) dispatch('closeFile', file);
if (file.tempFile) {
dispatch('deleteEntry', file.path);
} else if (file.prevPath) {
dispatch('renameEntry', {
path: file.path,
name: file.prevName,
parentPath: file.prevParentPath,
});
} else {
commit(types.DISCARD_FILE_CHANGES, file.path);
}
});
commit(types.REMOVE_ALL_CHANGES_FILES); commit(types.REMOVE_ALL_CHANGES_FILES);
}; };
......
...@@ -191,38 +191,47 @@ export const setFileViewMode = ({ commit }, { file, viewMode }) => { ...@@ -191,38 +191,47 @@ export const setFileViewMode = ({ commit }, { file, viewMode }) => {
commit(types.SET_FILE_VIEWMODE, { file, viewMode }); commit(types.SET_FILE_VIEWMODE, { file, viewMode });
}; };
export const discardFileChanges = ({ dispatch, state, commit, getters }, path) => { export const restoreOriginalFile = ({ dispatch, state, commit }, path) => {
const file = state.entries[path]; const file = state.entries[path];
const isDestructiveDiscard = file.tempFile || file.prevPath;
if (file.deleted && file.parentPath) { if (file.deleted && file.parentPath) {
dispatch('restoreTree', file.parentPath); dispatch('restoreTree', file.parentPath);
} }
if (file.tempFile || file.prevPath) { if (isDestructiveDiscard) {
dispatch('closeFile', file); dispatch('closeFile', file);
}
if (file.tempFile) { if (file.tempFile) {
dispatch('deleteEntry', file.path); dispatch('deleteEntry', file.path);
} else {
commit(types.DISCARD_FILE_CHANGES, file.path);
dispatch('renameEntry', {
path: file.path,
name: file.prevName,
parentPath: file.prevParentPath,
});
}
} else { } else {
commit(types.DISCARD_FILE_CHANGES, path); commit(types.DISCARD_FILE_CHANGES, file.path);
}
if (getters.activeFile && file.path === getters.activeFile.path) {
dispatch('updateDelayViewerUpdated', true) if (file.prevPath) {
.then(() => { dispatch('renameEntry', {
router.push(`/project${file.url}`); path: file.path,
}) name: file.prevName,
.catch(e => { parentPath: file.prevParentPath,
throw e; });
}); }
} };
export const discardFileChanges = ({ dispatch, state, commit, getters }, path) => {
const file = state.entries[path];
const isDestructiveDiscard = file.tempFile || file.prevPath;
dispatch('restoreOriginalFile', path);
if (!isDestructiveDiscard && file.path === getters.activeFile?.path) {
dispatch('updateDelayViewerUpdated', true)
.then(() => {
router.push(`/project${file.url}`);
})
.catch(e => {
throw e;
});
} }
commit(types.REMOVE_FILE_FROM_CHANGED, path); commit(types.REMOVE_FILE_FROM_CHANGED, path);
......
- type = local_assigns.fetch(:type) - type = local_assigns.fetch(:type)
- board = local_assigns.fetch(:board, nil) - board = local_assigns.fetch(:board, nil)
- show_sorting_dropdown = local_assigns.fetch(:show_sorting_dropdown, true)
- is_not_boards_modal_or_productivity_analytics = type != :boards_modal && type != :productivity_analytics - is_not_boards_modal_or_productivity_analytics = type != :boards_modal && type != :productivity_analytics
- block_css_class = is_not_boards_modal_or_productivity_analytics ? 'row-content-block second-block' : '' - block_css_class = is_not_boards_modal_or_productivity_analytics ? 'row-content-block second-block' : ''
- user_can_admin_list = board && can?(current_user, :admin_list, board.resource_parent) - user_can_admin_list = board && can?(current_user, :admin_list, board.resource_parent)
...@@ -169,5 +170,5 @@ ...@@ -169,5 +170,5 @@
- if @project - if @project
#js-add-issues-btn.prepend-left-10{ data: { can_admin_list: can?(current_user, :admin_list, @project) } } #js-add-issues-btn.prepend-left-10{ data: { can_admin_list: can?(current_user, :admin_list, @project) } }
#js-toggle-focus-btn #js-toggle-focus-btn
- elsif is_not_boards_modal_or_productivity_analytics - elsif is_not_boards_modal_or_productivity_analytics && show_sorting_dropdown
= render 'shared/issuable/sort_dropdown' = render 'shared/issuable/sort_dropdown'
---
title: 'Fix: undefined background migration classes for EE-CE downgrades'
merge_request: 22160
author:
type: fixed
---
title: Fix discard all to behave like discard single file in Web IDE
merge_request: 22572
author:
type: fixed
...@@ -161,7 +161,7 @@ test: ...@@ -161,7 +161,7 @@ test:
only: [merge_requests] only: [merge_requests]
except: except:
variables: variables:
$CI_COMMIT_REF_NAME =~ /^docs-/ - $CI_COMMIT_REF_NAME =~ /^docs-/
``` ```
## Important notes about merge requests from forked projects ## Important notes about merge requests from forked projects
......
...@@ -17,34 +17,29 @@ NOTE: **Note:** ...@@ -17,34 +17,29 @@ NOTE: **Note:**
Coming over to GitLab from Jenkins? Check out our [reference](../jenkins/index.md) Coming over to GitLab from Jenkins? Check out our [reference](../jenkins/index.md)
for converting your pre-existing pipelines over to our format. for converting your pre-existing pipelines over to our format.
GitLab offers a [continuous integration][ci] service. If you GitLab offers a [continuous integration](https://about.gitlab.com/product/continuous-integration/) service. For each commit or push to trigger your CI
[add a `.gitlab-ci.yml` file][yaml] to the root directory of your repository, [pipeline](../pipelines.md), you must:
and configure your GitLab project to use a [Runner], then each commit or
push triggers your CI [pipeline].
The `.gitlab-ci.yml` file tells the GitLab Runner what to do. By default it runs - Add a [`.gitlab-ci.yml` file](#creating-a-gitlab-ciyml-file) to your repository's root directory.
a pipeline with three [stages]: `build`, `test`, and `deploy`. You don't need to - Ensure your project is configured to use a [Runner](#configuring-a-runner).
use all three stages; stages with no jobs are simply ignored.
If everything runs OK (no non-zero return values), you'll get a nice green The `.gitlab-ci.yml` file tells the GitLab Runner what to do. A simple pipeline commonly has
checkmark associated with the commit. This makes it three [stages](../yaml/README.md#stages):
easy to see whether a commit caused any of the tests to fail before
you even look at the code.
Most projects use GitLab's CI service to run the test suite so that - `build`
developers get immediate feedback if they broke something. - `test`
- `deploy`
There's a growing trend to use continuous delivery and continuous deployment to You do not need to use all three stages; stages with no jobs are ignored.
automatically deploy tested code to staging and production environments.
So in brief, the steps needed to have a working CI can be summed up to: The pipeline appears under the project's **CI/CD > Pipelines** page. If everything runs OK (no non-zero
return values), you get a green check mark associated with the commit. This makes it easy to see
whether a commit caused any of the tests to fail before you even look at the job (test) log. Many projects use
GitLab's CI service to run the test suite, so developers get immediate feedback if they broke
something.
1. Add `.gitlab-ci.yml` to the root directory of your repository It's also common to use pipelines to automatically deploy
1. Configure a Runner tested code to staging and production environments.
From there on, on every push to your Git repository, the Runner will
automatically start the pipeline and the pipeline will appear under the
project's **Pipelines** page.
--- ---
...@@ -237,9 +232,4 @@ CI with various languages. ...@@ -237,9 +232,4 @@ CI with various languages.
[runner-install]: https://docs.gitlab.com/runner/install/ [runner-install]: https://docs.gitlab.com/runner/install/
[blog-ci]: https://about.gitlab.com/blog/2015/05/06/why-were-replacing-gitlab-ci-jobs-with-gitlab-ci-dot-yml/ [blog-ci]: https://about.gitlab.com/blog/2015/05/06/why-were-replacing-gitlab-ci-jobs-with-gitlab-ci-dot-yml/
[examples]: ../examples/README.md [examples]: ../examples/README.md
[ci]: https://about.gitlab.com/product/continuous-integration/
[yaml]: ../yaml/README.md
[runner]: ../runners/README.md
[enabled]: ../enable_or_disable_ci.md [enabled]: ../enable_or_disable_ci.md
[stages]: ../yaml/README.md#stages
[pipeline]: ../pipelines.md
...@@ -2719,10 +2719,10 @@ can lead to errors during the deployment. ...@@ -2719,10 +2719,10 @@ can lead to errors during the deployment.
To avoid these errors, the `resource_group` attribute can be used to ensure that To avoid these errors, the `resource_group` attribute can be used to ensure that
the Runner will not run certain jobs simultaneously. the Runner will not run certain jobs simultaneously.
When the `resource_group` key is defined in a job in `.gitlab-ci.yml`, When the `resource_group` key is defined for a job in `.gitlab-ci.yml`,
job runs are mutually exclusive across different pipelines in the same project. job executions are mutually exclusive across different pipelines for the same project.
If multiple jobs belonging to the same resource group are enqueued simultaneously, If multiple jobs belonging to the same resource group are enqueued simultaneously,
only one of them will be picked by the Runner, and the other jobs will wait until the only one of the jobs will be picked by the Runner, and the other jobs will wait until the
`resource_group` is free. `resource_group` is free.
Here is a simple example: Here is a simple example:
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class BackfillVersionDataFromGitaly
def perform(issue_id)
end
end
end
end
Gitlab::BackgroundMigration::BackfillVersionDataFromGitaly.prepend_if_ee('EE::Gitlab::BackgroundMigration::BackfillVersionDataFromGitaly')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class GenerateGitlabSubscriptions
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::GenerateGitlabSubscriptions.prepend_if_ee('EE::Gitlab::BackgroundMigration::GenerateGitlabSubscriptions')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MigrateApproverToApprovalRules
# @param target_type [String] class of target, either 'MergeRequest' or 'Project'
# @param target_id [Integer] id of target
def perform(target_type, target_id, sync_code_owner_rule: true)
end
end
end
end
Gitlab::BackgroundMigration::MigrateApproverToApprovalRules.prepend_if_ee('EE::Gitlab::BackgroundMigration::MigrateApproverToApprovalRules')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MigrateApproverToApprovalRulesCheckProgress
def perform
end
end
end
end
Gitlab::BackgroundMigration::MigrateApproverToApprovalRulesCheckProgress.prepend_if_ee('EE::Gitlab::BackgroundMigration::MigrateApproverToApprovalRulesCheckProgress')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MigrateApproverToApprovalRulesInBatch
def perform(start_id, end_id)
end
end
end
end
Gitlab::BackgroundMigration::MigrateApproverToApprovalRulesInBatch.prepend_if_ee('EE::Gitlab::BackgroundMigration::MigrateApproverToApprovalRulesInBatch')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MoveEpicIssuesAfterEpics
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::MoveEpicIssuesAfterEpics.prepend_if_ee('EE::Gitlab::BackgroundMigration::MoveEpicIssuesAfterEpics')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates any approver rule records according
# to the given merge request IDs range. A _single_ INSERT is issued for the given range.
class PopulateAnyApprovalRuleForMergeRequests
def perform(from_id, to_id)
end
end
end
end
Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForMergeRequests.prepend_if_ee('EE::Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForMergeRequests')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates any approver rule records according
# to the given project IDs range. A _single_ INSERT is issued for the given range.
class PopulateAnyApprovalRuleForProjects
def perform(from_id, to_id)
end
end
end
end
Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForProjects.prepend_if_ee('EE::Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForProjects')
# frozen_string_literal: true
#
# rubocop:disable Style/Documentation
# This job is added to fix https://gitlab.com/gitlab-org/gitlab/issues/30229
# It's not used anywhere else.
# Can be removed in GitLab 13.*
module Gitlab
module BackgroundMigration
class PruneOrphanedGeoEvents
def perform(table_name)
end
end
end
end
Gitlab::BackgroundMigration::PruneOrphanedGeoEvents.prepend_if_ee('EE::Gitlab::BackgroundMigration::PruneOrphanedGeoEvents')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class UpdateAuthorizedKeysFileSince
def perform(cutoff_datetime)
end
end
end
end
Gitlab::BackgroundMigration::UpdateAuthorizedKeysFileSince.prepend_if_ee('EE::Gitlab::BackgroundMigration::UpdateAuthorizedKeysFileSince')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class UpdateVulnerabilityConfidence
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::UpdateVulnerabilityConfidence.prepend_if_ee('EE::Gitlab::BackgroundMigration::UpdateVulnerabilityConfidence')
...@@ -12,6 +12,15 @@ module Gitlab ...@@ -12,6 +12,15 @@ module Gitlab
validations do validations do
validates :config, array_or_string: true validates :config, array_or_string: true
validate do
next unless opt(:max_size)
next unless config.is_a?(Array)
if config.size > opt(:max_size)
errors.add(:config, "is too long (maximum is #{opt(:max_size)})")
end
end
end end
def self.aspects def self.aspects
......
...@@ -9,7 +9,7 @@ module Gitlab ...@@ -9,7 +9,7 @@ module Gitlab
@message = message @message = message
end end
def execute(project) def execute(upload_parent:, uploader_class:)
attachments = [] attachments = []
message.attachments.each do |attachment| message.attachments.each do |attachment|
...@@ -23,7 +23,7 @@ module Gitlab ...@@ -23,7 +23,7 @@ module Gitlab
content_type: attachment.content_type content_type: attachment.content_type
} }
uploader = UploadService.new(project, file).execute uploader = UploadService.new(upload_parent, file, uploader_class).execute
attachments << uploader.to_h if uploader attachments << uploader.to_h if uploader
ensure ensure
tmp.close! tmp.close!
......
...@@ -41,13 +41,20 @@ module Gitlab ...@@ -41,13 +41,20 @@ module Gitlab
end end
def add_attachments(reply) def add_attachments(reply)
attachments = Email::AttachmentUploader.new(mail).execute(project) attachments = Email::AttachmentUploader.new(mail).execute(upload_params)
reply + attachments.map do |link| reply + attachments.map do |link|
"\n\n#{link[:markdown]}" "\n\n#{link[:markdown]}"
end.join end.join
end end
def upload_params
{
upload_parent: project,
uploader_class: FileUploader
}
end
def validate_permission!(permission) def validate_permission!(permission)
raise UserNotFoundError unless author raise UserNotFoundError unless author
raise UserBlockedError if author.blocked? raise UserBlockedError if author.blocked?
......
...@@ -5639,12 +5639,30 @@ msgstr "" ...@@ -5639,12 +5639,30 @@ msgstr ""
msgid "CycleAnalytics|No stages selected" msgid "CycleAnalytics|No stages selected"
msgstr "" msgstr ""
msgid "CycleAnalytics|Number of tasks"
msgstr ""
msgid "CycleAnalytics|Showing %{subject} and %{selectedLabelsCount} labels"
msgstr ""
msgid "CycleAnalytics|Showing data for group '%{groupName}' and %{selectedProjectCount} projects from %{startDate} to %{endDate}"
msgstr ""
msgid "CycleAnalytics|Showing data for group '%{groupName}' from %{startDate} to %{endDate}"
msgstr ""
msgid "CycleAnalytics|Stages" msgid "CycleAnalytics|Stages"
msgstr "" msgstr ""
msgid "CycleAnalytics|Tasks by type"
msgstr ""
msgid "CycleAnalytics|Total days to completion" msgid "CycleAnalytics|Total days to completion"
msgstr "" msgstr ""
msgid "CycleAnalytics|Type of work"
msgstr ""
msgid "CycleAnalytics|group dropdown filter" msgid "CycleAnalytics|group dropdown filter"
msgstr "" msgstr ""
...@@ -5687,6 +5705,9 @@ msgstr "" ...@@ -5687,6 +5705,9 @@ msgstr ""
msgid "Data is still calculating..." msgid "Data is still calculating..."
msgstr "" msgstr ""
msgid "Date"
msgstr ""
msgid "Date picker" msgid "Date picker"
msgstr "" msgstr ""
......
...@@ -619,107 +619,113 @@ describe('IDE store file actions', () => { ...@@ -619,107 +619,113 @@ describe('IDE store file actions', () => {
}); });
}); });
describe('discardFileChanges', () => { describe('with changed file', () => {
let tmpFile; let tmpFile;
beforeEach(() => { beforeEach(() => {
jest.spyOn(eventHub, '$on').mockImplementation(() => {});
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
tmpFile = file('tempFile'); tmpFile = file('tempFile');
tmpFile.content = 'testing'; tmpFile.content = 'testing';
tmpFile.raw = ORIGINAL_CONTENT; tmpFile.raw = ORIGINAL_CONTENT;
store.state.changedFiles.push(tmpFile); store.state.changedFiles.push(tmpFile);
store.state.entries[tmpFile.path] = tmpFile; store.state.entries[tmpFile.path] = tmpFile;
jest.spyOn(store, 'dispatch');
}); });
it('resets file content', done => { describe('restoreOriginalFile', () => {
store it('resets file content', () =>
.dispatch('discardFileChanges', tmpFile.path) store.dispatch('restoreOriginalFile', tmpFile.path).then(() => {
.then(() => {
expect(tmpFile.content).toBe(ORIGINAL_CONTENT); expect(tmpFile.content).toBe(ORIGINAL_CONTENT);
}));
done(); it('closes temp file and deletes it', () => {
}) tmpFile.tempFile = true;
.catch(done.fail); tmpFile.opened = true;
}); tmpFile.parentPath = 'parentFile';
store.state.entries.parentFile = file('parentFile');
it('removes file from changedFiles array', done => { actions.restoreOriginalFile(store, tmpFile.path);
store
.dispatch('discardFileChanges', tmpFile.path)
.then(() => {
expect(store.state.changedFiles.length).toBe(0);
done(); expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile);
}) expect(store.dispatch).toHaveBeenCalledWith('deleteEntry', tmpFile.path);
.catch(done.fail); });
});
it('closes temp file and deletes it', () => { describe('with renamed file', () => {
tmpFile.tempFile = true; beforeEach(() => {
tmpFile.opened = true; Object.assign(tmpFile, {
tmpFile.parentPath = 'parentFile'; prevPath: 'parentPath/old_name',
store.state.entries.parentFile = file('parentFile'); prevName: 'old_name',
prevParentPath: 'parentPath',
});
actions.discardFileChanges(store, tmpFile.path); store.state.entries.parentPath = file('parentPath');
expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile); actions.restoreOriginalFile(store, tmpFile.path);
expect(store.dispatch).toHaveBeenCalledWith('deleteEntry', tmpFile.path); });
});
describe('with renamed file', () => { it('renames the file to its original name and closes it if it was open', () => {
beforeEach(() => { expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile);
Object.assign(tmpFile, { expect(store.dispatch).toHaveBeenCalledWith('renameEntry', {
prevPath: 'parentPath/old_name', path: 'tempFile',
prevName: 'old_name', name: 'old_name',
prevParentPath: 'parentPath', parentPath: 'parentPath',
});
}); });
store.state.entries.parentPath = file('parentPath'); it('resets file content', () => {
expect(tmpFile.content).toBe(ORIGINAL_CONTENT);
});
});
});
actions.discardFileChanges(store, tmpFile.path); describe('discardFileChanges', () => {
beforeEach(() => {
jest.spyOn(eventHub, '$on').mockImplementation(() => {});
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
}); });
it('renames the file to its original name and closes it if it was open', () => { describe('with regular file', () => {
expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile); beforeEach(() => {
expect(store.dispatch).toHaveBeenCalledWith('renameEntry', { actions.discardFileChanges(store, tmpFile.path);
path: 'tempFile',
name: 'old_name',
parentPath: 'parentPath',
}); });
});
it('resets file content', () => { it('restores original file', () => {
expect(tmpFile.content).toBe(ORIGINAL_CONTENT); expect(store.dispatch).toHaveBeenCalledWith('restoreOriginalFile', tmpFile.path);
}); });
});
it('pushes route for active file', done => { it('removes file from changedFiles array', () => {
tmpFile.active = true; expect(store.state.changedFiles.length).toBe(0);
store.state.openFiles.push(tmpFile); });
store it('does not push a new route', () => {
.dispatch('discardFileChanges', tmpFile.path) expect(router.push).not.toHaveBeenCalled();
.then(() => { });
expect(router.push).toHaveBeenCalledWith(`/project${tmpFile.url}`);
done(); it('emits eventHub event to dispose cached model', () => {
}) actions.discardFileChanges(store, tmpFile.path);
.catch(done.fail);
}); expect(eventHub.$emit).toHaveBeenCalledWith(
`editor.update.model.new.content.${tmpFile.key}`,
ORIGINAL_CONTENT,
);
expect(eventHub.$emit).toHaveBeenCalledWith(
`editor.update.model.dispose.unstaged-${tmpFile.key}`,
ORIGINAL_CONTENT,
);
});
});
it('emits eventHub event to dispose cached model', done => { describe('with active file', () => {
store beforeEach(() => {
.dispatch('discardFileChanges', tmpFile.path) tmpFile.active = true;
.then(() => { store.state.openFiles.push(tmpFile);
expect(eventHub.$emit).toHaveBeenCalled();
done(); actions.discardFileChanges(store, tmpFile.path);
}) });
.catch(done.fail);
it('pushes route for active file', () => {
expect(router.push).toHaveBeenCalledWith(`/project${tmpFile.url}`);
});
});
}); });
}); });
......
...@@ -61,24 +61,25 @@ describe('Multi-file store actions', () => { ...@@ -61,24 +61,25 @@ describe('Multi-file store actions', () => {
}); });
describe('discardAllChanges', () => { describe('discardAllChanges', () => {
let f; const paths = ['to_discard', 'another_one_to_discard'];
beforeEach(() => { beforeEach(() => {
f = file('discardAll'); paths.forEach(path => {
f.changed = true; const f = file(path);
f.changed = true;
store.state.openFiles.push(f); store.state.openFiles.push(f);
store.state.changedFiles.push(f); store.state.changedFiles.push(f);
store.state.entries[f.path] = f; store.state.entries[f.path] = f;
});
}); });
it('discards changes in file', done => { it('discards all changes in file', () => {
store const expectedCalls = paths.map(path => ['restoreOriginalFile', path]);
.dispatch('discardAllChanges')
.then(() => { discardAllChanges(store);
expect(store.state.openFiles.changed).toBeFalsy();
}) expect(store.dispatch.calls.allArgs()).toEqual(jasmine.arrayContaining(expectedCalls));
.then(done)
.catch(done.fail);
}); });
it('removes all files from changedFiles state', done => { it('removes all files from changedFiles state', done => {
...@@ -86,64 +87,11 @@ describe('Multi-file store actions', () => { ...@@ -86,64 +87,11 @@ describe('Multi-file store actions', () => {
.dispatch('discardAllChanges') .dispatch('discardAllChanges')
.then(() => { .then(() => {
expect(store.state.changedFiles.length).toBe(0); expect(store.state.changedFiles.length).toBe(0);
expect(store.state.openFiles.length).toBe(1); expect(store.state.openFiles.length).toBe(2);
}) })
.then(done) .then(done)
.catch(done.fail); .catch(done.fail);
}); });
it('closes the temp file and deletes it if it was open', done => {
f.tempFile = true;
testAction(
discardAllChanges,
undefined,
store.state,
[{ type: types.REMOVE_ALL_CHANGES_FILES }],
[
{ type: 'closeFile', payload: jasmine.objectContaining({ path: 'discardAll' }) },
{ type: 'deleteEntry', payload: 'discardAll' },
],
done,
);
});
it('renames the file to its original name and closes it if it was open', done => {
Object.assign(f, {
prevPath: 'parent/path/old_name',
prevName: 'old_name',
prevParentPath: 'parent/path',
});
testAction(
discardAllChanges,
undefined,
store.state,
[{ type: types.REMOVE_ALL_CHANGES_FILES }],
[
{ type: 'closeFile', payload: jasmine.objectContaining({ path: 'discardAll' }) },
{
type: 'renameEntry',
payload: { path: 'discardAll', name: 'old_name', parentPath: 'parent/path' },
},
],
done,
);
});
it('discards file changes on all other files', done => {
testAction(
discardAllChanges,
undefined,
store.state,
[
{ type: types.DISCARD_FILE_CHANGES, payload: 'discardAll' },
{ type: types.REMOVE_ALL_CHANGES_FILES },
],
[],
done,
);
});
}); });
describe('closeAllFiles', () => { describe('closeAllFiles', () => {
......
...@@ -9,7 +9,7 @@ describe Gitlab::Email::AttachmentUploader do ...@@ -9,7 +9,7 @@ describe Gitlab::Email::AttachmentUploader do
let(:message) { Mail::Message.new(message_raw) } let(:message) { Mail::Message.new(message_raw) }
it "uploads all attachments and returns their links" do it "uploads all attachments and returns their links" do
links = described_class.new(message).execute(project) links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
link = links.first link = links.first
expect(link).not_to be_nil expect(link).not_to be_nil
......
...@@ -181,10 +181,21 @@ describe Gitlab::Email::Handler::CreateNoteHandler do ...@@ -181,10 +181,21 @@ describe Gitlab::Email::Handler::CreateNoteHandler do
it_behaves_like 'a reply to existing comment' it_behaves_like 'a reply to existing comment'
it "adds all attachments" do it "adds all attachments" do
expect_next_instance_of(Gitlab::Email::AttachmentUploader) do |uploader|
expect(uploader).to receive(:execute).with(upload_parent: project, uploader_class: FileUploader).and_return(
[
{
url: "uploads/image.png",
alt: "image",
markdown: markdown
}
]
)
end
receiver.execute receiver.execute
note = noteable.notes.last note = noteable.notes.last
expect(note.note).to include(markdown) expect(note.note).to include(markdown)
end end
......
...@@ -3461,44 +3461,7 @@ d3-zoom@1: ...@@ -3461,44 +3461,7 @@ d3-zoom@1:
d3-selection "1" d3-selection "1"
d3-transition "1" d3-transition "1"
d3@^5.12.0, d3@^5.7.0: d3@^5.12.0, d3@^5.14, d3@^5.7.0:
version "5.12.0"
resolved "https://registry.yarnpkg.com/d3/-/d3-5.12.0.tgz#0ddeac879c28c882317cd439b495290acd59ab61"
integrity sha512-flYVMoVuhPFHd9zVCe2BxIszUWqBcd5fvQGMNRmSiBrgdnh6Vlruh60RJQTouAK9xPbOB0plxMvBm4MoyODXNg==
dependencies:
d3-array "1"
d3-axis "1"
d3-brush "1"
d3-chord "1"
d3-collection "1"
d3-color "1"
d3-contour "1"
d3-dispatch "1"
d3-drag "1"
d3-dsv "1"
d3-ease "1"
d3-fetch "1"
d3-force "1"
d3-format "1"
d3-geo "1"
d3-hierarchy "1"
d3-interpolate "1"
d3-path "1"
d3-polygon "1"
d3-quadtree "1"
d3-random "1"
d3-scale "2"
d3-scale-chromatic "1"
d3-selection "1"
d3-shape "1"
d3-time "1"
d3-time-format "2"
d3-timer "1"
d3-transition "1"
d3-voronoi "1"
d3-zoom "1"
d3@^5.14:
version "5.15.0" version "5.15.0"
resolved "https://registry.yarnpkg.com/d3/-/d3-5.15.0.tgz#ffd44958e6a3cb8a59a84429c45429b8bca5677a" resolved "https://registry.yarnpkg.com/d3/-/d3-5.15.0.tgz#ffd44958e6a3cb8a59a84429c45429b8bca5677a"
integrity sha512-C+E80SL2nLLtmykZ6klwYj5rPqB5nlfN5LdWEAVdWPppqTD8taoJi2PxLZjPeYT8FFRR2yucXq+kBlOnnvZeLg== integrity sha512-C+E80SL2nLLtmykZ6klwYj5rPqB5nlfN5LdWEAVdWPppqTD8taoJi2PxLZjPeYT8FFRR2yucXq+kBlOnnvZeLg==
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment