Commit 02ab65d4 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 44113533
......@@ -155,11 +155,10 @@ Rails/ApplicationRecord:
# as they need to be as decoupled from application code as possible
- db/**/*.rb
- lib/gitlab/background_migration/**/*.rb
- ee/lib/ee/gitlab/background_migration/**/*.rb
- lib/gitlab/database/**/*.rb
- spec/**/*.rb
- ee/db/**/*.rb
- ee/lib/gitlab/background_migration/**/*.rb
- ee/lib/ee/gitlab/background_migration/**/*.rb
- ee/spec/**/*.rb
# GitLab ###################################################################
......@@ -233,7 +232,8 @@ RSpec/FactoriesInMigrationSpecs:
- 'spec/migrations/**/*.rb'
- 'ee/spec/migrations/**/*.rb'
- 'spec/lib/gitlab/background_migration/**/*.rb'
- 'ee/spec/lib/gitlab/background_migration/**/*.rb'
- 'spec/lib/ee/gitlab/background_migration/**/*.rb'
- 'ee/spec/lib/ee/gitlab/background_migration/**/*.rb'
Cop/IncludeActionViewContext:
Enabled: true
......@@ -365,4 +365,4 @@ Style/MultilineWhenThen:
Enabled: false
Style/FloatDivision:
Enabled: false
\ No newline at end of file
Enabled: false
......@@ -16,21 +16,7 @@ export const redirectToUrl = (self, url) => visitUrl(url);
export const setInitialData = ({ commit }, data) => commit(types.SET_INITIAL_DATA, data);
export const discardAllChanges = ({ state, commit, dispatch }) => {
state.changedFiles.forEach(file => {
if (file.tempFile || file.prevPath) dispatch('closeFile', file);
if (file.tempFile) {
dispatch('deleteEntry', file.path);
} else if (file.prevPath) {
dispatch('renameEntry', {
path: file.path,
name: file.prevName,
parentPath: file.prevParentPath,
});
} else {
commit(types.DISCARD_FILE_CHANGES, file.path);
}
});
state.changedFiles.forEach(file => dispatch('restoreOriginalFile', file.path));
commit(types.REMOVE_ALL_CHANGES_FILES);
};
......
......@@ -191,38 +191,47 @@ export const setFileViewMode = ({ commit }, { file, viewMode }) => {
commit(types.SET_FILE_VIEWMODE, { file, viewMode });
};
export const discardFileChanges = ({ dispatch, state, commit, getters }, path) => {
export const restoreOriginalFile = ({ dispatch, state, commit }, path) => {
const file = state.entries[path];
const isDestructiveDiscard = file.tempFile || file.prevPath;
if (file.deleted && file.parentPath) {
dispatch('restoreTree', file.parentPath);
}
if (file.tempFile || file.prevPath) {
if (isDestructiveDiscard) {
dispatch('closeFile', file);
}
if (file.tempFile) {
dispatch('deleteEntry', file.path);
} else {
commit(types.DISCARD_FILE_CHANGES, file.path);
dispatch('renameEntry', {
path: file.path,
name: file.prevName,
parentPath: file.prevParentPath,
});
}
if (file.tempFile) {
dispatch('deleteEntry', file.path);
} else {
commit(types.DISCARD_FILE_CHANGES, path);
if (getters.activeFile && file.path === getters.activeFile.path) {
dispatch('updateDelayViewerUpdated', true)
.then(() => {
router.push(`/project${file.url}`);
})
.catch(e => {
throw e;
});
}
commit(types.DISCARD_FILE_CHANGES, file.path);
}
if (file.prevPath) {
dispatch('renameEntry', {
path: file.path,
name: file.prevName,
parentPath: file.prevParentPath,
});
}
};
export const discardFileChanges = ({ dispatch, state, commit, getters }, path) => {
const file = state.entries[path];
const isDestructiveDiscard = file.tempFile || file.prevPath;
dispatch('restoreOriginalFile', path);
if (!isDestructiveDiscard && file.path === getters.activeFile?.path) {
dispatch('updateDelayViewerUpdated', true)
.then(() => {
router.push(`/project${file.url}`);
})
.catch(e => {
throw e;
});
}
commit(types.REMOVE_FILE_FROM_CHANGED, path);
......
- type = local_assigns.fetch(:type)
- board = local_assigns.fetch(:board, nil)
- show_sorting_dropdown = local_assigns.fetch(:show_sorting_dropdown, true)
- is_not_boards_modal_or_productivity_analytics = type != :boards_modal && type != :productivity_analytics
- block_css_class = is_not_boards_modal_or_productivity_analytics ? 'row-content-block second-block' : ''
- user_can_admin_list = board && can?(current_user, :admin_list, board.resource_parent)
......@@ -169,5 +170,5 @@
- if @project
#js-add-issues-btn.prepend-left-10{ data: { can_admin_list: can?(current_user, :admin_list, @project) } }
#js-toggle-focus-btn
- elsif is_not_boards_modal_or_productivity_analytics
- elsif is_not_boards_modal_or_productivity_analytics && show_sorting_dropdown
= render 'shared/issuable/sort_dropdown'
---
title: 'Fix: undefined background migration classes for EE-CE downgrades'
merge_request: 22160
author:
type: fixed
---
title: Fix discard all to behave like discard single file in Web IDE
merge_request: 22572
author:
type: fixed
......@@ -161,7 +161,7 @@ test:
only: [merge_requests]
except:
variables:
$CI_COMMIT_REF_NAME =~ /^docs-/
- $CI_COMMIT_REF_NAME =~ /^docs-/
```
## Important notes about merge requests from forked projects
......
......@@ -17,34 +17,29 @@ NOTE: **Note:**
Coming over to GitLab from Jenkins? Check out our [reference](../jenkins/index.md)
for converting your pre-existing pipelines over to our format.
GitLab offers a [continuous integration][ci] service. If you
[add a `.gitlab-ci.yml` file][yaml] to the root directory of your repository,
and configure your GitLab project to use a [Runner], then each commit or
push triggers your CI [pipeline].
GitLab offers a [continuous integration](https://about.gitlab.com/product/continuous-integration/) service. For each commit or push to trigger your CI
[pipeline](../pipelines.md), you must:
The `.gitlab-ci.yml` file tells the GitLab Runner what to do. By default it runs
a pipeline with three [stages]: `build`, `test`, and `deploy`. You don't need to
use all three stages; stages with no jobs are simply ignored.
- Add a [`.gitlab-ci.yml` file](#creating-a-gitlab-ciyml-file) to your repository's root directory.
- Ensure your project is configured to use a [Runner](#configuring-a-runner).
If everything runs OK (no non-zero return values), you'll get a nice green
checkmark associated with the commit. This makes it
easy to see whether a commit caused any of the tests to fail before
you even look at the code.
The `.gitlab-ci.yml` file tells the GitLab Runner what to do. A simple pipeline commonly has
three [stages](../yaml/README.md#stages):
Most projects use GitLab's CI service to run the test suite so that
developers get immediate feedback if they broke something.
- `build`
- `test`
- `deploy`
There's a growing trend to use continuous delivery and continuous deployment to
automatically deploy tested code to staging and production environments.
You do not need to use all three stages; stages with no jobs are ignored.
So in brief, the steps needed to have a working CI can be summed up to:
The pipeline appears under the project's **CI/CD > Pipelines** page. If everything runs OK (no non-zero
return values), you get a green check mark associated with the commit. This makes it easy to see
whether a commit caused any of the tests to fail before you even look at the job (test) log. Many projects use
GitLab's CI service to run the test suite, so developers get immediate feedback if they broke
something.
1. Add `.gitlab-ci.yml` to the root directory of your repository
1. Configure a Runner
From there on, on every push to your Git repository, the Runner will
automatically start the pipeline and the pipeline will appear under the
project's **Pipelines** page.
It's also common to use pipelines to automatically deploy
tested code to staging and production environments.
---
......@@ -237,9 +232,4 @@ CI with various languages.
[runner-install]: https://docs.gitlab.com/runner/install/
[blog-ci]: https://about.gitlab.com/blog/2015/05/06/why-were-replacing-gitlab-ci-jobs-with-gitlab-ci-dot-yml/
[examples]: ../examples/README.md
[ci]: https://about.gitlab.com/product/continuous-integration/
[yaml]: ../yaml/README.md
[runner]: ../runners/README.md
[enabled]: ../enable_or_disable_ci.md
[stages]: ../yaml/README.md#stages
[pipeline]: ../pipelines.md
......@@ -2719,10 +2719,10 @@ can lead to errors during the deployment.
To avoid these errors, the `resource_group` attribute can be used to ensure that
the Runner will not run certain jobs simultaneously.
When the `resource_group` key is defined in a job in `.gitlab-ci.yml`,
job runs are mutually exclusive across different pipelines in the same project.
When the `resource_group` key is defined for a job in `.gitlab-ci.yml`,
job executions are mutually exclusive across different pipelines for the same project.
If multiple jobs belonging to the same resource group are enqueued simultaneously,
only one of them will be picked by the Runner, and the other jobs will wait until the
only one of the jobs will be picked by the Runner, and the other jobs will wait until the
`resource_group` is free.
Here is a simple example:
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class BackfillVersionDataFromGitaly
def perform(issue_id)
end
end
end
end
Gitlab::BackgroundMigration::BackfillVersionDataFromGitaly.prepend_if_ee('EE::Gitlab::BackgroundMigration::BackfillVersionDataFromGitaly')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class GenerateGitlabSubscriptions
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::GenerateGitlabSubscriptions.prepend_if_ee('EE::Gitlab::BackgroundMigration::GenerateGitlabSubscriptions')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MigrateApproverToApprovalRules
# @param target_type [String] class of target, either 'MergeRequest' or 'Project'
# @param target_id [Integer] id of target
def perform(target_type, target_id, sync_code_owner_rule: true)
end
end
end
end
Gitlab::BackgroundMigration::MigrateApproverToApprovalRules.prepend_if_ee('EE::Gitlab::BackgroundMigration::MigrateApproverToApprovalRules')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MigrateApproverToApprovalRulesCheckProgress
def perform
end
end
end
end
Gitlab::BackgroundMigration::MigrateApproverToApprovalRulesCheckProgress.prepend_if_ee('EE::Gitlab::BackgroundMigration::MigrateApproverToApprovalRulesCheckProgress')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MigrateApproverToApprovalRulesInBatch
def perform(start_id, end_id)
end
end
end
end
Gitlab::BackgroundMigration::MigrateApproverToApprovalRulesInBatch.prepend_if_ee('EE::Gitlab::BackgroundMigration::MigrateApproverToApprovalRulesInBatch')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MoveEpicIssuesAfterEpics
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::MoveEpicIssuesAfterEpics.prepend_if_ee('EE::Gitlab::BackgroundMigration::MoveEpicIssuesAfterEpics')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates any approver rule records according
# to the given merge request IDs range. A _single_ INSERT is issued for the given range.
class PopulateAnyApprovalRuleForMergeRequests
def perform(from_id, to_id)
end
end
end
end
Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForMergeRequests.prepend_if_ee('EE::Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForMergeRequests')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates any approver rule records according
# to the given project IDs range. A _single_ INSERT is issued for the given range.
class PopulateAnyApprovalRuleForProjects
def perform(from_id, to_id)
end
end
end
end
Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForProjects.prepend_if_ee('EE::Gitlab::BackgroundMigration::PopulateAnyApprovalRuleForProjects')
# frozen_string_literal: true
#
# rubocop:disable Style/Documentation
# This job is added to fix https://gitlab.com/gitlab-org/gitlab/issues/30229
# It's not used anywhere else.
# Can be removed in GitLab 13.*
module Gitlab
module BackgroundMigration
class PruneOrphanedGeoEvents
def perform(table_name)
end
end
end
end
Gitlab::BackgroundMigration::PruneOrphanedGeoEvents.prepend_if_ee('EE::Gitlab::BackgroundMigration::PruneOrphanedGeoEvents')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class UpdateAuthorizedKeysFileSince
def perform(cutoff_datetime)
end
end
end
end
Gitlab::BackgroundMigration::UpdateAuthorizedKeysFileSince.prepend_if_ee('EE::Gitlab::BackgroundMigration::UpdateAuthorizedKeysFileSince')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class UpdateVulnerabilityConfidence
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::UpdateVulnerabilityConfidence.prepend_if_ee('EE::Gitlab::BackgroundMigration::UpdateVulnerabilityConfidence')
......@@ -12,6 +12,15 @@ module Gitlab
validations do
validates :config, array_or_string: true
validate do
next unless opt(:max_size)
next unless config.is_a?(Array)
if config.size > opt(:max_size)
errors.add(:config, "is too long (maximum is #{opt(:max_size)})")
end
end
end
def self.aspects
......
......@@ -9,7 +9,7 @@ module Gitlab
@message = message
end
def execute(project)
def execute(upload_parent:, uploader_class:)
attachments = []
message.attachments.each do |attachment|
......@@ -23,7 +23,7 @@ module Gitlab
content_type: attachment.content_type
}
uploader = UploadService.new(project, file).execute
uploader = UploadService.new(upload_parent, file, uploader_class).execute
attachments << uploader.to_h if uploader
ensure
tmp.close!
......
......@@ -41,13 +41,20 @@ module Gitlab
end
def add_attachments(reply)
attachments = Email::AttachmentUploader.new(mail).execute(project)
attachments = Email::AttachmentUploader.new(mail).execute(upload_params)
reply + attachments.map do |link|
"\n\n#{link[:markdown]}"
end.join
end
def upload_params
{
upload_parent: project,
uploader_class: FileUploader
}
end
def validate_permission!(permission)
raise UserNotFoundError unless author
raise UserBlockedError if author.blocked?
......
......@@ -5639,12 +5639,30 @@ msgstr ""
msgid "CycleAnalytics|No stages selected"
msgstr ""
msgid "CycleAnalytics|Number of tasks"
msgstr ""
msgid "CycleAnalytics|Showing %{subject} and %{selectedLabelsCount} labels"
msgstr ""
msgid "CycleAnalytics|Showing data for group '%{groupName}' and %{selectedProjectCount} projects from %{startDate} to %{endDate}"
msgstr ""
msgid "CycleAnalytics|Showing data for group '%{groupName}' from %{startDate} to %{endDate}"
msgstr ""
msgid "CycleAnalytics|Stages"
msgstr ""
msgid "CycleAnalytics|Tasks by type"
msgstr ""
msgid "CycleAnalytics|Total days to completion"
msgstr ""
msgid "CycleAnalytics|Type of work"
msgstr ""
msgid "CycleAnalytics|group dropdown filter"
msgstr ""
......@@ -5687,6 +5705,9 @@ msgstr ""
msgid "Data is still calculating..."
msgstr ""
msgid "Date"
msgstr ""
msgid "Date picker"
msgstr ""
......
......@@ -619,107 +619,113 @@ describe('IDE store file actions', () => {
});
});
describe('discardFileChanges', () => {
describe('with changed file', () => {
let tmpFile;
beforeEach(() => {
jest.spyOn(eventHub, '$on').mockImplementation(() => {});
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
tmpFile = file('tempFile');
tmpFile.content = 'testing';
tmpFile.raw = ORIGINAL_CONTENT;
store.state.changedFiles.push(tmpFile);
store.state.entries[tmpFile.path] = tmpFile;
jest.spyOn(store, 'dispatch');
});
it('resets file content', done => {
store
.dispatch('discardFileChanges', tmpFile.path)
.then(() => {
describe('restoreOriginalFile', () => {
it('resets file content', () =>
store.dispatch('restoreOriginalFile', tmpFile.path).then(() => {
expect(tmpFile.content).toBe(ORIGINAL_CONTENT);
}));
done();
})
.catch(done.fail);
});
it('closes temp file and deletes it', () => {
tmpFile.tempFile = true;
tmpFile.opened = true;
tmpFile.parentPath = 'parentFile';
store.state.entries.parentFile = file('parentFile');
it('removes file from changedFiles array', done => {
store
.dispatch('discardFileChanges', tmpFile.path)
.then(() => {
expect(store.state.changedFiles.length).toBe(0);
actions.restoreOriginalFile(store, tmpFile.path);
done();
})
.catch(done.fail);
});
expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile);
expect(store.dispatch).toHaveBeenCalledWith('deleteEntry', tmpFile.path);
});
it('closes temp file and deletes it', () => {
tmpFile.tempFile = true;
tmpFile.opened = true;
tmpFile.parentPath = 'parentFile';
store.state.entries.parentFile = file('parentFile');
describe('with renamed file', () => {
beforeEach(() => {
Object.assign(tmpFile, {
prevPath: 'parentPath/old_name',
prevName: 'old_name',
prevParentPath: 'parentPath',
});
actions.discardFileChanges(store, tmpFile.path);
store.state.entries.parentPath = file('parentPath');
expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile);
expect(store.dispatch).toHaveBeenCalledWith('deleteEntry', tmpFile.path);
});
actions.restoreOriginalFile(store, tmpFile.path);
});
describe('with renamed file', () => {
beforeEach(() => {
Object.assign(tmpFile, {
prevPath: 'parentPath/old_name',
prevName: 'old_name',
prevParentPath: 'parentPath',
it('renames the file to its original name and closes it if it was open', () => {
expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile);
expect(store.dispatch).toHaveBeenCalledWith('renameEntry', {
path: 'tempFile',
name: 'old_name',
parentPath: 'parentPath',
});
});
store.state.entries.parentPath = file('parentPath');
it('resets file content', () => {
expect(tmpFile.content).toBe(ORIGINAL_CONTENT);
});
});
});
actions.discardFileChanges(store, tmpFile.path);
describe('discardFileChanges', () => {
beforeEach(() => {
jest.spyOn(eventHub, '$on').mockImplementation(() => {});
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
});
it('renames the file to its original name and closes it if it was open', () => {
expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile);
expect(store.dispatch).toHaveBeenCalledWith('renameEntry', {
path: 'tempFile',
name: 'old_name',
parentPath: 'parentPath',
describe('with regular file', () => {
beforeEach(() => {
actions.discardFileChanges(store, tmpFile.path);
});
});
it('resets file content', () => {
expect(tmpFile.content).toBe(ORIGINAL_CONTENT);
});
});
it('restores original file', () => {
expect(store.dispatch).toHaveBeenCalledWith('restoreOriginalFile', tmpFile.path);
});
it('pushes route for active file', done => {
tmpFile.active = true;
store.state.openFiles.push(tmpFile);
it('removes file from changedFiles array', () => {
expect(store.state.changedFiles.length).toBe(0);
});
store
.dispatch('discardFileChanges', tmpFile.path)
.then(() => {
expect(router.push).toHaveBeenCalledWith(`/project${tmpFile.url}`);
it('does not push a new route', () => {
expect(router.push).not.toHaveBeenCalled();
});
done();
})
.catch(done.fail);
});
it('emits eventHub event to dispose cached model', () => {
actions.discardFileChanges(store, tmpFile.path);
expect(eventHub.$emit).toHaveBeenCalledWith(
`editor.update.model.new.content.${tmpFile.key}`,
ORIGINAL_CONTENT,
);
expect(eventHub.$emit).toHaveBeenCalledWith(
`editor.update.model.dispose.unstaged-${tmpFile.key}`,
ORIGINAL_CONTENT,
);
});
});
it('emits eventHub event to dispose cached model', done => {
store
.dispatch('discardFileChanges', tmpFile.path)
.then(() => {
expect(eventHub.$emit).toHaveBeenCalled();
describe('with active file', () => {
beforeEach(() => {
tmpFile.active = true;
store.state.openFiles.push(tmpFile);
done();
})
.catch(done.fail);
actions.discardFileChanges(store, tmpFile.path);
});
it('pushes route for active file', () => {
expect(router.push).toHaveBeenCalledWith(`/project${tmpFile.url}`);
});
});
});
});
......
......@@ -61,24 +61,25 @@ describe('Multi-file store actions', () => {
});
describe('discardAllChanges', () => {
let f;
const paths = ['to_discard', 'another_one_to_discard'];
beforeEach(() => {
f = file('discardAll');
f.changed = true;
paths.forEach(path => {
const f = file(path);
f.changed = true;
store.state.openFiles.push(f);
store.state.changedFiles.push(f);
store.state.entries[f.path] = f;
store.state.openFiles.push(f);
store.state.changedFiles.push(f);
store.state.entries[f.path] = f;
});
});
it('discards changes in file', done => {
store
.dispatch('discardAllChanges')
.then(() => {
expect(store.state.openFiles.changed).toBeFalsy();
})
.then(done)
.catch(done.fail);
it('discards all changes in file', () => {
const expectedCalls = paths.map(path => ['restoreOriginalFile', path]);
discardAllChanges(store);
expect(store.dispatch.calls.allArgs()).toEqual(jasmine.arrayContaining(expectedCalls));
});
it('removes all files from changedFiles state', done => {
......@@ -86,64 +87,11 @@ describe('Multi-file store actions', () => {
.dispatch('discardAllChanges')
.then(() => {
expect(store.state.changedFiles.length).toBe(0);
expect(store.state.openFiles.length).toBe(1);
expect(store.state.openFiles.length).toBe(2);
})
.then(done)
.catch(done.fail);
});
it('closes the temp file and deletes it if it was open', done => {
f.tempFile = true;
testAction(
discardAllChanges,
undefined,
store.state,
[{ type: types.REMOVE_ALL_CHANGES_FILES }],
[
{ type: 'closeFile', payload: jasmine.objectContaining({ path: 'discardAll' }) },
{ type: 'deleteEntry', payload: 'discardAll' },
],
done,
);
});
it('renames the file to its original name and closes it if it was open', done => {
Object.assign(f, {
prevPath: 'parent/path/old_name',
prevName: 'old_name',
prevParentPath: 'parent/path',
});
testAction(
discardAllChanges,
undefined,
store.state,
[{ type: types.REMOVE_ALL_CHANGES_FILES }],
[
{ type: 'closeFile', payload: jasmine.objectContaining({ path: 'discardAll' }) },
{
type: 'renameEntry',
payload: { path: 'discardAll', name: 'old_name', parentPath: 'parent/path' },
},
],
done,
);
});
it('discards file changes on all other files', done => {
testAction(
discardAllChanges,
undefined,
store.state,
[
{ type: types.DISCARD_FILE_CHANGES, payload: 'discardAll' },
{ type: types.REMOVE_ALL_CHANGES_FILES },
],
[],
done,
);
});
});
describe('closeAllFiles', () => {
......
......@@ -9,7 +9,7 @@ describe Gitlab::Email::AttachmentUploader do
let(:message) { Mail::Message.new(message_raw) }
it "uploads all attachments and returns their links" do
links = described_class.new(message).execute(project)
links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
link = links.first
expect(link).not_to be_nil
......
......@@ -181,10 +181,21 @@ describe Gitlab::Email::Handler::CreateNoteHandler do
it_behaves_like 'a reply to existing comment'
it "adds all attachments" do
expect_next_instance_of(Gitlab::Email::AttachmentUploader) do |uploader|
expect(uploader).to receive(:execute).with(upload_parent: project, uploader_class: FileUploader).and_return(
[
{
url: "uploads/image.png",
alt: "image",
markdown: markdown
}
]
)
end
receiver.execute
note = noteable.notes.last
expect(note.note).to include(markdown)
end
......
......@@ -3461,44 +3461,7 @@ d3-zoom@1:
d3-selection "1"
d3-transition "1"
d3@^5.12.0, d3@^5.7.0:
version "5.12.0"
resolved "https://registry.yarnpkg.com/d3/-/d3-5.12.0.tgz#0ddeac879c28c882317cd439b495290acd59ab61"
integrity sha512-flYVMoVuhPFHd9zVCe2BxIszUWqBcd5fvQGMNRmSiBrgdnh6Vlruh60RJQTouAK9xPbOB0plxMvBm4MoyODXNg==
dependencies:
d3-array "1"
d3-axis "1"
d3-brush "1"
d3-chord "1"
d3-collection "1"
d3-color "1"
d3-contour "1"
d3-dispatch "1"
d3-drag "1"
d3-dsv "1"
d3-ease "1"
d3-fetch "1"
d3-force "1"
d3-format "1"
d3-geo "1"
d3-hierarchy "1"
d3-interpolate "1"
d3-path "1"
d3-polygon "1"
d3-quadtree "1"
d3-random "1"
d3-scale "2"
d3-scale-chromatic "1"
d3-selection "1"
d3-shape "1"
d3-time "1"
d3-time-format "2"
d3-timer "1"
d3-transition "1"
d3-voronoi "1"
d3-zoom "1"
d3@^5.14:
d3@^5.12.0, d3@^5.14, d3@^5.7.0:
version "5.15.0"
resolved "https://registry.yarnpkg.com/d3/-/d3-5.15.0.tgz#ffd44958e6a3cb8a59a84429c45429b8bca5677a"
integrity sha512-C+E80SL2nLLtmykZ6klwYj5rPqB5nlfN5LdWEAVdWPppqTD8taoJi2PxLZjPeYT8FFRR2yucXq+kBlOnnvZeLg==
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment