Commit f058b52b authored by Douwe Maan's avatar Douwe Maan

Merge branch 'master' into new-resolvable-discussion

parents 130a1e86 7d3f37b0
......@@ -356,3 +356,5 @@ gem 'sys-filesystem', '~> 1.1.6'
# Gitaly GRPC client
gem 'gitaly', '~> 0.5.0'
gem 'toml-rb', '~> 0.3.15', require: false
......@@ -117,6 +117,7 @@ GEM
chronic_duration (0.10.6)
numerizer (~> 0.1.1)
chunky_png (1.3.5)
citrus (3.0.2)
cliver (0.3.2)
coderay (1.1.1)
coercible (1.0.0)
......@@ -784,6 +785,8 @@ GEM
tilt (2.0.6)
timecop (0.8.1)
timfel-krb5-auth (0.8.3)
toml-rb (0.3.15)
citrus (~> 3.0, > 3.0)
tool (0.2.3)
truncato (0.7.8)
htmlentities (~> 4.3.1)
......@@ -1015,6 +1018,7 @@ DEPENDENCIES
test_after_commit (~> 1.1)
thin (~> 1.7.0)
timecop (~> 0.8.0)
toml-rb (~> 0.3.15)
truncato (~> 0.7.8)
u2f (~> 0.2.1)
uglifier (~> 2.7.2)
......
......@@ -73,11 +73,20 @@ These types of merge requests need special consideration:
and a dedicated team with front-end, back-end, and UX.
* **Small features**: any other feature request.
**Large features** must be with a maintainer **by the 1st**. It's OK if they
aren't completely done, but this allows the maintainer enough time to make the
decision about whether this can make it in before the freeze. If the maintainer
doesn't think it will make it, they should inform the developers working on it
and the Product Manager responsible for the feature.
**Large features** must be with a maintainer **by the 1st**. This means that:
* There is a merge request (even if it's WIP).
* The person (or people, if it needs a frontend and backend maintainer) who will
ultimately be responsible for merging this have been pinged on the MR.
It's OK if merge request isn't completely done, but this allows the maintainer
enough time to make the decision about whether this can make it in before the
freeze. If the maintainer doesn't think it will make it, they should inform the
developers working on it and the Product Manager responsible for the feature.
The maintainer can also choose to assign a reviewer to perform an initial
review, but this way the maintainer is unlikely to be surprised by receiving an
MR later in the cycle.
**Small features** must be with a reviewer (not necessarily maintainer) **by the
3rd**.
......
......@@ -31,7 +31,7 @@
svg {
width: 20px;
height: auto;
height: 20px;
fill: $gl-text-color-secondary;
}
......
......@@ -19,7 +19,7 @@ ul.notes {
svg {
width: 18px;
height: auto;
height: 18px;
fill: $gray-darkest;
position: absolute;
left: 30px;
......
......@@ -7,7 +7,7 @@ module SystemNoteHelper
'closed' => 'icon_status_closed',
'time_tracking' => 'icon_stopwatch',
'assignee' => 'icon_user',
'title' => 'icon_pencil',
'title' => 'icon_edit',
'task' => 'icon_check_square_o',
'label' => 'icon_tags',
'cross_reference' => 'icon_random',
......
......@@ -10,6 +10,7 @@
= custom_icon("icon_code_fork")
.event-title
%span.author_name= link_to_author event
%span{ class: event.action_name }
- if event.target
= event.action_name
......
......@@ -2,6 +2,7 @@
= custom_icon("icon_status_open")
.event-title
%span.author_name= link_to_author event
%span{ class: event.action_name }
= event_action_name(event)
......
......@@ -2,6 +2,7 @@
= custom_icon("icon_comment_o")
.event-title
%span.author_name= link_to_author event
= event.action_name
= event_note_title_html(event)
......
......@@ -7,6 +7,7 @@
= custom_icon("icon_commit")
.event-title
%span.author_name= link_to_author event
%span.pushed #{event.action_name} #{event.ref_type}
%strong
- commits_link = namespace_project_commits_path(project.namespace, project, event.ref_name)
......
......@@ -7,7 +7,7 @@
#blob-content-holder.tree-holder
.file-holder
= render "projects/blob/header", blob: @blob
= render "projects/blob/header", blob: @blob, blame: true
.table-responsive.file-content.blame.code.js-syntax-highlight
%table
......
- blame = local_assigns.fetch(:blame, false)
.js-file-title.file-title-flex-parent
.file-header-content
= blob_icon blob.mode, blob.name
......@@ -12,14 +13,14 @@
.file-actions.hidden-xs
.btn-group{ role: "group" }<
= copy_blob_content_button(blob) if blob_text_viewable?(blob)
= copy_blob_content_button(blob) if !blame && blob_text_viewable?(blob)
= open_raw_file_button(namespace_project_raw_path(@project.namespace, @project, @id))
= view_on_environment_button(@commit.sha, @path, @environment) if @environment
.btn-group{ role: "group" }<
-# only show normal/blame view links for text files
- if blob_text_viewable?(blob)
- if current_page? namespace_project_blame_path(@project.namespace, @project, @id)
- if blame
= link_to 'Normal view', namespace_project_blob_path(@project.namespace, @project, @id),
class: 'btn btn-sm'
- else
......
---
title: Upgrade webpack to v2.3.3 and webpack-dev-server to v2.4.2
merge_request: 10552
author:
---
title: Add indication for closed or merged issuables in GFM
merge_request: 9462
author: Adam Buckland
class Spinach::Features::ProjectMergeRequestsAcceptance < Spinach::FeatureSteps
include LoginHelpers
include GitlabRoutingHelper
include WaitForAjax
step 'I am on the Merge Request detail page' do
visit merge_request_path(@merge_request)
......@@ -20,10 +21,18 @@ class Spinach::Features::ProjectMergeRequestsAcceptance < Spinach::FeatureSteps
step 'I should see the Remove Source Branch button' do
expect(page).to have_link('Remove source branch')
# Wait for AJAX requests to complete so they don't blow up if they are
# only handled after `DatabaseCleaner` has already run
wait_for_ajax
end
step 'I should not see the Remove Source Branch button' do
expect(page).not_to have_link('Remove source branch')
# Wait for AJAX requests to complete so they don't blow up if they are
# only handled after `DatabaseCleaner` has already run
wait_for_ajax
end
step 'There is an open Merge Request' do
......
......@@ -97,7 +97,7 @@ module SharedProject
step 'I should see project "Shop" activity feed' do
project = Project.find_by(name: "Shop")
expect(page).to have_content "pushed new branch fix at #{project.name_with_namespace}"
expect(page).to have_content "#{@user.name} pushed new branch fix at #{project.name_with_namespace}"
end
step 'I should see project settings' do
......
module Banzai
module Filter
# HTML filter that appends state information to issuable links.
# Runs as a post-process filter as issuable state might change whilst
# Markdown is in the cache.
#
# This filter supports cross-project references.
class IssuableStateFilter < HTML::Pipeline::Filter
VISIBLE_STATES = %w(closed merged).freeze
def call
extractor = Banzai::IssuableExtractor.new(project, current_user)
issuables = extractor.extract([doc])
issuables.each do |node, issuable|
if VISIBLE_STATES.include?(issuable.state)
node.children.last.content += " [#{issuable.state}]"
end
end
doc
end
private
def current_user
context[:current_user]
end
def project
context[:project]
end
end
end
end
......@@ -7,7 +7,7 @@ module Banzai
#
class RedactorFilter < HTML::Pipeline::Filter
def call
Redactor.new(project, current_user).redact([doc])
Redactor.new(project, current_user).redact([doc]) unless context[:skip_redaction]
doc
end
......
module Banzai
# Extract references to issuables from multiple documents
# This populates RequestStore cache used in Banzai::ReferenceParser::IssueParser
# and Banzai::ReferenceParser::MergeRequestParser
# Populating the cache should happen before processing documents one-by-one
# so we can avoid N+1 queries problem
class IssuableExtractor
QUERY = %q(
descendant-or-self::a[contains(concat(" ", @class, " "), " gfm ")]
[@data-reference-type="issue" or @data-reference-type="merge_request"]
).freeze
attr_reader :project, :user
def initialize(project, user)
@project = project
@user = user
end
# Returns Hash in the form { node => issuable_instance }
def extract(documents)
nodes = documents.flat_map do |document|
document.xpath(QUERY)
end
issue_parser = Banzai::ReferenceParser::IssueParser.new(project, user)
merge_request_parser = Banzai::ReferenceParser::MergeRequestParser.new(project, user)
issue_parser.issues_for_nodes(nodes).merge(
merge_request_parser.merge_requests_for_nodes(nodes)
)
end
end
end
......@@ -31,7 +31,8 @@ module Banzai
#
# Returns the same input objects.
def render(objects, attribute)
documents = render_objects(objects, attribute)
documents = render_documents(objects, attribute)
documents = post_process_documents(documents, objects, attribute)
redacted = redact_documents(documents)
objects.each_with_index do |object, index|
......@@ -41,9 +42,24 @@ module Banzai
end
end
# Renders the attribute of every given object.
def render_objects(objects, attribute)
render_attributes(objects, attribute)
private
def render_documents(objects, attribute)
pipeline = HTML::Pipeline.new([])
objects.map do |object|
pipeline.to_document(Banzai.render_field(object, attribute))
end
end
def post_process_documents(documents, objects, attribute)
# Called here to populate cache, refer to IssuableExtractor docs
IssuableExtractor.new(project, user).extract(documents)
documents.zip(objects).map do |document, object|
context = context_for(object, attribute)
Banzai::Pipeline[:post_process].to_document(document, context)
end
end
# Redacts the list of documents.
......@@ -57,25 +73,15 @@ module Banzai
# Returns a Banzai context for the given object and attribute.
def context_for(object, attribute)
context = base_context.dup
context = context.merge(object.banzai_render_context(attribute))
context
end
# Renders the attributes of a set of objects.
#
# Returns an Array of `Nokogiri::HTML::Document`.
def render_attributes(objects, attribute)
objects.map do |object|
string = Banzai.render_field(object, attribute)
context = context_for(object, attribute)
Banzai::Pipeline[:relative_link].to_document(string, context)
end
base_context.merge(object.banzai_render_context(attribute))
end
def base_context
@base_context ||= @redaction_context.merge(current_user: user, project: project)
@base_context ||= @redaction_context.merge(
current_user: user,
project: project,
skip_redaction: true
)
end
end
end
......@@ -4,6 +4,7 @@ module Banzai
def self.filters
FilterArray[
Filter::RelativeLinkFilter,
Filter::IssuableStateFilter,
Filter::RedactorFilter
]
end
......
......@@ -62,8 +62,7 @@ module Banzai
nodes.select do |node|
if node.has_attribute?(project_attr)
node_id = node.attr(project_attr).to_i
can_read_reference?(user, projects[node_id])
can_read_reference?(user, projects[node])
else
true
end
......@@ -112,12 +111,12 @@ module Banzai
per_project
end
# Returns a Hash containing objects for an attribute grouped per their
# IDs.
# Returns a Hash containing objects for an attribute grouped per the
# nodes that reference them.
#
# The returned Hash uses the following format:
#
# { id value => row }
# { node => row }
#
# nodes - An Array of HTML nodes to process.
#
......@@ -132,9 +131,14 @@ module Banzai
return {} if nodes.empty?
ids = unique_attribute_values(nodes, attribute)
rows = collection_objects_for_ids(collection, ids)
collection_objects = collection_objects_for_ids(collection, ids)
objects_by_id = collection_objects.index_by(&:id)
rows.index_by(&:id)
nodes.each_with_object({}) do |node, hash|
if node.has_attribute?(attribute)
hash[node] = objects_by_id[node.attr(attribute).to_i]
end
end
end
# Returns an Array containing all unique values of an attribute of the
......@@ -201,7 +205,7 @@ module Banzai
#
# The returned Hash uses the following format:
#
# { project ID => project }
# { node => project }
#
def projects_for_nodes(nodes)
@projects_for_nodes ||=
......
......@@ -13,14 +13,14 @@ module Banzai
issues_readable_by_user(issues.values, user).to_set
nodes.select do |node|
readable_issues.include?(issue_for_node(issues, node))
readable_issues.include?(issues[node])
end
end
def referenced_by(nodes)
issues = issues_for_nodes(nodes)
nodes.map { |node| issue_for_node(issues, node) }.uniq
nodes.map { |node| issues[node] }.compact.uniq
end
def issues_for_nodes(nodes)
......@@ -44,12 +44,6 @@ module Banzai
self.class.data_attribute
)
end
private
def issue_for_node(issues, node)
issues[node.attr(self.class.data_attribute).to_i]
end
end
end
end
......@@ -3,6 +3,14 @@ module Banzai
class MergeRequestParser < BaseParser
self.reference_type = :merge_request
def merge_requests_for_nodes(nodes)
@merge_requests_for_nodes ||= grouped_objects_for_nodes(
nodes,
MergeRequest.all,
self.class.data_attribute
)
end
def references_relation
MergeRequest.includes(:author, :assignee, :target_project)
end
......
......@@ -49,7 +49,7 @@ module Banzai
# Check if project belongs to a group which
# user can read.
def can_read_group_reference?(node, user, groups)
node_group = groups[node.attr('data-group').to_i]
node_group = groups[node]
node_group && can?(user, :read_group, node_group)
end
......@@ -74,8 +74,8 @@ module Banzai
if project && project_id && project.id == project_id.to_i
true
elsif project_id && user_id
project = projects[project_id.to_i]
user = users[user_id.to_i]
project = projects[node]
user = users[node]
project && user ? project.team.member?(user) : false
else
......
......@@ -19,5 +19,19 @@ namespace :gitlab do
run_command!([command])
end
end
desc "GitLab | Print storage configuration in TOML format"
task storage_config: :environment do
require 'toml'
puts "# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}"
puts "# This is in TOML format suitable for use in Gitaly's config.toml file."
config = Gitlab.config.repositories.storages.map do |key, val|
{ name: key, path: val['path'] }
end
puts TOML.dump(storage: config)
end
end
end
......@@ -44,6 +44,10 @@ FactoryGirl.define do
state :reopened
end
trait :locked do
state :locked
end
trait :simple do
source_branch "feature"
target_branch "master"
......
......@@ -21,20 +21,20 @@ feature 'Project member activity', feature: true, js: true do
context 'when a user joins the project' do
before { visit_activities_and_wait_with_event(Event::JOINED) }
it { is_expected.to eq("joined project") }
it { is_expected.to eq("#{user.name} joined project") }
end
context 'when a user leaves the project' do
before { visit_activities_and_wait_with_event(Event::LEFT) }
it { is_expected.to eq("left project") }
it { is_expected.to eq("#{user.name} left project") }
end
context 'when a users membership expires for the project' do
before { visit_activities_and_wait_with_event(Event::EXPIRED) }
it "presents the correct message" do
message = "removed due to membership expiration from project"
message = "#{user.name} removed due to membership expiration from project"
is_expected.to eq(message)
end
end
......
require 'spec_helper'
describe Banzai::Filter::IssuableStateFilter, lib: true do
include ActionView::Helpers::UrlHelper
include FilterSpecHelper
let(:user) { create(:user) }
def create_link(data)
link_to('text', '', class: 'gfm has-tooltip', data: data)
end
it 'ignores non-GFM links' do
html = %(See <a href="https://google.com/">Google</a>)
doc = filter(html, current_user: user)
expect(doc.css('a').last.text).to eq('Google')
end
it 'ignores non-issuable links' do
project = create(:empty_project, :public)
link = create_link(project: project, reference_type: 'issue')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text')
end
context 'for issue references' do
it 'ignores open issue references' do
issue = create(:issue)
link = create_link(issue: issue.id, reference_type: 'issue')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text')
end
it 'ignores reopened issue references' do
reopened_issue = create(:issue, :reopened)
link = create_link(issue: reopened_issue.id, reference_type: 'issue')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text')
end
it 'appends [closed] to closed issue references' do
closed_issue = create(:issue, :closed)
link = create_link(issue: closed_issue.id, reference_type: 'issue')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text [closed]')
end
end
context 'for merge request references' do
it 'ignores open merge request references' do
mr = create(:merge_request)
link = create_link(merge_request: mr.id, reference_type: 'merge_request')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text')
end
it 'ignores reopened merge request references' do
mr = create(:merge_request, :reopened)
link = create_link(merge_request: mr.id, reference_type: 'merge_request')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text')
end
it 'ignores locked merge request references' do
mr = create(:merge_request, :locked)
link = create_link(merge_request: mr.id, reference_type: 'merge_request')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text')
end
it 'appends [closed] to closed merge request references' do
mr = create(:merge_request, :closed)
link = create_link(merge_request: mr.id, reference_type: 'merge_request')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text [closed]')
end
it 'appends [merged] to merged merge request references' do
mr = create(:merge_request, :merged)
link = create_link(merge_request: mr.id, reference_type: 'merge_request')
doc = filter(link, current_user: user)
expect(doc.css('a').last.text).to eq('text [merged]')
end
end
end
......@@ -15,6 +15,16 @@ describe Banzai::Filter::RedactorFilter, lib: true do
link_to('text', '', class: 'gfm', data: data)
end
it 'skips when the skip_redaction flag is set' do
user = create(:user)
project = create(:empty_project)
link = reference_link(project: project.id, reference_type: 'test')
doc = filter(link, current_user: user, skip_redaction: true)
expect(doc.css('a').length).to eq 1
end
context 'with data-project' do
let(:parser_class) do
Class.new(Banzai::ReferenceParser::BaseParser) do
......
require 'spec_helper'
describe Banzai::IssuableExtractor, lib: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
let(:extractor) { described_class.new(project, user) }
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:issue_link) do
html_to_node(
"<a href='' data-issue='#{issue.id}' data-reference-type='issue' class='gfm'>text</a>"
)
end
let(:merge_request_link) do
html_to_node(
"<a href='' data-merge-request='#{merge_request.id}' data-reference-type='merge_request' class='gfm'>text</a>"
)
end
def html_to_node(html)
Nokogiri::HTML.fragment(
html
).children[0]
end
it 'returns instances of issuables for nodes with references' do
result = extractor.extract([issue_link, merge_request_link])
expect(result).to eq(issue_link => issue, merge_request_link => merge_request)
end
describe 'caching' do
before do
RequestStore.begin!
end
after do
RequestStore.end!
RequestStore.clear!
end
it 'saves records to cache' do
extractor.extract([issue_link, merge_request_link])
second_call_queries = ActiveRecord::QueryRecorder.new do
extractor.extract([issue_link, merge_request_link])
end.count
expect(second_call_queries).to eq 0
end
end
end
......@@ -3,128 +3,51 @@ require 'spec_helper'
describe Banzai::ObjectRenderer do
let(:project) { create(:empty_project) }
let(:user) { project.owner }
def fake_object(attrs = {})
object = double(attrs.merge("new_record?" => true, "destroyed?" => true))
allow(object).to receive(:markdown_cache_field_for).with(:note).and_return(:note_html)
allow(object).to receive(:banzai_render_context).with(:note).and_return(project: nil, author: nil)
allow(object).to receive(:update_column).with(:note_html, anything).and_return(true)
object
end
let(:renderer) { described_class.new(project, user, custom_value: 'value') }
let(:object) { Note.new(note: 'hello', note_html: '<p>hello</p>') }
describe '#render' do
it 'renders and redacts an Array of objects' do
renderer = described_class.new(project, user)
object = fake_object(note: 'hello', note_html: nil)
expect(renderer).to receive(:render_objects).with([object], :note).
and_call_original
expect(renderer).to receive(:redact_documents).
with(an_instance_of(Array)).
and_call_original
expect(object).to receive(:redacted_note_html=).with('<p dir="auto">hello</p>')
expect(object).to receive(:user_visible_reference_count=).with(0)
renderer.render([object], :note)
end
end
describe '#render_objects' do
it 'renders an Array of objects' do
object = fake_object(note: 'hello', note_html: nil)
renderer = described_class.new(project, user)
expect(renderer).to receive(:render_attributes).with([object], :note).
and_call_original
rendered = renderer.render_objects([object], :note)
expect(rendered).to be_an_instance_of(Array)
expect(rendered[0]).to be_an_instance_of(Nokogiri::HTML::DocumentFragment)
end
end
describe '#redact_documents' do
it 'redacts a set of documents and returns them as an Array of Hashes' do
doc = Nokogiri::HTML.fragment('<p>hello</p>')
renderer = described_class.new(project, user)
expect_any_instance_of(Banzai::Redactor).to receive(:redact).
with([doc]).
and_call_original
redacted = renderer.redact_documents([doc])
expect(redacted.count).to eq(1)
expect(redacted.first[:visible_reference_count]).to eq(0)
expect(redacted.first[:document].to_html).to eq('<p>hello</p>')
expect(object.redacted_note_html).to eq '<p>hello</p>'
expect(object.user_visible_reference_count).to eq 0
end
end
describe '#context_for' do
let(:object) { fake_object(note: 'hello') }
let(:renderer) { described_class.new(project, user) }
it 'calls Banzai::Redactor to perform redaction' do
expect_any_instance_of(Banzai::Redactor).to receive(:redact).and_call_original
it 'returns a Hash' do
expect(renderer.context_for(object, :note)).to be_an_instance_of(Hash)
end
it 'includes the banzai render context for the object' do
expect(object).to receive(:banzai_render_context).with(:note).and_return(foo: :bar)
context = renderer.context_for(object, :note)
expect(context).to have_key(:foo)
expect(context[:foo]).to eq(:bar)
end
end
describe '#render_attributes' do
it 'renders the attribute of a list of objects' do
objects = [fake_object(note: 'hello', note_html: nil), fake_object(note: 'bye', note_html: nil)]
renderer = described_class.new(project, user)
objects.each do |object|
expect(Banzai).to receive(:render_field).with(object, :note).and_call_original
end
docs = renderer.render_attributes(objects, :note)
expect(docs[0]).to be_an_instance_of(Nokogiri::HTML::DocumentFragment)
expect(docs[0].to_html).to eq('<p dir="auto">hello</p>')
expect(docs[1]).to be_an_instance_of(Nokogiri::HTML::DocumentFragment)
expect(docs[1].to_html).to eq('<p dir="auto">bye</p>')
end
it 'returns when no objects to render' do
objects = []
renderer = described_class.new(project, user, pipeline: :note)
expect(renderer.render_attributes(objects, :note)).to eq([])
renderer.render([object], :note)
end
end
describe '#base_context' do
let(:context) do
described_class.new(project, user, foo: :bar).base_context
end
it 'retrieves field content using Banzai.render_field' do
expect(Banzai).to receive(:render_field).with(object, :note).and_call_original
it 'returns a Hash' do
expect(context).to be_an_instance_of(Hash)
end
it 'includes the custom attributes' do
expect(context[:foo]).to eq(:bar)
renderer.render([object], :note)
end
it 'includes the current user' do
expect(context[:current_user]).to eq(user)
end
it 'passes context to PostProcessPipeline' do
another_user = create(:user)
another_project = create(:empty_project)
object = Note.new(
note: 'hello',
note_html: 'hello',
author: another_user,
project: another_project
)
expect(Banzai::Pipeline::PostProcessPipeline).to receive(:to_document).with(
anything,
hash_including(
skip_redaction: true,
current_user: user,
project: another_project,
author: another_user,
custom_value: 'value'
)
).and_call_original
it 'includes the current project' do
expect(context[:project]).to eq(project)
renderer.render([object], :note)
end
end
end
......@@ -92,16 +92,26 @@ describe Banzai::ReferenceParser::BaseParser, lib: true do
end
describe '#grouped_objects_for_nodes' do
it 'returns a Hash grouping objects per ID' do
nodes = [double(:node)]
it 'returns a Hash grouping objects per node' do
link = double(:link)
expect(link).to receive(:has_attribute?).
with('data-user').
and_return(true)
expect(link).to receive(:attr).
with('data-user').
and_return(user.id.to_s)
nodes = [link]
expect(subject).to receive(:unique_attribute_values).
with(nodes, 'data-user').
and_return([user.id])
and_return([user.id.to_s])
hash = subject.grouped_objects_for_nodes(nodes, User, 'data-user')
expect(hash).to eq({ user.id => user })
expect(hash).to eq({ link => user })
end
it 'returns an empty Hash when the list of nodes is empty' do
......
......@@ -67,6 +67,16 @@ describe Banzai::ReferenceParser::IssueParser, lib: true do
expect(subject.referenced_by([])).to eq([])
end
end
context 'when issue with given ID does not exist' do
before do
link['data-issue'] = '-1'
end
it 'returns an empty Array' do
expect(subject.referenced_by([link])).to eq([])
end
end
end
end
......@@ -75,7 +85,7 @@ describe Banzai::ReferenceParser::IssueParser, lib: true do
link['data-issue'] = issue.id.to_s
nodes = [link]
expect(subject.issues_for_nodes(nodes)).to eq({ issue.id => issue })
expect(subject.issues_for_nodes(nodes)).to eq({ link => issue })
end
end
end
......@@ -180,6 +180,15 @@ describe Banzai::ReferenceParser::UserParser, lib: true do
expect(subject.nodes_user_can_reference(user, [link])).to eq([])
end
it 'returns the nodes if the project attribute value equals the current project ID' do
other_user = create(:user)
link['data-project'] = project.id.to_s
link['data-author'] = other_user.id.to_s
expect(subject.nodes_user_can_reference(user, [link])).to eq([link])
end
end
context 'when the link does not have a data-author attribute' do
......
......@@ -75,4 +75,36 @@ describe 'gitlab:gitaly namespace rake task' do
end
end
end
describe 'storage_config' do
it 'prints storage configuration in a TOML format' do
config = {
'default' => { 'path' => '/path/to/default' },
'nfs_01' => { 'path' => '/path/to/nfs_01' },
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(config)
orig_stdout = $stdout
$stdout = StringIO.new
header = ''
Timecop.freeze do
header = <<~TOML
# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}
# This is in TOML format suitable for use in Gitaly's config.toml file.
TOML
run_rake_task('gitlab:gitaly:storage_config')
end
output = $stdout.string
$stdout = orig_stdout
expect(output).to include(header)
parsed_output = TOML.parse(output)
config.each do |name, params|
expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params['path'] })
end
end
end
end
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment