Commit 600422fa authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents 0a73bfba ae991fd6
...@@ -20,7 +20,6 @@ class InvitesController < ApplicationController ...@@ -20,7 +20,6 @@ class InvitesController < ApplicationController
def accept def accept
if member.accept_invite!(current_user) if member.accept_invite!(current_user)
track_invitation_reminders_experiment('accepted')
redirect_to invite_details[:path], notice: _("You have been granted %{member_human_access} access to %{title} %{name}.") % redirect_to invite_details[:path], notice: _("You have been granted %{member_human_access} access to %{title} %{name}.") %
{ member_human_access: member.human_access, title: invite_details[:title], name: invite_details[:name] } { member_human_access: member.human_access, title: invite_details[:title], name: invite_details[:name] }
else else
...@@ -107,8 +106,4 @@ class InvitesController < ApplicationController ...@@ -107,8 +106,4 @@ class InvitesController < ApplicationController
} }
end end
end end
def track_invitation_reminders_experiment(action)
track_experiment_event(:invitation_reminders, action, subject: member)
end
end end
...@@ -63,15 +63,6 @@ module Emails ...@@ -63,15 +63,6 @@ module Emails
subject: subject_line, subject: subject_line,
layout: 'unknown_user_mailer' layout: 'unknown_user_mailer'
) )
if Gitlab::Experimentation.active?(:invitation_reminders)
Gitlab::Tracking.event(
Gitlab::Experimentation.get_experiment(:invitation_reminders).tracking_category,
'sent',
property: Gitlab::Experimentation.in_experiment_group?(:invitation_reminders, subject: member.invite_email) ? 'experimental_group' : 'control_group',
label: Digest::MD5.hexdigest(member.to_global_id.to_s)
)
end
end end
def member_invited_reminder_email(member_source_type, member_id, token, reminder_index) def member_invited_reminder_email(member_source_type, member_id, token, reminder_index)
......
...@@ -14,8 +14,6 @@ module Members ...@@ -14,8 +14,6 @@ module Members
end end
def execute def execute
return unless experiment_enabled?
reminder_index = days_on_which_to_send_reminders.index(days_after_invitation_sent) reminder_index = days_on_which_to_send_reminders.index(days_after_invitation_sent)
return unless reminder_index return unless reminder_index
...@@ -24,10 +22,6 @@ module Members ...@@ -24,10 +22,6 @@ module Members
private private
def experiment_enabled?
Gitlab::Experimentation.in_experiment_group?(:invitation_reminders, subject: invitation.invite_email)
end
def days_after_invitation_sent def days_after_invitation_sent
(Date.today - invitation.created_at.to_date).to_i (Date.today - invitation.created_at.to_date).to_i
end end
......
...@@ -8,8 +8,6 @@ class MemberInvitationReminderEmailsWorker # rubocop:disable Scalability/Idempot ...@@ -8,8 +8,6 @@ class MemberInvitationReminderEmailsWorker # rubocop:disable Scalability/Idempot
urgency :low urgency :low
def perform def perform
return unless Gitlab::Experimentation.active?(:invitation_reminders)
Member.not_accepted_invitations.not_expired.last_ten_days_excluding_today.find_in_batches do |invitations| Member.not_accepted_invitations.not_expired.last_ten_days_excluding_today.find_in_batches do |invitations|
invitations.each do |invitation| invitations.each do |invitation|
Members::InvitationReminderEmailService.new(invitation).execute Members::InvitationReminderEmailService.new(invitation).execute
......
---
title: Add invitation reminders
merge_request: 47920
author:
type: added
...@@ -23,9 +23,10 @@ GitLab end users do not have direct access to Gitaly. Gitaly only manages Git ...@@ -23,9 +23,10 @@ GitLab end users do not have direct access to Gitaly. Gitaly only manages Git
repository access for GitLab. Other types of GitLab data aren't accessed using Gitaly. repository access for GitLab. Other types of GitLab data aren't accessed using Gitaly.
CAUTION: **Caution:** CAUTION: **Caution:**
From GitLab 13.0, Gitaly support for NFS is deprecated. In GitLab 14.0, Gitaly support From GitLab 13.0, Gitaly support for NFS is deprecated. As of GitLab 14.0, NFS-related issues
for NFS is scheduled to be removed. Upgrade to [Gitaly Cluster](praefect.md) as soon as with Gitaly will no longer be addressed. Upgrade to [Gitaly Cluster](praefect.md) as soon as
possible. possible. Watch for [tools to enable bulk move](https://gitlab.com/groups/gitlab-org/-/epics/4916)
of projects to Gitaly Cluster.
## Architecture ## Architecture
......
---
stage: configure
group: configure
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
comments: false
description: 'GitLab to Kubernetes communication'
---
# GitLab to Kubernetes communication
The goal of this document is to define how GitLab can communicate with Kubernetes
and in-cluster services through the GitLab Kubernetes Agent.
## Challenges
### Lack of network connectivity
For various features that exist today, GitLab communicates with Kubernetes by directly
or indirectly calling its API endpoints. This works well, as long as a network
path from GitLab to the cluster exists, which isn't always the case:
- GitLab.com and a self-managed cluster, where the cluster is not exposed to the Internet.
- GitLab.com and a cloud-vendor managed cluster, where the cluster is not exposed to the Internet.
- Self-managed GitLab and a cloud-vendor managed cluster, where the cluster is not
exposed to the Internet and there is no private peering between the cloud network
and the customer's network.
This last item is the hardest to address, as something must give to create a network
path. This feature gives the customer an extra option (exposing the `gitlab-kas` domain but
not the whole GitLab) in addition to the existing options (peering the networks,
or exposing one of the two sides).
Even if technically possible, it's almost always undesirable to expose a Kubernetes
cluster's API to the Internet for security reasons. As a result, our customers
are reluctant to do so, and are faced with a choice of security versus the features
GitLab provides for connected clusters.
This choice is true not only for Kubernetes' API, but for all APIs exposed by services
running on a customer's cluster that GitLab may need to access. For example,
Prometheus running in a cluster must be exposed for the GitLab integration to access it.
### Cluster-admin permissions
Both current integrations - building your own cluster (certificate-based) and GitLab-managed
cluster in a cloud - require granting full `cluster-admin` access to GitLab. Credentials
are stored on the GitLab side and this is yet another security concern for our customers.
For more discussion on these issues, read
[issue #212810](https://gitlab.com/gitlab-org/gitlab/-/issues/212810).
## GitLab Kubernetes Agent epic
To address these challenges and provide some new features, the Configure group
is building an active in-cluster component that inverts the
direction of communication:
1. The customer installs an agent into their cluster.
1. The agent connects to GitLab.com or their self-managed GitLab instance,
receiving commands from it.
The customer does not need to provide any credentials to GitLab, and
is in full control of what permissions the agent has.
For more information, visit the
[GitLab Kubernetes Agent repository](https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent) or
[the epic](https://gitlab.com/groups/gitlab-org/-/epics/3329).
### Request routing
Agents connect to the server-side component called GitLab Kubernetes Agent Server
(`gitlab-kas`) and keep an open connection that waits for commands. The
difficulty with the approach is in routing requests from GitLab to the correct agent.
Each cluster may contain multiple logical agents, and each may be running as multiple
replicas (`Pod`s), connected to an arbitrary `gitlab-kas` instance.
Existing and new features require real-time access to the APIs of the cluster
and (optionally) APIs of components, running in the cluster. As a result, it's difficult to pass
the information back and forth using the more traditional polling approach.
A good example to illustrate the real-time need is Prometheus integration.
If we wanted to draw real-time graphs, we would need direct access to the Prometheus API
to make queries and quickly return results. `gitlab-kas` could expose the Prometheus API
to GitLab, and transparently route traffic to one of the correct agents connected
at the moment. The agent then would stream the request to Prometheus and stream the response back.
## Proposal
Implement request routing in `gitlab-kas`. Encapsulate and hide all related
complexity from the main application by providing a clean API to work with Kubernetes
and the agents.
The above does not necessarily mean proxying Kubernetes' API directly, but that
is possible should we need it.
What APIs `gitlab-kas` provides depends on the features developed, but first
we must solve the request routing problem. It blocks any and all features
that require direct communication with agents, Kubernetes or in-cluster services.
Detailed implementation proposal with all technical details is in
[`kas_request_routing.md`](https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent/-/blob/master/doc/kas_request_routing.md).
```mermaid
flowchart LR
subgraph "Kubernetes 1"
agentk1p1["agentk 1, Pod1"]
agentk1p2["agentk 1, Pod2"]
end
subgraph "Kubernetes 2"
agentk2p1["agentk 2, Pod1"]
end
subgraph "Kubernetes 3"
agentk3p1["agentk 3, Pod1"]
end
subgraph kas
kas1["kas 1"]
kas2["kas 2"]
kas3["kas 3"]
end
GitLab["GitLab Rails"]
Redis
GitLab -- "gRPC to any kas" --> kas
kas1 -- register connected agents --> Redis
kas2 -- register connected agents --> Redis
kas1 -- lookup agent --> Redis
agentk1p1 -- "gRPC" --> kas1
agentk1p2 -- "gRPC" --> kas2
agentk2p1 -- "gRPC" --> kas1
agentk3p1 -- "gRPC" --> kas2
```
### Iterations
Iterations are tracked in [the dedicated epic](https://gitlab.com/groups/gitlab-org/-/epics/4591).
## Who
Proposal:
<!-- vale gitlab.Spelling = NO -->
| Role | Who
|------------------------------|-------------------------|
| Author | Mikhail Mazurskiy |
| Architecture Evolution Coach | Andrew Newdigate |
| Engineering Leader | Nicholas Klick |
| Domain Expert | Thong Kuah |
| Domain Expert | Graeme Gillies |
| Security Expert | Vitor Meireles De Sousa |
DRIs:
| Role | Who
|------------------------------|------------------------|
| Product Lead | Viktor Nagy |
| Engineering Leader | Nicholas Klick |
| Domain Expert | Mikhail Mazurskiy |
<!-- vale gitlab.Spelling = YES -->
...@@ -356,5 +356,5 @@ The state files attached to a project can be found under Operations / Terraform. ...@@ -356,5 +356,5 @@ The state files attached to a project can be found under Operations / Terraform.
You can only remove a state file by making a request to the API, like the following example: You can only remove a state file by making a request to the API, like the following example:
```shell ```shell
curl --header "Private-Token: <your_access_token>" --request DELETE "https://gitlab.example.com/api/v4/projects/<your_project_id/terraform/state/<your_state_name>" curl --header "Private-Token: <your_access_token>" --request DELETE "https://gitlab.example.com/api/v4/projects/<your_project_id>/terraform/state/<your_state_name>"
``` ```
...@@ -96,6 +96,9 @@ invitation, change their access level, or even delete them. ...@@ -96,6 +96,9 @@ invitation, change their access level, or even delete them.
![Invite user members list](img/add_user_email_accept.png) ![Invite user members list](img/add_user_email_accept.png)
While unaccepted, the system will automatically send reminder emails on the second, fifth,
and tenth day after the invitation was initially sent.
After the user accepts the invitation, they are prompted to create a new After the user accepts the invitation, they are prompted to create a new
GitLab account using the same e-mail address the invitation was sent to. GitLab account using the same e-mail address the invitation was sent to.
......
...@@ -48,18 +48,60 @@ module Elastic ...@@ -48,18 +48,60 @@ module Elastic
::Elastic::ProcessBookkeepingService.track!(self) ::Elastic::ProcessBookkeepingService.track!(self)
end end
def maintain_elasticsearch_update def maintain_elasticsearch_update(updated_attributes: previous_changes.keys)
updated_attributes = updated_attributes.map(&:to_s) # Allow caller to provide symbols but keep consistent to using strings
::Elastic::ProcessBookkeepingService.track!(self) ::Elastic::ProcessBookkeepingService.track!(self)
associations_to_update = associations_needing_elasticsearch_update(updated_attributes)
if associations_to_update.present?
ElasticAssociationIndexerWorker.perform_async(self.class.name, id, associations_to_update)
end
end end
def maintain_elasticsearch_destroy def maintain_elasticsearch_destroy
::Elastic::ProcessBookkeepingService.track!(self) ::Elastic::ProcessBookkeepingService.track!(self)
end end
# Override in child object if there are associations that need to be
# updated when specific fields are updated
def associations_needing_elasticsearch_update(updated_attributes)
self.class.elastic_index_dependants.map do |dependant|
association_name = dependant[:association_name]
on_change = dependant[:on_change]
next nil unless updated_attributes.include?(on_change.to_s)
association_name.to_s
end.compact.uniq
end
class_methods do class_methods do
def __elasticsearch__ def __elasticsearch__
@__elasticsearch__ ||= ::Elastic::MultiVersionClassProxy.new(self) @__elasticsearch__ ||= ::Elastic::MultiVersionClassProxy.new(self)
end end
# Mark a dependant association as needing to be updated when a specific
# field in this object changes. For example if you want to update
# project.issues in the index when project.visibility_level is changed
# then you can declare that as:
#
# elastic_index_dependant_association :issues, on_change: :visibility_level
#
def elastic_index_dependant_association(association_name, on_change:)
# This class is used for non ActiveRecord models but this method is not
# applicable for that so we raise.
raise "elastic_index_dependant_association is not applicable as this class is not an ActiveRecord model." unless self < ActiveRecord::Base
# Validate these are actually correct associations before sending to
# Sidekiq to avoid errors occuring when the job is picked up.
raise "Invalid association to index. \"#{association_name}\" is either not a collection or not an association. Hint: You must declare the has_many before declaring elastic_index_dependant_association." unless reflect_on_association(association_name)&.collection?
elastic_index_dependants << { association_name: association_name, on_change: on_change }
end
def elastic_index_dependants
@elastic_index_dependants ||= []
end
end end
end end
end end
...@@ -100,6 +100,8 @@ module EE ...@@ -100,6 +100,8 @@ module EE
has_many :incident_management_oncall_schedules, class_name: 'IncidentManagement::OncallSchedule', inverse_of: :project has_many :incident_management_oncall_schedules, class_name: 'IncidentManagement::OncallSchedule', inverse_of: :project
elastic_index_dependant_association :issues, on_change: :visibility_level
scope :with_shared_runners_limit_enabled, -> do scope :with_shared_runners_limit_enabled, -> do
if ::Ci::Runner.has_shared_runners_with_non_zero_public_cost? if ::Ci::Runner.has_shared_runners_with_non_zero_public_cost?
with_shared_runners with_shared_runners
......
...@@ -17,23 +17,24 @@ class VulnerabilityPresenter < Gitlab::View::Presenter::Delegated ...@@ -17,23 +17,24 @@ class VulnerabilityPresenter < Gitlab::View::Presenter::Delegated
"#{file}:#{line}" "#{file}:#{line}"
end end
def location_link def location_link_with_raw_path
return location_text unless blob_path location_link_for(raw_path)
end
"#{root_url}#{blob_path}" def location_link
location_link_for(blob_path)
end end
def blob_path def raw_path
return unless file return unless file
branch = finding.pipelines&.last&.sha || project.default_branch path_with_line_number(project_raw_path(vulnerability.project, File.join(pipeline_branch, file)))
path = project_blob_path(vulnerability.project, File.join(branch, file)) end
return unless path
path = path.gsub(/^\//, '') def blob_path
return unless file
add_line_numbers(path, finding.location['start_line'], finding.location['end_line']) path_with_line_number(project_blob_path(vulnerability.project, File.join(pipeline_branch, file)))
end end
def scanner def scanner
...@@ -46,6 +47,24 @@ class VulnerabilityPresenter < Gitlab::View::Presenter::Delegated ...@@ -46,6 +47,24 @@ class VulnerabilityPresenter < Gitlab::View::Presenter::Delegated
private private
def location_link_for(path)
return location_text unless path
"#{root_url}#{path}"
end
def pipeline_branch
finding.pipelines&.last&.sha || project.default_branch
end
def path_with_line_number(path)
return unless path
path = path.gsub(/^\//, '')
add_line_numbers(path, finding.location['start_line'], finding.location['end_line'])
end
def root_url def root_url
Gitlab::Routing.url_helpers.root_url Gitlab::Routing.url_helpers.root_url
end end
......
...@@ -16,7 +16,7 @@ module EE ...@@ -16,7 +16,7 @@ module EE
override :post_update_hooks override :post_update_hooks
def post_update_hooks(updated_project_ids) def post_update_hooks(updated_project_ids)
::Project.id_in(updated_project_ids).find_each do |project| ::Project.id_in(updated_project_ids).find_each do |project|
project.maintain_elasticsearch_update if project.maintaining_elasticsearch? project.maintain_elasticsearch_update(updated_attributes: [:visibility_level]) if project.maintaining_elasticsearch?
end end
super super
end end
......
...@@ -37,7 +37,7 @@ class ScanSecurityReportSecretsWorker # rubocop:disable Scalability/IdempotentWo ...@@ -37,7 +37,7 @@ class ScanSecurityReportSecretsWorker # rubocop:disable Scalability/IdempotentWo
{ {
type: revocation_type(vulnerability_finding), type: revocation_type(vulnerability_finding),
token: vulnerability_finding.metadata['raw_source_code_extract'], token: vulnerability_finding.metadata['raw_source_code_extract'],
location: vulnerability_finding.vulnerability.present.location_link location: vulnerability_finding.vulnerability.present.location_link_with_raw_path
} }
end end
end end
......
---
title: Pass the 'raw' URL instead of 'blob' URL in revocation api call
merge_request: 49170
author:
type: added
...@@ -13,6 +13,7 @@ module Elastic ...@@ -13,6 +13,7 @@ module Elastic
end end
data['assignee_id'] = safely_read_attribute_for_elasticsearch(:assignee_ids) data['assignee_id'] = safely_read_attribute_for_elasticsearch(:assignee_ids)
data['visibility_level'] = target.project.visibility_level
# protect against missing project_feature and set visibility to PRIVATE # protect against missing project_feature and set visibility to PRIVATE
# if the project_feature is missing on a project # if the project_feature is missing on a project
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Elastic::ApplicationVersionedSearch do
let(:klass) do
Class.new(ApplicationRecord) do
include Elastic::ApplicationVersionedSearch
has_many :widgets
end
end
describe '.elastic_index_dependant_association' do
it 'adds the associations to elastic_index_dependants' do
klass.elastic_index_dependant_association(:widgets, on_change: :title)
expect(klass.elastic_index_dependants).to include({
association_name: :widgets,
on_change: :title
})
end
context 'when the association does not exist' do
it 'raises an error' do
expect { klass.elastic_index_dependant_association(:foo_bars, on_change: :bar) }
.to raise_error("Invalid association to index. \"foo_bars\" is either not a collection or not an association. Hint: You must declare the has_many before declaring elastic_index_dependant_association.")
end
end
context 'when the class is not an ApplicationRecord' do
let(:not_application_record) do
Class.new do
include Elastic::ApplicationVersionedSearch
end
end
it 'raises an error' do
expect { not_application_record.elastic_index_dependant_association(:widgets, on_change: :title) }
.to raise_error("elastic_index_dependant_association is not applicable as this class is not an ActiveRecord model.")
end
end
end
end
...@@ -107,6 +107,7 @@ RSpec.describe Issue, :elastic do ...@@ -107,6 +107,7 @@ RSpec.describe Issue, :elastic do
it "returns json with all needed elements" do it "returns json with all needed elements" do
assignee = create(:user) assignee = create(:user)
project = create(:project, :internal)
issue = create :issue, project: project, assignees: [assignee] issue = create :issue, project: project, assignees: [assignee]
expected_hash = issue.attributes.extract!( expected_hash = issue.attributes.extract!(
...@@ -129,7 +130,8 @@ RSpec.describe Issue, :elastic do ...@@ -129,7 +130,8 @@ RSpec.describe Issue, :elastic do
}) })
expected_hash['assignee_id'] = [assignee.id] expected_hash['assignee_id'] = [assignee.id]
expected_hash['issues_access_level'] = issue.project.project_feature.issues_access_level expected_hash['issues_access_level'] = ProjectFeature::ENABLED
expected_hash['visibility_level'] = Gitlab::VisibilityLevel::INTERNAL
expect(issue.__elasticsearch__.as_indexed_json).to eq(expected_hash) expect(issue.__elasticsearch__.as_indexed_json).to eq(expected_hash)
end end
......
...@@ -2638,4 +2638,30 @@ RSpec.describe Project do ...@@ -2638,4 +2638,30 @@ RSpec.describe Project do
project.add_template_export_job(current_user: user) project.add_template_export_job(current_user: user)
end end
end end
context 'indexing updates in Elasticsearch', :elastic do
before do
stub_ee_application_setting(elasticsearch_indexing: true)
end
context 'on update' do
let(:project) { create(:project, :public) }
context 'when updating the visibility_level' do
it 'triggers ElasticAssociationIndexerWorker to update issues' do
expect(ElasticAssociationIndexerWorker).to receive(:perform_async).with('Project', project.id, ['issues'])
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
end
context 'when changing the title' do
it 'does not trigger ElasticAssociationIndexerWorker to update issues' do
expect(ElasticAssociationIndexerWorker).not_to receive(:perform_async)
project.update!(title: 'The new title')
end
end
end
end
end end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VulnerabilityPresenter do
let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, :success, project: project) }
let(:finding) { create(:vulnerabilities_finding, :with_secret_detection, pipelines: [pipeline], project: project) }
subject { described_class.new(finding.vulnerability) }
describe '#location_link_with_raw_path' do
it 'returns the location link in raw format' do
path = subject.location_link_with_raw_path
expect(path).to include('raw')
expect(path).to include(finding.file)
expect(path).to include("#L#{finding.location['start_line']}")
end
end
describe '#location_link' do
it 'returns the location link in blob format' do
path = subject.location_link
expect(path).to include('blob')
expect(path).to include(finding.file)
expect(path).to include("#L#{finding.location['start_line']}")
end
end
end
...@@ -21,14 +21,17 @@ RSpec.describe Groups::TransferService, '#execute' do ...@@ -21,14 +21,17 @@ RSpec.describe Groups::TransferService, '#execute' do
stub_ee_application_setting(elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_indexing: true)
end end
it 'reindexes projects', :elastic do it 'reindexes projects and associated issues', :elastic do
project1 = create(:project, :repository, :public, namespace: group) project1 = create(:project, :repository, :public, namespace: group)
project2 = create(:project, :repository, :public, namespace: group) project2 = create(:project, :repository, :public, namespace: group)
project3 = create(:project, :repository, :private, namespace: group) project3 = create(:project, :repository, :private, namespace: group)
expect(Elastic::ProcessBookkeepingService).to receive(:track!).with(project1) expect(Elastic::ProcessBookkeepingService).to receive(:track!).with(project1)
expect(ElasticAssociationIndexerWorker).to receive(:perform_async).with('Project', project1.id, ['issues'])
expect(Elastic::ProcessBookkeepingService).to receive(:track!).with(project2) expect(Elastic::ProcessBookkeepingService).to receive(:track!).with(project2)
expect(ElasticAssociationIndexerWorker).to receive(:perform_async).with('Project', project2.id, ['issues'])
expect(Elastic::ProcessBookkeepingService).not_to receive(:track!).with(project3) expect(Elastic::ProcessBookkeepingService).not_to receive(:track!).with(project3)
expect(ElasticAssociationIndexerWorker).not_to receive(:perform_async).with('Project', project3.id, ['issues'])
transfer_service.execute(new_group) transfer_service.execute(new_group)
......
...@@ -66,6 +66,7 @@ RSpec.describe ScanSecurityReportSecretsWorker do ...@@ -66,6 +66,7 @@ RSpec.describe ScanSecurityReportSecretsWorker do
expect(key[:type]).to eql(revocation_key_type) expect(key[:type]).to eql(revocation_key_type)
expect(key[:token]).to eql(api_key) expect(key[:token]).to eql(api_key)
expect(key[:location]).to include(file) expect(key[:location]).to include(file)
expect(key[:location]).to include('raw')
end end
end end
end end
...@@ -66,10 +66,6 @@ module Gitlab ...@@ -66,10 +66,6 @@ module Gitlab
tracking_category: 'Growth::Acquisition::Experiment::InviteEmail', tracking_category: 'Growth::Acquisition::Experiment::InviteEmail',
use_backwards_compatible_subject_index: true use_backwards_compatible_subject_index: true
}, },
invitation_reminders: {
tracking_category: 'Growth::Acquisition::Experiment::InvitationReminders',
use_backwards_compatible_subject_index: true
},
group_only_trials: { group_only_trials: {
tracking_category: 'Growth::Conversion::Experiment::GroupOnlyTrials', tracking_category: 'Growth::Conversion::Experiment::GroupOnlyTrials',
use_backwards_compatible_subject_index: true use_backwards_compatible_subject_index: true
......
...@@ -7,16 +7,18 @@ module QA ...@@ -7,16 +7,18 @@ module QA
# #
# git config --global receive.advertisepushoptions true # git config --global receive.advertisepushoptions true
branch = "push-options-test-#{SecureRandom.hex(8)}" let(:branch) { "push-options-test-#{SecureRandom.hex(8)}" }
title = "MR push options test #{SecureRandom.hex(8)}" let(:title) { "MR push options test #{SecureRandom.hex(8)}" }
commit_message = 'Add README.md' let(:commit_message) { 'Add README.md' }
project = Resource::Project.fabricate_via_api! do |project| let(:project) do
project.name = 'merge-request-push-options' Resource::Project.fabricate_via_api! do |project|
project.initialize_with_readme = true project.name = 'merge-request-push-options'
project.initialize_with_readme = true
end
end end
it 'sets labels', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/1032' do def create_new_mr_via_push
Resource::Repository::ProjectPush.fabricate! do |push| Resource::Repository::ProjectPush.fabricate! do |push|
push.project = project push.project = project
push.commit_message = commit_message push.commit_message = commit_message
...@@ -27,6 +29,10 @@ module QA ...@@ -27,6 +29,10 @@ module QA
label: %w[one two three] label: %w[one two three]
} }
end end
end
it 'sets labels', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/1032' do
create_new_mr_via_push
merge_request = project.merge_request_with_title(title) merge_request = project.merge_request_with_title(title)
...@@ -35,7 +41,11 @@ module QA ...@@ -35,7 +41,11 @@ module QA
end end
context 'when labels are set already' do context 'when labels are set already' do
it 'removes them', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/1033' do before do
create_new_mr_via_push
end
it 'removes them on subsequent push', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/1033' do
Resource::Repository::ProjectPush.fabricate! do |push| Resource::Repository::ProjectPush.fabricate! do |push|
push.project = project push.project = project
push.file_content = "Unlabel test #{SecureRandom.hex(8)}" push.file_content = "Unlabel test #{SecureRandom.hex(8)}"
......
...@@ -22,43 +22,6 @@ RSpec.describe InvitesController, :snowplow do ...@@ -22,43 +22,6 @@ RSpec.describe InvitesController, :snowplow do
end end
end end
shared_examples "tracks the 'accepted' event for the invitation reminders experiment" do
before do
stub_experiment(invitation_reminders: true)
stub_experiment_for_subject(invitation_reminders: experimental_group)
end
context 'when in the control group' do
let(:experimental_group) { false }
it "tracks the 'accepted' event" do
request
expect_snowplow_event(
category: 'Growth::Acquisition::Experiment::InvitationReminders',
label: md5_member_global_id,
property: 'control_group',
action: 'accepted'
)
end
end
context 'when in the experimental group' do
let(:experimental_group) { true }
it "tracks the 'accepted' event" do
request
expect_snowplow_event(
category: 'Growth::Acquisition::Experiment::InvitationReminders',
label: md5_member_global_id,
property: 'experimental_group',
action: 'accepted'
)
end
end
end
describe 'GET #show' do describe 'GET #show' do
subject(:request) { get :show, params: params } subject(:request) { get :show, params: params }
...@@ -87,7 +50,6 @@ RSpec.describe InvitesController, :snowplow do ...@@ -87,7 +50,6 @@ RSpec.describe InvitesController, :snowplow do
expect(flash[:notice]).to be_nil expect(flash[:notice]).to be_nil
end end
it_behaves_like "tracks the 'accepted' event for the invitation reminders experiment"
it_behaves_like 'invalid token' it_behaves_like 'invalid token'
end end
...@@ -119,7 +81,6 @@ RSpec.describe InvitesController, :snowplow do ...@@ -119,7 +81,6 @@ RSpec.describe InvitesController, :snowplow do
subject(:request) { post :accept, params: params } subject(:request) { post :accept, params: params }
it_behaves_like "tracks the 'accepted' event for the invitation reminders experiment"
it_behaves_like 'invalid token' it_behaves_like 'invalid token'
end end
......
...@@ -16,7 +16,6 @@ RSpec.describe Gitlab::Experimentation::EXPERIMENTS do ...@@ -16,7 +16,6 @@ RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
:contact_sales_btn_in_app, :contact_sales_btn_in_app,
:customize_homepage, :customize_homepage,
:invite_email, :invite_email,
:invitation_reminders,
:group_only_trials, :group_only_trials,
:default_to_issues_board :default_to_issues_board
] ]
......
...@@ -1450,38 +1450,6 @@ RSpec.describe Notify do ...@@ -1450,38 +1450,6 @@ RSpec.describe Notify do
subject { described_class.member_invited_email('Group', group_member.id, group_member.invite_token) } subject { described_class.member_invited_email('Group', group_member.id, group_member.invite_token) }
shared_examples "tracks the 'sent' event for the invitation reminders experiment" do
before do
stub_experiment(invitation_reminders: true)
allow(Gitlab::Experimentation).to receive(:in_experiment_group?).with(:invitation_reminders, subject: group_member.invite_email).and_return(experimental_group)
end
it "tracks the 'sent' event", :snowplow do
subject.deliver_now
expect_snowplow_event(
category: 'Growth::Acquisition::Experiment::InvitationReminders',
label: Digest::MD5.hexdigest(group_member.to_global_id.to_s),
property: experimental_group ? 'experimental_group' : 'control_group',
action: 'sent'
)
end
end
describe 'tracking for the invitation reminders experiment' do
context 'when invite email is in the experimental group' do
let(:experimental_group) { true }
it_behaves_like "tracks the 'sent' event for the invitation reminders experiment"
end
context 'when invite email is in the control group' do
let(:experimental_group) { false }
it_behaves_like "tracks the 'sent' event for the invitation reminders experiment"
end
end
it_behaves_like 'an email sent from GitLab' it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links' it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link" it_behaves_like "a user cannot unsubscribe through footer link"
......
...@@ -9,67 +9,49 @@ RSpec.describe Members::InvitationReminderEmailService do ...@@ -9,67 +9,49 @@ RSpec.describe Members::InvitationReminderEmailService do
let_it_be(:frozen_time) { Date.today.beginning_of_day } let_it_be(:frozen_time) { Date.today.beginning_of_day }
let_it_be(:invitation) { build(:group_member, :invited, created_at: frozen_time) } let_it_be(:invitation) { build(:group_member, :invited, created_at: frozen_time) }
context 'when the experiment is disabled' do before do
before do invitation.expires_at = frozen_time + expires_at_days.days if expires_at_days
allow(Gitlab::Experimentation).to receive(:in_experiment_group?).and_return(false)
invitation.expires_at = frozen_time + 2.days
end
it 'does not send an invitation' do
travel_to(frozen_time + 1.day) do
expect(invitation).not_to receive(:send_invitation_reminder)
subject
end
end
end end
context 'when the experiment is enabled' do using RSpec::Parameterized::TableSyntax
before do
allow(Gitlab::Experimentation).to receive(:in_experiment_group?).and_return(true) where(:expires_at_days, :send_reminder_at_days) do
invitation.expires_at = frozen_time + expires_at_days.days if expires_at_days 0 | []
end 1 | []
2 | [1]
using RSpec::Parameterized::TableSyntax 3 | [1, 2]
4 | [1, 2, 3]
where(:expires_at_days, :send_reminder_at_days) do 5 | [1, 2, 4]
0 | [] 6 | [1, 3, 5]
1 | [] 7 | [1, 3, 5]
2 | [1] 8 | [2, 3, 6]
3 | [1, 2] 9 | [2, 4, 7]
4 | [1, 2, 3] 10 | [2, 4, 8]
5 | [1, 2, 4] 11 | [2, 4, 8]
6 | [1, 3, 5] 12 | [2, 5, 9]
7 | [1, 3, 5] 13 | [2, 5, 10]
8 | [2, 3, 6] 14 | [2, 5, 10]
9 | [2, 4, 7] 15 | [2, 5, 10]
10 | [2, 4, 8] nil | [2, 5, 10]
11 | [2, 4, 8] end
12 | [2, 5, 9]
13 | [2, 5, 10]
14 | [2, 5, 10]
15 | [2, 5, 10]
nil | [2, 5, 10]
end
with_them do
# Create an invitation today with an expiration date from 0 to 10 days in the future or without an expiration date
# We chose 10 days here, because we fetch invitations that were created at most 10 days ago.
(0..10).each do |day|
it 'sends an invitation reminder only on the expected days' do
next if day > (expires_at_days || 10) # We don't need to test after the invitation has already expired
# We are traveling in a loop from today to 10 days from now
travel_to(frozen_time + day.days) do
# Given an expiration date and the number of days after the creation of the invitation based on the current day in the loop, a reminder may be sent
if (reminder_index = send_reminder_at_days.index(day))
expect(invitation).to receive(:send_invitation_reminder).with(reminder_index)
else
expect(invitation).not_to receive(:send_invitation_reminder)
end
subject with_them do
# Create an invitation today with an expiration date from 0 to 10 days in the future or without an expiration date
# We chose 10 days here, because we fetch invitations that were created at most 10 days ago.
(0..10).each do |day|
it 'sends an invitation reminder only on the expected days' do
next if day > (expires_at_days || 10) # We don't need to test after the invitation has already expired
# We are traveling in a loop from today to 10 days from now
travel_to(frozen_time + day.days) do
# Given an expiration date and the number of days after the creation of the invitation based on the current day in the loop, a reminder may be sent
if (reminder_index = send_reminder_at_days.index(day))
expect(invitation).to receive(:send_invitation_reminder).with(reminder_index)
else
expect(invitation).not_to receive(:send_invitation_reminder)
end end
subject
end end
end end
end end
......
...@@ -10,30 +10,12 @@ RSpec.describe MemberInvitationReminderEmailsWorker do ...@@ -10,30 +10,12 @@ RSpec.describe MemberInvitationReminderEmailsWorker do
create(:group_member, :invited, created_at: 2.days.ago) create(:group_member, :invited, created_at: 2.days.ago)
end end
context 'feature flag disabled' do it 'executes the invitation reminder email service' do
before do expect_next_instance_of(Members::InvitationReminderEmailService) do |service|
stub_experiment(invitation_reminders: false) expect(service).to receive(:execute)
end end
it 'does not attempt to execute the invitation reminder service' do subject
expect(Members::InvitationReminderEmailService).not_to receive(:new)
subject
end
end
context 'feature flag enabled' do
before do
stub_experiment(invitation_reminders: true)
end
it 'executes the invitation reminder email service' do
expect_next_instance_of(Members::InvitationReminderEmailService) do |service|
expect(service).to receive(:execute)
end
subject
end
end end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment