Commit 53f8e502 authored by Tiger Watson's avatar Tiger Watson

Merge branch '36385-encrypt-integration-properties' into 'master'

Add Integration.encrypted_properties

See merge request gitlab-org/gitlab!80219
parents dc32671d 4f727ada
...@@ -49,6 +49,16 @@ class Integration < ApplicationRecord ...@@ -49,6 +49,16 @@ class Integration < ApplicationRecord
serialize :properties, JSON # rubocop:disable Cop/ActiveRecordSerialize serialize :properties, JSON # rubocop:disable Cop/ActiveRecordSerialize
attr_encrypted :encrypted_properties_tmp,
attribute: :encrypted_properties,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_32,
algorithm: 'aes-256-gcm',
marshal: true,
marshaler: ::Gitlab::Json,
encode: false,
encode_iv: false
alias_attribute :type, :type_new alias_attribute :type, :type_new
default_value_for :active, false default_value_for :active, false
...@@ -67,6 +77,8 @@ class Integration < ApplicationRecord ...@@ -67,6 +77,8 @@ class Integration < ApplicationRecord
default_value_for :wiki_page_events, true default_value_for :wiki_page_events, true
after_initialize :initialize_properties after_initialize :initialize_properties
after_initialize :copy_properties_to_encrypted_properties
before_save :copy_properties_to_encrypted_properties
after_commit :reset_updated_properties after_commit :reset_updated_properties
...@@ -123,8 +135,10 @@ class Integration < ApplicationRecord ...@@ -123,8 +135,10 @@ class Integration < ApplicationRecord
def #{arg}=(value) def #{arg}=(value)
self.properties ||= {} self.properties ||= {}
self.encrypted_properties_tmp = properties
updated_properties['#{arg}'] = #{arg} unless #{arg}_changed? updated_properties['#{arg}'] = #{arg} unless #{arg}_changed?
self.properties['#{arg}'] = value self.properties['#{arg}'] = value
self.encrypted_properties_tmp['#{arg}'] = value
end end
def #{arg}_changed? def #{arg}_changed?
...@@ -354,6 +368,12 @@ class Integration < ApplicationRecord ...@@ -354,6 +368,12 @@ class Integration < ApplicationRecord
self.properties = {} if has_attribute?(:properties) && properties.nil? self.properties = {} if has_attribute?(:properties) && properties.nil?
end end
def copy_properties_to_encrypted_properties
self.encrypted_properties_tmp = properties
rescue ActiveModel::MissingAttributeError
# ignore - in a record built from using a restricted select list
end
def title def title
# implement inside child # implement inside child
end end
...@@ -394,7 +414,21 @@ class Integration < ApplicationRecord ...@@ -394,7 +414,21 @@ class Integration < ApplicationRecord
# return a hash of columns => values suitable for passing to insert_all # return a hash of columns => values suitable for passing to insert_all
def to_integration_hash def to_integration_hash
column = self.class.attribute_aliases.fetch('type', 'type') column = self.class.attribute_aliases.fetch('type', 'type')
as_json(except: %w[id instance project_id group_id]).merge(column => type) copy_properties_to_encrypted_properties
as_json(except: %w[id instance project_id group_id encrypted_properties_tmp])
.merge(column => type)
.merge(reencrypt_properties)
end
def reencrypt_properties
unless properties.nil? || properties.empty?
alg = self.class.encrypted_attributes[:encrypted_properties_tmp][:algorithm]
iv = generate_iv(alg)
ep = self.class.encrypt(:encrypted_properties_tmp, properties, { iv: iv })
end
{ 'encrypted_properties' => ep, 'encrypted_properties_iv' => iv }
end end
def to_data_fields_hash def to_data_fields_hash
......
# frozen_string_literal: true
class AddIntegrationsEncryptedProperties < Gitlab::Database::Migration[1.0]
def change
add_column :integrations, :encrypted_properties, :binary
add_column :integrations, :encrypted_properties_iv, :binary
end
end
# frozen_string_literal: true
class EncryptIntegrationProperties < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
MIGRATION = 'EncryptIntegrationProperties'
BATCH_SIZE = 1_000
INTERVAL = 2.minutes.to_i
def up
queue_background_migration_jobs_by_range_at_intervals(
define_batchable_model('integrations').all,
MIGRATION,
INTERVAL,
track_jobs: true,
batch_size: BATCH_SIZE
)
end
def down
# this migration is not reversible
end
end
9d98618a1e9fd0474c45ac54420fc64a1d90ad77f36be594337e5b117fccdadb
\ No newline at end of file
1593e935601ae1f2ab788109687bb40bad026f3f425339a39c8d13d3e4c7e306
\ No newline at end of file
...@@ -16093,6 +16093,8 @@ CREATE TABLE integrations ( ...@@ -16093,6 +16093,8 @@ CREATE TABLE integrations (
type_new text, type_new text,
vulnerability_events boolean DEFAULT false NOT NULL, vulnerability_events boolean DEFAULT false NOT NULL,
archive_trace_events boolean DEFAULT false NOT NULL, archive_trace_events boolean DEFAULT false NOT NULL,
encrypted_properties bytea,
encrypted_properties_iv bytea,
CONSTRAINT check_a948a0aa7e CHECK ((char_length(type_new) <= 255)) CONSTRAINT check_a948a0aa7e CHECK ((char_length(type_new) <= 255))
); );
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Migrates the integration.properties column from plaintext to encrypted text.
class EncryptIntegrationProperties
# The Integration model, with just the relevant bits.
class Integration < ActiveRecord::Base
include EachBatch
ALGORITHM = 'aes-256-gcm'
self.table_name = 'integrations'
self.inheritance_column = :_type_disabled
scope :with_properties, -> { where.not(properties: nil) }
scope :not_already_encrypted, -> { where(encrypted_properties: nil) }
scope :for_batch, ->(range) { where(id: range) }
attr_encrypted :encrypted_properties_tmp,
attribute: :encrypted_properties,
mode: :per_attribute_iv,
key: ::Settings.attr_encrypted_db_key_base_32,
algorithm: ALGORITHM,
marshal: true,
marshaler: ::Gitlab::Json,
encode: false,
encode_iv: false
# See 'Integration#reencrypt_properties'
def encrypt_properties
data = ::Gitlab::Json.parse(properties)
iv = generate_iv(ALGORITHM)
ep = self.class.encrypt(:encrypted_properties_tmp, data, { iv: iv })
[ep, iv]
end
end
def perform(start_id, stop_id)
batch_query = Integration.with_properties.not_already_encrypted.for_batch(start_id..stop_id)
encrypt_batch(batch_query)
mark_job_as_succeeded(start_id, stop_id)
end
private
def mark_job_as_succeeded(*arguments)
Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
self.class.name.demodulize,
arguments
)
end
# represent binary string as a PSQL binary literal:
# https://www.postgresql.org/docs/9.4/datatype-binary.html
def bytea(value)
"'\\x#{value.unpack1('H*')}'::bytea"
end
def encrypt_batch(batch_query)
values = batch_query.select(:id, :properties).map do |record|
encrypted_properties, encrypted_properties_iv = record.encrypt_properties
"(#{record.id}, #{bytea(encrypted_properties)}, #{bytea(encrypted_properties_iv)})"
end
return if values.empty?
Integration.connection.execute(<<~SQL.squish)
WITH cte(cte_id, cte_encrypted_properties, cte_encrypted_properties_iv)
AS #{::Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
SELECT *
FROM (VALUES #{values.join(',')}) AS t (id, encrypted_properties, encrypted_properties_iv)
)
UPDATE #{Integration.table_name}
SET encrypted_properties = cte_encrypted_properties
, encrypted_properties_iv = cte_encrypted_properties_iv
FROM cte
WHERE cte_id = id
SQL
end
end
end
end
...@@ -16,6 +16,9 @@ module Gitlab ...@@ -16,6 +16,9 @@ module Gitlab
# @return [Boolean, String, Array, Hash] # @return [Boolean, String, Array, Hash]
# @raise [JSON::ParserError] raised if parsing fails # @raise [JSON::ParserError] raised if parsing fails
def parse(string, opts = {}) def parse(string, opts = {})
# Parse nil as nil
return if string.nil?
# First we should ensure this really is a string, not some other # First we should ensure this really is a string, not some other
# type which purports to be a string. This handles some legacy # type which purports to be a string. This handles some legacy
# usage of the JSON class. # usage of the JSON class.
...@@ -30,6 +33,7 @@ module Gitlab ...@@ -30,6 +33,7 @@ module Gitlab
end end
alias_method :parse!, :parse alias_method :parse!, :parse
alias_method :load, :parse
# Restricted method for converting a Ruby object to JSON. If you # Restricted method for converting a Ruby object to JSON. If you
# need to pass options to this, you should use `.generate` instead, # need to pass options to this, you should use `.generate` instead,
...@@ -67,6 +71,14 @@ module Gitlab ...@@ -67,6 +71,14 @@ module Gitlab
::JSON.pretty_generate(object, opts) ::JSON.pretty_generate(object, opts)
end end
# The standard parser error we should be returning. Defined in a method
# so we can potentially override it later.
#
# @return [JSON::ParserError]
def parser_error
::JSON::ParserError
end
private private
# Convert JSON string into Ruby through toggleable adapters. # Convert JSON string into Ruby through toggleable adapters.
...@@ -134,14 +146,6 @@ module Gitlab ...@@ -134,14 +146,6 @@ module Gitlab
opts opts
end end
# The standard parser error we should be returning. Defined in a method
# so we can potentially override it later.
#
# @return [JSON::ParserError]
def parser_error
::JSON::ParserError
end
# @param [Nil, Boolean] an extracted :legacy_mode key from the opts hash # @param [Nil, Boolean] an extracted :legacy_mode key from the opts hash
# @return [Boolean] # @return [Boolean]
def legacy_mode_enabled?(arg_value) def legacy_mode_enabled?(arg_value)
......
...@@ -25,8 +25,8 @@ module Gitlab ...@@ -25,8 +25,8 @@ module Gitlab
log_queries(id, data, 'active-record') log_queries(id, data, 'active-record')
log_queries(id, data, 'gitaly') log_queries(id, data, 'gitaly')
log_queries(id, data, 'redis') log_queries(id, data, 'redis')
rescue StandardError => err rescue StandardError => e
logger.error(message: "failed to process request id #{id}: #{err.message}") logger.error(message: "failed to process request id #{id}: #{e.message}")
end end
private private
...@@ -34,6 +34,8 @@ module Gitlab ...@@ -34,6 +34,8 @@ module Gitlab
def request(id) def request(id)
# Peek gem stores request data under peek:requests:request_id key # Peek gem stores request data under peek:requests:request_id key
json_data = @redis.get("peek:requests:#{id}") json_data = @redis.get("peek:requests:#{id}")
raise "No data for #{id}" if json_data.nil?
Gitlab::Json.parse(json_data) Gitlab::Json.parse(json_data)
end end
......
...@@ -353,7 +353,16 @@ RSpec.describe Projects::ServicesController do ...@@ -353,7 +353,16 @@ RSpec.describe Projects::ServicesController do
it 'does not modify integration' do it 'does not modify integration' do
expect { put :update, params: project_params.merge(service: integration_params) } expect { put :update, params: project_params.merge(service: integration_params) }
.not_to change { project.prometheus_integration.reload.attributes } .not_to change { prometheus_integration_as_data }
end
def prometheus_integration_as_data
pi = project.prometheus_integration.reload
attrs = pi.attributes.except('encrypted_properties',
'encrypted_properties_iv',
'encrypted_properties_tmp')
[attrs, pi.encrypted_properties_tmp]
end end
end end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties do
let(:integrations) do
table(:integrations) do |integrations|
integrations.send :attr_encrypted, :encrypted_properties_tmp,
attribute: :encrypted_properties,
mode: :per_attribute_iv,
key: ::Settings.attr_encrypted_db_key_base_32,
algorithm: 'aes-256-gcm',
marshal: true,
marshaler: ::Gitlab::Json,
encode: false,
encode_iv: false
end
end
let!(:no_properties) { integrations.create! }
let!(:with_plaintext_1) { integrations.create!(properties: json_props(1)) }
let!(:with_plaintext_2) { integrations.create!(properties: json_props(2)) }
let!(:with_encrypted) do
x = integrations.new
x.properties = nil
x.encrypted_properties_tmp = some_props(3)
x.save!
x
end
let(:start_id) { integrations.minimum(:id) }
let(:end_id) { integrations.maximum(:id) }
it 'ensures all properties are encrypted', :aggregate_failures do
described_class.new.perform(start_id, end_id)
props = integrations.all.to_h do |record|
[record.id, [Gitlab::Json.parse(record.properties), record.encrypted_properties_tmp]]
end
expect(integrations.count).to eq(4)
expect(props).to match(
no_properties.id => both(be_nil),
with_plaintext_1.id => both(eq some_props(1)),
with_plaintext_2.id => both(eq some_props(2)),
with_encrypted.id => match([be_nil, eq(some_props(3))])
)
end
private
def both(obj)
match [obj, obj]
end
def some_props(id)
HashWithIndifferentAccess.new({ id: id, foo: 1, bar: true, baz: %w[a string array] })
end
def json_props(id)
some_props(id).to_json
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe EncryptIntegrationProperties, :migration, schema: 20220204193000 do
subject(:migration) { described_class.new }
let(:integrations) { table(:integrations) }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
end
it 'correctly schedules background migrations', :aggregate_failures do
# update required
record1 = integrations.create!(properties: some_props)
record2 = integrations.create!(properties: some_props)
record3 = integrations.create!(properties: some_props)
record4 = integrations.create!(properties: nil)
record5 = integrations.create!(properties: nil)
Sidekiq::Testing.fake! do
freeze_time do
migrate!
expect(described_class::MIGRATION).to be_scheduled_migration(record1.id, record2.id)
expect(described_class::MIGRATION).to be_scheduled_migration(record3.id, record4.id)
expect(described_class::MIGRATION).to be_scheduled_migration(record5.id, record5.id)
expect(BackgroundMigrationWorker.jobs.size).to eq(3)
end
end
end
def some_props
{ iid: generate(:iid), url: generate(:url), username: generate(:username) }.to_json
end
end
...@@ -843,4 +843,82 @@ RSpec.describe Integration do ...@@ -843,4 +843,82 @@ RSpec.describe Integration do
expect(subject.password_fields).to eq([]) expect(subject.password_fields).to eq([])
end end
end end
describe 'encrypted_properties' do
let(:properties) { { foo: 1, bar: true } }
let(:db_props) { properties.stringify_keys }
let(:record) { create(:integration, :instance, properties: properties) }
it 'contains the same data as properties' do
expect(record).to have_attributes(
properties: db_props,
encrypted_properties_tmp: db_props
)
end
it 'is persisted' do
encrypted_properties = described_class.id_in(record.id)
expect(encrypted_properties).to contain_exactly have_attributes(encrypted_properties_tmp: db_props)
end
it 'is updated when using prop_accessors' do
some_integration = Class.new(described_class) do
prop_accessor :foo
end
record = some_integration.new
record.foo = 'the foo'
expect(record.encrypted_properties_tmp).to eq({ 'foo' => 'the foo' })
end
it 'saves correctly using insert_all' do
hash = record.to_integration_hash
hash[:project_id] = project
expect do
described_class.insert_all([hash])
end.to change(described_class, :count).by(1)
expect(described_class.last).to have_attributes(encrypted_properties_tmp: db_props)
end
it 'is part of the to_integration_hash' do
hash = record.to_integration_hash
expect(hash).to include('encrypted_properties' => be_present, 'encrypted_properties_iv' => be_present)
expect(hash['encrypted_properties']).not_to eq(record.encrypted_properties)
expect(hash['encrypted_properties_iv']).not_to eq(record.encrypted_properties_iv)
decrypted = described_class.decrypt(:encrypted_properties_tmp,
hash['encrypted_properties'],
{ iv: hash['encrypted_properties_iv'] })
expect(decrypted).to eq db_props
end
context 'when the properties are empty' do
let(:properties) { {} }
it 'is part of the to_integration_hash' do
hash = record.to_integration_hash
expect(hash).to include('encrypted_properties' => be_nil, 'encrypted_properties_iv' => be_nil)
end
it 'saves correctly using insert_all' do
hash = record.to_integration_hash
hash[:project_id] = project
expect do
described_class.insert_all([hash])
end.to change(described_class, :count).by(1)
expect(described_class.last).not_to eq record
expect(described_class.last).to have_attributes(encrypted_properties_tmp: db_props)
end
end
end
end end
...@@ -13,14 +13,23 @@ RSpec.describe BulkCreateIntegrationService do ...@@ -13,14 +13,23 @@ RSpec.describe BulkCreateIntegrationService do
let_it_be(:excluded_project) { create(:project, group: excluded_group) } let_it_be(:excluded_project) { create(:project, group: excluded_group) }
let(:instance_integration) { create(:jira_integration, :instance) } let(:instance_integration) { create(:jira_integration, :instance) }
let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] } let(:excluded_attributes) do
%w[
id project_id group_id inherit_from_id instance template
created_at updated_at
encrypted_properties encrypted_properties_iv
]
end
shared_examples 'creates integration from batch ids' do shared_examples 'creates integration from batch ids' do
def attributes(record)
record.reload.attributes.except(*excluded_attributes)
end
it 'updates the inherited integrations' do it 'updates the inherited integrations' do
described_class.new(integration, batch, association).execute described_class.new(integration, batch, association).execute
expect(created_integration.attributes.except(*excluded_attributes)) expect(attributes(created_integration)).to eq attributes(integration)
.to eq(integration.reload.attributes.except(*excluded_attributes))
end end
context 'integration with data fields' do context 'integration with data fields' do
...@@ -29,8 +38,8 @@ RSpec.describe BulkCreateIntegrationService do ...@@ -29,8 +38,8 @@ RSpec.describe BulkCreateIntegrationService do
it 'updates the data fields from inherited integrations' do it 'updates the data fields from inherited integrations' do
described_class.new(integration, batch, association).execute described_class.new(integration, batch, association).execute
expect(created_integration.reload.data_fields.attributes.except(*excluded_attributes)) expect(attributes(created_integration.data_fields))
.to eq(integration.reload.data_fields.attributes.except(*excluded_attributes)) .to eq attributes(integration.data_fields)
end end
end end
end end
......
...@@ -13,6 +13,8 @@ module MigrationsHelpers ...@@ -13,6 +13,8 @@ module MigrationsHelpers
def self.name def self.name
table_name.singularize.camelcase table_name.singularize.camelcase
end end
yield self if block_given?
end end
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment