Commit f3e69da7 authored by James Fargher's avatar James Fargher

Merge branch 'ak/update-es' into 'master'

Update Elastic Stack chart to 2.0.0 to support kubernetes 1.16

See merge request gitlab-org/gitlab!29601
parents e7e3fcef 3f7ff559
......@@ -3,7 +3,7 @@
module Clusters
module Applications
class ElasticStack < ApplicationRecord
VERSION = '1.9.0'
VERSION = '2.0.0'
ELASTICSEARCH_PORT = 9200
......@@ -28,6 +28,7 @@ module Clusters
rbac: cluster.platform_kubernetes_rbac?,
chart: chart,
files: files,
preinstall: migrate_to_2_script,
postinstall: post_install_script
)
end
......@@ -69,6 +70,10 @@ module Clusters
end
end
def filebeat7?
Gem::Version.new(version) >= Gem::Version.new('2.0.0')
end
private
def post_install_script
......@@ -86,6 +91,27 @@ module Clusters
def kube_client
cluster&.kubeclient&.core_client
end
def migrate_to_2_script
# Updating the chart to 2.0.0 includes an update of the filebeat chart from 1.7.0 to 3.1.1 https://github.com/helm/charts/pull/21640
# This includes the following commit that changes labels on the filebeat deployment https://github.com/helm/charts/commit/9b009170686c6f4b202c36ceb1da4bb9ba15ddd0
# Unfortunately those fields are immutable, and we can't use `helm upgrade` to change them. We first have to delete the associated filebeat resources
# The following pre-install command runs before updating to 2.0.0 and sets filebeat.enable=false so the filebeat deployment is deleted.
# Then the main install command re-creates them properly
if updating? && !filebeat7?
[
Gitlab::Kubernetes::Helm::InstallCommand.new(
name: 'elastic-stack',
version: version,
rbac: cluster.platform_kubernetes_rbac?,
chart: chart,
files: files
).install_command + ' --set filebeat.enabled\\=false'
]
else
[]
end
end
end
end
end
......@@ -65,6 +65,8 @@ module PodLogs
client = cluster&.application_elastic_stack&.elasticsearch_client
return error(_('Unable to connect to Elasticsearch')) unless client
filebeat7 = cluster.application_elastic_stack.filebeat7?
response = ::Gitlab::Elasticsearch::Logs::Lines.new(client).pod_logs(
namespace,
pod_name: result[:pod_name],
......@@ -72,7 +74,8 @@ module PodLogs
search: result[:search],
start_time: result[:start_time],
end_time: result[:end_time],
cursor: result[:cursor]
cursor: result[:cursor],
filebeat7: filebeat7
)
result.merge!(response)
......
---
title: Update Elastic Stack chart to 2.0.0 to support kubernetes 1.16
merge_request: 29601
author:
type: fixed
......@@ -13,7 +13,7 @@ module Gitlab
@client = client
end
def pod_logs(namespace, pod_name: nil, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil)
def pod_logs(namespace, pod_name: nil, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil, filebeat7: true)
query = { bool: { must: [] } }.tap do |q|
filter_pod_name(q, pod_name)
filter_namespace(q, namespace)
......@@ -22,7 +22,7 @@ module Gitlab
filter_times(q, start_time, end_time)
end
body = build_body(query, cursor)
body = build_body(query, cursor, filebeat7)
response = @client.search body: body
format_response(response)
......@@ -30,13 +30,14 @@ module Gitlab
private
def build_body(query, cursor = nil)
def build_body(query, cursor = nil, filebeat7 = true)
offset_field = filebeat7 ? "log.offset" : "offset"
body = {
query: query,
# reverse order so we can query N-most recent records
sort: [
{ "@timestamp": { order: :desc } },
{ "offset": { order: :desc } }
{ "#{offset_field}": { order: :desc } }
],
# only return these fields in the response
_source: ["@timestamp", "message", "kubernetes.pod.name"],
......
......@@ -37,8 +37,6 @@ module Gitlab
@rbac
end
private
# Uses `helm upgrade --install` which means we can use this for both
# installation and uprade of applications
def install_command
......@@ -55,6 +53,8 @@ module Gitlab
command.shelljoin
end
private
def install_flag
['--install']
end
......
......@@ -26,7 +26,7 @@
}
},
{
"offset": {
"log.offset": {
"order": "desc"
}
}
......
......@@ -33,7 +33,7 @@
}
},
{
"offset": {
"log.offset": {
"order": "desc"
}
}
......
......@@ -26,7 +26,7 @@
}
},
{
"offset": {
"log.offset": {
"order": "desc"
}
}
......
......@@ -35,7 +35,7 @@
}
},
{
"offset": {
"log.offset": {
"order": "desc"
}
}
......
{
"query": {
"bool": {
"must": [
{
"match_phrase": {
"kubernetes.pod.name": {
"query": "production-6866bc8974-m4sk4"
}
}
},
{
"match_phrase": {
"kubernetes.namespace": {
"query": "autodevops-deploy-9-production"
}
}
}
]
}
},
"sort": [
{
"@timestamp": {
"order": "desc"
}
},
{
"offset": {
"order": "desc"
}
}
],
"_source": [
"@timestamp",
"message",
"kubernetes.pod.name"
],
"size": 500
}
......@@ -35,7 +35,7 @@
}
},
{
"offset": {
"log.offset": {
"order": "desc"
}
}
......
......@@ -35,7 +35,7 @@
}
},
{
"offset": {
"log.offset": {
"order": "desc"
}
}
......
......@@ -36,7 +36,7 @@
}
},
{
"offset": {
"log.offset": {
"order": "desc"
}
}
......
......@@ -29,6 +29,7 @@ describe Gitlab::Elasticsearch::Logs::Lines do
let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
let(:body_with_cursor) { JSON.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) }
let(:body_with_filebeat_6) { JSON.parse(fixture_file('lib/elasticsearch/query_with_filebeat_6.json')) }
RSpec::Matchers.define :a_hash_equal_to_json do |expected|
match do |actual|
......@@ -85,5 +86,12 @@ describe Gitlab::Elasticsearch::Logs::Lines do
result = subject.pod_logs(namespace, pod_name: pod_name, cursor: cursor)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
it 'can search on filebeat 6' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_filebeat_6)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name: pod_name, filebeat7: false)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
end
end
......@@ -20,9 +20,10 @@ describe Clusters::Applications::ElasticStack do
it 'is initialized with elastic stack arguments' do
expect(subject.name).to eq('elastic-stack')
expect(subject.chart).to eq('stable/elastic-stack')
expect(subject.version).to eq('1.9.0')
expect(subject.version).to eq('2.0.0')
expect(subject).to be_rbac
expect(subject.files).to eq(elastic_stack.files)
expect(subject.preinstall).to be_empty
end
context 'on a non rbac enabled cluster' do
......@@ -33,11 +34,23 @@ describe Clusters::Applications::ElasticStack do
it { is_expected.not_to be_rbac }
end
context 'on versions older than 2' do
before do
elastic_stack.status = elastic_stack.status_states[:updating]
elastic_stack.version = "1.9.0"
end
it 'includes a preinstall script' do
expect(subject.preinstall).not_to be_empty
expect(subject.preinstall.first).to include("filebeat.enable")
end
end
context 'application failed to install previously' do
let(:elastic_stack) { create(:clusters_applications_elastic_stack, :errored, version: '0.0.1') }
it 'is initialized with the locked version' do
expect(subject.version).to eq('1.9.0')
expect(subject.version).to eq('2.0.0')
end
end
end
......
......@@ -225,7 +225,7 @@ describe ::PodLogs::ElasticsearchService do
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
.with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
.with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor, filebeat7: true)
.and_return({ logs: expected_logs, cursor: expected_cursor })
result = subject.send(:pod_logs, result_arg)
......
......@@ -23,7 +23,7 @@ filebeat:
output.elasticsearch:
enabled: true
hosts: ["http://elastic-stack-elasticsearch-client:9200"]
filebeat.prospectors:
filebeat.inputs:
- type: log
enabled: true
paths:
......@@ -86,6 +86,17 @@ elasticsearch-curator:
timestring: '%Y.%m.%d'
unit: days
unit_count: 30
2:
action: delete_indices
description: >-
Indices created by filebeat 6.7.0 are incompatible with filebeat 7,
so they will be deleted.
options:
ignore_empty_list: True
filters:
- filtertype: pattern
kind: prefix
value: filebeat-6.7.0-
elasticsearch-exporter:
enabled: false
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment