Commit ed6884bf authored by Jérome Perrin's avatar Jérome Perrin

software/grafana: WIP generate telegraf and loki config

parent 6966a04f
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
[instance-profile] [instance-profile]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 94674d597e3ea7e9eca3637a737765ff md5sum = 33e6510d84c83a46a2edf217b4f1beb5
[influxdb-config-file] [influxdb-config-file]
filename = influxdb-config-file.cfg.in filename = influxdb-config-file.cfg.in
...@@ -23,20 +23,24 @@ md5sum = a28972ced3e0f4aa776e43a9c44717c0 ...@@ -23,20 +23,24 @@ md5sum = a28972ced3e0f4aa776e43a9c44717c0
[telegraf-config-file] [telegraf-config-file]
filename = telegraf-config-file.cfg.in filename = telegraf-config-file.cfg.in
md5sum = a1a9c22c2a7829c66a49fc2504604d21 md5sum = 6de1faa34842e1eda095a51edecc2083
[grafana-config-file] [grafana-config-file]
filename = grafana-config-file.cfg.in filename = grafana-config-file.cfg.in
md5sum = e255dcca466f5de51698d24cbd114577 md5sum = 83a8445858eab21a12f1769c23424bea
[grafana-provisioning-config-file] [grafana-provisioning-datasources-config-file]
filename = grafana-provisioning-config-file.cfg.in filename = grafana-provisioning-datasources-config-file.cfg.in
md5sum = 3aa0f1ed752b2a59ea2b5e7c1733daf3 md5sum = 3aa0f1ed752b2a59ea2b5e7c1733daf3
[grafana-provisioning-dashboards-config-file]
filename = grafana-provisioning-dashboards-config-file.cfg.in
md5sum = 5616679a9c5c2757540175ead3f5500a
[loki-config-file] [loki-config-file]
filename = loki-config-file.cfg.in filename = loki-config-file.cfg.in
md5sum = ad2baf4599a937d7352034a41fa24814 md5sum = 19a7f5cb904b3287b0bc7cb3e8a27429
[promtail-config-file] [loki-nginx-config-file]
filename = promtail-config-file.cfg.in filename = loki-nginx-config-file.cfg.in
md5sum = 5f1b3a1a3d3f98daeab4780106452d71 md5sum = b08ce1e4abb34eb79e26133459c27c3a
...@@ -154,7 +154,7 @@ reporting_enabled = true ...@@ -154,7 +154,7 @@ reporting_enabled = true
# in some UI views to notify that grafana or plugin update exists # in some UI views to notify that grafana or plugin update exists
# This option does not cause any auto updates, nor send any information # This option does not cause any auto updates, nor send any information
# only a GET request to https://grafana.com to get latest versions # only a GET request to https://grafana.com to get latest versions
check_for_updates = true check_for_updates = false
# Google Analytics universal tracking code, only enabled if you specify an id here # Google Analytics universal tracking code, only enabled if you specify an id here
google_analytics_ua_id = google_analytics_ua_id =
...@@ -345,11 +345,8 @@ user = {{ slapparameter_dict.get('smtp-username', '') }} ...@@ -345,11 +345,8 @@ user = {{ slapparameter_dict.get('smtp-username', '') }}
password = {{ slapparameter_dict.get('smtp-password', '') and '"""%s"""' % slapparameter_dict['smtp-password'] or ""}} password = {{ slapparameter_dict.get('smtp-password', '') and '"""%s"""' % slapparameter_dict['smtp-password'] or ""}}
cert_file = cert_file =
key_file = key_file =
#skip_verify = false skip_verify = {{ slapparameter_dict.get('smtp-verify-ssl') and 'true' or 'false' }}
skip_verify = {{ slapparameter_dict.get('smtp-verify-ssl', 'true').lower() == 'true' and 'false' or 'true' }}
#from_address = admin@grafana.localhost
from_address = {{ slapparameter_dict.get('email-from-address', '') }} from_address = {{ slapparameter_dict.get('email-from-address', '') }}
#from_name = Grafana
from_name = {{ slapparameter_dict.get('email-from-name', 'Grafana') }} from_name = {{ slapparameter_dict.get('email-from-name', 'Grafana') }}
ehlo_identity = ehlo_identity =
......
# https://grafana.com/docs/grafana/latest/administration/provisioning/#dashboards
apiVersion: 1
providers:
- name: SlapOS
folder: ''
updateIntervalSeconds: 10
allowUiUpdates: false
options:
path: {{ dashboards_dir }}
{ {
"$schema": "http://json-schema.org/draft-04/schema#", "$schema": "http://json-schema.org/draft-07/schema#",
"description": "Parameters to instantiate Grafana", "description": "Parameters to instantiate Grafana",
"type": "object", "type": "object",
"additionalProperties": false, "additionalProperties": false,
...@@ -18,11 +18,7 @@ ...@@ -18,11 +18,7 @@
}, },
"smtp-verify-ssl": { "smtp-verify-ssl": {
"description": "Verify SSL certificate of SMTP server", "description": "Verify SSL certificate of SMTP server",
"type": "string", "type": "boolean"
"enum": [
"true",
"false"
]
}, },
"email-from-address": { "email-from-address": {
"description": "Email address used in From: header of emails", "description": "Email address used in From: header of emails",
...@@ -33,6 +29,133 @@ ...@@ -33,6 +29,133 @@
"default": "Grafana", "default": "Grafana",
"type": "string" "type": "string"
}, },
"applications": {
"description": "Applications to monitor",
"type": "array",
"items": {
"type": "object",
"required": [
"name",
"instance-root",
"partitions"
],
"properties": {
"name": {
"description": "Name of this application",
"type": "string"
},
"instance-root": {
"description": "Directory containing SlapOS partitions.",
"type": "string"
},
"urls": {
"description": "URLs to monitor for availability and certificate lifetime",
"type": "array",
"items": {
"type": "string"
}
},
"partitions": {
"description": "SlapOS partitions to monitor",
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Friendly name of the partition",
"examples": [
"mariadb",
"zope-activity"
]
},
"reference": {
"type": "string",
"description": "Reference of the partition",
"examples": [
"slappart1",
"slappart2"
]
},
"type": {
"type": "string",
"description": "Type of the partition. Known types have extra metrics and logs collected",
"enum": [
"erp5/mariadb",
"erp5/balancer",
"erp5/zope-activity",
"erp5/zope-front",
"erp5/zeo",
"mariadb",
"default"
]
},
"file-path": {
"type": "string",
"description": "Glob for the files to watch. This mostly makes sense for `default` type"
},
"static-tags": {
"type": "object",
"description": "Static tags for this partition",
"examples": [
{
"region": "eu",
"data-center": "abc123"
}
]
}
},
"anyOf": [
{
"properties": {
"type": {
"const": "default"
}
},
"required": [
"name",
"file-path"
]
},
{
"properties": {
"type": {
"not": {
"const": "default"
}
}
},
"required": [
"name",
"reference"
]
}
],
"examples": [
{
"name": "zope-backoffice",
"type": "erp5/zope-front",
"reference": "slappart1",
"static-tags": {
"instance": "instance-name"
}
},
{
"name": "mariadb",
"type": "erp5/mariadb",
"reference": "slappart2"
},
{
"name": "syslog",
"type": "default",
"file-path": "/var/log/syslog"
}
]
}
}
}
}
},
"promtail-extra-scrape-config": { "promtail-extra-scrape-config": {
"description": "Raw promtail config (experimental parameter, see https://github.com/grafana/loki/blob/v0.3.0/docs/promtail.md#scrape-configs for detail)", "description": "Raw promtail config (experimental parameter, see https://github.com/grafana/loki/blob/v0.3.0/docs/promtail.md#scrape-configs for detail)",
"default": "", "default": "",
......
...@@ -8,7 +8,6 @@ develop-eggs-directory = {{ buildout['develop-eggs-directory'] }} ...@@ -8,7 +8,6 @@ develop-eggs-directory = {{ buildout['develop-eggs-directory'] }}
offline = true offline = true
[instance-parameter] [instance-parameter]
recipe = slapos.cookbook:slapconfiguration recipe = slapos.cookbook:slapconfiguration
computer = ${slap-connection:computer-id} computer = ${slap-connection:computer-id}
...@@ -42,11 +41,17 @@ grafana-plugins-dir = ${:grafana-dir}/plugins ...@@ -42,11 +41,17 @@ grafana-plugins-dir = ${:grafana-dir}/plugins
grafana-provisioning-config-dir = ${:grafana-dir}/provisioning-config grafana-provisioning-config-dir = ${:grafana-dir}/provisioning-config
grafana-provisioning-datasources-dir = ${:grafana-provisioning-config-dir}/datasources grafana-provisioning-datasources-dir = ${:grafana-provisioning-config-dir}/datasources
grafana-provisioning-dashboards-dir = ${:grafana-provisioning-config-dir}/dashboards grafana-provisioning-dashboards-dir = ${:grafana-provisioning-config-dir}/dashboards
grafana-dashboards-dir = ${:grafana-dir}/dashboards
telegraf-dir = ${:srv}/telegraf telegraf-dir = ${:srv}/telegraf
telegraf-extra-config-dir = ${:telegraf-dir}/extra-config telegraf-extra-config-dir = ${:telegraf-dir}/extra-config
loki-dir = ${:srv}/loki loki-dir = ${:srv}/loki
loki-storage-boltdb-dir = ${:loki-dir}/index/ loki-boltdb-shipper-active-index-directory = ${:loki-dir}/index
loki-storage-filesystem-dir = ${:loki-dir}/chunks/ loki-boltdb-shipper-cache-location = ${:loki-dir}/index-cache
loki-compactor-working-directory = ${:loki-dir}/compactor
loki-storage-filesystem-directory = ${:loki-dir}/chunks
loki-nginx-dir = ${:srv}/loki-nginx
loki-nginx-logs-dir = ${:loki-nginx-dir}/logs
promtail-dir = ${:srv}/promtail promtail-dir = ${:srv}/promtail
# macros # macros
...@@ -149,6 +154,7 @@ logs-dir = ${directory:grafana-logs-dir} ...@@ -149,6 +154,7 @@ logs-dir = ${directory:grafana-logs-dir}
plugins-dir = ${directory:grafana-plugins-dir} plugins-dir = ${directory:grafana-plugins-dir}
provisioning-config-dir = ${directory:grafana-provisioning-config-dir} provisioning-config-dir = ${directory:grafana-provisioning-config-dir}
provisioning-datasources-dir = ${directory:grafana-provisioning-datasources-dir} provisioning-datasources-dir = ${directory:grafana-provisioning-datasources-dir}
provisioning-dashboards-dir = ${directory:grafana-provisioning-dashboards-dir}
admin-user = ${grafana-password:username} admin-user = ${grafana-password:username}
admin-password = ${grafana-password:passwd} admin-password = ${grafana-password:passwd}
secret-key = ${grafana-secret-key:passwd} secret-key = ${grafana-secret-key:passwd}
...@@ -164,8 +170,10 @@ wrapper-path = ${directory:service}/grafana ...@@ -164,8 +170,10 @@ wrapper-path = ${directory:service}/grafana
<= generate-certificate <= generate-certificate
[grafana-password] [grafana-password]
recipe = slapos.cookbook:generate.password # TODO
#recipe = slapos.cookbook:generate.password
username = admin username = admin
passwd = admin
[grafana-secret-key] [grafana-secret-key]
recipe = slapos.cookbook:generate.password recipe = slapos.cookbook:generate.password
...@@ -177,22 +185,27 @@ context = ...@@ -177,22 +185,27 @@ context =
section apache_frontend apache-frontend section apache_frontend apache-frontend
key slapparameter_dict slap-configuration:configuration key slapparameter_dict slap-configuration:configuration
depends = depends =
${grafana-provisioning-config-file:output} ${grafana-provisioning-datasources-config-file:output}
${grafana-provisioning-dashboards-config-file:output}
[grafana-provisioning-config-file] [grafana-provisioning-datasources-config-file]
<= config-file <= config-file
output = ${grafana:provisioning-datasources-dir}/datasource.yaml output = ${grafana:provisioning-datasources-dir}/datasource.yaml
context = context =
section influxdb influxdb section influxdb influxdb
section loki loki section loki loki
[grafana-provisioning-dashboards-config-file]
<= config-file
rendered = ${grafana:provisioning-dashboards-dir}/dashboard.yaml
context =
key dashboards_dir directory:grafana-dashboards-dir
[grafana-listen-promise] [grafana-listen-promise]
<= check-port-listening-promise <= check-port-listening-promise
hostname= ${grafana:ipv6} hostname= ${grafana:ipv6}
port = ${grafana:port} port = ${grafana:port}
[telegraf] [telegraf]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
extra-config-dir = ${directory:telegraf-extra-config-dir} extra-config-dir = ${directory:telegraf-extra-config-dir}
...@@ -206,20 +219,368 @@ wrapper-path = ${directory:service}/telegraf ...@@ -206,20 +219,368 @@ wrapper-path = ${directory:service}/telegraf
context = context =
section influxdb influxdb section influxdb influxdb
section telegraf telegraf section telegraf telegraf
section extra telegraf-config-file-extra
[telegraf-config-file-extra]
recipe = slapos.recipe.build
telegraf-input-slapos-bin = {{ telegraf_input_slapos_bin }}
slapparameter-dict = ${slap-configuration:configuration}
init =
import zc.buildout
import pkg_resources
buildout_options = self.buildout["buildout"]
zc.buildout.easy_install.install(
["toml"],
dest=None,
working_set=pkg_resources.working_set,
path=[
buildout_options["develop-eggs-directory"],
buildout_options["eggs-directory"],
],
)
import collections
import os.path
import urllib.parse
import toml
# files to create during install step
self._config_files = {}
inputs = collections.defaultdict(list)
processors = collections.defaultdict(list)
slapparameter_dict = self.options["slapparameter-dict"]
for application in slapparameter_dict.get('applications', []):
partition_mapping = {}
for partition in application.get("partitions", []):
partition.setdefault("type", "default")
if "reference" in partition:
partition_mapping[partition["reference"]] = partition["name"]
partition_directory = os.path.join(application["instance-root"], partition['reference'])
if partition["type"] in ("erp5/mariadb", "mariadb"):
partition.setdefault("username", "root")
partition.setdefault("dbname", "erp5")
dsn = f"{partition['username']}@unix({partition_directory}/var/run/mariadb.sock)/{partition['dbname']}"
inputs["mysql"].append(
{
"name_override": f"{partition['name']}-mysql",
"servers": [dsn],
"gather_innodb_metrics": True,
"tags": dict(partition.get("static-tags", {}), app=application["name"]),
}
)
if partition["type"] == "erp5/mariadb":
inputs["sql"].append(
{
"name_override": f"{partition['name']}-activities",
"driver": "mysql",
"dsn": dsn,
"query": [
{
"query": "select count(*) as message_count from message",
"field_columns_include": ["message_count"],
},
{
"query": "select count(*) as message_queue_count from message_queue",
"field_columns_include": ["message_queue_count"],
},
{
"query": "select count(*) as message_failed_count from message where processing_node=-2",
"field_columns_include": ["message_failed_count"],
},
{
"query": "select count(*) as message_queue_failed_count from message_queue where processing_node=-2",
"field_columns_include": ["message_queue_failed_count"],
},
{
"query": """
select cast(coalesce(max(UNIX_TIMESTAMP(now()) - UNIX_TIMESTAMP(message.date)), 0) as int)
as message_waiting_time from message
where processing_node in (-1, 0) and message not like '%after_tag%'
""",
"field_columns_include": ["message_waiting_time"],
},
{
"query": """
select cast(coalesce(max(UNIX_TIMESTAMP(now()) - UNIX_TIMESTAMP(message_queue.date)), 0) as int)
as message_queue_waiting_time from message_queue
where processing_node in (-1, 0) and message not like '%after_tag%'
""",
"field_columns_include": ["message_queue_waiting_time"],
}
],
"tags": dict(partition.get("static-tags", {}), app=application["name"]),
}
)
if partition["type"] == "erp5/balancer":
inputs["tail"].append(
{
"data_format": "grok",
"files": [f"{partition_directory}/var/log/apache-access.log"],
"grok_custom_pattern_files": [],
"grok_custom_patterns": "",
"grok_patterns": [
'%{IPORHOST:client_ip} %{NOTSPACE:ident} %{NOTSPACE:auth} \\[%{HTTPDATE:timestamp}\\] "(?:%{WORD:verb:tag} %{NOTSPACE:request}(?: HTTP/%{NUMBER:http_version:float})?|%{DATA})" %{NUMBER:resp_code:tag} (?:%{NUMBER:resp_bytes:int}|-) %{QS:referrer} %{QS:agent} %{NUMBER:response_time:int}'
],
"grok_timezone": "Local",
"name_override": f"{partition['name']}",
"tags": dict(partition.get("static-tags", {}), app=application["name"]),
}
)
urls = application.get("urls", [])
if urls:
inputs["http_response"].append({
"interval": "5m",
"urls": urls,
"tags": {"app": application["name"]},
})
for url in urls:
x509_url = url
parsed_url = urllib.parse.urlparse(url)
if parsed_url.scheme == 'https':
# x509_cert wants a port
if not parsed_url.port:
x509_url = parsed_url._replace(netloc=parsed_url.hostname+':443').geturl()
inputs["x509_cert"].append({
"sources": [x509_url],
"tags": {"url": url},
"interval": "5h",
"tags": {"app": application["name"]},
})
# TODO: don't run more than one per instance_root
telegraf_slapos_input_config_file = os.path.join(
self.options['location'],
f"telegraf-input-slapos-{application['name']}.cfg")
self._config_files[telegraf_slapos_input_config_file] = toml.dumps({
"inputs": {
"slapos": [{
"instance_root": application['instance-root']}]}})
# TODO: supervisor process finder for
# https://github.com/influxdata/telegraf/tree/master/plugins/inputs/procstat ?
telegraf_slapos_input_command = self.options['telegraf-input-slapos-bin']
inputs["execd"].append({
"name_override": f"{application['name']}-processes",
"command": [telegraf_slapos_input_command, '-config', telegraf_slapos_input_config_file],
"tags": {"app": application["name"]},
})
# "cleanup" slapos process names, remove hash from wrappers and -on-watch suffix
processors["regex"].append({
"namepass": [f"{application['name']}-processes"],
"order": 1,
"tags": [{
"key": "name",
"pattern": "^(.*)-.{32}",
# XXX we concatenate strings so that we don't have to escape them for buildout
"replacement": "$" + "{1}",
}]})
processors["regex"].append({
"namepass": [f"{application['name']}-processes"],
"order": 2,
"tags": [{
"key": "name",
"pattern": "^(.*)-on-watch$",
"replacement": "$" + "{1}",
}]})
processors["enum"].append({
"namepass": [ f"{application['name']}-processes"],
"mapping": [{
# "tag": "group", # TODO: rename this in input plugin
"tag": "slappart",
"dest": "partition",
"value_mappings": partition_mapping,
}]})
# TODOs:
# - [ ] slapos input
# - [x] friendly name of slappart
# - [x] strip hashes from -on-watch
# - [x] activity metrics
# - [ ] alert dashboard
# - [ ] inclu "jerome-dev" partout ???
# - [ ] apdex
options["extra-config"] = toml.dumps({
"inputs": inputs,
"processors": processors})
# import pdb; pdb.set_trace()
# apdex
# SELECT sum("success") / sum("all") FROM
# (SELECT count("duration") AS "all" FROM "jerome-dev-balancer" WHERE $timeFilter GROUP BY time($__interval) fill(null)),
# (SELECT count("duration") AS "success" FROM "jerome-dev-balancer" WHERE ("resp_code" = '200' ) AND $timeFilter GROUP BY time($__interval) fill(null))
#SELECT sum("success") + sum("all") FROM
# (SELECT count("duration") AS "all" FROM "jerome-dev-balancer" WHERE $timeFilter GROUP BY time($__interval) fill(0)),
# (SELECT count("duration") AS "success" FROM "jerome-dev-balancer" WHERE ("resp_code" = '200' ) AND $timeFilter GROUP BY time($__interval) fill(0))
install =
import os
os.mkdir(self.options['location'])
for fname, content in self._config_files.items():
with open(fname, 'w') as f:
f.write(content)
[loki] [loki]
recipe = slapos.cookbook:wrapper boltdb-shipper-active-index-directory = ${directory:loki-boltdb-shipper-active-index-directory}
command-line = boltdb-shipper-cache-location = ${directory:loki-boltdb-shipper-cache-location}
bash -c 'nice -19 chrt --idle 0 ionice -c3 {{ loki_bin }} -config.file=${loki-config-file:output}' compactor-working-directory = ${directory:loki-compactor-working-directory}
wrapper-path = ${directory:service}/loki storage-filesystem-directory = ${directory:loki-storage-filesystem-directory}
storage-boltdb-dir = ${directory:loki-storage-boltdb-dir}
storage-filesystem-dir = ${directory:loki-storage-filesystem-dir}
ip = ${instance-parameter:ipv4-random} ip = ${instance-parameter:ipv4-random}
port = 3100 read-1-http-port = 3101
grpc-port = 9095 read-1-grpc-port = 9096
url = http://${:ip}:${:port} read-1-memberlist-port = 7947
read-2-http-port = 3102
read-2-grpc-port = 9097
read-2-memberlist-port = 7948
write-http-port = 3103
write-grpc-port = 9098
write-memberlist-port = 7949
query-frontend-http-port = 3104
query-frontend-grpc-port = 9099
query-frontend-memberlist-port = 7950
querier-http-port = 3105
querier-grpc-port = 9100
querier-memberlist-port = 7951
index-gateway-http-port = 3106
index-gateway-grpc-port = 9101
index-gateway-memberlist-port = 7952
query-scheduler-http-port = 3107
query-scheduler-grpc-port = 9102
query-scheduler-memberlist-port = 7953
# compactor
nginx-port = 3100
url = http://${:ip}:${:nginx-port}
ipv6 = ${instance-parameter:ipv6-random}
[loki-service-macro]
recipe = slapos.cookbook:wrapper
command-line =
bash -c 'nice -19 chrt --idle 0 ionice -c3 {{ loki_bin }} \
-config.file=${loki-config-file:output} \
\
-boltdb.shipper.compactor.ring.instance-addr=${loki:ip} \
-boltdb.shipper.compactor.ring.instance-id=${:_buildout_section_name_} \
-common.embedded-cachering.instance-addr=${loki:ip} \
-common.embedded-cachering.instance-id=${:_buildout_section_name_} \
-distributor.ring.instance-addr=${loki:ip} \
-distributor.ring.instance-id=${:_buildout_section_name_} \
-frontend.instance-addr=${loki:ip} \
-frontend.instance-port=${loki:query-frontend-grpc-port} \
-index-gateway.ring.instance-addr=${loki:ip} \
-index-gateway.ring.instance-id=${:_buildout_section_name_} \
-memberlist.advertise-port=${:memberlist-port} \
-memberlist.bind-port=${:memberlist-port} \
-memberlist.nodename=${:_buildout_section_name_} \
-query-scheduler.ring.instance-addr=${loki:ip} \
-query-scheduler.ring.instance-id=${:_buildout_section_name_} \
-ruler.ring.instance-addr=${loki:ip} \
-ruler.ring.instance-id=${:_buildout_section_name_} \
-server.grpc-listen-port=${:grpc-port} \
-server.http-listen-port=${:http-port} \
${:extra-command-line}'
wrapper-path = ${directory:service}/${:_buildout_section_name_}
extra-command-line =
# level=error ts=2022-09-24T14:40:13.636615531Z caller=scheduler_processor.go:182 org_id=fake msg="error notifying frontend about finished query" err="rpc error: code = ResourceExhausted desc = grpc: received message larger than max (4200411 vs. 4194304)" frontend=10.0.44.65:9099
[loki-listen-promise-macro]
<= check-url-available-promise
url = http://${loki:ip}:${:port}/ready
[loki-read-1-service]
<= loki-service-macro
extra-command-line = -target=read -querier.scheduler-address=${loki:ip}:${loki:read-2-grpc-port} -query-scheduler.ring.instance-port=${loki:read-1-grpc-port}
http-port = ${loki:read-1-http-port}
grpc-port = ${loki:read-1-grpc-port}
memberlist-port = ${loki:read-1-memberlist-port}
[loki-read-1-listen-promise]
<= loki-listen-promise-macro
port = ${loki-read-1-service:http-port}
[loki-read-2-service]
<= loki-service-macro
extra-command-line = -target=read -querier.scheduler-address=${loki:ip}:${loki:read-1-grpc-port} -query-scheduler.ring.instance-port=${loki:read-2-grpc-port}
http-port = ${loki:read-2-http-port}
grpc-port = ${loki:read-2-grpc-port}
memberlist-port = ${loki:read-2-memberlist-port}
[loki-read-2-listen-promise]
<= loki-listen-promise-macro
port = ${loki-read-2-service:http-port}
[loki-write-service]
<= loki-service-macro
extra-command-line = -target=write
http-port = ${loki:write-http-port}
grpc-port = ${loki:write-grpc-port}
memberlist-port = ${loki:write-memberlist-port}
[loki-write-listen-promise]
<= loki-listen-promise-macro
port = ${loki-write-service:http-port}
[loki-querier-service]
<= loki-service-macro
extra-command-line = -target=querier -querier.scheduler-address=${loki:ip}:${loki:query-scheduler-grpc-port} -query-scheduler.ring.instance-port=${loki:querier-grpc-port}
http-port = ${loki:querier-http-port}
grpc-port = ${loki:querier-grpc-port}
memberlist-port = ${loki:querier-memberlist-port}
[loki-querier-listen-promise]
<= loki-listen-promise-macro
port = ${loki-querier-service:http-port}
[loki-index-gateway-service]
<= loki-service-macro
extra-command-line = -target=index-gateway -boltdb.shipper.query-ready-num-days=30
# XXX -boltdb.shipper.query-ready-num-days=30 useful ?
http-port = ${loki:index-gateway-http-port}
grpc-port = ${loki:index-gateway-grpc-port}
memberlist-port = ${loki:index-gateway-memberlist-port}
[loki-index-gateway-listen-promise]
<= loki-listen-promise-macro
port = ${loki-index-gateway-service:http-port}
[loki-query-frontend-service]
<= loki-service-macro
extra-command-line = -target=query-frontend -frontend.scheduler-address=${loki:ip}:${loki:query-scheduler-grpc-port}
http-port = ${loki:query-frontend-http-port}
grpc-port = ${loki:query-frontend-grpc-port}
memberlist-port = ${loki:query-frontend-memberlist-port}
[loki-query-frontend-listen-promise]
<= loki-listen-promise-macro
port = ${loki-query-frontend-service:http-port}
[loki-query-scheduler-service]
<= loki-service-macro
extra-command-line = -target=query-scheduler
http-port = ${loki:query-scheduler-http-port}
grpc-port = ${loki:query-scheduler-grpc-port}
memberlist-port = ${loki:query-scheduler-memberlist-port}
[loki-query-scheduler-listen-promise]
<= loki-listen-promise-macro
port = ${loki-query-scheduler-service:http-port}
[loki-config-file] [loki-config-file]
...@@ -227,14 +588,26 @@ url = http://${:ip}:${:port} ...@@ -227,14 +588,26 @@ url = http://${:ip}:${:port}
context = context =
section loki loki section loki loki
[loki-listen-promise] [loki-nginx-service]
recipe = slapos.cookbook:wrapper
command-line =
{{ nginx_bin }} -p ${directory:loki-nginx-dir} -c ${loki-nginx-config-file:output}
wrapper-path = ${directory:service}/${:_buildout_section_name_}
url = http://${loki:ip}:${loki:nginx-port}
[loki-nginx-listen-promise]
<= check-url-available-promise <= check-url-available-promise
url = ${loki:url}/ready url = ${loki-nginx-service:url}
[loki-nginx-config-file]
<= config-file
context =
section loki loki
[promtail] [promtail]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = command-line =
bash -c 'nice -19 chrt --idle 0 ionice -c3 {{ promtail_bin }} -config.file=${promtail-config-file:output}' bash -c 'nice -19 chrt --idle 0 ionice -c3 {{ promtail_bin }} -config.file=${promtail-config-file:location}'
wrapper-path = ${directory:service}/promtail wrapper-path = ${directory:service}/promtail
dir = ${directory:promtail-dir} dir = ${directory:promtail-dir}
...@@ -244,11 +617,310 @@ ip = ${instance-parameter:ipv4-random} ...@@ -244,11 +617,310 @@ ip = ${instance-parameter:ipv4-random}
url = http://${:ip}:${:http-port} url = http://${:ip}:${:http-port}
[promtail-config-file] [promtail-config-file]
<= config-file recipe = slapos.recipe.build
context = location = ${directory:etc}/${:_buildout_section_name_}.cfg
section promtail promtail slapparameter-dict = ${slap-configuration:configuration}
section loki loki install =
key slapparameter_dict slap-configuration:configuration {% raw %}
import os
# XXX make extra eggs available to buildout
import zc.buildout
import pkg_resources
buildout_options = self.buildout['buildout']
zc.buildout.easy_install.install(
['pyyaml'],
dest=None,
working_set=pkg_resources.working_set,
path=[
buildout_options['develop-eggs-directory'],
buildout_options['eggs-directory']])
import yaml
slapparameter_dict = self.options['slapparameter-dict']
cfg = {
"server": {
"http_listen_address": self.buildout['promtail']['ip'],
"http_listen_port": int(self.buildout['promtail']['http-port']),
"grpc_listen_address": self.buildout['promtail']['ip'],
"grpc_listen_port": int(self.buildout['promtail']['grpc-port']),
"graceful_shutdown_timeout": 5,
"external_url": self.buildout['promtail']['url'],
},
"positions": {
"filename": "{}/positions.yaml".format(self.buildout['promtail']['dir']),
},
"clients": [
{
"url": "{}/loki/api/v1/push".format(self.buildout['loki']['url']),
"batchwait": "5s"
}
],
"scrape_configs": []
}
def get_job_selector(partition, job_name, application_name):
# make a selector in LogQL, like '{job="job_name",key="value"}'
selector_parts = [f'app="{application_name}"']
for k, v in dict(partition.get('static-tags', {}), job=job_name).items():
selector_parts.append(f'{k}="{v}"')
return "{%s}" % ",".join(selector_parts)
def get_static_configs(partition, job_name, path, application):
directory = ''
if partition.get('reference'):
directory = os.path.join(application['instance-root'], partition['reference'])
return [
{
"targets": [
"localhost"
],
"labels": dict(
partition.get('static-tags', {}),
job=job_name,
partition=partition['name'],
app=application['name'],
__path__=path.format(directory=directory),
)
}
]
for application in slapparameter_dict.get('applications', []):
for partition in application.get('partitions', []):
partition.setdefault("type", "default")
if partition['type'] in ('erp5/zope-activity', 'erp5/zope-front'):
job_name = f"{partition['name']}-event-log"
cfg['scrape_configs'].append({
"job_name": job_name,
"pipeline_stages": [
{
"match": {
"selector": get_job_selector(partition, job_name, application['name']),
"stages": [
{
"multiline": {
"firstline": "^------",
"max_wait_time": "3s"
}
},
{
"regex": {
"expression": "^------\\n(?P<timestamp>\\d{4}-\\d{2}-\\d{2}\\s\\d{1,2}\\:\\d{2}\\:\\d{2}\\,\\d{3}) (?P<level>\\S+) (?P<component>\\S+) (?P<message>.*)"
}
},
{
"timestamp": {
"format": "2021-04-04 03:57:11,242",
"source": "timestamp"
}
},
{
"labels": {
"level": None,
# XXX do we really want `component` ? it may cause lots of cardinality
# "component": None
}
}
]
}
}
],
"static_configs": get_static_configs(
partition,
job_name,
"{directory}/var/log/zope-*-event.log",
application,
)})
if partition['type'] == 'erp5/zope-front':
job_name = f"{partition['name']}-access-log"
cfg['scrape_configs'].append({
"job_name": job_name,
# drop requests for haproxy health check
"pipeline_stages": [
{
"drop": {
"expression": '.* "GET / HTTP/1.0" 200 .*'
}
}
],
"static_configs": get_static_configs(
partition,
job_name,
"{directory}/var/log/zope-*-Z2.log",
application,
)})
job_name = f"{partition['name']}-long-request-log"
cfg['scrape_configs'].append({
"job_name": job_name,
"pipeline_stages": [
{
"match": {
"selector": get_job_selector(partition, job_name, application['name']),
"stages": [
{
"multiline": {
"firstline": "^\\d{4}-\\d{2}-\\d{2}\\s\\d{1,2}\\:\\d{2}\\:\\d{2}\\,\\d{3}",
"max_wait_time": "3s"
}
},
{
"regex": {
"expression": "^(?P<timestamp>.*) .*"
}
},
{
"timestamp": {
"format": "2021-04-04 03:57:11,242",
"source": "timestamp"
}
}
]
}
}
],
"static_configs": get_static_configs(
partition,
job_name,
"{directory}/var/log/zope-*-longrequest.log",
application,
)})
if partition['type'] in ('erp5/mariadb', 'mariadb'):
job_name = f"{partition['name']}-mariadb-slow-queries"
cfg['scrape_configs'].append({
"job_name": job_name,
"pipeline_stages": [
{
"match": {
"selector": get_job_selector(partition, job_name, application['name']),
"stages": [
{
"multiline": {
# TODO
#"firstline": "^# Time: \\d{2}\\d{2}\\d{2}\\s\\d{1,2}\\:\\d{2}\\:\\d{2}",
"firstline": r"^# Time: \d{2}.*",
"max_wait_time": "3s"
}
},
{
"regex": {
"expression": ".*SET timestamp=(?P<timestamp>\\d+);.*"
}
},
{
"timestamp": {
"format": "Unix",
"source": "timestamp"
}
}
]
}
}
],
"static_configs": get_static_configs(
partition,
job_name,
"{directory}/var/log/mariadb_slowquery.log",
application,
)})
job_name = f"{partition['name']}-mariadb-error-log"
cfg['scrape_configs'].append({
"job_name": job_name,
"pipeline_stages": [
{
"match": {
"selector": get_job_selector(partition, job_name, application['name']),
"stages": [
{
"timestamp": {
"format": "2021-06-05 3:55:31",
"source": "timestamp"
}
}
]
}
}
],
"static_configs": get_static_configs(
partition,
job_name,
"{directory}/var/log/mariadb_error.log",
application,
)})
if partition['type'] == 'erp5/zeo':
job_name = f"{partition['name']}-zeo-log"
cfg['scrape_configs'].append({
"job_name": job_name,
"pipeline_stages": [
{
"match": {
"selector": get_job_selector(partition, job_name, application['name']),
"stages": [
{
"multiline": {
"firstline": "^------",
"max_wait_time": "3s"
}
},
{
"regex": {
"expression": "^------\\n(?P<timestamp>\\d{4}-\\d{2}-\\d{2}\\s\\d{1,2}\\:\\d{2}\\:\\d{2}\\,\\d{3}) (?P<level>\\S+) (?P<component>\\S+) (?P<message>.*)"
}
},
{
"timestamp": {
"format": "2021-04-04 03:57:11,242",
"source": "timestamp"
}
},
{
"labels": {
"level": None,
"component": None
}
}
]
}
}
],
"static_configs": get_static_configs(
partition,
job_name,
"{directory}/var/log/zeo-*.log",
application,
)})
if partition['type'] == 'erp5/balancer':
job_name = f"{partition['name']}-balancer-access-log"
cfg['scrape_configs'].append({
"job_name": job_name,
"static_configs": get_static_configs(
partition,
job_name,
"{directory}/var/log/apache-access.log",
application,
)})
job_name = f"{partition['name']}-balancer-error-log"
cfg['scrape_configs'].append({
"job_name": job_name,
"static_configs": get_static_configs(
partition,
job_name,
"{directory}/var/log/apache-error.log",
application,
)})
if partition.get('file-path'):
job_name = partition['name']
cfg['scrape_configs'].append({
"job_name": job_name,
"static_configs": get_static_configs(
partition,
job_name,
f"{partition['file-path']}",
application,
)})
with open(self.options['location'], 'w') as f:
yaml.dump(cfg, f)
{% endraw %}
[promtail-listen-promise] [promtail-listen-promise]
<= check-port-listening-promise <= check-port-listening-promise
...@@ -256,7 +928,6 @@ hostname= ${promtail:ip} ...@@ -256,7 +928,6 @@ hostname= ${promtail:ip}
port = ${promtail:http-port} port = ${promtail:http-port}
[apache-frontend] [apache-frontend]
<= slap-connection <= slap-connection
recipe = slapos.cookbook:requestoptional recipe = slapos.cookbook:requestoptional
...@@ -280,8 +951,14 @@ instance-promises = ...@@ -280,8 +951,14 @@ instance-promises =
${influxdb-password-promise:wrapper-path} ${influxdb-password-promise:wrapper-path}
${influxdb-database-ready-promise:wrapper-path} ${influxdb-database-ready-promise:wrapper-path}
${grafana-listen-promise:path} ${grafana-listen-promise:path}
${loki-listen-promise:path} ${loki-query-frontend-listen-promise:path}
${promtail-listen-promise:path} ${loki-query-scheduler-listen-promise:path}
# ${loki-index-gateway-listen-promise:path}
${loki-querier-listen-promise:path}
# ${loki-read-1-listen-promise:path}
# ${loki-read-2-listen-promise:path}
${loki-write-listen-promise:path}
${loki-nginx-listen-promise:path}
${promtail-listen-promise:path} ${promtail-listen-promise:path}
${apache-frontend-available-promise:path} ${apache-frontend-available-promise:path}
......
# insipired from
# https://github.com/grafana/loki/blob/1489c1731277c327e3661da182bfc6c90d4559f4/tools/dev/loki-boltdb-storage-s3/docker-compose.yml
# and othe configuration examples with microservices, because the single binary
# mode assumes running on 127.0.0.1, but in slapos we want to bind on partition's
# addresses
auth_enabled: false auth_enabled: false
http_prefix:
server: server:
http_listen_address: {{ loki['ip'] }} http_listen_address: {{ loki['ip'] }}
http_listen_port: {{ loki['port'] }}
grpc_listen_address: {{ loki['ip'] }} grpc_listen_address: {{ loki['ip'] }}
grpc_listen_port: {{ loki['grpc-port'] }} grpc_server_max_recv_msg_size: 1.048576e+08
grpc_server_max_send_msg_size: 1.048576e+08
ingester: # # TODO ?
lifecycler: # wal:
address: {{ loki['ip'] }} # enabled: true
ring: # dir: /loki/wal
kvstore:
store: inmemory common:
replication_factor: 1 compactor_address: http://{{ loki['ip'] }}:{{ loki['write-http-port'] }}
chunk_idle_period: 15m
schema_config: schema_config:
configs: configs:
- from: 2018-04-15 - from: 2020-05-15
store: boltdb store: boltdb-shipper
object_store: filesystem object_store: filesystem
schema: v9 schema: v11
index: index:
prefix: index_ prefix: index_
period: 168h period: 24h
storage_config: storage_config:
boltdb: boltdb_shipper:
directory: {{ loki['storage-boltdb-dir'] }} active_index_directory: {{ loki['boltdb-shipper-active-index-directory'] }}
cache_location: {{ loki['boltdb-shipper-cache-location'] }}
filesystem: filesystem:
directory: {{ loki['storage-filesystem-dir'] }} directory: {{ loki['storage-filesystem-directory'] }}
limits_config: limits_config:
reject_old_samples: false
enforce_metric_name: false enforce_metric_name: false
reject_old_samples: true ingestion_rate_mb: 1024
reject_old_samples_max_age: 168h ingestion_burst_size_mb: 1024
chunk_store_config:
max_look_back_period: 0 ingester:
lifecycler:
table_manager: address: {{ loki['ip'] }}
chunk_tables_provisioning: ring:
inactive_read_throughput: 0 kvstore:
inactive_write_throughput: 0 store: memberlist
provisioned_read_throughput: 0 replication_factor: 1
provisioned_write_throughput: 0
index_tables_provisioning: compactor:
inactive_read_throughput: 0 compaction_interval: 1m
inactive_write_throughput: 0 retention_enabled: true
provisioned_read_throughput: 0 working_directory: {{ loki['compactor-working-directory'] }}
provisioned_write_throughput: 0
retention_deletes_enabled: false frontend:
retention_period: 0 log_queries_longer_than: 5s
compress_responses: true
max_outstanding_per_tenant: 2048
tail_proxy_url: http://{{ loki['ip'] }}:{{ loki['querier-http-port']}}
frontend_worker:
scheduler_address: {{ loki['ip'] }}:{{ loki['query-scheduler-grpc-port'] }}
#testERP5Type
memberlist:
bind_addr:
- {{ loki['ip'] }}
join_members:
# - {{ loki['ip'] }}:{{ loki['read-1-memberlist-port'] }}
- {{ loki['ip'] }}:{{ loki['querier-memberlist-port'] }}
# - {{ loki['ip'] }}:{{ loki['write-memberlist-port'] }}
query_scheduler:
max_outstanding_requests_per_tenant: 1024
querier:
query_ingesters_within: 2h
daemon off;
events {
worker_connections 1024;
}
error_log /dev/stdout;
http {
default_type application/octet-stream;
access_log /dev/stdout;
sendfile on;
tcp_nopush on;
upstream read {
server {{ loki['ip'] }}:{{ loki['query-frontend-http-port'] }};
}
upstream write {
server {{ loki['ip'] }}:{{ loki['write-http-port'] }};
}
upstream cluster {
server {{ loki['ip'] }}:{{ loki['write-http-port'] }};
server {{ loki['ip'] }}:{{ loki['query-frontend-http-port'] }};
server {{ loki['ip'] }}:{{ loki['querier-http-port'] }};
}
upstream query-frontend {
server {{ loki['ip'] }}:{{ loki['query-frontend-http-port'] }};
}
server {
listen {{ loki['ip'] }}:{{ loki['nginx-port'] }};
# XXX while debugging
listen [{{ loki['ipv6'] }}]:{{ loki['nginx-port'] }};
location / {
return 200 'OK';
}
location = /ring {
proxy_pass http://cluster$request_uri;
}
location = /memberlist {
proxy_pass http://cluster$request_uri;
}
location = /config {
proxy_pass http://cluster$request_uri;
}
location = /metrics {
proxy_pass http://cluster$request_uri;
}
location = /ready {
proxy_pass http://cluster$request_uri;
}
location = /loki/api/v1/push {
proxy_pass http://write$request_uri;
}
location = /loki/api/v1/tail {
proxy_pass http://read$request_uri;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
location ~ /loki/api/.* {
proxy_pass http://query-frontend$request_uri;
}
}
}
server:
http_listen_address: {{ promtail['ip'] }}
http_listen_port: {{ promtail['http-port'] }}
grpc_listen_address: {{ promtail['ip'] }}
grpc_listen_port: {{ promtail['grpc-port'] }}
external_url: {{ promtail['url'] }}
positions:
filename: {{ promtail['dir'] }}/positions.yaml
clients:
- url: {{ loki['url'] }}/api/prom/push
scrape_configs:
- job_name: test
static_configs:
- targets:
- localhost
labels:
job: grafanalogs
__path__: ./var/log/*log
{{ slapparameter_dict.get('promtail-extra-scrape-config', '') }}
...@@ -7,9 +7,9 @@ extends = ...@@ -7,9 +7,9 @@ extends =
../../component/openssl/buildout.cfg ../../component/openssl/buildout.cfg
../../component/curl/buildout.cfg ../../component/curl/buildout.cfg
../../component/dash/buildout.cfg ../../component/dash/buildout.cfg
../../component/nginx/buildout.cfg
buildout.hash.cfg buildout.hash.cfg
versions = versions
parts = parts =
slapos-cookbook slapos-cookbook
instance-profile instance-profile
...@@ -17,24 +17,33 @@ parts = ...@@ -17,24 +17,33 @@ parts =
influxdb-config-file influxdb-config-file
telegraf-config-file telegraf-config-file
grafana-config-file grafana-config-file
grafana-provisioning-config-file grafana-provisioning-datasources-config-file
grafana-provisioning-dashboards-config-file
loki-config-file loki-config-file
promtail-config-file loki-nginx-config-file
[nodejs] [nodejs]
<= nodejs-14.16.0 <= nodejs-16.14.0
[gowork]
golang = ${golang1.19:location}
# XXX speed up development cycle by not rebuilding workspace on every software run
# XXX does not work ?
update-command =
[go_github.com_grafana_grafana] [go_github.com_grafana_grafana]
<= go-git-package <= go-git-package
go.importpath = github.com/grafana/grafana go.importpath = github.com/grafana/grafana
repository = https://github.com/grafana/grafana repository = https://github.com/grafana/grafana
revision = v7.5.2-0-gca413c612f revision = v9.1.5-0-gdf015a9301
[go_github.com_grafana_loki] [go_github.com_grafana_loki]
<= go-git-package <= go-git-package
go.importpath = github.com/grafana/loki go.importpath = github.com/grafana/loki
repository = https://github.com/perrinjerome/loki repository = https://github.com/grafana/loki
revision = v2.2.1-1-gda6d45f2 revision = v2.1.0-2075-gafd63c598
# tag helm-loki-3.1.0 which supports golang 1.19
[go_github.com_influxdata_influxdb] [go_github.com_influxdata_influxdb]
<= go-git-package <= go-git-package
...@@ -46,7 +55,7 @@ revision = v1.8.4-0-gbc8ec4384e ...@@ -46,7 +55,7 @@ revision = v1.8.4-0-gbc8ec4384e
<= go-git-package <= go-git-package
go.importpath = github.com/influxdata/telegraf go.importpath = github.com/influxdata/telegraf
repository = https://github.com/influxdata/telegraf repository = https://github.com/influxdata/telegraf
revision = v1.20.2-0-gf721f53d revision = v1.24.0-0-g3c4a6516e
[go_github.com_perrinjerome_slapos_telegraf_input] [go_github.com_perrinjerome_slapos_telegraf_input]
<= go-git-package <= go-git-package
...@@ -54,12 +63,18 @@ go.importpath = github.com/perrinjerome/telegraf-input-slapos ...@@ -54,12 +63,18 @@ go.importpath = github.com/perrinjerome/telegraf-input-slapos
repository = https://github.com/perrinjerome/telegraf-input-slapos repository = https://github.com/perrinjerome/telegraf-input-slapos
revision = v0.0.1-0-gf8981f3 revision = v0.0.1-0-gf8981f3
# [go_github.com_jaegertracking_jaeger]
# <= go-git-package
# go.importpath = github.com/jaegertracking/jaeger
# repository = https://github.com/jaegertracking/jaeger
# revision = v1.20.0-623-gcac21f82
[gowork] [gowork]
# Fails with current default golang1.18 # Fails with current default golang1.18
golang = ${golang1.17:location} golang = ${golang1.17:location}
install = install =
${go_github.com_grafana_loki:location}:./cmd/loki ${go_github.com_grafana_loki:location}:./cmd/loki
${go_github.com_grafana_loki:location}:./cmd/promtail ${go_github.com_grafana_loki:location}:./clients/cmd/promtail
${go_github.com_grafana_loki:location}:./cmd/logcli ${go_github.com_grafana_loki:location}:./cmd/logcli
${go_github.com_influxdata_telegraf:location}:./cmd/... ${go_github.com_influxdata_telegraf:location}:./cmd/...
${go_github.com_influxdata_influxdb:location}:./cmd/... ${go_github.com_influxdata_influxdb:location}:./cmd/...
...@@ -70,6 +85,7 @@ environment = ...@@ -70,6 +85,7 @@ environment =
CGO_ENABLED = 0 CGO_ENABLED = 0
telegraf-bin = ${:bin}/telegraf telegraf-bin = ${:bin}/telegraf
telegraf-input-slapos-bin = ${:bin}/telegraf-input-slapos
influx-bin = ${:bin}/influx influx-bin = ${:bin}/influx
influxd-bin = ${:bin}/influxd influxd-bin = ${:bin}/influxd
grafana-bin = ${:bin}/grafana-server grafana-bin = ${:bin}/grafana-server
...@@ -80,14 +96,18 @@ promtail-bin = ${:bin}/promtail ...@@ -80,14 +96,18 @@ promtail-bin = ${:bin}/promtail
[grafana] [grafana]
recipe = plone.recipe.command recipe = plone.recipe.command
command = bash -c " command = bash -ce "
cd ${:homepath} && cd ${:homepath} && \
. ${gowork:env.sh} && . ${gowork:env.sh} && \
go install github.com/google/wire/cmd/wire@v0.5.0 && \
wire gen -tags oss ./pkg/server ./pkg/cmd/grafana-cli/runner && \
# Unlike the loki, grafana _needs_ CGO_ENABLED, so we override here # Unlike the loki, grafana _needs_ CGO_ENABLED, so we override here
export CGO_ENABLED=1 && export CGO_ENABLED=1 && \
go run build.go setup && \ go run build.go setup && \
go run build.go build && \ go run build.go build && \
${yarn:location}/bin/yarn install --pure-lockfile && \ export NODE_OPTIONS=--max_old_space_size=8192 && \
${yarn:location}/bin/yarn install --immutable && \
${yarn:location}/bin/yarn run themes:generate && \
${yarn:location}/bin/yarn run build && \ ${yarn:location}/bin/yarn run build && \
${yarn:location}/bin/yarn run plugins:build-bundled && \ ${yarn:location}/bin/yarn run plugins:build-bundled && \
# Cleanup yarn and Cypress caches # Cleanup yarn and Cypress caches
...@@ -110,15 +130,24 @@ url = ${:_profile_base_location_}/${:filename} ...@@ -110,15 +130,24 @@ url = ${:_profile_base_location_}/${:filename}
[grafana-config-file] [grafana-config-file]
<= download-file-base <= download-file-base
[grafana-provisioning-config-file] [grafana-provisioning-datasources-config-file]
<= download-file-base
[grafana-provisioning-dashboards-config-file]
<= download-file-base <= download-file-base
[loki-config-file] [loki-config-file]
<= download-file-base <= download-file-base
[promtail-config-file] [loki-nginx-config-file]
<= download-file-base <= download-file-base
[instance-eggs]
recipe = zc.recipe.egg
eggs =
${python-PyYAML:egg}
toml
[instance-profile] [instance-profile]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
...@@ -128,16 +157,20 @@ context = ...@@ -128,16 +157,20 @@ context =
section buildout buildout section buildout buildout
key openssl_bin openssl-output:openssl key openssl_bin openssl-output:openssl
key telegraf_bin gowork:telegraf-bin key telegraf_bin gowork:telegraf-bin
key telegraf_input_slapos_bin gowork:telegraf-input-slapos-bin
key influxd_bin gowork:influxd-bin key influxd_bin gowork:influxd-bin
key influx_bin gowork:influx-bin key influx_bin gowork:influx-bin
key grafana_bin gowork:grafana-bin key grafana_bin gowork:grafana-bin
key grafana_homepath gowork:grafana-homepath key grafana_homepath gowork:grafana-homepath
key loki_bin gowork:loki-bin key loki_bin gowork:loki-bin
raw nginx_bin ${nginx:location}/sbin/nginx
key promtail_bin gowork:promtail-bin key promtail_bin gowork:promtail-bin
key curl_bin :curl-bin key curl_bin :curl-bin
key dash_bin :dash-bin key dash_bin :dash-bin
curl-bin = ${curl:location}/bin/curl curl-bin = ${curl:location}/bin/curl
dash-bin = ${dash:location}/bin/dash dash-bin = ${dash:location}/bin/dash
depends = ${instance-eggs:eggs}
[versions] [versions]
inotifyx = 0.2.2 inotifyx = 0.2.2
toml = 0.10.2
{ {
"name": "Grafana", "name": "Grafana",
"description": "Grafana, Telegraf and Influxdb", "description": "Grafana, Telegraf and Influxdb",
"serialisation": "xml", "serialisation": "json-in-xml",
"software-type": { "software-type": {
"default": { "default": {
"title": "Default", "title": "Default",
......
...@@ -55,9 +55,6 @@ ...@@ -55,9 +55,6 @@
[outputs.influxdb] [outputs.influxdb]
# The full HTTP or UDP endpoint URL for your InfluxDB instance # The full HTTP or UDP endpoint URL for your InfluxDB instance
# Multiple urls can be specified for InfluxDB cluster support. # Multiple urls can be specified for InfluxDB cluster support.
# urls = ["udp://localhost:8089"] # UDP endpoint example
# XXX XXX XXX
#urls = ["http://localhost:8086"] # required
urls = ["{{ influxdb['url'] }}"] urls = ["{{ influxdb['url'] }}"]
insecure_skip_verify = true # because we are using a self signed certificate insecure_skip_verify = true # because we are using a self signed certificate
# The target database for metrics (telegraf will create it if not exists) # The target database for metrics (telegraf will create it if not exists)
...@@ -100,32 +97,9 @@ ...@@ -100,32 +97,9 @@
[system] [system]
{{ extra['extra-config'] }}
############################################################################### ###############################################################################
# ERP5 - PLUGINS # # To add ad-hoc config, don't edit this file directly, but place your config
###############################################################################
#
# Left here as example, don't edit this file directly, but place your config
# files in {{ telegraf['extra-config-dir'] }} # files in {{ telegraf['extra-config-dir'] }}
#
#[mysql]
# servers = ["root@unix(/srv/slapgrid/slappart12/srv/runner/instance/slappart1/var/run/mariadb.sock)/erp5"]
#[memcached]
# # XXX kumofs does not support memcached's stat command
# servers = ["10.0.248.233:2013", "10.0.248.233:2003"]
#[haproxy]
# servers = ["http://10.0.121.162:2150/haproxy", "http://10.0.121.162:2152/haproxy"]
#[[inputs.exec]]
# commands = ["/srv/slapgrid/slappart0/bin/slapsensor /srv/slapgrid/slappart0/srv/runner/instance/etc/supervisord.conf"]
# name_suffix = "_slapos"
# interval = "5s"
###############################################################################
# SERVICE PLUGINS #
############################################################################### ###############################################################################
...@@ -32,6 +32,7 @@ import os ...@@ -32,6 +32,7 @@ import os
import tempfile import tempfile
import textwrap import textwrap
import time import time
import json
import psutil import psutil
import requests import requests
...@@ -102,7 +103,7 @@ class TestGrafana(GrafanaTestCase): ...@@ -102,7 +103,7 @@ class TestGrafana(GrafanaTestCase):
with open( with open(
os.path.join(self.computer_partition_root_path, 'etc', os.path.join(self.computer_partition_root_path, 'etc',
'grafana-config-file.cfg')) as f: 'grafana-config-file.cfg')) as f:
config.readfp(io.StringIO('[default]\n' + f.read())) config.read_file(io.StringIO('[default]\n' + f.read()))
self.assertEqual(config.get('smtp', 'enabled'), 'false') self.assertEqual(config.get('smtp', 'enabled'), 'false')
...@@ -185,8 +186,109 @@ class TestTelegraf(GrafanaTestCase): ...@@ -185,8 +186,109 @@ class TestTelegraf(GrafanaTestCase):
class TestLoki(GrafanaTestCase): class TestLoki(GrafanaTestCase):
instance_max_retry = 2
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
cls._logfile = tempfile.NamedTemporaryFile(suffix='log')
parameter_dict = {
"applications": [
{
"name": "System",
"instance-root": "/",
"partitions": [
{
# no slapos for system application
# XXX example
"name": "syslog",
"reference": "syslog",
"files": [
"/srv/slapgrid/slappart15/grosgzip/bench.log",
]
},
]
},
{
"name": "ERP5",
"instance-root": "/srv/slapgrid/slappart15/srv/runner/instance/",
"urls": [
# TODO
# "https://XXX.host.vifib.net/erp5/",
],
"partitions": [
{
"name": "jerome-dev-mariadb",
"reference": "slappart3",
"type": "erp5/mariadb",
#"static-tags": {
# "XXX": "needed?"
#}
},
{
"name": "jerome-dev-zodb",
"reference": "slappart4",
"type": "erp5/zeo",
#"static-tags": {
# "XXX": "needed?"
#}
},
{
"name": "jerome-dev-balancer",
"reference": "slappart6",
"type": "erp5/balancer",
#"static-tags": {
# "XXX": "needed?"
#}
},
{
"name": "jerome-dev-zope-front",
"reference": "slappart5",
"type": "erp5/zope-front",
#"static-tags": {
# "XXX": "needed?"
#}
},
# {
# "name": "jerome-dev-zope-front",
# "reference": "slappart13",
# "type": "erp5/zope-activity",
# #"static-tags": {
# # "XXX": "needed?"
# #}
# }
]
}
],
# TODO: drop this
'promtail-extra-scrape-config':
textwrap.dedent(r'''
- job_name: {cls.__name__}
pipeline_stages:
- match:
selector: '{{job="{cls.__name__}"}}'
stages:
- multiline:
firstline: '^\d{{4}}-\d{{2}}-\d{{2}}\s\d{{1,2}}\:\d{{2}}\:\d{{2}}\,\d{{3}}'
max_wait_time: 3s
- regex:
expression: '^(?P<timestamp>.*) - (?P<name>\S+) - (?P<level>\S+) - (?P<message>.*)'
- timestamp:
format: 2006-01-02T15:04:05Z00:00
source: timestamp
- labels:
level:
name:
static_configs:
- targets:
- localhost
labels:
job: {cls.__name__}
__path__: {cls._logfile.name}
''').format(**locals())
}
return {'_': json.dumps(parameter_dict)}
def xgetInstanceParameterDict(cls):
cls._logfile = tempfile.NamedTemporaryFile(suffix='log') cls._logfile = tempfile.NamedTemporaryFile(suffix='log')
return { return {
'promtail-extra-scrape-config': 'promtail-extra-scrape-config':
...@@ -227,9 +329,10 @@ class TestLoki(GrafanaTestCase): ...@@ -227,9 +329,10 @@ class TestLoki(GrafanaTestCase):
)['loki-url'] )['loki-url']
def test_loki_available(self): def test_loki_available(self):
import pdb;pdb; set_trace()
self.assertEqual( self.assertEqual(
requests.codes.ok, requests.codes.ok,
requests.get('{self.loki_url}/ready'.format(**locals()), requests.get(f'{self.loki_url}/ready',
verify=False).status_code) verify=False).status_code)
def test_log_ingested(self): def test_log_ingested(self):
......
...@@ -26,7 +26,7 @@ md5sum = d10b8e35b02b5391cf46bf0c7dbb1196 ...@@ -26,7 +26,7 @@ md5sum = d10b8e35b02b5391cf46bf0c7dbb1196
[template-mariadb] [template-mariadb]
filename = instance-mariadb.cfg.in filename = instance-mariadb.cfg.in
md5sum = 93b2277185e4949a3d17be79d3710d2d md5sum = 257ea9d3c76ea563430c24f5724b8ac9
[template-kumofs] [template-kumofs]
filename = instance-kumofs.cfg.in filename = instance-kumofs.cfg.in
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
{% for database_count in range(slapparameter_dict.get('test-database-amount', 1)) -%} {% for database_count in range(slapparameter_dict.get('test-database-amount', 1)) -%}
{% do test_database_list.append({'name': 'erp5_test_' ~ database_count, 'user': 'testuser_' ~ database_count, 'password': 'testpassword' ~ database_count}) -%} {% do test_database_list.append({'name': 'erp5_test_' ~ database_count, 'user': 'testuser_' ~ database_count, 'password': 'testpassword' ~ database_count}) -%}
{% endfor -%} {% endfor -%}
{% set character_set_server = slapparameter_dict.get('character-set-server', 'utf8mb4') -%}
{% set catalog_backup = slapparameter_dict.get('catalog-backup', {}) -%} {% set catalog_backup = slapparameter_dict.get('catalog-backup', {}) -%}
{% set backup_periodicity = slapparameter_dict.get('backup-periodicity', 'daily') -%} {% set backup_periodicity = slapparameter_dict.get('backup-periodicity', 'daily') -%}
{% set full_backup_retention_days = catalog_backup.get('full-retention-days', 7) -%} {% set full_backup_retention_days = catalog_backup.get('full-retention-days', 7) -%}
...@@ -99,7 +100,8 @@ time = {{ dumps(backup_periodicity) }} ...@@ -99,7 +100,8 @@ time = {{ dumps(backup_periodicity) }}
# can be fully restored. # can be fully restored.
# master-data: use value "2" as we are not in a replication case # master-data: use value "2" as we are not in a replication case
#} #}
command = "${binary-wrap-mysqldump:wrapper-path}" --all-databases --flush-privileges --single-transaction --max-allowed-packet=128M {% if incremental_backup_retention_days > -1 %}--flush-logs --master-data=2 {% endif %}| {{ parameter_dict['gzip-location'] }}/bin/gzip > "${directory:mariadb-backup-full}/$({{ parameter_dict['coreutils-location'] }}/bin/date "+%Y%m%d%H%M%S").sql.gz" command = "${binary-wrap-mysqldump:wrapper-path}" --all-databases --default-character-set={{ character_set_server }} --flush-privileges --single-transaction --max-allowed-packet=128M {% if incremental_backup_retention_days > -1 %}--flush-logs --master-data=2 {% endif %}| {{ parameter_dict['gzip-location'] }}/bin/gzip > "${directory:mariadb-backup-full}/$({{ parameter_dict['coreutils-location'] }}/bin/date "+%Y%m%d%H%M%S").sql.gz"
xcommand = "${binary-wrap-mysqldump:wrapper-path}" --all-databases--flush-privileges --single-transaction --max-allowed-packet=128M {% if incremental_backup_retention_days > -1 %}--flush-logs --master-data=2 {% endif %}| {{ parameter_dict['gzip-location'] }}/bin/gzip > "${directory:mariadb-backup-full}/$({{ parameter_dict['coreutils-location'] }}/bin/date "+%Y%m%d%H%M%S").sql.gz"
{# KEEP GLOB PATTERN IN SYNC with generated filenames above {# KEEP GLOB PATTERN IN SYNC with generated filenames above
# YYYYmmddHHMMSS -#} # YYYYmmddHHMMSS -#}
file-glob = ??????????????.sql.gz file-glob = ??????????????.sql.gz
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment