Commit aa4541d4 authored by Thomas Gambier's avatar Thomas Gambier 🚴🏼

Update Release Candidate

parents 326822a0 9ae8e791
......@@ -78,11 +78,11 @@ md5sum = f525f0c3f1c4ca184a604a75dabf4f71
# XXX: This is not the latest version because
# Debian does not provide a stable URL for it.
<= debian-amd64-netinst-base
version = 10.0.0
md5sum = f31779fcca35f5ce9833a9661a9bd5bd
version = 10.1.0
md5sum = b931ef8736c98704bcf519160b50fd83
[debian-amd64-testing-netinst.iso]
<= debian-amd64-netinst-base
release = daily/20190923-1
release = daily-builds/daily/20191119-3
version = testing
md5sum = 00eda4218c401c46c15f491add41cd4e
md5sum = db9e0131f6cfd3eb0aa2b76fddb2f942
......@@ -46,7 +46,7 @@ md5sum = 80b3746568bc8e308a1f337bdaa2441c
[template-pullrdiffbackup]
filename = instance-pullrdiffbackup.cfg.in
md5sum = 537ab12f3885660e59d280ee40e379ac
md5sum = c1f00c251298c2ab7fd095e7c4571d3b
[template]
filename = instance.cfg.in
......
......@@ -31,7 +31,6 @@ statistic = $${:srv}/$${variables:statistic_dirbasename}
backupscript = $${:etc}/backup
www = $${:srv}/www
home = $${:etc}/home
promises = $${:etc}/promise
ssl = $${:etc}/ssl
ssh = $${:home}/.ssh
plugin = $${:etc}/plugin
......@@ -194,10 +193,11 @@ virtual-depends =
$${nginx-configuration:ip}
[nginx-listen-promise]
recipe = slapos.cookbook:check_port_listening
hostname = $${nginx-configuration:ip}
port = $${nginx-configuration:port}
path = $${directory:promises}/nginx_listen
<= monitor-promise-base
module = check_port_listening
name = nginx_listen.py
config-hostname = $${nginx-configuration:ip}
config-port = $${nginx-configuration:port}
[nginx-configuration]
recipe = slapos.recipe.template
......
......@@ -22,15 +22,15 @@ md5sum = c801b7f9f11f0965677c22e6bbe9281b
[template-apache-frontend]
filename = instance-apache-frontend.cfg.in
md5sum = 7f2f7b40e3639c50cf7dd929f4425f20
md5sum = c6d78b2856d9d0ec63728e668e3395d8
[template-caddy-replicate]
filename = instance-apache-replicate.cfg.in
md5sum = bd73ad5ef84fe6bf0f479ee45da3139d
md5sum = a34ab1970f91731c32ed7f2471632c86
[template-slave-list]
filename = templates/apache-custom-slave-list.cfg.in
md5sum = 5913a176c025cdadee6782f8ad285dc4
md5sum = 46e0ccb54c2406b8a44cdada73c061a6
[template-slave-configuration]
filename = templates/custom-virtualhost.conf.in
......@@ -118,4 +118,4 @@ md5sum = 38792c2dceae38ab411592ec36fff6a8
[template-kedifa]
filename = instance-kedifa.cfg.in
md5sum = bb1eb172e65d8ffe5b161eae191947e1
md5sum = d5efd74d80a1df3d5386758c9f13e190
......@@ -59,7 +59,6 @@ log = ${:var}/log
run = ${:var}/run
service = ${:etc}/service
etc-run = ${:etc}/run
plugin = ${:etc}/plugin
ca-dir = ${:srv}/ssl
# BBB: SlapOS Master non-zero knowledge BEGIN
......@@ -139,6 +138,7 @@ context =
import json_module json
raw common_profile {{ parameter_dict['common_profile'] }}
raw logrotate_base_instance {{ parameter_dict['logrotate_base_instance'] }}
raw monitor_template {{ parameter_dict['monitor_template'] }}
key slap_software_type :slap_software_type
key slapparameter_dict :slapparameter_dict
section directory directory
......@@ -257,7 +257,6 @@ extra-context =
section frontend_configuration frontend-configuration
section caddy_configuration caddy-configuration
key monitor_base_url monitor-instance-parameter:monitor-base-url
key plugin_directory directory:plugin
key report_directory directory:bin
key bin_directory :bin_directory
key enable_http2_by_default configuration:enable-http2-by-default
......@@ -510,17 +509,8 @@ filename = ip_allow.config
context =
key content trafficserver-variable:ip-allow-config
[promise-plugin-base]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
content =
from slapos.promise.plugin.${:module} import RunPromise
output = ${directory:plugin}/${:name}
[trafficserver-promise-listen-port]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = trafficserver-port-listening.py
config-hostname = ${trafficserver-variable:local-ip}
......@@ -533,7 +523,7 @@ wrapper-path = ${directory:bin}/traffic_ctl
environment = TS_ROOT=${buildout:directory}
[trafficserver-promise-cache-availability]
<= promise-plugin-base
<= monitor-promise-base
module = trafficserver_cache_availability
name = trafficserver-cache-availability.py
config-wrapper-path = ${trafficserver-ctl:wrapper-path}
......@@ -633,48 +623,48 @@ context =
key content :content
[promise-frontend-caddy-configuration]
<= promise-plugin-base
<= monitor-promise-base
module = validate_frontend_configuration
name = frontend-caddy-configuration-promise.py
config-verification-script = ${promise-helper-last-configuration-state:rendered}
[promise-caddy-frontend-v4-https]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = caddy_frontend_ipv4_https.py
config-hostname = {{ instance_parameter['ipv4-random'] }}
config-port = ${configuration:port}
[promise-caddy-frontend-v4-http]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = caddy_frontend_ipv4_http.py
config-hostname = {{ instance_parameter['ipv4-random'] }}
config-port = ${configuration:plain_http_port}
[promise-caddy-frontend-v6-https]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = caddy_frontend_ipv6_https.py
config-hostname = {{ instance_parameter['ipv6-random'] }}
config-port = ${configuration:port}
[promise-caddy-frontend-v6-http]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = caddy_frontend_ipv6_http.py
config-hostname = {{ instance_parameter['ipv6-random'] }}
config-port = ${configuration:plain_http_port}
[promise-caddy-frontend-cached]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = caddy_cached.py
config-hostname = {{ instance_parameter['ipv4-random'] }}
config-port = ${caddy-configuration:cache-through-port}
[promise-caddy-frontend-ssl-cached]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = caddy_ssl_cached.py
config-hostname = {{ instance_parameter['ipv4-random'] }}
......@@ -735,7 +725,7 @@ context =
raw content show:cache-stats
[monitor-verify-re6st-connectivity]
<= promise-plugin-base
<= monitor-promise-base
module = check_url_available
name = re6st-connectivity.py
config-url = ${configuration:re6st-verification-url}
......
......@@ -512,7 +512,6 @@ context =
key content :content
[directory]
plugin = ${:etc}/plugin
service = ${:etc}/service
promise-output = ${:srv}/promise-output
......@@ -573,23 +572,15 @@ template = inline:
rendered = ${directory:etc}/Caddyfile-rejected-slave
[promise-plugin-base]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
content =
from slapos.promise.plugin.${:module} import RunPromise
output = ${directory:plugin}/${:name}
[promise-rejected-slave-publish-ip-port]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = rejected-slave-publish-ip-port-listening.py
config-hostname = ${rejected-slave-publish-configuration:ip}
config-port = ${rejected-slave-publish-configuration:port}
[rejected-slave-promise]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
module = check_file_state
name = rejected-slave.py
......
......@@ -36,7 +36,6 @@ hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
service_auto_approve_count=0,
user_auto_approve_count=1,
key_len=2048,
promise='${directory:promise}/caucased',
)}}
# Create all needed directories
......@@ -54,8 +53,6 @@ log = ${:var}/log
run = ${:var}/run
service = ${:etc}/service
etc-run = ${:etc}/run
promise = ${:etc}/promise
plugin = ${:etc}/plugin
# KeDiFa directories
kedifa = ${:srv}/kedifa
......@@ -157,7 +154,7 @@ template = inline:
rendered = ${directory:caddy-csr_id}/Caddyfile
[promise-expose-csr_id-ip-port]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = expose-csr_id-ip-port-listening.py
config-hostname = ${expose-csr_id-configuration:ip}
......@@ -196,14 +193,6 @@ context =
section directory directory
${:extra-context}
[promise-plugin-base]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
content =
from slapos.promise.plugin.${:module} import RunPromise
output = ${directory:plugin}/${:name}
[kedifa-config]
ip = {{ instance_parameter['ipv6-random'] }}
port = {{ instance_parameter['configuration.kedifa_port'] }}
......@@ -222,7 +211,7 @@ command-line = kill -SIGHUP `cat ${kedifa-config:pidfile}`
wrapper-path = ${directory:etc-run}/kedifa-reloader
[promise-kedifa-http-reply]
<= promise-plugin-base
<= monitor-promise-base
module = check_url_available
name = kedifa-http-reply.py
# Kedifa replies 400 on /, so use it to be sure that Kedifa replied
......
......@@ -42,14 +42,6 @@ create = true
{% set slave_kedifa_information = {} %}
{% endif %}
[promise-plugin-base]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
content =
from slapos.promise.plugin.${:module} import RunPromise
output = {{ plugin_directory }}/${:name}
# empty section if no cached slaves are available
[slave-log-cache-direct-directory-dict]
......@@ -288,7 +280,7 @@ filename = {{ '%s.conf' % slave_reference }}
{% set monitor_ipv6_section_title = 'check-%s-ipv6-packet-list-test' % slave_instance.get('slave_reference') %}
{% do part_list.append(monitor_ipv6_section_title) %}
[{{ monitor_ipv6_section_title }}]
<= promise-plugin-base
<= monitor-promise-base
module = check_icmp_packet_lost
name = {{ monitor_ipv6_section_title }}.py
config-address = {{ dumps(monitor_ipv6_test) }}
......@@ -301,7 +293,7 @@ config-frequency = 720
{% set monitor_ipv4_section_title = 'check-%s-ipv4-packet-list-test' % slave_instance.get('slave_reference') %}
{% do part_list.append(monitor_ipv4_section_title) %}
[{{ monitor_ipv4_section_title }}]
<= promise-plugin-base
<= monitor-promise-base
module = check_icmp_packet_lost
name = {{ monitor_ipv4_section_title }}.py
config-address = {{ dumps(monitor_ipv4_test) }}
......@@ -321,7 +313,7 @@ config-frequency = 720
{% do part_list.append(re6st_optimal_test_section_title) %}
[{{ re6st_optimal_test_section_title }}]
<= promise-plugin-base
<= monitor-promise-base
module = check_re6st_optimal_status
name = {{ re6st_optimal_test_section_title }}.py
config-ipv4 = {{ dumps(re6st_ipv4) }}
......@@ -493,6 +485,7 @@ rendered = {{frontend_configuration.get('log-access-configuration')}}
extends =
{{ common_profile }}
{{ logrotate_base_instance }}
{{ monitor_template }}
parts +=
kedifa-updater
......@@ -561,7 +554,7 @@ template = inline:
rendered = {{ directory_caddy_csr_id }}/Caddyfile
[promise-expose-csr_id-ip-port]
<= promise-plugin-base
<= monitor-promise-base
module = check_port_listening
name = expose-csr_id-ip-port-listening.py
config-hostname = ${expose-csr_id-configuration:ip}
......
......@@ -360,6 +360,8 @@ class TestDataMixin(object):
'validate_configuration_state_signature',
# run by cron from time to time
'monitor/monitor-collect.pid',
# no control regarding if it would or not be running
'monitor/monitor-bootstrap.pid',
])
def test_file_list_etc_cron_d(self):
......
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
\ No newline at end of file
T-0/var/run/monitor-httpd.pid
T-0/var/run/monitor/monitor-bootstrap.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor/monitor-bootstrap.pid
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/monitor/monitor-bootstrap.pid
......@@ -15,7 +15,7 @@
# not need these here).
[instance-caucased]
filename = instance-caucased.cfg.jinja2
md5sum = 442e17a66b6f8d39f87903e147e3edaf
md5sum = 2277c891a71534e00487468f5048d196
[instance]
filename = instance.cfg.jinja2
......
......@@ -4,7 +4,6 @@
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
promise = ${:etc}/promise
service-on-watch = ${:etc}/service
srv = ${buildout:directory}/srv
......@@ -17,7 +16,6 @@ srv = ${buildout:directory}/srv
service_auto_approve_count=slapparameter_dict.get('service-auto-approve-amount', 0),
user_auto_approve_count=slapparameter_dict.get('user-auto-approve-amount', 1),
key_len=slapparameter_dict.get('key-length', 2048),
promise='${directory:promise}/caucased',
) }}
[publish]
......
......@@ -18,4 +18,4 @@ md5sum = 6e4431cf4b0a0d034402604b1e2844c0
[template-cloudooo-instance]
filename = instance-cloudooo.cfg.in
md5sum = 31ed5c856ce2dff0305d7029caefc3f6
md5sum = 79f2740f7e28ad9fcd2f823d076e790e
......@@ -68,7 +68,9 @@ mode = {{ mode }}
[buildout]
extends =
{{ parameter_dict['template-logrotate-base'] }}
{{ parameter_dict['template-monitor'] }}
parts =
monitor-base
publish
apache
apache-conf
......@@ -114,10 +116,11 @@ crl = ${apache-ssl-client:crl}
[apache-promise]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/apache
hostname = {{ ipv4 }}
port = {{ apache_dict.values()[0][0] }}
<= monitor-promise-base
module = check_port_listening
name = apache.py
config-hostname = {{ ipv4 }}
config-port = {{ apache_dict.values()[0][0] }}
[apache-conf-ssl]
cert = ${directory:apache-conf}/apache.crt
......@@ -262,7 +265,6 @@ font = ${:srv}/font
log = ${:var}/log
newcerts = ${:ca-dir}/newcerts
private = ${:ca-dir}/private
promise = ${directory:etc}/promise
requests = ${:ca-dir}/requests
run = ${:var}/run
services = ${:etc}/run
......
......@@ -3,6 +3,7 @@ extends =
buildout.hash.cfg
../../stack/cloudooo.cfg
../../stack/logrotate/buildout.cfg
../../stack/monitor/buildout.cfg
parts =
${cloudooo-buildout:parts}
......@@ -78,6 +79,7 @@ xserver = ${xserver:location}
zlib = ${zlib:location}
template-apache-conf = ${template-apache-backend-conf:target}
template-logrotate-base = ${template-logrotate-base:rendered}
template-monitor = ${monitor2-template:rendered}
[template-cloudooo-instance]
recipe = slapos.recipe.build:download
......
......@@ -18,4 +18,4 @@ md5sum = 307663d73ef3ef94b02567ecd322252e
[template-default]
filename = instance-default.cfg
md5sum = 4abfdf82f90a39a36de338f36b5eeb3e
md5sum = a76803eedf0c66fe7649b62efc86c8fe
......@@ -183,7 +183,6 @@ recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log
services = $${rootdirectory:etc}/service
run = $${rootdirectory:var}/run
promises = $${rootdirectory:etc}/promise
[directory]
recipe = slapos.cookbook:mkdirectory
......@@ -217,17 +216,14 @@ return = domain secure_access
[promises]
recipe =
instance-promises =
$${shellinabox-frontend-listen-promise:path}
[check-port-listening-promise]
recipe = slapos.cookbook:check_port_listening
path = $${directory:promises}/$${:_buildout_section_name_}
$${shellinabox-frontend-listen-promise:name}
[shellinabox-frontend-listen-promise]
<= check-port-listening-promise
hostname= $${shellinabox-frontend:hostname}
port = $${shellinabox-frontend:port}
<= monitor-promise-base
module = check_port_listening
name = $${:_buildout_section_name_}.py
config-hostname = $${shellinabox-frontend:hostname}
config-port = $${shellinabox-frontend:port}
[slap-parameter]
node-quantity = 1
......
......@@ -14,7 +14,7 @@
# not need these here).
[instance.cfg]
filename = instance.cfg.in
md5sum = ddadc1eda2b474d6f2349409c05476eb
md5sum = 36252abb4d857da08d62bf3eb26faae1
[watcher]
_update_hash_filename_ = watcher.in
......@@ -50,7 +50,7 @@ md5sum = 0ddf4093dcf4427e5a160707e6017950
[instance-gitlab.cfg.in]
_update_hash_filename_ = instance-gitlab.cfg.in
md5sum = bd8bccad43e7cf294af2fc9b60fb3b46
md5sum = d794631233626d03b04894ca6b6d8496
[instance-gitlab-export.cfg.in]
_update_hash_filename_ = instance-gitlab-export.cfg.in
......
......@@ -4,7 +4,9 @@
# security and performance reasons (unix has 2x less latency and more
# throughput compared to tcp over loopback).
[buildout]
extends = {{ gitlab_parameters_cfg }}
extends =
{{ gitlab_parameters_cfg }}
{{ monitor_template }}
parts =
directory
publish-instance-info
......@@ -105,7 +107,6 @@ srv = ${:home}/srv
# slapos startup/service/promise scripts live here:
startup = ${:etc}/run
service = ${:etc}/service
promise = ${:etc}/promise
promise.slow = ${:promise}.slow
# gitlab: etc/ log/ ...
......@@ -401,18 +402,12 @@ tune-command =
# 5. services
# [promise-<something>] to generate promise wrapper <something>
[promise-wrapper]
recipe = slapos.cookbook:wrapper
wrapper-path = !py! '${directory:promise}/' + '${:_buildout_section_name_}'[8:]
# [promise-<something>] to check <something> by url
[promise-byurl]
recipe = slapos.cookbook:check_url_available
path = !py! '${directory:promise}/' + '${:_buildout_section_name_}'[8:]
dash_path = {{ bash_bin }}
curl_path = {{ curl_bin }}
http_code = 200
<= monitor-promise-base
module = check_url_available
name = !py! '${:_buildout_section_name_}'[8:] + '.py'
config-http_code = 200
......@@ -452,8 +447,10 @@ depend =
${promise-postgresql:recipe}
[promise-postgresql]
<= promise-wrapper
command-line =
<= monitor-base-promise
module = check_command_execute
name = promise-postgresql.py
config-command =
{{ postgresql_location }}/bin/psql
-h ${service-postgresql:pgdata-directory}
-U ${service-postgresql:superuser}
......@@ -539,7 +536,7 @@ depend =
<= promise-byurl
# http://localhost/users/statics.css will not redirect to /users/sign_in anymore because of this commit:
# https://lab.nexedi.com/nexedi/gitlab-workhorse/commit/c81f109a62fecf2a847fb17ceed012b380dab49f#c1215002e6d745f05eaaf9ee1dad7752e85d866f_318_331
url = --unix-socket ${gitlab-workhorse:socket} http://localhost/users/sign_in
config-url = --unix-socket ${gitlab-workhorse:socket} http://localhost/users/sign_in
# gitlab-workhorse logs to stdout/stderr - logs are handled by slapos not us
......@@ -581,7 +578,7 @@ depend =
[promise-unicorn]
<= promise-byurl
url = --unix-socket ${unicorn:socket} http://localhost/
config-url = --unix-socket ${unicorn:socket} http://localhost/
[promise-rakebase]
recipe = slapos.cookbook:wrapper
......@@ -728,7 +725,7 @@ depend =
<= promise-byurl
# XXX this depends on gitlab-workhorse being up
# (nginx is configured to proxy all requests to gitlab-workhorse)
url = ${backend-info:url}/users/sign_in
config-url = ${backend-info:url}/users/sign_in
[logrotate-entry-nginx]
<= logrotate-entry
......
......@@ -64,6 +64,7 @@ context =
raw database_yml_in ${database.yml.in:target}
raw gitconfig_in ${gitconfig.in:target}
raw gitlab_parameters_cfg ${gitlab-parameters.cfg:target}
raw monitor_template ${monitor2-template:rendered}
raw gitlab_shell_config_yml_in ${gitlab-shell-config.yml.in:target}
raw gitlab_unicorn_startup_in ${gitlab-unicorn-startup.in:target}
raw gitlab_yml_in ${gitlab.yml.in:target}
......
......@@ -3,6 +3,7 @@
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
../../component/ruby/buildout.cfg
../../component/golang/buildout.cfg
../../component/postgresql/buildout.cfg
......
......@@ -15,7 +15,7 @@
[instance-profile]
filename = instance.cfg.in
md5sum = 7fb6806b139b3a8d0054308397be1dd9
md5sum = 461d515da03de5e422e6f75189d09184
[influxdb-config-file]
filename = influxdb-config-file.cfg.in
......
......@@ -3,6 +3,8 @@ parts =
promises
publish-connection-parameter
extends = {{ monitor_template }}
eggs-directory = {{ buildout['eggs-directory'] }}
develop-eggs-directory = {{ buildout['develop-eggs-directory'] }}
offline = true
......@@ -33,7 +35,6 @@ etc = ${:home}/etc
var = ${:home}/var
srv = ${:home}/srv
service = ${:etc}/service
promise = ${:etc}/promise
influxdb-data-dir = ${:srv}/influxdb
grafana-dir = ${:srv}/grafana
grafana-data-dir = ${:grafana-dir}/data
......@@ -70,8 +71,9 @@ mode = 0644
extensions = jinja2.ext.do
[check-port-listening-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/${:_buildout_section_name_}
<= monitor-base-promise
module = check_port_listening
name = ${:_buildout_section_name_}.py
......@@ -110,14 +112,15 @@ username = influxdb
[influxdb-listen-promise]
<= check-port-listening-promise
hostname = ${influxdb:ipv6}
port = ${influxdb:http-port}
config-hostname = ${influxdb:ipv6}
config-port = ${influxdb:http-port}
[influxdb-password-promise]
recipe = slapos.cookbook:wrapper
command-line =
<= monitor-base-promise
module = check_command_execute
name = ${:_buildout_section_name_}.py
config-command =
{{ influx_bin }} -username ${influxdb:auth-username} -password ${influxdb:auth-password} -socket ${influxdb:unix-socket} -execute "CREATE USER ${influxdb:auth-username} WITH PASSWORD '${influxdb:auth-password}' WITH ALL PRIVILEGES"
wrapper-path = ${directory:promise}/${:_buildout_section_name_}
......@@ -160,8 +163,8 @@ context =
[grafana-listen-promise]
<= check-port-listening-promise
hostname= ${grafana:ipv6}
port = ${grafana:port}
config-hostname= ${grafana:ipv6}
config-port = ${grafana:port}
......
......@@ -2,6 +2,7 @@
extends =
../../stack/slapos.cfg
../../stack/nodejs.cfg
../../stack/monitor/buildout.cfg
../../component/make/buildout.cfg
../../component/golang/buildout.cfg
../../component/openssl/buildout.cfg
......@@ -105,6 +106,7 @@ context =
key influx_bin gowork:influx-bin
key grafana_bin gowork:grafana-bin
key grafana_homepath gowork:grafana-homepath
key monitor_template monitor2-template:rendered
[versions]
......
......@@ -14,4 +14,4 @@
# not need these here).
[instance-profile]
filename = instance.cfg.in
md5sum = 6567f8dedb5cdd93542dc29e96edb547
md5sum = cc8902e44c1d50804b570775633b8c2a
......@@ -15,6 +15,7 @@ eggs-directory = {{ buildout['eggs-directory'] }}
develop-eggs-directory = {{ buildout['develop-eggs-directory'] }}
offline = true
extends = {{ template_monitor }}
[instance-parameter]
# Fetch arbitrary parameters defined by the user in SlapOS Master for his instance.
......@@ -51,9 +52,6 @@ var = ${:home}/var
script = ${:etc}/run/
# Executables put here will be started and monitored (for daemons)
service = ${:etc}/service
# Executables put here will be launched after buildout has completed to see
# if instance is running
promise = ${:etc}/promise/
# Path of the log directory used by our service (see [helloweb])
log = ${:var}/log
......@@ -88,8 +86,9 @@ wrapper-path = ${directory:service}/helloweb-${:kind}
# promise, that checks that helloweb service is alive
[helloweb-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/helloweb-${:kind}
<= monitor-promise-base
module = check_port_listening
name = helloweb-${:kind}.py
{# macro to instantiate service of `kind` to listen on `port` #}
{% set service_list = [] %}
......@@ -103,8 +102,8 @@ port = {{ port }}
[helloweb-{{ kind }}-promise]
<= helloweb-promise
kind = {{ kind }}
hostname= ${helloweb-{{ kind }}:ipv6}
port = {{ port }}
config-hostname= ${helloweb-{{ kind }}:ipv6}
config-port = {{ port }}
{% endmacro %}
# services instantiation
......
......@@ -7,6 +7,7 @@ extends =
# "slapos" stack describes basic things needed for 99.9% of SlapOS Software
# Releases
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
# Extend here component profiles, like openssl, apache, mariadb, curl...
# Or/and extend a stack (lamp, tomcat) that does most of the work for you
# In this example we extend from helloweb component.
......@@ -46,6 +47,7 @@ rendered = ${buildout:directory}/instance.cfg
extensions = jinja2.ext.do
context =
section buildout buildout
raw template_monitor ${monitor2-template:rendered}
# md5sum is fetched from buildout.hash.cfg and can be recalculated automatically by
# calling update-hash
......
......@@ -22,7 +22,7 @@ md5sum = 09803fb71404edbccb32c44a0040dae4
[template-validator]
filename = instance-validator.cfg.in
md5sum = 0275d7a8a021f84a1303e5c8933c07c3
md5sum = f2609d1f526ba6349b1374e3631a9f7c
[template]
filename = instance.cfg.in
......
......@@ -11,7 +11,6 @@ run = $${:var}/run
log = $${:var}/log
# scripts = $${:etc}/run
services = $${:etc}/service
promises = $${:etc}/promise
# tomcat directories
catalina_base = $${:var}/vnu
catalina_logs = $${:catalina_base}/logs
......@@ -55,10 +54,11 @@ port = 8899
scheme = https
[tomcat-listen-promise]
recipe = slapos.cookbook:check_port_listening
hostname = $${tomcat-configuration:ip}
port = $${tomcat-configuration:port}
path = $${basedirectory:promises}/tomcat_listen
<= monitor-promise-base
module = check_port_listening
name = tomcat_listen.py
config-hostname = $${tomcat-configuration:ip}
config-port = $${tomcat-configuration:port}
#################################
# Slapos publish
......
......@@ -15,7 +15,7 @@
[instance]
filename = instance.cfg.in
md5sum = 2a79bb6c4f593d7c4c7f4e0de97d9803
md5sum = 1d6bb3263f642a115982ddf245cb1cf0
[template-nginx-service]
filename = template-nginx-service.sh.in
......
......@@ -22,7 +22,6 @@ log = $${:var}/log
varnginx = $${:var}/nginx
scripts = $${:etc}/run
services = $${:etc}/service
promise = $${:etc}/promise/
www = $${:srv}/www
home = $${:etc}/home
ssl = $${:etc}/ssl
......
......@@ -15,7 +15,7 @@
[instance-jupyter-notebook]
filename = instance.cfg.in
md5sum = 78625cff193d7fdadd57670d30bc9a0d
md5sum = ba4e4c582b81deeb7339a13c82bbf34b
[jupyter-notebook-config]
filename = jupyter_notebook_config.py.jinja
......
......@@ -90,7 +90,6 @@ etc = ${:home}/etc
var = ${:home}/var
script = ${:etc}/run/
service = ${:etc}/service
promise = ${:etc}/promise/
log = ${:var}/log
notebook_dir = ${:var}/notebooks
# Add folders to explicitly define jupyter directory
......
......@@ -19,19 +19,19 @@ md5sum = 2cbfd6b08c65369c1d45cf3ba2ff335a
[template-kvm]
filename = instance-kvm.cfg.jinja2
md5sum = bcd3705e510da52e7ddf1e1527839642
md5sum = 36e7a8656e52d3aace8d9e52dcb3864e
[template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in
md5sum = 352f18067714501c8ccee55f8930d245
md5sum = 2e743132ba4e001f784791311df9ba6a
[template-kvm-resilient]
filename = instance-kvm-resilient.cfg.jinja2
md5sum = 7d4b14660b7a30084aa7e023b0db50be
md5sum = e50e45c3097ed5a7115817fbc967f173
[template-kvm-import]
filename = instance-kvm-import.cfg.jinja2.in
md5sum = dc3f3ad9ebd8b3b5c3ded57b91cee9c7
md5sum = 97a8ff8a5891678274b14481dfc5214c
[template-kvm-import-script]
filename = template/kvm-import.sh.jinja2
......@@ -47,7 +47,7 @@ md5sum = b617d64de73de1eed518185f310bbc82
[template-nbd]
filename = instance-nbd.cfg.in
md5sum = f634a5249b773658b7a7bc9fa9bb0368
md5sum = a05b581d65768ac55faf3b06d4aec447
[template-ansible-promise]
filename = template/ansible-promise.in
......@@ -79,4 +79,4 @@ md5sum = 599dbbbd438fe7801e3f8642ae9e9a78
[template-httpd]
filename = instance-kvm-http.cfg.in
md5sum = 26a181a48046ce88570adb32334747ef
md5sum = d657884d02105deffddee0edae4b50a6
......@@ -229,7 +229,6 @@ log = ${:var}/log
scripts = ${:etc}/run
services = ${:etc}/service
webroot = ${:srv}/document
promises = ${:etc}/promise
ssl = ${:etc}/ssl
[directory-doc]
......
......@@ -14,7 +14,6 @@ public = ${:srv}/public/
log = ${:var}/log
services = ${:etc}/service
scripts = ${:etc}/run
promises = ${:etc}/promise
run = ${:var}/run
document = ${:srv}/document
ssl = ${:etc}/ssl
......@@ -65,7 +64,8 @@ update-command =
stop-on-error = true
[httpd-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/apache-httpd
hostname = ${apache-conf:ip}
port = ${apache-conf:port}
\ No newline at end of file
<= monitor-promise-base
module = check_port_listening
name = apache-httpd.py
config-hostname = ${apache-conf:ip}
config-port = ${apache-conf:port}
......@@ -41,7 +41,6 @@ var = ${buildout:directory}/var
log = ${:var}/log
scripts = ${:etc}/run
services = ${:etc}/service
promises = ${:etc}/promise
novnc-conf = ${:etc}/novnc
run = ${:var}/run
ca-dir = ${:srv}/ssl
......@@ -63,8 +62,7 @@ context =
backup-disk-path = ${directory:backup}/virtual.qcow2
[kvm-disk-image-corruption-promise]
# Check that disk image is not corrupted
[kvm-disk-image-corruption-bin]
recipe = collective.recipe.template
input = inline:#!/bin/sh
QEMU_IMAGE="${directory:srv}/virtual.qcow2"
......@@ -78,5 +76,12 @@ input = inline:#!/bin/sh
else
exit 1
fi
output = ${directory:promises}/kvm-disk-image-corruption
mode = 700
\ No newline at end of file
output = ${directory:bin}/kvm-disk-image-corruption
mode = 700
[kvm-disk-image-corruption-promise]
# Check that disk image is not corrupted
<= monitor-promise-base
module = check_command_execute
name = kvm-disk-image-corruption.py
config-command = ${kvm-disk-image-corruption-bin:output}
......@@ -57,7 +57,6 @@ cors-domains = {{ monitor_parameter.get('monitor-cors-domains', '') }}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
promises = ${:etc}/promise
# Bubble down the parameters of the requested instance to the user
[request-kvm]
......@@ -83,21 +82,26 @@ monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${publish-early:monitor-password}
{% endif -%}
[kvm-frontend-url-promise]
# Check that url parameter is complete
[kvm-frontend-url-bin]
recipe = collective.recipe.template
input = inline:#!/bin/sh
URL="${request-kvm:connection-url}"
if [[ ! "$URL" == https://* ]]; then
exit 1
fi
output = ${resilient-directory:promise}/kvm-frontend-url
output = ${directory:bin}/kvm-frontend-url
mode = 700
[kvm-frontend-url-promise]
# Check that url parameter is complete
<= monitor-promise-base
module = check_command_execute
name = kvm-frontend-url.py
config-command = ${kvm-frontend-url-bin:output}
[kvm-backend-url-promise]
# Check that backend url is reachable
recipe = slapos.cookbook:check_url_available
path = ${directory:promises}/frontend_promise
url = ${publish-connection-information:url}
dash_path = /bin/sh
curl_path = {{ curl_executable_location }}
<= monitor-promise-base
module = check_url_available
name = frontend_promise.py
config-url = ${publish-connection-information:url}
......@@ -40,7 +40,6 @@ var = ${buildout:directory}/var
log = ${:var}/log
scripts = ${:etc}/run
services = ${:etc}/service
promises = ${:etc}/promise
novnc-conf = ${:etc}/novnc
run = ${:var}/run
prerm = ${:etc}/prerm
......@@ -216,13 +215,13 @@ command-line = ${kvm-controller:rendered}
[kvm-vnc-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/vnc_promise
hostname = ${kvm-parameter-dict:vnc-ip}
port = ${kvm-parameter-dict:vnc-port}
<= monitor-promise-base
module = check_port_listening
name = vnc_promise.py
config-hostname = ${kvm-parameter-dict:vnc-ip}
config-port = ${kvm-parameter-dict:vnc-port}
[kvm-disk-image-corruption-promise]
# Check that disk image is not corrupted
[kvm-disk-image-corruption-bin]
recipe = collective.recipe.template
input = inline:#!/bin/sh
# Return code 0 is "OK"
......@@ -239,9 +238,16 @@ input = inline:#!/bin/sh
else
exit 1
fi
output = ${directory:promises}/kvm-disk-image-corruption
output = ${directory:bin}/kvm-disk-image-corruption
mode = 700
[kvm-disk-image-corruption-promise]
# Check that disk image is not corrupted
<= monitor-promise-base
module = check_command_execute
name = kvm-disk-image-corruption.py
config-command = ${kvm-disk-image-corruption-bin:output}
{% if wipe_disk == 'true' -%}
{% do part_list.append('wipe-disk-wrapper') -%}
{% set wipe_file_list = '${kvm-parameter-dict:disk-path}' -%}
......@@ -255,16 +261,22 @@ command-line =
{{ wipe_disk_wrapper }} -n {{ slapparameter_dict.get('wipe-disk-iterations', 1) }} -suz --check-pid-file ${kvm-parameter-dict:pid-file-path} --file {{ wipe_file_list }}
{% endif -%}
[kvm-started-promise]
[kvm-started-bin]
recipe = slapos.recipe.template:jinja2
template = {{ qemu_start_promise_tpl }}
rendered = ${directory:promises}/qemu-virtual-machine-is-ready
rendered = ${directory:bin}/qemu-virtual-machine-is-ready
mode = 700
context =
raw dash {{ dash_executable_location }}
raw qemu_ready_path ${kvm-controller-parameter-dict:kvm-status-path}
raw qemu_service_log_file ${buildout:directory}/.${slap-connection:partition-id}_kvm.log
[kvm-started-promise]
<= monitor-promise-base
module = check_command_execute
name = qemu-virtual-machine-is-ready.py
config-command = ${kvm-started-bin:rendered}
[novnc-instance]
recipe = slapos.cookbook:novnc
path = ${ca-novnc:executable}
......@@ -325,10 +337,11 @@ executable = ${directory:bin}/novnc
wrapper = ${directory:bin}/websockify
[novnc-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/novnc_promise
hostname = ${novnc-instance:ip}
port = ${novnc-instance:port}
<= monitor-promise-base
module = check_port_listening
name = novnc_promise.py
config-hostname = ${novnc-instance:ip}
config-port = ${novnc-instance:port}
#----------------
......@@ -380,11 +393,10 @@ name = ${slap-parameter:frontend-instance-name}
sla-instance_guid = ${slap-parameter:frontend-instance-guid}
[frontend-promise]
recipe = slapos.cookbook:check_url_available
path = ${directory:promises}/frontend_promise
url = ${publish-connection-information:url}
dash_path = {{ dash_executable_location }}
curl_path = {{ curl_executable_location }}
<= monitor-promise-base
module = check_url_available
name = frontend_promise.py
config-url = ${publish-connection-information:url}
{% if additional_frontend %}
[request-slave-frontend-additional]
......@@ -395,11 +407,10 @@ name = ${slap-parameter:frontend-additional-instance-name}
sla-instance_guid = ${slap-parameter:frontend-additional-instance-guid}
[frontend-additional-promise]
recipe = slapos.cookbook:check_url_available
path = ${directory:promises}/frontend_additional_promise
url = ${publish-connection-information:url-additional}
dash_path = {{ dash_executable_location }}
curl_path = {{ curl_executable_location }}
<= monitor-promise-base
module = check_url_available
name = frontend_additional_promise.py
config-url = ${publish-connection-information:url-additional}
{% endif %}
{% if enable_http == 'true' %}
......@@ -419,10 +430,11 @@ wrapper-path = ${directory:services}/http-server
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[httpd-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/httpd
hostname = ${httpd:host}
port = ${httpd:port}
<= monitor-promise-base
module = check_port_listening
name = httpd.py
config-hostname = ${httpd:host}
config-port = ${httpd:port}
{% endif %}
[monitor-instance-parameter]
......@@ -679,15 +691,21 @@ command-line = ${tunnel-cluster-url:runner-path}
wrapper-path = ${directory:services}/6tunnel-cluster
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[ansible-vm-promise]
[ansible-vm-bin]
recipe = slapos.recipe.template:jinja2
template = {{ ansible_promise_tpl }}
rendered = ${directory:promises}/ansible_{{ name }}
rendered = ${directory:bin}/ansible_{{ name }}
extensions = jinja2.ext.do
context =
raw logs ${directory:public}/ansible
raw name {{ name }}
[ansible-vm-promise]
<= monitor-promise-base
module = check_execute_comand
name = ansible_{{ name }}.py
config-command = ${ansible-vm-bin:rendered}
[download-bootstrap-script]
recipe = plone.recipe.command
file-location = ${directory:public}/vm-bootstrap
......
......@@ -23,7 +23,6 @@ log = $${buildout:directory}/log
recipe = slapos.cookbook:mkdirectory
services = $${rootdirectory:etc}/run
watched-services = $${rootdirectory:etc}/service
promises = $${rootdirectory:etc}/promise
[nbd-instance]
recipe = slapos.cookbook:nbdserver
......@@ -36,10 +35,11 @@ shell-path = ${dash:location}/bin/dash
path = $${basedirectory:services}/nbdserver
[nbd-promise]
recipe = slapos.cookbook:check_port_listening
path = $${basedirectory:promises}/nbd_promise
hostname = $${nbd-instance:ip}
port = $${nbd-instance:port}
<= monitor-promise-base
module = check_port_listening
name = nbd_promise.py
config-hostname = $${nbd-instance:ip}
config-port = $${nbd-instance:port}
[gen-passwd]
recipe = slapos.cookbook:generate.password
......@@ -58,10 +58,11 @@ path = $${basedirectory:watched-services}/onetimeupload
key = $${gen-passwd:passwd}
[onetimeupload-promise]
recipe = slapos.cookbook:check_port_listening
path = $${basedirectory:promises}/onetimeupload_promise
hostname = $${onetimeupload-instance:ip}
port = $${onetimeupload-instance:port}
<= monitor-promise-base
module = check_port_listening
name = onetimeupload_promise.py
config-hostname = $${onetimeupload-instance:ip}
config-port = $${onetimeupload-instance:port}
[publish-connection-information]
recipe = slapos.cookbook:publish
......
......@@ -28,22 +28,46 @@
import httplib
import json
import os
import re
import requests
import slapos.util
import subprocess
import sqlite3
import urlparse
import unittest
from slapos.recipe.librecipe import generateHashFromFiles
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, InstanceTestCase = makeModuleSetUpAndTestCaseClass(
def sanityCheck():
try:
output = subprocess.check_output("lsmod | grep kvm_intel", shell=True)
except subprocess.CalledProcessError as e:
state = False
output = e.output
else:
state = True
if state is True and re.search(r'kvm.*kvm_intel', output):
return True
if sanityCheck():
setUpModule, InstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
else:
setUpModule, InstanceTestCase = None, unittest.TestCase
class SanityCheckTestCase(unittest.TestCase):
def test_kvm_sanity_check(self):
if not(sanityCheck()):
self.fail('This environment is not usable for kvm testing, as it '
'lacks kvm_intel kernel module')
class ServicesTestCase(InstanceTestCase):
@unittest.skipIf(not sanityCheck(), 'missing kvm_intel module')
class ServicesTestCase(InstanceTestCase):
def test_hashes(self):
hash_files = [
'software_release/buildout.cfg',
......@@ -138,6 +162,7 @@ class MonitorAccessMixin(object):
)
@unittest.skipIf(not sanityCheck(), 'missing kvm_intel module')
class TestAccessDefault(MonitorAccessMixin, InstanceTestCase):
__partition_reference__ = 'ad'
expected_partition_with_monitor_base_url_count = 1
......@@ -154,6 +179,7 @@ class TestAccessDefault(MonitorAccessMixin, InstanceTestCase):
self.assertFalse('url-additional' in connection_parameter_dict)
@unittest.skipIf(not sanityCheck(), 'missing kvm_intel module')
class TestAccessDefaultAdditional(MonitorAccessMixin, InstanceTestCase):
__partition_reference__ = 'ada'
expected_partition_with_monitor_base_url_count = 1
......@@ -184,6 +210,7 @@ class TestAccessDefaultAdditional(MonitorAccessMixin, InstanceTestCase):
self.assertTrue('<title>noVNC</title>' in result.text)
@unittest.skipIf(not sanityCheck(), 'missing kvm_intel module')
class TestAccessKvmCluster(MonitorAccessMixin, InstanceTestCase):
__partition_reference__ = 'akc'
expected_partition_with_monitor_base_url_count = 2
......@@ -214,6 +241,7 @@ class TestAccessKvmCluster(MonitorAccessMixin, InstanceTestCase):
self.assertFalse('kvm0-url-additional' in connection_parameter_dict)
@unittest.skipIf(not sanityCheck(), 'missing kvm_intel module')
class TestAccessKvmClusterAdditional(MonitorAccessMixin, InstanceTestCase):
__partition_reference__ = 'akca'
expected_partition_with_monitor_base_url_count = 2
......
......@@ -18,7 +18,7 @@ md5sum = 1b7d2d097f208f6641bf98a17df079c8
[template-monitor]
_update_hash_filename_ = instance-monitor.cfg.jinja2
md5sum = dffe797be3a66d6838ffc70ee8f5f3ff
md5sum = 373c79480e6425c20480fc911a56c3fd
[template-monitor-distributor]
_update_hash_filename_ = instance-monitor-distributor.cfg.jinja2
......
......@@ -80,12 +80,10 @@ command-line =
wrapper-path = ${monitor-directory:bin}/monitor-collect-csv-dump
[monitor-check-memory-usage]
recipe = slapos.cookbook:wrapper
command-line = {{ buildout_bin}}/check-computer-memory
-db ${monitor-instance-parameter:collector-db}
--threshold ${slap-parameter:memory-percent-threshold}
--unit percent
wrapper-path = ${directory:promises}/check-computer-memory-usage
<= monitor-promise-base
module = check_command_execute
name = check-computer-memory-usage.py
config-command = {{ buildout_bin}}/check-computer-memory -db ${monitor-instance-parameter:collector-db} --threshold ${slap-parameter:memory-percent-threshold} --unit percent
[monitor-check-cpu-usage]
recipe = slapos.cookbook:promise.plugin
......
......@@ -14,4 +14,4 @@
# not need these here).
[template-instance]
filename = instance.cfg
md5sum = 7e9deae78923ab7805c0d10d0d6fe336
md5sum = ae26da39d6a156a6164782086d6aa00d
......@@ -6,12 +6,14 @@
[buildout]
parts =
monitor-base
parameters
promise-sudo-on-host
template-full-build-script
template-sudo-wrapper
template-cros-sdk-wrapper
extends = {{ monitor_template }}
# standard declaration of eggs directories
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
......@@ -54,7 +56,6 @@ cert = ${slap_connection:cert_file}
recipe = slapos.cookbook:mkdirectory
log = ${buildout:directory}/var/log
run = ${buildout:directory}/etc/run
promise = ${buildout:directory}/etc/promise
wrapper_dir = ${buildout:directory}/wrapper_bin
cros_location = ${buildout:directory}/parts/chromiumos
......@@ -64,12 +65,18 @@ wrapper_cros_sdk=${directory:wrapper_dir}/wrapper_cros_sdk
bash=/bin/bash
sudo=/usr/bin/sudo
[promise-sudo-on-host]
# assert sudo is installed, as it is required to enter the chroot 'cros_sdk'
[promise-sudo-on-host-bin]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:promise}/${:_buildout_section_name_}
wrapper-path = ${directory:bin}/${:_buildout_section_name_}
command-line = sudo -V
[promise-sudo-on-host]
# assert sudo is installed, as it is required to enter the chroot 'cros_sdk'
<= monitor-promise-base
module = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = ${promise-sudo-on-host-bin:wrapper-path}
[template-sudo-wrapper]
recipe = slapos.recipe.template:jinja2
template = {{ software_dir }}/scripts/wrapper_sudo.in
......
......@@ -3,6 +3,7 @@ extends =
buildout.hash.cfg
# basic needs
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
../../component/automake/buildout.cfg
../../component/libexpat/buildout.cfg
../../component/libaio/buildout.cfg
......@@ -35,6 +36,7 @@ context =
key eggs_directory buildout:eggs-directory
key develop_eggs_directory buildout:develop-eggs-directory
key thin_provisioning_tools thin-provisioning-tools:location
key monitor_template monitor2-template:rendered
[thin-provisioning-tools]
recipe = slapos.recipe.cmmi
......
......@@ -26,4 +26,4 @@ md5sum = fbc5eacda192cb02c8d9861cb628fe6f
[template-nextcloud-instance]
filename = nextcloud-instance.cfg.in
md5sum = 59e9c65e655cf9cf144d97dd36863ede
md5sum = 426df55e81725cd7faa408d3cd93558e
......@@ -12,7 +12,7 @@ backup = ${directory:backup}/nextcloud
[service-redis]
recipe = slapos.cookbook:redis.server
wrapper = ${directory:services}/redis
promise_wrapper = ${directory:promises}/redis
promise_wrapper = ${directory:bin}/redis-promise
server_dir = ${nc-directory:redis}
config_file = ${directory:etc}/redis.conf
log_file = ${nc-directory:redis-log}/redis.log
......@@ -26,6 +26,12 @@ server_bin = {{ redis_bin }}
depend =
${logrotate-entry-redis:recipe}
[redis-promise]
<= monitor-promise-base
module = check_command_execute
name = redis.py
config-command = ${service-redis:promise_wrapper}
[logrotate-entry-redis]
<= logrotate-entry-base
name = redis
......@@ -83,6 +89,7 @@ context =
mode = 744
depends =
${service-redis:recipe}
${redis-promise:recipe}
${nextcloud-cron:recipe}
${nextcloud-optimize:recipe}
${nextcloud-apache-httpd:recipe}
......
......@@ -15,7 +15,7 @@
[instance]
filename = instance.cfg.in
md5sum = 8ed7b3f59d94869a328c72fa9bbfd7d5
md5sum = f35aa36b41cd197ab3d763dcb884e96a
[tomcat-server-xml]
filename = server.xml.in
......
......@@ -7,6 +7,8 @@ eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
extends = ${monitor2-template:rendered}
[fontconfig-conf]
recipe = slapos.recipe.template
url = ${font.conf:output}
......@@ -78,16 +80,14 @@ needs = $${tomcat-web-xml:location}
[promises]
recipe =
instance-promises =
$${tomcat-listen-promise:path}
[check-port-listening-promise]
recipe = slapos.cookbook:check_port_listening
path = $${directory:promises}/$${:_buildout_section_name_}
$${tomcat-listen-promise:name}
[tomcat-listen-promise]
<= check-port-listening-promise
hostname= $${tomcat-instance:ip}
port = $${tomcat-instance:port}
<= monitor-promise-base
module = check_port_listening
name = $${:_buildout_section_name_}.py
config-hostname= $${tomcat-instance:ip}
config-port = $${tomcat-instance:port}
[publish-connection-parameter]
recipe = slapos.cookbook:publish
......@@ -110,7 +110,6 @@ srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
services = $${:etc}/service
promises = $${:etc}/promise
fonts = $${:srv}/fonts/
fontcache = $${buildout:directory}/.fontcache/
......
[buildout]
extends =
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
../../stack/nodejs.cfg
../../component/fontconfig/buildout.cfg
../../component/freetype/buildout.cfg
......
......@@ -18,7 +18,7 @@ md5sum = da8be58db4255c07750f7a7583eab3ca
[template-powerdns]
filename = instance-powerdns.cfg
md5sum = 0275136daa72e1e3ee135cf0c1fa8af3
md5sum = 681cd9564e491d1f7b7ccb810f8ca7df
[template-pdns-configuration]
_update_hash_filename_ = template/pdns.conf.jinja2
......
......@@ -16,7 +16,6 @@ log = $${:var}/log
run = $${:var}/run
service = $${:etc}/service
etc-run = $${:etc}/run
promise = $${:etc}/promise
logrotate-backup = $${:backup}/logrotate
logrotate-entries = $${:etc}/logrotate.d
......@@ -106,10 +105,11 @@ extra-context =
# Promises
[pdns-promise-listen-port]
recipe = slapos.cookbook:check_port_listening
path = $${directory:promise}/pdns-port-listening
hostname = $${pdns:local-ipv4}
port = $${pdns:port}
<= monitor-promise-base
module = check_port_listening
name = pdns-port-listening.py
config-hostname = $${pdns:local-ipv4}
config-port = $${pdns:port}
[publish-connection-informations]
recipe = slapos.cookbook:publish
......
......@@ -15,11 +15,11 @@
[instance-profile]
filename = instance.cfg.in
md5sum = 8ed5b4a7940db47ccb386c2f4e3e7273
md5sum = efb4238229681447aa7fe73898dffad4
[instance-default]
filename = instance-default.cfg.in
md5sum = 4cba10d2e9fc4b17c73c78bde0bcb86c
md5sum = 2a2c066d7d40dd8545f3008f434ee842
[proftpd-config-file]
filename = proftpd-config-file.cfg.in
......
......@@ -3,6 +3,7 @@ parts =
promises
publish-connection-parameter
extends = {{ template_monitor }}
[instance-parameter]
# TODO: this is not needed
......@@ -29,7 +30,6 @@ var = ${:home}/var
log = ${:var}/log
srv = ${:home}/srv
service = ${:etc}/service
promise = ${:etc}/promise
proftpd-dir = ${:srv}/proftpd/
ssh-authorized-keys-dir = ${:etc}/authorized_keys/
......@@ -40,10 +40,6 @@ rendered = ${directory:etc}/${:_buildout_section_name_}.cfg
mode = 0644
extensions = jinja2.ext.do
[check-port-listening-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/${:_buildout_section_name_}
[proftpd-userinfo]
recipe = slapos.cookbook:userinfo
......@@ -81,9 +77,11 @@ command-line =
wrapper-path = ${directory:service}/proftpd
[proftpd-listen-promise]
<= check-port-listening-promise
hostname = ${proftpd:ipv6}
port = ${proftpd:sftp-port}
<= monitor-promise-base
module = check_port_listening
name = ${:_buildout_section_name_}.py
config-hostname = ${proftpd:ipv6}
config-port = ${proftpd:sftp-port}
[ftpasswd]
......@@ -129,7 +127,7 @@ context =
[promises]
recipe =
instance-promises =
${proftpd-listen-promise:path}
${proftpd-listen-promise:name}
[publish-connection-parameter]
......
......@@ -25,6 +25,7 @@ context =
raw ftpdctl_bin {{ ftpdctl_bin }}
raw ssh_keygen_bin {{ ssh_keygen_bin }}
raw perl_bin {{ perl_bin }}
raw template_monitor {{ template_monitor }}
[instance-default]
<= instance-template
......
[buildout]
extends =
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
../../component/openssh/buildout.cfg
../../component/proftpd/buildout.cfg
buildout.hash.cfg
......@@ -38,7 +39,7 @@ context =
key ftpdctl_bin proftpd-output:ftpdctl
key ssh_keygen_bin openssh-output:keygen
key perl_bin proftpd-output:perl
raw template_monitor ${monitor2-template:rendered}
[versions]
collective.recipe.environment = 1.1.0
......
......@@ -18,7 +18,7 @@ md5sum = 610fc6fd0444d3bab3fca4478572749a
[template-re6stnet]
filename = instance-re6stnet.cfg.in
md5sum = 73c8fad6db7c711b0822891aba574564
md5sum = ee582dbab92c1c452874ddca3831aef6
[template-apache-conf]
filename = apache.conf.in
......
......@@ -18,7 +18,6 @@ var = ${buildout:directory}/var
log = ${:var}/log
services = ${:etc}/service
script = ${:etc}/run
promises = ${:etc}/promise
run = ${:var}/run
ca-dir = ${:etc}/ssl
requests = ${:ca-dir}/requests
......@@ -202,16 +201,18 @@ context =
section parameter_dict re6st-registry-conf-dict
[re6st-registry-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/re6st-registry
hostname = ${re6st-registry:ipv4}
port = ${re6st-registry:port}
<= monitor-promise-base
module = check_port_listening
name = re6st-registry.py
config-hostname = ${re6st-registry:ipv4}
config-port = ${re6st-registry:port}
[apache-registry-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/apache-re6st-registry
hostname = ${apache-conf:ipv6}
port = ${apache-conf:port}
<= monitor-promise-base
module = check_port_listening
name = apache-re6st-registry.py
config-hostname = ${apache-conf:ipv6}
config-port = ${apache-conf:port}
{% do publish_dict.__setitem__('re6stry-url', uri_scheme ~ '://[${apache-conf:ipv6}]:${apache-conf:port}') -%}
{% do publish_dict.__setitem__('re6stry-local-url', 'http://${re6st-registry:ipv4}:${re6st-registry:port}/') -%}
......
......@@ -19,4 +19,4 @@ md5sum = c4ac5de141ae6a64848309af03e51d88
[template-selenium]
filename = instance-selenium.cfg.in
md5sum = 7c1a5a4f1a3916ddf3ec0db7b22ef820
md5sum = 4f557a7b3aa9b4df1ca1fa6a754ca657
[buildout]
parts =
monitor-base
promises
publish-connection-parameter
extends = ${monitor-template:rendered}
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
......@@ -237,53 +239,54 @@ command = mkdir -p $${:location} && echo '$${instance-parameter:configuration.ss
[promises]
recipe =
instance-promises =
$${sshd-listen-promise:path}
$${selenium-server-frontend-listen-promise:path}
$${selenium-server-hub-listen-promise:path}
$${selenium-server-node-firefox-52-listen-promise:path}
$${selenium-server-node-firefox-60-listen-promise:path}
$${selenium-server-node-firefox-68-listen-promise:path}
$${selenium-server-node-instance-chromium-69-listen-promise:path}
$${sshd-listen-promise:name}
$${selenium-server-frontend-listen-promise:name}
$${selenium-server-hub-listen-promise:name}
$${selenium-server-node-firefox-52-listen-promise:name}
$${selenium-server-node-firefox-60-listen-promise:name}
$${selenium-server-node-firefox-68-listen-promise:name}
$${selenium-server-node-instance-chromium-69-listen-promise:name}
[check-port-listening-promise]
recipe = slapos.cookbook:check_port_listening
path = $${directory:promises}/$${:_buildout_section_name_}
<= monitor-promise-base
module = check_port_listening
name = $${:_buildout_section_name_}.py
[sshd-listen-promise]
<= check-port-listening-promise
hostname = $${sshd-address:hostname}
port = $${sshd-address:port}
config-hostname = $${sshd-address:hostname}
config-port = $${sshd-address:port}
[selenium-server-frontend-listen-promise]
<= check-port-listening-promise
hostname = $${selenium-server-frontend-instance:ip}
port = $${selenium-server-frontend-instance:port}
config-hostname = $${selenium-server-frontend-instance:ip}
config-port = $${selenium-server-frontend-instance:port}
[selenium-server-hub-listen-promise]
<= check-port-listening-promise
hostname = $${selenium-server-hub-instance:hostname}
port = $${selenium-server-hub-instance:port}
config-hostname = $${selenium-server-hub-instance:hostname}
config-port = $${selenium-server-hub-instance:port}
[selenium-server-node-firefox-52-listen-promise]
<= check-port-listening-promise
hostname = $${selenium-server-node-instance-firefox-52:hostname}
port = $${selenium-server-node-instance-firefox-52:port}
config-hostname = $${selenium-server-node-instance-firefox-52:hostname}
config-port = $${selenium-server-node-instance-firefox-52:port}
[selenium-server-node-firefox-60-listen-promise]
<= check-port-listening-promise
hostname = $${selenium-server-node-instance-firefox-60:hostname}
port = $${selenium-server-node-instance-firefox-60:port}
config-hostname = $${selenium-server-node-instance-firefox-60:hostname}
config-port = $${selenium-server-node-instance-firefox-60:port}
[selenium-server-node-firefox-68-listen-promise]
<= check-port-listening-promise
hostname = $${selenium-server-node-instance-firefox-68:hostname}
port = $${selenium-server-node-instance-firefox-68:port}
config-hostname = $${selenium-server-node-instance-firefox-68:hostname}
config-port = $${selenium-server-node-instance-firefox-68:port}
[selenium-server-node-instance-chromium-69-listen-promise]
<= check-port-listening-promise
hostname = $${selenium-server-node-instance-chromium-69:hostname}
port = $${selenium-server-node-instance-chromium-69:port}
config-hostname = $${selenium-server-node-instance-chromium-69:hostname}
config-port = $${selenium-server-node-instance-chromium-69:port}
[publish-connection-parameter]
......@@ -321,7 +324,6 @@ bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
services = $${:etc}/service
promises = $${:etc}/promise
framebuffer = $${:srv}/framebuffer
fonts = $${:srv}/fonts/
ssh = $${:etc}/ssh
......
......@@ -14,6 +14,7 @@ extends =
../../component/openssh/buildout.cfg
../../stack/slapos.cfg
./buildout.hash.cfg
../../stack/monitor/buildout.cfg
parts =
slapos-cookbook
......
......@@ -14,11 +14,11 @@
# not need these here).
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = 70c8d3e4414f6f9f969c9641e840b52f
md5sum = 2ef0ddc206c6b0982a37cfc21f23e423
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = e8033d4fd7b6348b525a6148762ccdb4
md5sum = ef86e09e44ac67a9b15939df0ab4a466
[template-apache-backend-conf]
filename = apache-backend.conf.in
......
......@@ -187,10 +187,11 @@ input = inline:
[{{ section('apache-promise') }}]
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/apache
hostname = {{ ipv4 }}
port = {{ apache_dict.values()[0][0] }}
<= monitor-promise-base
module = check_port_listening
name = apache.py
config-hostname = {{ ipv4 }}
config-port = {{ apache_dict.values()[0][0] }}
[{{ section('publish') }}]
recipe = slapos.cookbook:publish.serialised
......@@ -229,7 +230,6 @@ recipe = slapos.cookbook:mkdirectory
apache-conf = ${:etc}/apache
bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc
promise = ${:etc}/promise
services = ${:etc}/run
services-on-watch = ${:etc}/service
var = ${buildout:directory}/var
......@@ -273,14 +273,16 @@ configuration = {{ slapparameter_dict['apachedex-configuration'] }}
promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
[{{ section('monitor-promise-apachedex-result') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:promise}/check-apachedex-result
command-line = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
<= monitor-promise-base
module = check_command_execute
name = check-apachedex-result.py
config-command = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
[{{ section('promise-check-computer-memory') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:promise}/check-computer-memory
command-line = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
<= monitor-promise-base
module = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
[monitor-instance-parameter]
monitor-httpd-ipv6 = {{ (ipv6_set | list)[0] }}
......
......@@ -65,7 +65,6 @@ config-name = {{ name }}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
promise = ${:etc}/promise
service-on-watch = ${:etc}/service
srv = ${buildout:directory}/srv
backup-caucased = ${:srv}/backup/caucased
......@@ -91,7 +90,6 @@ backup-caucased = ${:srv}/backup/caucased
service_auto_approve_count=caucase_dict.get('service-auto-approve-amount', 1),
user_auto_approve_count=caucase_dict.get('user-auto-approve-amount', 0),
key_len=caucase_dict.get('key-length', 2048),
promise='${directory:promise}/caucased',
)}}
{% do root_common.section('caucased') -%}
{% do root_common.section('caucased-promise') -%}
......
......@@ -9,10 +9,10 @@ parts +=
[local-bt5-repository]
# Same as bt5-repository, but only local repository.
# Used to generate bt5lists.
list = ${erp5:location}/bt5 ${erp5:location}/product/ERP5/bootstrap ${vifib:location}/master/bt5
list = ${erp5:location}/bt5 ${erp5:location}/product/ERP5/bootstrap ${erp5-bin:location}/bt5 ${erp5-doc:location}/bt5 ${vifib:location}/master/bt5
[erp5_repository_list]
repository_id_list = erp5 vifib/master
repository_id_list = erp5 erp5-bin erp5-doc vifib/master
[erp5]
branch = erp5-vifib
......
......@@ -14,7 +14,7 @@
# not need these here).
[template]
filename = instance.cfg
md5sum = c44a7481bb85e3258128afe3fcf23f44
md5sum = 317c49bf451e80bf0f9d44baa603861e
[template-runner]
filename = instance-runner.cfg
......@@ -22,11 +22,11 @@ md5sum = bacb2d1a38d3a512025e861debdc75b2
[template-runner-import-script]
filename = template/runner-import.sh.jinja2
md5sum = 3c80e21dda146b4cbee103b0a3f6935b
md5sum = fc22e2d2f03ce58631f157a5b4943e15
[instance-runner-import]
filename = instance-runner-import.cfg.in
md5sum = 238466e8045096c09534e1d01b1befd6
md5sum = 1f1c62f2bc09a6ab3a2f96eacdf99492
[instance-runner-export]
filename = instance-runner-export.cfg.in
......
......@@ -107,6 +107,7 @@ context =
section supervisord supervisord
raw output_log_file ${directory:log}/${:resilient-log-basename}
raw shell_binary {{ bash_executable_location }}
raw sqlite3_binary {{ sqlite3_executable_location }}
raw rsync_binary {{ rsync_executable_location }}
raw restore_exit_code_file ${:restore-exit-code-file}
raw restore_error_message_file ${:restore-error-message-file}
......
......@@ -76,6 +76,7 @@ context =
raw importer_script_path ${template-runner-import-script:location}/${template-runner-import-script:filename}
raw buildout_executable_location ${buildout:executable}
raw bash_executable_location ${bash:location}/bin/bash
raw sqlite3_executable_location ${sqlite3:location}/bin/sqlite3
raw rsync_executable_location ${rsync:location}/bin/rsync
raw pbsready_import_template_path ${pbsready-import:output}
raw software_release_information_template ${template-resilient-software-release-information:destination}/${template-resilient-software-release-information:filename}
......
......@@ -134,8 +134,7 @@ HOME='{{ directory["home"] }}'
export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
export MAKEFLAGS=-j4
SLAPOS='{{ directory["bin"] }}'/slapos
# XXX hardcoded
SQLITE3="$HOME/software_release/parts/sqlite3/bin/sqlite3"
SQLITE3="{{ sqlite3_binary }}"
DATABASE="$HOME/srv/runner/proxy.db"
db_query () {
# Try opening locked tables for 5 seconds to prevent "database is locked" error
......
......@@ -15,7 +15,7 @@
[instance]
filename = instance.cfg.in
md5sum = c35e6baca37c690bce39ef43825ec42b
md5sum = 6a0a65ff9f81f907d53c42c65b54442a
[yarn.lock]
filename = yarn.lock
......
[buildout]
parts =
monitor-base
promises
.bashrc
frontend-reload
publish-connection-parameter
extends = ${monitor-template:rendered}
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
......@@ -111,23 +114,22 @@ template = inline:
[promises]
recipe =
instance-promises =
$${theia-listen-promise:path}
$${frontend-listen-promise:path}
[check-port-listening-promise]
recipe = slapos.cookbook:check_port_listening
path = $${directory:promises}/$${:_buildout_section_name_}
$${theia-listen-promise:name}
$${frontend-listen-promise:name}
[theia-listen-promise]
<= check-port-listening-promise
hostname= $${theia-instance:ip}
port = $${theia-instance:port}
<= monitor-promise-base
module = check_port_listening
name = $${:_buildout_section_name_}.py
config-hostname= $${theia-instance:ip}
config-port = $${theia-instance:port}
[frontend-listen-promise]
<= check-port-listening-promise
hostname= $${frontend-instance:ip}
port = $${frontend-instance:port}
<= monitor-promise-base
module = check_port_listening
name = $${:_buildout_section_name_}.py
config-hostname = $${frontend-instance:ip}
config-port = $${frontend-instance:port}
[publish-connection-parameter]
recipe = slapos.cookbook:publish
......@@ -152,7 +154,6 @@ tmp = $${buildout:directory}/tmp
pidfiles = $${:var}/run
services = $${:etc}/service
promises = $${:etc}/promise
framebuffer = $${:srv}/framebuffer
fonts = $${:srv}/fonts
home = $${:srv}/home
\ No newline at end of file
......@@ -8,6 +8,7 @@ extends =
../../component/bash/buildout.cfg
../../component/coreutils/buildout.cfg
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
./gowork.cfg
./buildout.hash.cfg
# this gowork.cfg includes the one from caddy, because they share the only gowork
......
......@@ -19,4 +19,4 @@ md5sum = d027a2dccaf15ae6e7d3a28cc02d70c3
[template-turnserver]
filename = instance-turnserver.cfg.jinja2.in
md5sum = 6ba54fb299e1fd59617e5a6a9545e36e
md5sum = 62382da42ea56e9fd0124caeca42bcf3
......@@ -11,7 +11,6 @@ run = ${:var}/run
log = ${:var}/log
scripts = ${:etc}/run
services = ${:etc}/service
promises = ${:etc}/promise
plugins = ${:etc}/plugin
ssl = ${:etc}/ssl
......
......@@ -15,4 +15,4 @@
[caucase-jinja2-library]
filename = caucase.jinja2.library
md5sum = 5cf324ea1beb593738339f3d20d60a44
md5sum = 6394dadda5726571149ec1b21207311b
......@@ -8,7 +8,7 @@
user_auto_approve_count=1,
key_len=None,
backup_dir=None,
promise=None
promise=True
) -%}
[{{ prefix }}-directory]
recipe = slapos.cookbook:mkdirectory
......@@ -30,9 +30,10 @@ command-line = '{{ buildout_bin_directory }}/caucased'
{% if promise -%}
[{{ prefix }}-promise]
recipe = slapos.cookbook:wrapper
wrapper-path = {{ promise }}
command-line = '{{ buildout_bin_directory }}/caucase-probe' 'http://{{ netloc }}'
<= monitor-promise-base
module = check_command_execute
name = {{ prefix }}.py
config-command = '{{ buildout_bin_directory }}/caucase-probe' 'http://{{ netloc }}'
{%- endif %}
{%- endmacro %}
......
......@@ -365,7 +365,7 @@ list = ${local-bt5-repository:list}
[local-bt5-repository]
# Same as bt5-repository, but only local repository.
# Used to generate bt5lists.
list = ${erp5:location}/bt5 ${erp5:location}/product/ERP5/bootstrap ${erp5-bin:location}/bt5 ${erp5-doc:location}
list = ${erp5:location}/bt5 ${erp5:location}/product/ERP5/bootstrap ${erp5-bin:location}/bt5 ${erp5-doc:location}/bt5
[genbt5list]
recipe = plone.recipe.command
......
......@@ -26,11 +26,11 @@ md5sum = d95e8500bdc72d1f40b97cc414656e7e
[template-mariadb]
filename = instance-mariadb.cfg.in
md5sum = 591fe60e2f615c7690fa5078473b1b0a
md5sum = b29449652ef996c267518e4dcfd8e9e1
[template-kumofs]
filename = instance-kumofs.cfg.in
md5sum = 13315c109deab534b81e7a45e7320eea
md5sum = e91c0fbd0df441884f7422fa7976053c
[template-zope-conf]
filename = zope.conf.in
......@@ -50,7 +50,7 @@ md5sum = dec33a617fa1b307c8ddb883efcfe3ce
[template-postfix]
filename = instance-postfix.cfg.in
md5sum = cbcb5f4c2885e3f2589770e76a422be7
md5sum = bbf84495576a3dbc522524895e9640ff
[template-postfix-master-cf]
filename = postfix_master.cf.in
......@@ -78,19 +78,19 @@ md5sum = d41d8cd98f00b204e9800998ecf8427e
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = 4fe42a1fe78ce9531d8e9c9837f4a784
md5sum = 939522bb26bf2109d7db818585a46f7a
[template-zeo]
filename = instance-zeo.cfg.in
md5sum = 10a01b85c966ad9fe13bc981f1ddabe8
md5sum = 0648e38bd5d3a15bb9f93264932740b9
[template-zope]
filename = instance-zope.cfg.in
md5sum = 5eba2aa53af7ee74959745e6225ea0a7
md5sum = b1685783f4c93da918ccc83702559e6f
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = 10c620e934397390dc9b737453aab387
md5sum = cfc9c1ad40bf215771f4902b2ea3fe5b
[template-haproxy-cfg]
filename = haproxy.cfg.in
......
......@@ -180,11 +180,12 @@ input = inline:
kill -USR1 "$(cat '${apache-conf-parameter-dict:pid-file}')"
[{{ section('apache-promise') }}]
<= monitor-promise-base
# Check any apache port in ipv4, expect other ports and ipv6 to behave consistently
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/apache
hostname = {{ ipv4 }}
port = {{ apache_dict.values()[0][0] }}
module = check_port_listening
name = apache.py
config-hostname = {{ ipv4 }}
config-port = {{ apache_dict.values()[0][0] }}
[{{ section('publish') }}]
recipe = slapos.cookbook:publish.serialised
......@@ -209,7 +210,6 @@ recipe = slapos.cookbook:mkdirectory
apache-conf = ${:etc}/apache
bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc
promise = ${:etc}/promise
services = ${:etc}/run
services-on-watch = ${:etc}/service
var = ${buildout:directory}/var
......@@ -247,14 +247,16 @@ configuration = {{ slapparameter_dict['apachedex-configuration'] }}
promise-threshold = {{ slapparameter_dict['apachedex-promise-threshold'] }}
[{{ section('monitor-promise-apachedex-result') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:promise}/check-apachedex-result
command-line = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
<= monitor-promise-base
module = check_command_execute
name = check-apachedex-result.py
config-command = "{{ parameter_dict['promise-check-apachedex-result'] }}" --apachedex_path "${directory:apachedex}" --status_file ${monitor-directory:private}/apachedex.report.json --threshold "${apachedex-parameters:promise-threshold}"
[{{ section('promise-check-computer-memory') }}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:promise}/check-computer-memory
command-line = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
<= monitor-promise-base
module = check_command_execute
name = check-computer-memory.py
config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${monitor-instance-parameter:collector-db} --threshold "{{ slapparameter_dict["computer-memory-percent-threshold"] }}" --unit percent
[monitor-instance-parameter]
monitor-httpd-ipv6 = {{ (ipv6_set | list)[0] }}
......
......@@ -65,7 +65,7 @@ config-name = {{ name }}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
promise = ${:etc}/promise
plugin = ${:etc}/plugin
service-on-watch = ${:etc}/service
srv = ${buildout:directory}/srv
backup-caucased = ${:srv}/backup/caucased
......@@ -91,7 +91,6 @@ backup-caucased = ${:srv}/backup/caucased
service_auto_approve_count=caucase_dict.get('service-auto-approve-amount', 1),
user_auto_approve_count=caucase_dict.get('user-auto-approve-amount', 0),
key_len=caucase_dict.get('key-length', 2048),
promise='${directory:promise}/caucased',
)}}
{% do root_common.section('caucased') -%}
{% do root_common.section('caucased-promise') -%}
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment