Commit 7ff43824 authored by Jérome Perrin's avatar Jérome Perrin

ERP5: Move frontend virtualhost logic on backend

 - use caucase for balancer certificate
 - move virtual host logic on the backend
 - change "frontend" parameter to request "" type (and no longer "zope")

See merge request nexedi/slapos!1504
parents cb78214e 6e735808
From 8e7c9a6a86104e306aee2224ff5e517ee201b28f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Tue, 9 Jan 2024 17:15:11 +0900
Subject: [PATCH] Fix redirections to URLS with host given as IP-litteral
(#1192)
When redirecting to an URL with an IPv6 host with surrounding brackets,
we should not escape the surrounding brackets.
The patch updates referenced RFC from 2396 to 3986, which obsoletes it
and change the safe characters for the netloc part to allow [ and ].
The RFC specifies that [ and ] are only allowed when they are the first
and last characters, but we don't need to be more specific here, because
using [ or ] in other places of the host is rejected by urlparse above.
Fixes #1191
---
src/ZPublisher/HTTPResponse.py | 14 +++++++-------
src/ZPublisher/tests/testHTTPResponse.py | 8 ++++++--
2 files changed, 13 insertions(+), 9 deletions(-)
diff --git a/src/ZPublisher/HTTPResponse.py b/src/ZPublisher/HTTPResponse.py
index b0b4ca2b1..b1a824151 100644
--- a/src/ZPublisher/HTTPResponse.py
+++ b/src/ZPublisher/HTTPResponse.py
@@ -230,24 +230,24 @@ class HTTPBaseResponse(BaseResponse):
# To be entirely correct, we must make sure that all non-ASCII
# characters are quoted correctly.
parsed = list(urlparse(location))
- rfc2396_unreserved = "-_.!~*'()" # RFC 2396 section 2.3
+ rfc3986_unreserved = "-_.!~*'()" # RFC 3986 section 2.3
for idx, idx_safe in (
# authority
- (1, ";:@?/&=+$,"), # RFC 2396 section 3.2, 3.2.1, 3.2.3
+ (1, "[];:@?/&=+$,"), # RFC 3986 section 3.2, 3.2.1, 3.2.3
# path
- (2, "/;:@&=+$,"), # RFC 2396 section 3.3
+ (2, "/;:@&=+$,"), # RFC 3986 section 3.3
# params - actually part of path; empty in Python 3
- (3, "/;:@&=+$,"), # RFC 2396 section 3.3
+ (3, "/;:@&=+$,"), # RFC 3986 section 3.3
# query
- (4, ";/?:@&=+,$"), # RFC 2396 section 3.4
+ (4, ";/?:@&=+,$"), # RFC 3986 section 3.4
# fragment
- (5, ";/?:@&=+$,"), # RFC 2396 section 4
+ (5, ";/?:@&=+$,"), # RFC 3986 section 4
):
# Make a hacky guess whether the component is already
# URL-encoded by checking for %. If it is, we don't touch it.
if '%' not in parsed[idx]:
parsed[idx] = quote(parsed[idx],
- safe=rfc2396_unreserved + idx_safe)
+ safe=rfc3986_unreserved + idx_safe)
location = urlunparse(parsed)
self.setStatus(status, lock=lock)
diff --git a/src/ZPublisher/tests/testHTTPResponse.py b/src/ZPublisher/tests/testHTTPResponse.py
index a7f816c04..08a1674ba 100644
--- a/src/ZPublisher/tests/testHTTPResponse.py
+++ b/src/ZPublisher/tests/testHTTPResponse.py
@@ -767,15 +767,19 @@ class HTTPResponseTests(unittest.TestCase):
self._redirectURLCheck(ENC_URL)
def test_redirect_unreserved_chars(self):
- # RFC 2396 section 2.3, characters that should not be encoded
+ # RFC 3986 section 2.3, characters that should not be encoded
url = "http://example.com/-_.!~*'()"
self._redirectURLCheck(url)
def test_redirect_reserved_chars(self):
- # RFC 2396 section 3.3, characters with reserved meaning in a path
+ # RFC 3986 section 3.3, characters with reserved meaning in a path
url = 'http://example.com/+/$/;/,/=/?/&/@@index.html'
self._redirectURLCheck(url)
+ def test_redirect_ipv6(self):
+ url = "http://[fe80::1ff:fe23:4567:890a]:1234"
+ self._redirectURLCheck(url)
+
def test__encode_unicode_no_content_type_uses_default_encoding(self):
UNICODE = u'<h1>Tr\u0039s Bien</h1>'
response = self._makeOne()
--
2.42.0
......@@ -90,7 +90,6 @@ This software release assigns the following port ranges by default:
balancer 2150-2199
zope 2200-*
jupyter 8888
caucase 8890,8891
==================== ==========
Non-zope partitions are unique in an ERP5 cluster, so you shouldn't have to
......
......@@ -5,7 +5,7 @@
"additionalProperties": false,
"definitions": {
"routing-rule-list": {
"description": "Maps the path received in requests to given zope path. Rules are applied in the order they are given. This requires the path received from the outside world (typically: frontend) to have its root correspond to Zope's root (for frontend: 'path' parameter must be empty), with the customary VirtualHostMonster construct (for frontend: 'type' must be 'zope').",
"description": "Maps the path received in requests to given zope path. Rules are applied in the order they are given, after 'internal-path' from 'frontend' parameter. This also supports legacy frontends, using Rapid CDN with \"zope\" type.",
"type": "array",
"default": [
[
......@@ -111,7 +111,7 @@
"description": "Family-wide options, possibly overriding global options",
"default": {},
"patternProperties": {
".*": {
"^[a-zA-Z0-9_-]+$": {
"default": {},
"properties": {
"webdav": {
......@@ -164,7 +164,7 @@
"default": {}
},
"patternProperties": {
".*": {
"^[a-zA-Z0-9_-]+$": {
"required": [
"zope-family"
],
......@@ -204,13 +204,14 @@
"1": {}
},
"patternProperties": {
".*": {
"^[a-zA-Z0-9_-]+$": {
"additionalProperties": false,
"properties": {
"family": {
"description": "The family this partition is part of. For example: 'public', 'admin', 'backoffice', 'web-service'... Each family gets its own balancer entry. It has no special meaning for the system.",
"default": "default",
"type": "string"
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"instance-count": {
"description": "Number of Zopes to setup on this partition",
......@@ -513,7 +514,7 @@
"properties": {
"url": {
"title": "Caucase URL",
"description": "URL of existing caucase instance to use. If empty, a new caucase instance will be deployed. If not empty, other properties in this section will be ignored.",
"description": "URL of existing caucase instance to use. If empty, caucase instances will be deployed inside partitions.",
"default": "",
"type": "string",
"format": "uri"
......@@ -720,15 +721,9 @@
},
"uniqueItems": true
},
"caucase-url": {
"title": "Caucase URL",
"description": "URL of caucase service to use. If not set, global setting will be used.",
"type": "string",
"format": "uri"
},
"csr": {
"title": "csr",
"description": "PEM-encoded certificate signature request to request server certificate with. If not provided, HTTPS will be disabled.",
"description": "PEM-encoded certificate signature request to request server certificate with.",
"type": "string"
},
"max-crl-update-delay": {
......
......@@ -69,7 +69,7 @@
"type": "string"
},
"caucase-http-url": {
"description": "Caucase url on HTTP. For HTTPS URL, uses https scheme, if port is explicitely specified in http URL, take that port and add 1 and use it as https port. If it is not specified.",
"description": "Caucase url on HTTP. For HTTPS URL, uses https scheme, if port is explicitely specified in http URL, take that port and add 1 and use it as https port.",
"pattern": "^http://",
"type": "string"
}
......
......@@ -314,7 +314,7 @@ class CaucaseCertificate(ManagedResource):
)
return os.path.join(software_release_root_path, 'bin', 'caucase')
def request(self, common_name: str, caucase: CaucaseService) -> None:
def request(self, common_name: str, caucase: CaucaseService, san: x509.SubjectAlternativeName=None) -> None:
"""Generate certificate and request signature to the caucase service.
This overwrite any previously requested certificate for this instance.
......@@ -345,11 +345,10 @@ class CaucaseCertificate(ManagedResource):
NameOID.COMMON_NAME,
common_name,
),
])).sign(
key,
hashes.SHA256(),
default_backend(),
)
]))
if san:
csr = csr.add_extension(san, critical=True)
csr = csr.sign(key, hashes.SHA256(), default_backend())
with open(self.csr_file, 'wb') as f:
f.write(csr.public_bytes(serialization.Encoding.PEM))
......
import glob
import ipaddress
import json
import logging
import os
import re
import socket
import subprocess
import sqlite3
import tempfile
import time
import urllib.parse
from http.server import BaseHTTPRequestHandler
from unittest import mock
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.oid import NameOID
import OpenSSL.SSL
import pexpect
import psutil
import requests
from slapos.proxy.db_version import DB_VERSION
from slapos.testing.utils import CrontabMixin, ManagedHTTPServer
from . import CaucaseCertificate, CaucaseService, ERP5InstanceTestCase, default, matrix, setUpModule
......@@ -99,14 +110,15 @@ class BalancerTestCase(ERP5InstanceTestCase):
'default': ['dummy_http_server'],
},
'dummy_http_server': [[cls.getManagedResource("backend_web_server", EchoHTTPServer).netloc, 1, False]],
'backend-path-dict': {
'default': '',
},
'ssl-authentication-dict': {'default': False},
'ssl': {
'caucase-url': cls.getManagedResource("caucase", CaucaseService).url,
},
'ssl': {},
'timeout-dict': {'default': None},
'frontend-parameter-dict': {
'default': {
'internal-path': '',
'zope-family': 'default',
},
},
'family-path-routing-dict': {},
'path-routing-list': [],
}
......@@ -116,24 +128,72 @@ class BalancerTestCase(ERP5InstanceTestCase):
return {'_': json.dumps(cls._getInstanceParameterDict())}
def setUp(self) -> None:
self.default_balancer_url = json.loads(
self.default_balancer_direct_url = json.loads(
self.computer_partition.getConnectionParameterDict()['_'])['default']
self.default_balancer_zope_url = json.loads(
self.computer_partition.getConnectionParameterDict()['_'])['url-backend-default']
class TestURLRewrite(BalancerTestCase):
__partition_reference__ = 'ur'
def test_direct(self):
self.assertEqual(requests.get(self.default_balancer_direct_url, verify=False).json()['Path'], '/')
self.assertEqual(
requests.get(
urllib.parse.urljoin(
self.default_balancer_direct_url,
'/VirtualHostBase/https/example.com:443/VirtualHostRoot/path'),
verify=False
).json()['Path'],
'/VirtualHostBase/https/example.com:443/VirtualHostRoot/path')
def test_zope(self):
netloc = urllib.parse.urlparse(self.default_balancer_zope_url).netloc
self.assertEqual(
requests.get(self.default_balancer_zope_url, verify=False).json()['Path'],
f'/VirtualHostBase/https/{netloc}/VirtualHostRoot/')
self.assertEqual(
requests.get(urllib.parse.urljoin(
self.default_balancer_zope_url, 'path'), verify=False).json()['Path'],
f'/VirtualHostBase/https/{netloc}/VirtualHostRoot/path')
self.assertEqual(
requests.get(
urllib.parse.urljoin(
self.default_balancer_zope_url,
'/VirtualHostBase/https/example.com:443/VirtualHostRoot/path'),
verify=False
).json()['Path'],
f'/VirtualHostBase/https/{netloc}/VirtualHostRoot/VirtualHostBase/https/example.com:443/VirtualHostRoot/path')
def test_bad_host(self):
self.assertEqual(
requests.get(self.default_balancer_zope_url, headers={'Host': 'a/b'}, verify=False).status_code,
requests.codes.bad_request)
class SlowHTTPServer(ManagedHTTPServer):
"""An HTTP Server which reply after a timeout.
Timeout is 2 seconds by default, and can be specified in the path of the URL
Timeout is 2 seconds by default, and can be specified in the path of the URL:
GET /{timeout}
but because balancer rewrites the URL, the actual URL used by this server is:
GET /VirtualHostBase/https/{host}/VirtualHostRoot/{timeout}
"""
class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self) -> None:
self.send_response(200)
self.send_header("Content-Type", "text/plain")
timeout = 2
if self.path == '/': # for health checks
timeout = 0
try:
timeout = int(self.path[1:])
except ValueError:
timeout = int(self.path.split('/')[5])
except (ValueError, IndexError):
pass
self.send_response(200)
self.send_header("Content-Type", "text/plain")
time.sleep(timeout)
self.end_headers()
self.wfile.write(b"OK\n")
......@@ -155,12 +215,12 @@ class TestTimeout(BalancerTestCase, CrontabMixin):
def test_timeout(self) -> None:
self.assertEqual(
requests.get(
urllib.parse.urljoin(self.default_balancer_url, '/1'),
urllib.parse.urljoin(self.default_balancer_zope_url, '/1'),
verify=False).status_code,
requests.codes.ok)
self.assertEqual(
requests.get(
urllib.parse.urljoin(self.default_balancer_url, '/5'),
urllib.parse.urljoin(self.default_balancer_zope_url, '/5'),
verify=False).status_code,
requests.codes.gateway_timeout)
......@@ -172,13 +232,13 @@ class TestLog(BalancerTestCase, CrontabMixin):
@classmethod
def _getInstanceParameterDict(cls) -> dict:
parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead
# use a slow server instead, so that we can test logs with slow requests
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
return parameter_dict
def test_access_log_format(self) -> None:
requests.get(
urllib.parse.urljoin(self.default_balancer_url, '/url_path'),
urllib.parse.urljoin(self.default_balancer_zope_url, '/url_path'),
verify=False,
)
time.sleep(.5) # wait a bit more until access is logged
......@@ -191,7 +251,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
# the request - but our test machines can be slow sometimes, so we tolerate
# it can take up to 20 seconds.
match = re.match(
r'([(\d\.)]+) - - \[(.*?)\] "(.*?)" (\d+) (\d+) "(.*?)" "(.*?)" (\d+)',
r'([(\da-fA-F:\.)]+) - - \[(.*?)\] "(.*?)" (\d+) (\d+) "(.*?)" "(.*?)" (\d+)',
access_line
)
self.assertTrue(match)
......@@ -202,7 +262,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
def test_access_log_apachedex_report(self) -> None:
# make a request so that we have something in the logs
requests.get(self.default_balancer_url, verify=False)
requests.get(self.default_balancer_zope_url, verify=False)
# crontab for apachedex is executed
self._executeCrontabAtDate('generate-apachedex-report', '23:59')
......@@ -227,7 +287,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
self._executeCrontabAtDate('logrotate', '2000-01-01')
# make a request so that we have something in the logs
requests.get(self.default_balancer_url, verify=False).raise_for_status()
requests.get(self.default_balancer_zope_url, verify=False).raise_for_status()
# slow query crontab depends on crontab for log rotation
# to be executed first.
......@@ -242,7 +302,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
)
self.assertTrue(os.path.exists(rotated_log_file))
requests.get(self.default_balancer_url, verify=False).raise_for_status()
requests.get(self.default_balancer_zope_url, verify=False).raise_for_status()
# on next day execution of logrotate, log files are compressed
self._executeCrontabAtDate('logrotate', '2050-01-02')
self.assertTrue(os.path.exists(rotated_log_file + '.xz'))
......@@ -256,11 +316,11 @@ class TestLog(BalancerTestCase, CrontabMixin):
# after a while, balancer should detect and log this event in error log
time.sleep(5)
self.assertEqual(
requests.get(self.default_balancer_url, verify=False).status_code,
requests.get(self.default_balancer_zope_url, verify=False).status_code,
requests.codes.service_unavailable)
with open(os.path.join(self.computer_partition_root_path, 'var', 'log', 'apache-error.log')) as error_log_file:
error_line = error_log_file.read().splitlines()[-1]
self.assertIn('proxy family_default has no server available!', error_line)
self.assertIn('backend default has no server available!', error_line)
# this log also include a timestamp
self.assertRegex(error_line, r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}')
......@@ -268,7 +328,9 @@ class TestLog(BalancerTestCase, CrontabMixin):
class BalancerCookieHTTPServer(ManagedHTTPServer):
"""An HTTP Server which can set balancer cookie.
This server set cookie when requested /set-cookie path.
This server set cookie when requested /set-cookie path (actually
/VirtualHostBase/https/{host}/VirtualHostRoot/set-cookie , which is
added by balancer proxy)
The reply body is the name used when registering this resource
using getManagedResource. This way we can assert which
......@@ -282,7 +344,8 @@ class BalancerCookieHTTPServer(ManagedHTTPServer):
def do_GET(self) -> None:
self.send_response(200)
self.send_header("Content-Type", "text/plain")
if self.path == '/set_cookie':
if self.path != '/' and self.path.split('/')[5] == 'set_cookie':
# the balancer tells the backend what's the name of the balancer cookie with
# the X-Balancer-Current-Cookie header.
self.send_header('Set-Cookie', '%s=anything' % self.headers['X-Balancer-Current-Cookie'])
......@@ -313,7 +376,7 @@ class TestBalancer(BalancerTestCase):
def test_balancer_round_robin(self) -> None:
# requests are by default balanced to both servers
self.assertEqual(
{requests.get(self.default_balancer_url, verify=False).text for _ in range(10)},
{requests.get(self.default_balancer_zope_url, verify=False).text for _ in range(10)},
{'backend_web_server1', 'backend_web_server2'}
)
......@@ -322,7 +385,7 @@ class TestBalancer(BalancerTestCase):
self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).close()
self.addCleanup(self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).open)
self.assertEqual(
{requests.get(self.default_balancer_url, verify=False).text for _ in range(10)},
{requests.get(self.default_balancer_zope_url, verify=False).text for _ in range(10)},
{'backend_web_server1',}
)
......@@ -330,7 +393,7 @@ class TestBalancer(BalancerTestCase):
# if backend provides a "SERVERID" cookie, balancer will overwrite it with the
# backend selected by balancing algorithm
self.assertIn(
requests.get(urllib.parse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
requests.get(urllib.parse.urljoin(self.default_balancer_zope_url, '/set_cookie'), verify=False).cookies['SERVERID'],
('default-0', 'default-1'),
)
......@@ -338,7 +401,7 @@ class TestBalancer(BalancerTestCase):
# if request is made with the sticky cookie, the client stick on one balancer
cookies = dict(SERVERID='default-1')
self.assertEqual(
{requests.get(self.default_balancer_url, verify=False, cookies=cookies).text for _ in range(10)},
{requests.get(self.default_balancer_zope_url, verify=False, cookies=cookies).text for _ in range(10)},
{'backend_web_server2',}
)
......@@ -346,7 +409,7 @@ class TestBalancer(BalancerTestCase):
self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).close()
self.addCleanup(self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).open)
self.assertEqual(
requests.get(self.default_balancer_url, verify=False, cookies=cookies).text,
requests.get(self.default_balancer_zope_url, verify=False, cookies=cookies).text,
'backend_web_server1')
def test_balancer_stats_socket(self) -> None:
......@@ -366,7 +429,7 @@ class TestBalancer(BalancerTestCase):
raise
self.assertEqual(socat_process.poll(), 0)
# output is a csv
self.assertIn(b'family_default,FRONTEND,', output)
self.assertIn(b'\ndefault,BACKEND,', output)
class TestTestRunnerEntryPoints(BalancerTestCase):
......@@ -465,7 +528,7 @@ class TestHTTP(BalancerTestCase):
'--insecure',
'--write-out',
'%{http_version}',
self.default_balancer_url,
self.default_balancer_zope_url,
]),
b'2',
)
......@@ -476,16 +539,16 @@ class TestHTTP(BalancerTestCase):
session.verify = False
# do a first request, which establish a first connection
session.get(self.default_balancer_url).raise_for_status()
session.get(self.default_balancer_zope_url).raise_for_status()
# "break" new connection method and check we can make another request
with mock.patch(
"requests.packages.urllib3.connectionpool.HTTPSConnectionPool._new_conn",
) as new_conn:
session.get(self.default_balancer_url).raise_for_status()
session.get(self.default_balancer_zope_url).raise_for_status()
new_conn.assert_not_called()
parsed_url = urllib.parse.urlparse(self.default_balancer_url)
parsed_url = urllib.parse.urlparse(self.default_balancer_zope_url)
# check that we have an open file for the ip connection
self.assertTrue([
......@@ -495,11 +558,162 @@ class TestHTTP(BalancerTestCase):
])
class TestServerTLSEmbeddedCaucase(BalancerTestCase):
"""Check Server TLS with embedded caucase
"""
__partition_reference__ = 's'
def _getCaucaseCACertificatePath(self) -> str:
"""Returns the path of the caucase certificate on file system.
"""
ca_cert = tempfile.NamedTemporaryFile(
prefix="ca.crt.pem",
mode="w",
delete=False,
)
ca_cert.write(
requests.get(
urllib.parse.urljoin(
self.getRootPartitionConnectionParameterDict()['caucase-http-url'],
'/cas/crt/ca.crt.pem',
)).text)
ca_cert.flush()
self.addCleanup(os.unlink, ca_cert.name)
return ca_cert.name
def _getServerCertificate(self, hostname: str, port: int) -> x509.base.Certificate:
sock = socket.socket(socket.AF_INET6 if ':' in hostname else socket.AF_INET)
sock.connect((hostname, port))
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
sock_ssl = OpenSSL.SSL.Connection(ctx, sock)
sock_ssl.set_connect_state()
sock_ssl.set_tlsext_host_name(hostname.encode())
sock_ssl.do_handshake()
cert = sock_ssl.get_peer_certificate()
crypto_cert = cert.to_cryptography()
sock_ssl.close()
sock.close()
return crypto_cert
def test_certificate_validates_with_caucase_ca(self) -> None:
requests.get(self.default_balancer_zope_url, verify=self._getCaucaseCACertificatePath())
def test_certificate_renewal(self) -> None:
balancer_parsed_url = urllib.parse.urlparse(self.default_balancer_zope_url)
certificate_before_renewal = self._getServerCertificate(
balancer_parsed_url.hostname,
balancer_parsed_url.port)
# run caucase updater in the future, so that certificate is renewed
caucase_updater, = glob.glob(
os.path.join(
self.computer_partition_root_path,
'etc',
'service',
'caucase-updater-haproxy-certificate-*',
))
process = pexpect.spawnu("faketime +90days " + caucase_updater)
logger = self.logger
class DebugLogFile:
def write(self, msg):
logger.info("output from caucase_updater: %s", msg)
def flush(self):
pass
process.logfile = DebugLogFile()
process.expect("Renewing .*\nNext wake-up.*")
process.terminate()
process.wait()
# wait for server to use new certificate
for _ in range(30):
certificate_after_renewal = self._getServerCertificate(
balancer_parsed_url.hostname,
balancer_parsed_url.port)
if certificate_after_renewal.not_valid_before > certificate_before_renewal.not_valid_before:
break
time.sleep(.5)
self.assertGreater(
certificate_after_renewal.not_valid_before,
certificate_before_renewal.not_valid_before,
)
# requests are served properly after certificate renewal
self.test_certificate_validates_with_caucase_ca()
class TestServerTLSExternalCaucase(TestServerTLSEmbeddedCaucase):
"""Check Server TLS with external caucase
"""
@classmethod
def _getInstanceParameterDict(cls) -> dict:
parameter_dict = super()._getInstanceParameterDict()
parameter_dict['ssl']['caucase-url'] = cls.getManagedResource(
"caucase", CaucaseService).url
return parameter_dict
def test_published_caucase_http_url_parameter(self) -> None:
self.assertEqual(
self.getRootPartitionConnectionParameterDict()['caucase-http-url'],
self.getManagedResource("caucase", CaucaseService).url,
)
class TestServerTLSCSRTemplateParameter(TestServerTLSExternalCaucase):
"""Check Server TLS with a CSR template passed as parameter
"""
@classmethod
def _getInstanceParameterDict(cls) -> dict:
# use a CSR template with this subject, we'll assert that the
# certificate used by haproxy has same subject.
cls.csr_subject = subject = x509.Name(
[x509.NameAttribute(NameOID.COMMON_NAME, cls.__name__)])
# Add all IPs of the computer in SubjectAlternativeName, we don't
# know what will be the IP of the balancer partition.
with sqlite3.connect(cls.slap._proxy_database) as db:
ip_address_list = [
x509.IPAddress(ipaddress.ip_address(r)) for (r, ) in db.execute(
f"SELECT address FROM partition_network{DB_VERSION}").fetchall()
]
assert ip_address_list
csr = x509.CertificateSigningRequestBuilder().subject_name(
subject).add_extension(
x509.SubjectAlternativeName(ip_address_list),
critical=True,
).sign(
rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=default_backend(),
),
hashes.SHA256(),
default_backend(),
)
parameter_dict = super()._getInstanceParameterDict()
parameter_dict['ssl']['csr'] = csr.public_bytes(serialization.Encoding.PEM).decode()
return parameter_dict
def test_certificate_validates_with_caucase_ca(self) -> None:
super().test_certificate_validates_with_caucase_ca()
balancer_parsed_url = urllib.parse.urlparse(self.default_balancer_zope_url)
cert = self._getServerCertificate(
balancer_parsed_url.hostname,
balancer_parsed_url.port,
)
self.assertEqual(
cert.subject.rfc4514_string(),
self.csr_subject.rfc4514_string())
class ContentTypeHTTPServer(ManagedHTTPServer):
"""An HTTP/1.1 Server which reply with content type from path.
For example when requested http://host/text/plain it will reply
with Content-Type: text/plain header.
This actually uses a URL like this to support zope style virtual host:
GET /VirtualHostBase/https/{host}/VirtualHostRoot/text/plain
The body is always "OK"
"""
......@@ -510,7 +724,7 @@ class ContentTypeHTTPServer(ManagedHTTPServer):
if self.path == '/':
self.send_header("Content-Length", '0')
return self.end_headers()
content_type = self.path[1:]
content_type = '/'.join(self.path.split('/')[5:])
body = b"OK"
self.send_header("Content-Type", content_type)
self.send_header("Content-Length", str(len(body)))
......@@ -552,7 +766,7 @@ class TestContentEncoding(BalancerTestCase):
'application/font-woff2',
'application/x-font-opentype',
'application/wasm',):
resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, content_type), verify=False)
resp = requests.get(urllib.parse.urljoin(self.default_balancer_zope_url, content_type), verify=False)
self.assertEqual(resp.headers['Content-Type'], content_type)
self.assertEqual(
resp.headers.get('Content-Encoding'),
......@@ -561,7 +775,7 @@ class TestContentEncoding(BalancerTestCase):
self.assertEqual(resp.text, 'OK')
def test_no_gzip_encoding(self) -> None:
resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
resp = requests.get(urllib.parse.urljoin(self.default_balancer_zope_url, '/image/png'), verify=False)
self.assertNotIn('Content-Encoding', resp.headers)
self.assertEqual(resp.text, 'OK')
......@@ -579,7 +793,6 @@ class TestFrontendXForwardedFor(BalancerTestCase):
parameter_dict = super()._getInstanceParameterDict()
# add another "-auth" backend, that will have ssl-authentication enabled
parameter_dict['zope-family-dict']['default-auth'] = ['dummy_http_server']
parameter_dict['backend-path-dict']['default-auth'] = '/'
parameter_dict['ssl-authentication-dict'] = {
'default': False,
'default-auth': True,
......@@ -652,7 +865,19 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
def _getInstanceParameterDict(cls) -> dict:
server_caucase = cls.getManagedResource('server_caucase', CaucaseService)
server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate)
server_certificate.request(cls._ipv4_address, server_caucase)
# Add all IPs of the computer in SubjectAlternativeName, we don't
# know what will be the IP of the balancer partition.
with sqlite3.connect(cls.slap._proxy_database) as db:
ip_address_list = [
x509.IPAddress(ipaddress.ip_address(r)) for (r, ) in db.execute(
f"SELECT address FROM partition_network{DB_VERSION}").fetchall()
]
assert ip_address_list
server_certificate.request(
cls.__name__,
server_caucase,
x509.SubjectAlternativeName(ip_address_list))
parameter_dict = super()._getInstanceParameterDict()
with open(server_certificate.cert_file) as f:
parameter_dict['ssl']['cert'] = f.read()
......@@ -662,7 +887,7 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
def test_certificate_validates_with_provided_ca(self) -> None:
server_certificate = self.getManagedResource("server_certificate", CaucaseCertificate)
requests.get(self.default_balancer_url, verify=server_certificate.ca_crt_file)
requests.get(self.default_balancer_zope_url, verify=server_certificate.ca_crt_file)
class TestClientTLS(BalancerTestCase):
......@@ -708,7 +933,7 @@ class TestClientTLS(BalancerTestCase):
# the client certificate in "remote-user" header
def _make_request() -> dict:
return requests.get(
self.default_balancer_url,
self.default_balancer_zope_url,
cert=(client_certificate.cert_file, client_certificate.key_file),
verify=False,
).json()
......@@ -726,8 +951,8 @@ class TestClientTLS(BalancerTestCase):
self.assertEqual(_make_request()['Incoming Headers'].get('remote-user'),
client_certificate_name)
# We have two services, in charge of updating CRL and CA certificates for
# each frontend CA
# We have two services in charge of updating CRL and CA certificates for
# each frontend CA, plus the one for the balancer's own certificate
caucase_updater_list = glob.glob(
os.path.join(
self.computer_partition_root_path,
......@@ -735,7 +960,7 @@ class TestClientTLS(BalancerTestCase):
'service',
'caucase-updater-*',
))
self.assertEqual(len(caucase_updater_list), 2)
self.assertEqual(len(caucase_updater_list), 3)
# find the one corresponding to this caucase
for caucase_updater_candidate in caucase_updater_list:
......
......@@ -51,7 +51,7 @@ import urllib3
from slapos.testing.utils import CrontabMixin
import zc.buildout.configparser
from . import ERP5InstanceTestCase, default, matrix, neo, setUpModule
from . import CaucaseService, ERP5InstanceTestCase, default, matrix, neo, setUpModule
setUpModule # pyflakes
......@@ -60,15 +60,8 @@ class TestPublishedURLIsReachableMixin:
"""Mixin that checks that default page of ERP5 is reachable.
"""
def _checkERP5IsReachable(self, base_url, site_id, verify):
# We access ERP5 trough a "virtual host", which should make
# ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root
# as base.
virtual_host_url = urllib.parse.urljoin(
base_url,
'/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/'
.format(site_id))
@contextlib.contextmanager
def requestSession(self, base_url):
# What happens is that instantiation just create the services, but does not
# wait for ERP5 to be initialized. When this test run ERP5 instance is
# instantiated, but zope is still busy creating the site and haproxy replies
......@@ -84,7 +77,32 @@ class TestPublishedURLIsReachableMixin:
total=20,
backoff_factor=.5,
status_forcelist=(404, 500, 503))))
yield session
def _checkERP5IsReachableWithVirtualHost(self, url, verify):
with self.requestSession(urllib.parse.urljoin(url, '/')) as session:
r = session.get(url, verify=verify, allow_redirects=True)
# access on / are redirected to login form
self.assertTrue(r.url.endswith('/login_form'))
self.assertEqual(r.status_code, requests.codes.ok)
self.assertIn("ERP5", r.text)
# host header is used in redirected URL. The URL is always https
r = session.get(url, verify=verify, allow_redirects=False, headers={'Host': 'www.example.com'})
self.assertEqual(r.headers.get('Location'), 'https://www.example.com/login_form')
r = session.get(url, verify=verify, allow_redirects=False, headers={'Host': 'www.example.com:1234'})
self.assertEqual(r.headers.get('Location'), 'https://www.example.com:1234/login_form')
def _checkERP5IsReachableWithoutVirtualHost(self, base_url, site_id, verify):
# We access ERP5 trough a "virtual host", which should make
# ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root
# as base.
virtual_host_url = urllib.parse.urljoin(
base_url,
'/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/'
.format(site_id))
with self.requestSession(base_url) as session:
r = session.get(virtual_host_url, verify=verify, allow_redirects=False)
self.assertEqual(r.status_code, requests.codes.found)
# access on / are redirected to login form, with virtual host preserved
......@@ -99,34 +117,49 @@ class TestPublishedURLIsReachableMixin:
self.assertEqual(r.status_code, requests.codes.ok)
self.assertIn("ERP5", r.text)
def _getCaucaseServiceCACertificate(self):
ca_cert = tempfile.NamedTemporaryFile(
prefix="ca.crt.pem",
mode="w",
delete=False,
)
ca_cert.write(
requests.get(
urllib.parse.urljoin(
self.getRootPartitionConnectionParameterDict()['caucase-http-url'],
'/cas/crt/ca.crt.pem',
)).text)
ca_cert.flush()
self.addCleanup(os.unlink, ca_cert.name)
return ca_cert.name
def test_published_family_default_v6_is_reachable(self):
"""Tests the IPv6 URL published by the root partition is reachable.
"""
param_dict = self.getRootPartitionConnectionParameterDict()
self._checkERP5IsReachable(
self._checkERP5IsReachableWithoutVirtualHost(
param_dict['family-default-v6'],
param_dict['site-id'],
verify=False,
self._getCaucaseServiceCACertificate(),
)
def test_published_family_default_v4_is_reachable(self):
"""Tests the IPv4 URL published by the root partition is reachable.
"""
param_dict = self.getRootPartitionConnectionParameterDict()
self._checkERP5IsReachable(
self._checkERP5IsReachableWithoutVirtualHost(
param_dict['family-default'],
param_dict['site-id'],
verify=False,
self._getCaucaseServiceCACertificate(),
)
def test_published_frontend_default_is_reachable(self):
"""Tests the frontend URL published by the root partition is reachable.
"""
param_dict = self.getRootPartitionConnectionParameterDict()
self._checkERP5IsReachable(
self._checkERP5IsReachableWithVirtualHost(
param_dict['url-frontend-default'],
param_dict['site-id'],
verify=False,
self._getCaucaseServiceCACertificate(),
)
......@@ -141,9 +174,8 @@ class TestDefaultParameters(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
'.installed-switch-softwaretype.cfg')) as f:
installed = zc.buildout.configparser.parse(f, 'installed')
self.assertEqual(
installed['request-frontend-default']['config-type'], 'zope')
self.assertEqual(
installed['request-frontend-default']['config-path'], '/erp5')
installed['request-frontend-default']['config-type'], '')
self.assertNotIn('config-path', installed['request-frontend-default'])
self.assertEqual(
installed['request-frontend-default']['config-authenticate-to-backend'], 'true')
self.assertEqual(installed['request-frontend-default']['shared'], 'true')
......@@ -159,6 +191,70 @@ class TestDefaultParameters(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
self.getRootPartitionConnectionParameterDict()['url-frontend-default'])
class TestExternalCaucase(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
"""Test providing the URL of an external caucase in parameters.
"""
__partition_reference__ = 'ec'
@classmethod
def getInstanceParameterDict(cls) -> dict:
caucase_url = cls.getManagedResource("caucase", CaucaseService).url
return {'_': json.dumps({'caucase': {'url': caucase_url}})}
def test_published_caucase_http_url_parameter(self) -> None:
self.assertEqual(
self.getRootPartitionConnectionParameterDict()['caucase-http-url'],
self.getManagedResource("caucase", CaucaseService).url,
)
class TestReinstantiateWithExternalCaucase(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
"""Test providing the URL of an external caucase in parameters after
the initial instantiation.
"""
__partition_reference__ = 'sc'
def test_switch_to_external_caucase(self) -> None:
# this also waits that ERP5 is fully ready
self.test_published_frontend_default_is_reachable()
external_caucase_url = self.getManagedResource("caucase", CaucaseService).url
partition_parameter_kw = {
'_':
json.dumps(
dict(
json.loads(self.getInstanceParameterDict()['_']),
caucase={'url': external_caucase_url}))
}
def rerequest():
return self.slap.request(
software_release=self.getSoftwareURL(),
software_type=self.getInstanceSoftwareType(),
partition_reference=self.default_partition_reference,
partition_parameter_kw=partition_parameter_kw,
state='started')
rerequest()
self.slap.waitForInstance(max_retry=10)
self.assertEqual(
json.loads(rerequest().getConnectionParameterDict()['_'])['caucase-http-url'],
external_caucase_url)
with tempfile.NamedTemporaryFile(mode="w") as ca_cert:
ca_cert.write(
requests.get(
urllib.parse.urljoin(
external_caucase_url,
'/cas/crt/ca.crt.pem',
)).text)
ca_cert.flush()
requests.get(
self.getRootPartitionConnectionParameterDict()['url-frontend-default'],
verify=ca_cert.name).raise_for_status()
class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
"""Test ERP5 Jupyter notebook
"""
......@@ -225,6 +321,13 @@ class TestBalancerPorts(ERP5InstanceTestCase):
param_dict[f'family-{family_name}'])
self.checkValidHTTPSURL(
param_dict[f'family-{family_name}-v6'])
# ports are allocated in alphabetical order and are "stable", ie. is not supposed
# to change after updating software release, because there is typically a rapid-cdn
# frontend pointing to this port.
self.assertEqual(urllib.parse.urlparse(param_dict['family-family1']).port, 2152)
self.assertEqual(urllib.parse.urlparse(param_dict['family-family1-v6']).port, 2152)
self.assertEqual(urllib.parse.urlparse(param_dict['family-family2']).port, 2155)
self.assertEqual(urllib.parse.urlparse(param_dict['family-family2-v6']).port, 2155)
def test_published_test_runner_url(self):
# each family's also a list of test test runner URLs, by default 3 per family
......@@ -233,6 +336,7 @@ class TestBalancerPorts(ERP5InstanceTestCase):
family_test_runner_url_list = param_dict[
f'{family_name}-test-runner-url-list']
self.assertEqual(3, len(family_test_runner_url_list))
self.assertEqual(3, len(set(family_test_runner_url_list)))
for url in family_test_runner_url_list:
self.checkValidHTTPSURL(url)
......@@ -273,6 +377,68 @@ class TestBalancerPorts(ERP5InstanceTestCase):
])
class TestBalancerPortsStable(ERP5InstanceTestCase):
"""Instantiate with two one families and a frontend, then
re-request with one more family and one more frontend, the ports
should not change
"""
__partition_reference__ = 'ap'
@classmethod
def getInstanceParameterDict(cls):
return {
'_':
json.dumps(
{
"frontend": {
"zzz": {
"zope-family": "zzz"
}
},
"zope-partition-dict": {
"zzz": {
"instance-count": 1,
"family": "zzz"
},
},
})
}
def test_same_balancer_ports_when_adding_zopes_or_frontends(self):
param_dict_before = self.getRootPartitionConnectionParameterDict()
balancer_param_dict_before = json.loads(
self.getComputerPartition('balancer').getConnectionParameter('_'))
# re-request with one more frontend and one more backend, that are before
# the existing ones when sorting alphabetically
instance_parameter_dict = json.loads(self.getInstanceParameterDict()['_'])
instance_parameter_dict['frontend']['aaa'] = {"zope-family": "aaa"}
instance_parameter_dict['zope-partition-dict']['aaa'] = {
"instance-count": 2,
"family": "aaa"
}
def rerequest():
return self.slap.request(
software_release=self.getSoftwareURL(),
software_type=self.getInstanceSoftwareType(),
partition_reference=self.default_partition_reference,
partition_parameter_kw={'_': json.dumps(instance_parameter_dict)},
state='started')
rerequest()
self.slap.waitForInstance(max_retry=10)
param_dict_after = json.loads(rerequest().getConnectionParameterDict()['_'])
balancer_param_dict_after = json.loads(
self.getComputerPartition('balancer').getConnectionParameter('_'))
self.assertEqual(param_dict_before['family-zzz-v6'], param_dict_after['family-zzz-v6'])
self.assertEqual(param_dict_before['url-frontend-zzz'], param_dict_after['url-frontend-zzz'])
self.assertEqual(balancer_param_dict_before['url-backend-zzz'], balancer_param_dict_after['url-backend-zzz'])
self.assertNotEqual(param_dict_before['family-zzz-v6'], param_dict_after['family-aaa-v6'])
self.assertNotEqual(param_dict_before['url-frontend-zzz'], param_dict_after['url-frontend-aaa'])
self.assertNotEqual(balancer_param_dict_before['url-backend-zzz'], balancer_param_dict_after['url-backend-aaa'])
class TestSeleniumTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
"""Test ERP5 can be instantiated with selenium server for test runner.
"""
......@@ -333,14 +499,15 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
self.assertNotIn('runTestSuite', bin_programs)
def test_no_haproxy_testrunner_port(self):
# Haproxy only listen on two ports, there is no haproxy ports allocated for test runner
# Haproxy only listen on two ports for frontend, two ports for legacy entry points
# and there is no haproxy ports allocated for test runner
with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo()
process_info, = (p for p in all_process_info if p['name'].startswith('haproxy'))
haproxy_master_process = psutil.Process(process_info['pid'])
haproxy_worker_process, = haproxy_master_process.children()
self.assertEqual(
sorted([socket.AF_INET, socket.AF_INET6]),
sorted([socket.AF_INET, socket.AF_INET6, socket.AF_INET, socket.AF_INET6]),
sorted(
c.family
for c in haproxy_worker_process.connections()
......@@ -1213,10 +1380,13 @@ class TestFrontend(ERP5InstanceTestCase):
},
"web": {
"family": "web",
"port-base": 2300,
},
"activities": {
# this family will not have frontend
"family": "activities"
"family": "activities",
"port-base": 2400,
},
},
"frontend": {
......@@ -1225,7 +1395,7 @@ class TestFrontend(ERP5InstanceTestCase):
},
"website": {
"zope-family": "web",
"internal-path": "/%(site-id)s/web_site_module/my_website/",
"internal-path": "/%(site-id)s/web_site_module/my_website",
"instance-parameters": {
# some extra frontend parameters
"enable_cache": "true",
......@@ -1253,19 +1423,20 @@ class TestFrontend(ERP5InstanceTestCase):
def test_request_parameters(self):
param_dict = self.getRootPartitionConnectionParameterDict()
balancer_param_dict = json.loads(
self.getComputerPartition('balancer').getConnectionParameter('_'))
with open(os.path.join(self.computer_partition_root_path,
'.installed-switch-softwaretype.cfg')) as f:
installed = zc.buildout.configparser.parse(f, 'installed')
self.assertEqual(
installed['request-frontend-backoffice']['config-type'], 'zope')
installed['request-frontend-backoffice']['config-type'], '')
self.assertEqual(
installed['request-frontend-backoffice']['shared'], 'true')
self.assertEqual(
installed['request-frontend-backoffice']['config-url'],
param_dict['family-default-v6'])
self.assertEqual(
installed['request-frontend-backoffice']['config-path'], '/erp5')
balancer_param_dict['url-backend-backoffice'])
self.assertNotIn('config-path', installed['request-frontend-backoffice'])
self.assertEqual(
installed['request-frontend-backoffice']['sla-computer_guid'],
'COMP-1234')
......@@ -1278,7 +1449,7 @@ class TestFrontend(ERP5InstanceTestCase):
param_dict['url-frontend-backoffice'])
self.assertEqual(
installed['request-frontend-website']['config-type'], 'zope')
installed['request-frontend-website']['config-type'], '')
# no SLA by default
self.assertFalse([k for k in installed['request-frontend-website'] if k.startswith('sla-')])
# instance parameters are propagated
......@@ -1286,10 +1457,8 @@ class TestFrontend(ERP5InstanceTestCase):
installed['request-frontend-website']['config-enable_cache'], 'true')
self.assertEqual(
installed['request-frontend-website']['config-url'],
param_dict['family-web-v6'])
self.assertEqual(
installed['request-frontend-website']['config-path'],
'/erp5/web_site_module/my_website/')
balancer_param_dict['url-backend-website'])
self.assertNotIn('config-path', installed['request-frontend-website'])
self.assertEqual(
installed['request-frontend-website']['connection-secure_access'],
param_dict['url-frontend-website'])
......@@ -1297,6 +1466,32 @@ class TestFrontend(ERP5InstanceTestCase):
# no frontend was requested for activities family
self.assertNotIn('request-frontend-activities', installed)
self.assertNotIn('url-frontend-activities', param_dict)
self.assertNotIn('url-backend-activities', balancer_param_dict)
def test_path_virtualhost(self):
balancer_param_dict = json.loads(
self.getComputerPartition('balancer').getConnectionParameter('_'))
found_line = False
retries = 10
while retries:
requests.get(balancer_param_dict['url-backend-website'], verify=False)
for logfile in glob.glob(os.path.join(self.getComputerPartitionPath('zope-web'), 'var/log/*Z2.log')):
with open(logfile) as f:
for line in f:
if 'GET /VirtualHost' in line:
found_line = True
break
if found_line:
break
time.sleep(1)
retries = retries - 1
self.assertTrue(found_line)
percent_encoded_netloc = urllib.parse.quote(
urllib.parse.urlparse(
balancer_param_dict['url-backend-website']).netloc)
self.assertIn(
f'/VirtualHostBase/https/{percent_encoded_netloc}/erp5/web_site_module/my_website/VirtualHostRoot/ HTTP', line)
class TestDefaultFrontendWithZopePartitionDict(ERP5InstanceTestCase):
......@@ -1325,12 +1520,15 @@ class TestDefaultFrontendWithZopePartitionDict(ERP5InstanceTestCase):
def test_frontend_requested(self):
param_dict = self.getRootPartitionConnectionParameterDict()
balancer_param_dict = json.loads(
self.getComputerPartition('balancer').getConnectionParameter('_'))
with open(os.path.join(self.computer_partition_root_path,
'.installed-switch-softwaretype.cfg')) as f:
installed = zc.buildout.configparser.parse(f, 'installed')
self.assertEqual(
installed['request-frontend-default']['config-url'],
param_dict['family-backoffice-v6'])
balancer_param_dict['url-backend-default'])
requests.get(
param_dict['url-frontend-default'],
......
......@@ -14,7 +14,7 @@
# not need these here).
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = ba46a66da1c834df14a80a20b21e4a96
md5sum = 6db19ee819a960a34012308e29c5bbfb
[template-balancer]
filename = instance-balancer.cfg.in
......
......@@ -435,6 +435,7 @@ return =
{% endfor -%}
{% do monitor_base_url_dict.__setitem__('request-balancer', '${' ~ 'request-balancer' ~ ':connection-monitor-base-url}') -%}
config-zope-family-dict = {{ dumps(zope_family_parameter_dict) }}
config-frontend-parameter-dict = {{ dumps({}) }}
config-tcpv4-port = {{ dumps(balancer_dict.get('tcpv4-port', 2150)) }}
{% for zope_section_id, name in zope_address_list_id_dict.items() -%}
config-{{ name }} = {{ ' ${' ~ zope_section_id ~ ':connection-zope-address-list}' }}
......
......@@ -716,6 +716,7 @@ waitress-patches =
waitress-patch-options = -p1
Zope-patches =
${:_profile_base_location_}/../../component/egg-patch/Zope/0001-WSGIPublisher-set-REMOTE_USER-even-in-case-of-error-.patch#a437f4da28975f94dd07db0b02954111
${:_profile_base_location_}/../../component/egg-patch/Zope/0001-Fix-redirections-to-URLS-with-host-given-as-IP-litte.patch#093ad5755094d537c6a4deadc959ade0
Zope-patch-options = -p1
# neoppod installs bin/coverage so we inject erp5 plugin here so that coverage script can use it in report
......@@ -757,7 +758,7 @@ pysvn = 1.9.15+SlapOSPatched001
python-ldap = 2.4.32+SlapOSPatched001
python-magic = 0.4.12+SlapOSPatched001
waitress = 1.4.4+SlapOSPatched006
Zope = 4.8.9+SlapOSPatched001
Zope = 4.8.9+SlapOSPatched002
## https://lab.nexedi.com/nexedi/slapos/merge_requests/648
pylint = 1.4.4+SlapOSPatched002
# astroid 1.4.1 breaks testDynamicClassGeneration
......
......@@ -70,11 +70,11 @@ md5sum = b95084ae9eed95a68eada45e28ef0c04
[template]
filename = instance.cfg.in
md5sum = 5e0e9565227fe190c420a7bbcd0f7b93
md5sum = 55463b0abdbe0118ef1c27e6b71c3324
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = 46c5d2173ab91f56b32e6eb2b544bc26
md5sum = ae9c380ae04dde4f20e139c66ef7c22a
[template-zeo]
filename = instance-zeo.cfg.in
......@@ -86,15 +86,15 @@ md5sum = 0ac4b74436f554cd677f19275d18d880
[template-zope]
filename = instance-zope.cfg.in
md5sum = 41709f47e5a9051ca4a9c943859f589b
md5sum = 6178ba7b42848f9e2412ab898a7b026c
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = b0751d3d12cfcc8934cb1027190f5e5e
md5sum = 0fad9497da12ed0186dca5236c23f3a7
[template-haproxy-cfg]
filename = haproxy.cfg.in
md5sum = 85a8c0dadf7b648ef9748b6199dcfeb6
md5sum = 2cd76971b64b0bf7771978ad07bfc2e5
[template-rsyslogd-cfg]
filename = rsyslogd.cfg.in
......
{# This file configures haproxy to redirect requests from ports to specific urls.
# It provides TLS support for server and optionnaly for client.
#
......@@ -19,11 +18,11 @@
# "stats-socket": "<file_path>",
#
# # IPv4 to listen on
# # All backends from `backend-dict` will listen on this IP.
# # All frontends from `frontend-dict` will listen on this IP.
# "ipv4": "0.0.0.0",
#
# # IPv6 to listen on
# # All backends from `backend-dict` will listen on this IP.
# # All frontends from `frontend-dict` will listen on this IP.
# "ipv6": "::1",
#
# # Certificate and key in PEM format. All ports will serve TLS using
......@@ -42,36 +41,59 @@
# # Path to use for HTTP health check on backends from `backend-dict`.
# "server-check-path": "/",
#
# # The mapping of frontend, keyed by frontend name
# "frontend-dict": {
# "frontend-default": {
# "port": 8080,
# "client-cert-required": False,
# "backend-name": "family-default",
# "request-path-prepend": "/erp5",
# }
# "legacy-frontend-family-secure": {
# "port": 8000,
# "client-cert-required": False,
# "backend-name": "family-secure",
# "request-path-prepend": None, # None means do not rewrite the request path
# }
# "legacy-frontend-family-default": {
# "port": 8002,
# "client-cert-required": False,
# "backend-name": "family-default",
# "request-path-prepend": None, # None means do not rewrite the request path
# }
# }
# # The mapping of backends, keyed by family name
# "backend-dict": {
# "family-secure": {
# ( 8000, # port int
# 'https', # proto str
# True, # ssl_required bool
# None, # timeout (in seconds) int | None
# [ # backends
# '10.0.0.10:8001', # netloc str
# 1, # max_connection_count int
# False, # is_web_dav bool
# ],
# ),
# },
# "family-default": {
# ( 8002, # port int
# 'https', # proto str
# False, # ssl_required bool
# None, # timeout (in seconds) int | None
# [ # backends
# '10.0.0.10:8003', # netloc str
# 1, # max_connection_count int
# False, # is_web_dav bool
# ],
# ),
# "family-secure": {
# "timeout": None, # in seconds
# "backend-list": [
# [
# '10.0.0.10:8001', # netloc str
# 1, # max_connection_count int
# False, # is_web_dav bool
# ]
# ]
# },
# "family-default": {
# "timeout": None, # in seconds
# "backend-list": [
# [
# '10.0.0.10:8003', # netloc str
# 1, # max_connection_count int
# False, # is_web_dav bool
# ],
# [
# '10.0.0.10:8004', # netloc str
# 1, # max_connection_count int
# False, # is_web_dav bool
# ],
# ]
# },
#
# # The mapping of zope paths.
# # This is a Zope specific feature.
# # `enable_authentication` has same meaning as for `backend-list`.
# # This is a Zope specific feature used only to provide https while running
# # ERP5 "unit test" suite.
# # `enable_authentication` has same meaning as for `backend-dict`.
# "zope-virtualhost-monster-backend-dict": {
# # {(ip, port): ( enable_authentication, {frontend_path: ( internal_url ) }, ) }
# ('[::1]', 8004): (
......@@ -84,15 +106,20 @@
# }
#
# This sample of `parameter_dict` will make haproxy listening to :
# From to `backend-list`:
# For "family-secure":
# For "frontend-default":
# - 0.0.0.0:8080 redirecting internaly to http://10.0.0.10:8003 or http://10.0.0.10:8004
# - [::1]:8080 redirecting internaly to http://10.0.0.10:8003 or http://10.0.0.10:8004
# accepting requests from any client and rewriting the path to add a Zope rewrite rule
# so that the a request on https://0.0.0.0:8080/path is rewritten to serve a Zope object at
# path /erp5/path , visible as /path.
# For "legacy-frontend-family-secure":
# - 0.0.0.0:8000 redirecting internaly to http://10.0.0.10:8001 and
# - [::1]:8000 redirecting internaly to http://10.0.0.10:8001
# only accepting requests from clients providing a verified TLS certificate
# emitted by a CA from `ca-cert` and not revoked in `crl`.
# For "family-default":
# - 0.0.0.0:8002 redirecting internaly to http://10.0.0.10:8003
# - [::1]:8002 redirecting internaly to http://10.0.0.10:8003
# For "legacy-frontend-family-default":
# - 0.0.0.0:8002 redirecting internaly to http://10.0.0.10:8003 or http://10.0.0.10:8004
# - [::1]:8002 redirecting internaly to http://10.0.0.10:8003 or http://10.0.0.10:8004
# accepting requests from any client.
#
# For both families, X-Forwarded-For header will be stripped unless
......@@ -105,7 +132,7 @@
# with some VirtualHostMonster rewrite rules so zope writes URLs with
# [::1]:8004 as server name.
# For more details, refer to
# https://docs.zope.org/zope2/zope2book/VirtualHosting.html#using-virtualhostroot-and-virtualhostbase-together
# https://zope.readthedocs.io/en/latest/zopebook/VirtualHosting.html#using-virtualhostroot-and-virtualhostbase-together
-#}
{% set server_check_path = parameter_dict['server-check-path'] -%}
......@@ -151,26 +178,17 @@ defaults
{% set family_path_routing_dict = parameter_dict['family-path-routing-dict'] %}
{% set path_routing_list = parameter_dict['path-routing-list'] %}
{% for name, (port, _, certificate_authentication, timeout, backend_list) in sorted(six.iteritems(parameter_dict['backend-dict'])) -%}
listen family_{{ name }}
{% for name, frontend in sorted(six.iteritems(parameter_dict['frontend-dict'])) %}
listen {{ name }}
{%- if parameter_dict.get('ca-cert') -%}
{%- set ssl_auth = ' ca-file ' ~ parameter_dict['ca-cert'] ~ ' verify' ~ ( ' required' if certificate_authentication else ' optional crt-ignore-err all' ) ~ ' crl-file ' ~ parameter_dict['crl'] %}
{%- set ssl_auth = ' ca-file ' ~ parameter_dict['ca-cert'] ~ ' verify' ~ ( ' required' if frontend['client-cert-required'] else ' optional crt-ignore-err all' ) ~ ' crl-file ' ~ parameter_dict['crl'] %}
{%- else %}
{%- set ssl_auth = '' %}
{%- endif %}
bind {{ parameter_dict['ipv4'] }}:{{ port }} {{ bind_ssl_crt }} {{ ssl_auth }}
bind {{ parameter_dict['ipv6'] }}:{{ port }} {{ bind_ssl_crt }} {{ ssl_auth }}
cookie SERVERID rewrite
http-request set-header X-Balancer-Current-Cookie SERVERID
{% if timeout %}
{#
Apply a slightly longer timeout than the zope timeout so that clients can see the
TimeoutReachedError from zope, that is a bit more informative than the 504 error
page from haproxy.
#}
timeout server {{ timeout + 3 }}s
{%- endif %}
bind {{ parameter_dict['ipv4'] }}:{{ frontend['port'] }} {{ bind_ssl_crt }} {{ ssl_auth }}
bind {{ parameter_dict['ipv6'] }}:{{ frontend['port'] }} {{ bind_ssl_crt }} {{ ssl_auth }}
# remove X-Forwarded-For unless client presented a verified certificate
http-request del-header X-Forwarded-For unless { ssl_c_verify 0 } { ssl_c_used 1 }
......@@ -178,18 +196,43 @@ listen family_{{ name }}
http-request del-header Remote-User
http-request set-header Remote-User %{+Q}[ssl_c_s_dn(cn)] if { ssl_c_verify 0 } { ssl_c_used 1 }
# reject invalid host header before using it in path
http-request deny deny_status 400 if { req.hdr(host) -m sub / }
# logs
capture request header Referer len 512
capture request header User-Agent len 512
log-format "%{+Q}o %{-Q}ci - - [%trg] %r %ST %B %{+Q}[capture.req.hdr(0)] %{+Q}[capture.req.hdr(1)] %Ta"
{% for outer_prefix, inner_prefix in family_path_routing_dict.get(name, []) + path_routing_list %}
{% if frontend['request-path-prepend'] is not none %}
http-request replace-path ^/(.*) /VirtualHostBase/https/%[req.hdr(Host)]{{ frontend['request-path-prepend'] }}/VirtualHostRoot/\1
{% endif %}
{% for outer_prefix, inner_prefix in family_path_routing_dict.get(frontend['backend-name'], []) + path_routing_list %}
{% set outer_prefix = outer_prefix.strip('/') -%}
http-request replace-path ^(/+VirtualHostBase/+[^/]+/+[^/]+)/+VirtualHostRoot/+{% if outer_prefix %}{{ outer_prefix }}($|/.*){% else %}(.*){% endif %} \1/{{ inner_prefix.strip('/') }}/VirtualHostRoot/{% if outer_prefix %}_vh_{{ outer_prefix.replace('/', '/_vh_') }}{% endif %}\2
{% endfor %}
use_backend {{ frontend['backend-name'] }}
{% endfor %}
{% for name, backend in sorted(six.iteritems(parameter_dict['backend-dict'])) %}
backend {{ name }}
cookie SERVERID rewrite
http-request set-header X-Balancer-Current-Cookie SERVERID
{% if backend['timeout'] %}
{#
Apply a slightly longer timeout than the zope timeout so that clients can see the
TimeoutReachedError from zope, that is a bit more informative than the 504 error
page from haproxy.
#}
timeout server {{ backend['timeout'] + 3 }}s
{%- endif %}
{% set has_webdav = [] -%}
{% for address, connection_count, webdav in backend_list -%}
{% for address, connection_count, webdav in backend['backend-list'] -%}
{% if webdav %}{% do has_webdav.append(None) %}{% endif -%}
{% set server_name = name ~ '-' ~ loop.index0 %}
server {{ server_name }} {{ address }} cookie {{ server_name }} check inter 3s rise 1 fall 2 maxqueue 5 maxconn {{ connection_count }}
......
......@@ -2,6 +2,20 @@
{% set part_list = [] -%}
{% macro section(name) %}{% do part_list.append(name) %}{{ name }}{% endmacro -%}
{% set ssl_parameter_dict = slapparameter_dict['ssl'] -%}
{% set caucase_url = ssl_parameter_dict.get('caucase-url') %}
{% set with_embedded_caucased = caucase_url is none %}
{% if not caucase_url -%}
{% if ipv6_set -%}
{% set caucase_host = '[' ~ (ipv6_set | list)[0] ~ ']' %}
{%- else -%}
{% set caucase_host = (ipv4_set | list)[0] %}
{%- endif %}
{% set caucase_http_port = 2198 -%}{# caucase_https_port is implicitly 2199 #}
{% set caucase_netloc = caucase_host ~ ':' ~ caucase_http_port -%}
{% set caucase_url = 'http://' ~ caucase_netloc %}
{% endif %}
{# Caucase related parts include a hash of the caucase_url, to make is possible to re-request with another caucase url. #}
{% set caucase_haproxy_certificate_suffix = '-' + hashlib.md5(six.ensure_binary(caucase_url)).hexdigest()[:6] -%}
{% set frontend_caucase_url_list = ssl_parameter_dict.get('frontend-caucase-url-list', []) -%}
{#
XXX: This template only supports exactly one IPv4 and (if ipv6 is used) one IPv6
......@@ -15,25 +29,103 @@ per partition. No more (undefined result), no less (IndexError).
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
[simplefile]
< = jinja2-template-base
inline = {{ '{{ content }}' }}
{% macro simplefile(section_name, file_path, content, mode='') -%}
{% set content_section_name = section_name ~ '-content' -%}
[{{ content_section_name }}]
content = {{ dumps(content) }}
[{{ section(section_name) }}]
< = simplefile
output = {{ file_path }}
context = key content {{content_section_name}}:content
mode = {{ mode }}
{%- endmacro %}
{% if with_embedded_caucased -%}
{{ caucase.caucased(
prefix='caucased-haproxy-certificate',
buildout_bin_directory=bin_directory,
caucased_path='${directory:services-on-watch}/caucased' + caucase_haproxy_certificate_suffix,
backup_dir='${directory:backup-caucased-haproxy-certificate}',
data_dir='${directory:srv-caucased-haproxy-certificate}',
netloc=caucase_netloc,
tmp='${directory:tmp}',
service_auto_approve_count=ssl_parameter_dict.get('service-auto-approve-amount', 1),
user_auto_approve_count=ssl_parameter_dict.get('user-auto-approve-amount', 0),
key_len=ssl_parameter_dict.get('key-length', 2048),
)}}
{% do section('caucased-haproxy-certificate') -%}
{% do section('caucased-haproxy-certificate-promise') -%}
{% endif -%}
[haproxy-certificate]
cert-and-key-file = ${directory:etc}/${:_buildout_section_name_}{{ caucase_haproxy_certificate_suffix }}-cert-and-key.pem
ca-file = ${directory:etc}/${:_buildout_section_name_}{{ caucase_haproxy_certificate_suffix }}.ca.crt
crl-file = ${directory:etc}/${:_buildout_section_name_}{{ caucase_haproxy_certificate_suffix }}.crl
[haproxy-certificate-csr-config]
recipe = slapos.recipe.template
inline =
[ req ]
prompt = no
req_extensions = req_ext
distinguished_name = dn
[ dn ]
CN = haproxy
[ req_ext ]
subjectAltName = @alt_names
[ alt_names ]
IP.1 = {{ ipv4 }}
{% if ipv6_set %}
IP.2 = {{ ipv6 }}
{% endif %}
output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:_buildout_section_name_}.txt
[haproxy-certificate-csr]
recipe = plone.recipe.command
command =
if [ ! -f '${:csr}' ] ; then
{{ parameter_dict['openssl'] }}/bin/openssl req \
-newkey rsa \
-batch \
-new \
-nodes \
-keyout /dev/null \
-config '${haproxy-certificate-csr-config:output}' \
-out '${:csr}'
fi
stop-on-error = true
csr = ${directory:srv}/${:_buildout_section_name_}{{ caucase_haproxy_certificate_suffix }}.csr.pem
{% if ssl_parameter_dict.get('csr') %}
{{ simplefile(
'haproxy-certificate-csr-from-parameters',
'${directory:etc}/haproxy-certificate-csr-from-parameters.pem',
ssl_parameter_dict['csr']) }}
[haproxy-certificate-csr]
csr = ${haproxy-certificate-csr-from-parameters:output}
{% endif %}
{{ caucase.updater(
prefix='caucase-updater',
prefix='caucase-updater-haproxy-certificate',
buildout_bin_directory=parameter_dict['bin-directory'],
updater_path='${directory:services-on-watch}/caucase-updater',
url=ssl_parameter_dict['caucase-url'],
data_dir='${directory:srv}/caucase-updater',
crt_path='${apache-conf-ssl:caucase-cert}',
ca_path='${directory:srv}/caucase-updater/ca.crt',
crl_path='${directory:srv}/caucase-updater/crl.pem',
key_path='${apache-conf-ssl:caucase-key}',
updater_path='${directory:services-on-watch}/caucase-updater-haproxy-certificate' + caucase_haproxy_certificate_suffix,
url=caucase_url,
data_dir='${directory:caucase-updater-haproxy-certificate}',
crt_path='${haproxy-certificate:cert-and-key-file}',
ca_path='${haproxy-certificate:ca-file}',
crl_path='${haproxy-certificate:crl-file}',
key_path='${haproxy-certificate:cert-and-key-file}',
on_renew='${haproxy-reload:output}',
max_sleep=ssl_parameter_dict.get('max-crl-update-delay', 1.0),
template_csr_pem=ssl_parameter_dict.get('csr'),
template_csr='${haproxy-certificate-csr:csr}',
openssl=parameter_dict['openssl'] ~ '/bin/openssl',
)}}
{# XXX we don't use caucase yet.
{% do section('caucase-updater') -%}
{% do section('caucase-updater-promise') -%}
#}
{% do section('caucase-updater-haproxy-certificate') -%}
{% set frontend_caucase_url_hash_list = [] -%}
{% for frontend_caucase_url in frontend_caucase_url_list -%}
......@@ -116,71 +208,8 @@ command = ${caucase-updater-housekeeper:output}
update-command = ${:command}
{% endif -%}
{% set haproxy_dict = {} -%}
{% set zope_virtualhost_monster_backend_dict = {} %}
{% set test_runner_url_dict = {} %} {# family_name => list of URLs #}
{% set next_port = functools.partial(next, itertools.count(slapparameter_dict['tcpv4-port'])) -%}
{% for family_name, parameter_id_list in sorted(
six.iteritems(slapparameter_dict['zope-family-dict'])) -%}
{% set zope_family_address_list = [] -%}
{% set ssl_authentication = slapparameter_dict['ssl-authentication-dict'][family_name] -%}
{% set has_webdav = [] -%}
{% for parameter_id in parameter_id_list -%}
{% set zope_address_list = slapparameter_dict[parameter_id] -%}
{% for zope_address, maxconn, webdav in zope_address_list -%}
{% if webdav -%}
{% do has_webdav.append(None) %}
{% endif -%}
{% set zope_effective_address = zope_address -%}
{% do zope_family_address_list.append((zope_effective_address, maxconn, webdav)) -%}
{% endfor -%}
{# # Generate entries with rewrite rule for test runnners #}
{% set test_runner_address_list = slapparameter_dict.get(parameter_id ~ '-test-runner-address-list', []) %}
{% if test_runner_address_list -%}
{% set test_runner_backend_mapping = {} %}
{% set test_runner_balancer_url_list = [] %}
{% set test_runner_external_port = next_port() %}
{% for i, (test_runner_internal_ip, test_runner_internal_port) in enumerate(test_runner_address_list) %}
{% do test_runner_backend_mapping.__setitem__(
'unit_test_' ~ i,
'http://' ~ test_runner_internal_ip ~ ':' ~ test_runner_internal_port ) %}
{% do test_runner_balancer_url_list.append(
'https://' ~ ipv4 ~ ':' ~ test_runner_external_port ~ '/unit_test_' ~ i ~ '/' ) %}
{% endfor %}
{% do zope_virtualhost_monster_backend_dict.__setitem__(
(ipv4, test_runner_external_port),
( ssl_authentication, test_runner_backend_mapping ) ) -%}
{% do test_runner_url_dict.__setitem__(family_name, test_runner_balancer_url_list) -%}
{% endif -%}
{% endfor -%}
{# Make rendering fail artificially if any family has no known backend.
# This is useful as haproxy's hot-reconfiguration mechanism is
# supervisord-incompatible.
# As jinja2 postpones KeyError until place-holder value is actually used,
# do a no-op getitem.
-#}
{% do zope_family_address_list[0][0] -%}
{#
# We use to have haproxy then apache, now haproxy is playing apache's role
# To keep port stable, we consume one port so that haproxy use the same port
# that apache was using before.
-#}
{% set _ = next_port() -%}
{% set haproxy_port = next_port() -%}
{% set backend_path = slapparameter_dict['backend-path-dict'][family_name] -%}
{% if has_webdav -%}
{% set external_scheme = 'webdavs' -%}
{% else %}
{% set external_scheme = 'https' -%}
{% endif -%}
{% do haproxy_dict.__setitem__(family_name, (haproxy_port, external_scheme, slapparameter_dict['ssl-authentication-dict'][family_name], slapparameter_dict['timeout-dict'][family_name], zope_family_address_list)) -%}
{% endfor -%}
[haproxy-cfg-parameter-dict]
recipe = slapos.recipe.build
ipv4 = {{ ipv4 }}
ipv6 = {{ ipv6 }}
cert = ${haproxy-conf-ssl:certificate}
......@@ -194,9 +223,107 @@ family-path-routing-dict = {{ dumps(slapparameter_dict['family-path-routing-dict
pidfile = ${directory:run}/haproxy.pid
log-socket = ${rsyslogd-cfg-parameter-dict:log-socket}
server-check-path = {{ dumps(slapparameter_dict['haproxy-server-check-path']) }}
backend-dict = {{ dumps(haproxy_dict) }}
zope-virtualhost-monster-backend-dict = {{ dumps(zope_virtualhost_monster_backend_dict) }}
slapparameter-dict = {{ dumps(slapparameter_dict) }}
ports-state-file = ${buildout:directory}/.${:_buildout_section_name_}-ports.json
init =
import functools
import itertools
import json
import os
import shutil
import six
from zc.buildout import UserError
slapparameter_dict = options['slapparameter-dict']
ipv4 = options['ipv4']
ipv6 = options['ipv6']
# read port state file
port_dict = {}
previous_port_dict = None
if os.path.exists(options['ports-state-file']):
with open(options['ports-state-file']) as f:
port_dict = json.load(f)
previous_port_dict = dict(port_dict)
_next_port = functools.partial(next, itertools.count(slapparameter_dict['tcpv4-port']))
def get_port(name):
if name in port_dict:
return port_dict[name]
port = _next_port()
while port in port_dict.values():
port = _next_port()
port_dict[name] = port
return port
backend_dict = {}
frontend_dict = {}
zope_virtualhost_monster_backend_dict = {}
for family_name, parameter_id_list in sorted(
six.iteritems(slapparameter_dict['zope-family-dict'])):
zope_family_address_list = []
ssl_authentication = slapparameter_dict['ssl-authentication-dict'][family_name]
for parameter_id in parameter_id_list:
zope_family_address_list.extend(slapparameter_dict[parameter_id])
# Generate entries with rewrite rule for test runnners
test_runner_address_list = slapparameter_dict.get(parameter_id + '-test-runner-address-list', [])
if test_runner_address_list:
test_runner_backend_mapping = {}
test_runner_balancer_url_list = []
for i, (test_runner_internal_ip, test_runner_internal_port) in enumerate(test_runner_address_list):
test_runner_backend_mapping['unit_test_%s' % i] = \
'http://%s:%s' % (test_runner_internal_ip, test_runner_internal_port)
test_runner_balancer_url_list.append(
'https://%s:%s/unit_test_%s/' % (ipv4, get_port('test-runner-' + family_name), i))
zope_virtualhost_monster_backend_dict[(ipv4, get_port('test-runner-' + family_name))] =\
( ssl_authentication, test_runner_backend_mapping )
self.buildout['publish'][family_name + '-test-runner-url-list'] = test_runner_balancer_url_list
if not zope_family_address_list:
raise UserError('No zope defined for family %s (maybe not ready)' % family_name)
# consume a port for compatibility when were using apache + haproxy
get_port('apache-compatibility-' + family_name)
legacy_port = get_port('legacy-' + family_name)
# a port for monitoring promise (which port is not important, the promise checks
# that haproxy is healthy enough to listen on a port)
options['haproxy-promise-port'] = legacy_port
frontend_dict['legacy-frontend-' + family_name] = {
'port': legacy_port,
'client-cert-required': ssl_authentication,
'backend-name': family_name,
'request-path-prepend': None,
}
backend_dict[family_name] = {
'timeout': slapparameter_dict['timeout-dict'][family_name],
'backend-list': zope_family_address_list,
}
external_scheme = 'webdavs' if any(a[2] for a in zope_family_address_list) else 'https'
self.buildout['publish'][family_name] = "{external_scheme}://{ipv4}:{legacy_port}".format(**locals())
self.buildout['publish'][family_name + "-v6"] = "{external_scheme}://[{ipv6}]:{legacy_port}".format(**locals())
for frontend_name, frontend in six.iteritems(slapparameter_dict['frontend-parameter-dict']):
frontend_port = get_port('frontend-' + frontend_name)
family_name = frontend['zope-family']
frontend_dict['frontend-' + frontend_name] = {
'port': frontend_port,
'client-cert-required': slapparameter_dict['ssl-authentication-dict'][family_name],
'backend-name': family_name,
'request-path-prepend': frontend['internal-path'],
}
self.buildout['publish']['url-backend-' + frontend_name] = "https://[{ipv6}]:{frontend_port}".format(**locals())
options['backend-dict'] = backend_dict
options['frontend-dict'] = frontend_dict
options['zope-virtualhost-monster-backend-dict'] = zope_virtualhost_monster_backend_dict
if port_dict != previous_port_dict:
with open(options['ports-state-file'] + '.tmp', 'w') as f:
json.dump(port_dict, f, indent=True)
shutil.move(options['ports-state-file'] + '.tmp', options['ports-state-file'])
[haproxy-cfg]
< = jinja2-template-base
......@@ -222,28 +349,6 @@ wrapper-path = ${directory:services-on-watch}/haproxy
command-line = "{{ parameter_dict['haproxy'] }}/sbin/haproxy" -f "${haproxy-cfg:output}"
hash-files = ${haproxy-cfg:output}
[apache-conf-ssl]
# XXX caucase is/was buggy and this certificate does not match key for instances
# that were updated, so don't use it yet.
caucase-cert = ${directory:apache-conf}/apache-caucase.crt
caucase-key = ${directory:apache-conf}/apache-caucase.pem
[simplefile]
< = jinja2-template-base
inline = {{ '{{ content }}' }}
{% macro simplefile(section_name, file_path, content, mode='') -%}
{% set content_section_name = section_name ~ '-content' -%}
[{{ content_section_name }}]
content = {{ dumps(content) }}
[{{ section(section_name) }}]
< = simplefile
output = {{ file_path }}
context = key content {{content_section_name}}:content
mode = {{ mode }}
{%- endmacro %}
[{{ section('haproxy-socat-stats')}}]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/${:_buildout_section_name_}
......@@ -277,7 +382,7 @@ config-command = test -S ${rsyslogd-cfg-parameter-dict:log-socket}
[haproxy-conf-ssl]
certificate = ${build-certificate-and-key:certificate-and-key}
certificate = ${haproxy-certificate:cert-and-key-file}
{% if frontend_caucase_url_list -%}
ca-cert = ${directory:etc}/frontend-ca.pem
ca-cert-dir = ${directory:ca-cert}
......@@ -286,18 +391,15 @@ crl-dir = ${directory:crl}
depends = ${caucase-updater-housekeeper-run:recipe}
{%- endif %}
[build-certificate-and-key]
# BBB cert and key as ssl parameters. Use caucase instead.
{% if ssl_parameter_dict.get('key') -%}
certificate-and-key = ${tls-certificate-and-key-from-parameters:output}
{{ simplefile(
'tls-certificate-and-key-from-parameters',
'haproxy-conf-ssl-certificate-and-key-from-parameters',
'${directory:etc}/certificate-and-key-from-parameters.pem',
ssl_parameter_dict['cert'] ~ "\n" ~ ssl_parameter_dict['key']) }}
{% else %}
recipe = plone.recipe.command
command = "{{ parameter_dict['openssl'] }}/bin/openssl" req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout "${:certificate-and-key}" -out "${:certificate-and-key}"
certificate-and-key = ${directory:etc}/certificate-and-key-generated.pem
{%- endif %}
[haproxy-conf-ssl]
certificate = ${haproxy-conf-ssl-certificate-and-key-from-parameters:output}
{% endif %}
[{{ section('haproxy-promise') }}]
<= monitor-promise-base
......@@ -305,17 +407,12 @@ certificate-and-key = ${directory:etc}/certificate-and-key-generated.pem
promise = check_socket_listening
name = haproxy.py
config-host = {{ ipv4 }}
config-port = {{ next(six.itervalues(haproxy_dict))[0] }}
config-port = ${haproxy-cfg-parameter-dict:haproxy-promise-port}
[{{ section('publish') }}]
recipe = slapos.cookbook:publish.serialised
{% for family_name, (port, scheme, _, _, _) in haproxy_dict.items() -%}
{{ family_name ~ '-v6' }} = {% if ipv6_set %}{{ scheme ~ '://[' ~ ipv6 ~ ']:' ~ port }}{% endif %}
{{ family_name }} = {{ scheme ~ '://' ~ ipv4 ~ ':' ~ port }}
{% endfor -%}
{% for family_name, test_runner_url_list in test_runner_url_dict.items() -%}
{{ family_name ~ '-test-runner-url-list' }} = {{ dumps(test_runner_url_list) }}
{% endfor -%}
# note: some values are pushed by haproxy-cfg-parameter-dict
caucase-http-url = {{ caucase_url }}
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
[{{ section('logrotate-rsyslogd') }}]
......@@ -335,6 +432,12 @@ var = ${buildout:directory}/var
run = ${:var}/run
log = ${:var}/log
srv = ${buildout:directory}/srv
{% if with_embedded_caucased %}
srv-caucased-haproxy-certificate = ${:srv}/caucased-haproxy-certificate{{ caucase_haproxy_certificate_suffix }}/
backup-caucased-haproxy-certificate = ${:srv}/backup/caucased{{ caucase_haproxy_certificate_suffix }}
{% endif %}
caucase-updater-haproxy-certificate = ${:srv}/caucase-updater-haproxy-certificate
tmp = ${buildout:directory}/tmp
apachedex = ${monitor-directory:private}/apachedex
rsyslogd-spool = ${:run}/rsyslogd-spool
{% if frontend_caucase_url_list -%}
......@@ -393,7 +496,7 @@ config-command = "{{ parameter_dict["check-computer-memory-binary"] }}" -db ${mo
[monitor-instance-parameter]
monitor-httpd-ipv6 = {{ (ipv6_set | list)[0] }}
monitor-httpd-port = {{ next_port() }}
monitor-httpd-port = 2197
monitor-title = {{ slapparameter_dict['name'] }}
password = {{ slapparameter_dict['monitor-passwd'] }}
......
......@@ -79,35 +79,30 @@ bin = ${buildout:directory}/bin
service-on-watch = ${buildout:directory}/etc/service
srv = ${buildout:directory}/srv
tmp = ${buildout:directory}/tmp
backup-caucased = ${:srv}/backup/caucased
{% if not caucase_url -%}
{% if use_ipv6 -%}
{% set caucase_host = '[' ~ (ipv6_set | list)[0] ~ ']' %}
{%- else -%}
{% set caucase_host = (ipv4_set | list)[0] %}
{%- endif %}
{% set caucase_port = caucase_dict.get('base-port', 8890) -%}
{% set caucase_netloc = caucase_host ~ ':' ~ caucase_port -%}
{% set caucase_url = 'http://' ~ caucase_netloc -%}
{{ caucase.caucased(
prefix='caucased',
buildout_bin_directory=bin_directory,
caucased_path='${directory:service-on-watch}/caucased',
backup_dir='${directory:backup-caucased}',
data_dir='${directory:srv}/caucased',
netloc=caucase_netloc,
tmp='${directory:tmp}',
service_auto_approve_count=caucase_dict.get('service-auto-approve-amount', 1),
user_auto_approve_count=caucase_dict.get('user-auto-approve-amount', 0),
key_len=caucase_dict.get('key-length', 2048),
)}}
{% do root_common.section('caucased') -%}
{% do root_common.section('caucased-promise') -%}
{% endif -%}
{% do publish_dict.__setitem__('caucase-http-url', caucase_url) -%}
{% set balancer_dict = slapparameter_dict.setdefault('balancer', {}) -%}
{% do balancer_dict.setdefault('ssl', {}).setdefault('caucase-url', caucase_url) -%}
{% do balancer_dict.setdefault('ssl', {}) %}
{% if caucase_url %}
{% do balancer_dict['ssl'].setdefault('caucase-url', caucase_url) -%}
[balancer-updated-caucase-url]
# Promise to wait for balancer partition to use the provided URL, for cases where the
# caucase URL is changed
recipe = slapos.cookbook:check_parameter
value = ${publish:caucase-http-url}
expected-not-value =
expected-value = {{ caucase_url }}
path = ${directory:bin}/${:_buildout_section_name_}
[balancer-updated-caucase-url-promise]
<= monitor-promise-base
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = ${balancer-updated-caucase-url:path}
{% do root_common.section("balancer-updated-caucase-url-promise") -%}
{% endif %}
{% do balancer_dict.setdefault('tcpv4-port', 2150) -%}
{% do balancer_dict.__setitem__('haproxy-server-check-path', balancer_dict.get('haproxy-server-check-path', '/') % {'site-id': site_id}) -%}
{% set routing_path_template_field_dict = {"site-id": site_id} -%}
......@@ -241,7 +236,6 @@ return =
config-bt5 = {{ dumps(slapparameter_dict.get('bt5', ' '.join(bt5_default_list))) }}
config-bt5-repository-url = {{ dumps(slapparameter_dict.get('bt5-repository-url', local_bt5_repository)) }}
config-cloudooo-url-list = {{ dumps(slapparameter_dict.get('cloudooo-url-list', default_cloudooo_url_list)) }}
config-caucase-url = {{ dumps(caucase_url) }}
config-deadlock-debugger-password = ${publish-early:deadlock-debugger-password}
config-developer-list = {{ dumps(slapparameter_dict.get('developer-list', [inituser_login])) }}
config-selenium-server-configuration-dict = {{ dumps(slapparameter_dict.get('selenium-server-configuration-dict', {})) }}
......@@ -275,7 +269,6 @@ software-type = zope
{% set global_activity_timeout = slapparameter_dict.get('activity-timeout') -%}
{% set zope_family_dict = {} -%}
{% set zope_family_name_list = [] -%}
{% set zope_backend_path_dict = {} -%}
{% set ssl_authentication_dict = {} -%}
{% set balancer_timeout_dict = {} -%}
{% set jupyter_zope_family_default = [] -%}
......@@ -288,14 +281,12 @@ software-type = zope
{% set promise_test_runner_url_section_name = 'promise-test-runner-url' ~ partition_name -%}
{% set zope_family = zope_parameter_dict.get('family', 'default') -%}
{% do zope_family_name_list.append(zope_family) %}
{% set backend_path = zope_parameter_dict.get('backend-path', '') % {'site-id': site_id} %}
{# # default jupyter zope family is first zope family. -#}
{# # use list.append() to update it, because in jinja2 set changes only local scope. -#}
{% if not jupyter_zope_family_default -%}
{% do jupyter_zope_family_default.append(zope_family) -%}
{% endif -%}
{% do zope_family_dict.setdefault(zope_family, []).append(section_name) -%}
{% do zope_backend_path_dict.__setitem__(zope_family, backend_path) -%}
{% do ssl_authentication_dict.__setitem__(zope_family, zope_parameter_dict.get('ssl-authentication', False)) -%}
{% set current_zope_family_override_dict = zope_family_override_dict.get(zope_family, {}) -%}
{% do balancer_timeout_dict.__setitem__(zope_family, current_zope_family_override_dict.get('publisher-timeout', global_publisher_timeout)) -%}
......@@ -397,13 +388,13 @@ return =
{% set request_frontend_name = 'request-frontend-' ~ frontend_name -%}
{% set frontend_software_url = frontend_parameters.get('software-url', 'http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg') -%}
{% set frontend_software_type = frontend_parameters.get('software-type', '') -%}
{% do frontend_parameters.__setitem__('internal-path', frontend_parameters.get('internal-path', '/%(site-id)s') % {'site-id': site_id}) %}
{% set frontend_instance_parameters = frontend_parameters.get('instance-parameters', {}) -%}
{% if frontend_instance_parameters.setdefault('type', 'zope') == 'zope' -%}
{% if frontend_instance_parameters.setdefault('type', '') == '' -%}
{% do frontend_instance_parameters.setdefault('authenticate-to-backend', 'true') -%}
{% set zope_family_name = frontend_parameters['zope-family'] -%}
{% do assert(zope_family_name in zope_family_dict, 'Unknown family %s for frontend %s' % (zope_family_name, frontend_name)) -%}
{% do frontend_instance_parameters.setdefault('url', '${request-balancer:connection-' ~ zope_family_name ~ '-v6}') -%}
{% do frontend_instance_parameters.setdefault('path', frontend_parameters.get('internal-path', '/%(site-id)s') % {'site-id': site_id}) -%}
{% do frontend_instance_parameters.setdefault('url', '${request-balancer:connection-url-backend-' ~ frontend_name ~ '}') -%}
{% endif %}
[{{ request_frontend_name }}]
<= request-frontend-base
......@@ -447,7 +438,7 @@ config-allow-redirects = 0
{%- endif %}
{% set balancer_ret_dict = {'monitor-base-url': False} -%}
{% set balancer_ret_dict = {'monitor-base-url': False, 'caucase-http-url': False} -%}
{% for family in zope_family_dict -%}
{% do balancer_ret_dict.__setitem__(family, False) -%}
{% do balancer_ret_dict.__setitem__(family + '-v6', False) -%}
......@@ -455,6 +446,9 @@ config-allow-redirects = 0
{% do balancer_ret_dict.__setitem__(family + '-test-runner-url-list', False) -%}
{% endif -%}
{% endfor -%}
{% for frontend_name in frontend_parameter_dict -%}
{% do balancer_ret_dict.__setitem__('url-backend-' ~ frontend_name, False) -%}
{% endfor -%}
{% set balancer_key_config_dict = {
'monitor-passwd': 'monitor-htpasswd:passwd',
} -%}
......@@ -476,7 +470,7 @@ config-allow-redirects = 0
config_key='balancer',
config={
'zope-family-dict': zope_family_parameter_dict,
'backend-path-dict': zope_backend_path_dict,
'frontend-parameter-dict': frontend_parameter_dict,
'ssl-authentication-dict': ssl_authentication_dict,
'timeout-dict': balancer_timeout_dict,
'apachedex-promise-threshold': monitor_dict.get('apachedex-promise-threshold', 70),
......@@ -496,7 +490,7 @@ config-allow-redirects = 0
ret=balancer_ret_dict,
key_config=balancer_key_config_dict,
) }}
{% do publish_dict.__setitem__('caucase-http-url', '${request-balancer:connection-caucase-http-url}' ) -%}
{% endif -%}{# if zope_partition_dict -#}
......
......@@ -81,7 +81,6 @@ environment +=
MATPLOTLIBRC={{ parameter_dict['matplotlibrc'] }}
PYTHONUNBUFFERED=1
INSTANCE_HOME=${:instance-home}
CAUCASE={{ slapparameter_dict['caucase-url'] }}
FONTCONFIG_FILE=${fontconfig-conf:output}
JUPYTER_PATH=${directory:jupyter-dir}
JUPYTER_CONFIG_DIR=${directory:jupyter-config-dir}
......
......@@ -91,8 +91,9 @@ url = {{ template_balancer }}
filename = instance-balancer.cfg
extra-context =
section parameter_dict dynamic-template-balancer-parameters
import itertools itertools
import hashlib hashlib
# XXX: only used in software/slapos-master:
import itertools itertools
import functools functools
import-list =
file caucase context:caucase-jinja2-library
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment