Commit 1348e674 authored by Xavier Thompson's avatar Xavier Thompson

Update Release Candidate

parents 29d51ff6 36dd4da0
......@@ -69,6 +69,7 @@ url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/gobject-introspect
md5sum = 5af8d724f25d0c9cfbe6df41b77e5dc0
configure-options =
--disable-static
--with-python=${buildout:executable}
environment =
PATH=${pkgconfig:location}/bin:${gettext:location}/bin:${glib:location}/bin:${xz-utils:location}/bin:${flex:location}/bin:${bison:location}/bin:%(PATH)s
......
......@@ -14,8 +14,8 @@ extends =
[fish-shell]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/fish-shell/fish-shell/releases/download/3.4.1/fish-3.4.1.tar.xz
md5sum = 80733d30a14ffa50bf48cce96296aa7a
url = https://github.com/fish-shell/fish-shell/releases/download/3.5.0/fish-3.5.0.tar.xz
md5sum = aea858def631da76e1313936e79651fd
configure-command = ${cmake:location}/bin/cmake
configure-options =
-DCMAKE_INSTALL_PREFIX=${:location}
......
......@@ -15,12 +15,12 @@ extends =
[groonga]
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/groonga/groonga-11.0.9.tar.gz
md5sum = 9c66445d92c8b7536f1b28119ac1855b
url = https://packages.groonga.org/source/groonga/groonga-12.0.4.tar.gz
md5sum = a7c6416301e5b5899ef517193c510bd0
groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/
# temporary patch to respect more tokens in natural language mode.
patches =
${:_profile_base_location_}/groonga.patch#5d831331ddfdcd87e91b68949c339a1d
${:_profile_base_location_}/groonga.patch#cc6a678acd478fc074e678c7b7dd09d8
patch-options = -p1
configure-options =
--disable-static
......@@ -48,8 +48,8 @@ environment =
[groonga-normalizer-mysql]
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.5.tar.gz
md5sum = 842d02becc6dcc25a02fa7e789c2cba7
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.8.tar.gz
md5sum = a1520691da3083e14bdc65a9ec57a620
groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/
pre-configure = mkdir -p ${:groonga-plugin-dir}
make-targets = GROONGA_PLUGINS_DIR=${:groonga-plugin-dir} install
......
--- a/lib/ii.c
+++ b/lib/ii.c
@@ -10387,7 +10387,9 @@
? (data->optarg->similarity_threshold > GRN_HASH_SIZE(h)
@@ -10874,7 +10874,9 @@
? ((uint32_t)(data->optarg->similarity_threshold) > GRN_HASH_SIZE(h)
? GRN_HASH_SIZE(h)
: data->optarg->similarity_threshold)
:(uint32_t)(data->optarg->similarity_threshold))
- : (GRN_HASH_SIZE(h) >> 3) + 1;
+ : (GRN_HASH_SIZE(h) < 8
+ ? GRN_HASH_SIZE(h)
+ : ((GRN_HASH_SIZE(h) - 8) >> 3) + 8);
if (GRN_HASH_SIZE(h)) {
grn_id j, id;
int w2, rep;
float w2;
......@@ -30,8 +30,8 @@ parts =
recipe = slapos.recipe.cmmi
shared = true
url = https://archive.mariadb.org//mariadb-${:version}/source/mariadb-${:version}.tar.gz
version = 10.4.22
md5sum = 0d5e1b9e3694322e18819811a2bf81fa
version = 10.4.25
md5sum = 76e6ee973adb7deb15d7936f710eb5a4
pre-configure =
set '\bSET(PLUGIN_AUTH_PAM YES CACHE BOOL "")' cmake/build_configurations/mysql_release.cmake
grep -q "$@"
......@@ -99,8 +99,8 @@ post-install =
# as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir )
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/mroonga/mroonga-11.09.tar.gz
md5sum = 8b1786332edc61c41a769f225e6063b2
url = https://packages.groonga.org/source/mroonga/mroonga-12.04.tar.gz
md5sum = 1d154e6cb4540f7be5791b5fb376a5de
pre-configure =
rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source
......@@ -117,8 +117,9 @@ configure-options =
--disable-document
post-install =
cp -rs ${mariadb:location}/lib/plugin/* ${:plugin-dir}
cat @@LOCATION@@/share/mroonga/install.sql @@LOCATION@@/share/mroonga/update.sql > ${:install-sql}
plugin-dir = @@LOCATION@@/lib/plugin
install-sql = @@LOCATION@@/share/mroonga/install.sql
install-sql = @@LOCATION@@/share/mroonga/install-and-update.sql
make-targets = plugindir=${:plugin-dir} install
patch-options = -p1
patches =
......@@ -134,8 +135,8 @@ environment =
### (we just override here for easier revert)
[mariadb-10.3]
<= mariadb-10.4
version = 10.3.32
md5sum = 12341dc150c810c0072a40e55825ca57
version = 10.3.35
md5sum = b7a2e69d103eda3dd61c8bad8775c7bd
post-install =
ldd=`ldd %(location)s/lib/plugin/ha_rocksdb.so`
for x in ${lz4:location} ${snappy:location} ${zstd:location}
......
......@@ -12,4 +12,5 @@ url = http://www.dest-unreach.org/socat/download/Archive/socat-${:version}.tar.g
version = 1.7.3.2
md5sum = aec3154f7854580cfab0c2d81e910519
environment =
CPPFLAGS=-I${openssl:location}/include
LDFLAGS=-L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib
......@@ -97,7 +97,7 @@
},
"publisher-timeout": {
"description": "How long a publisher-initiated transaction may last, in seconds",
"default": null,
"default": 300,
"type": [
"number",
"null"
......@@ -339,6 +339,7 @@
},
"zodb-zeo": {
"description": "Common settings ZEO servers",
"additionalProperties": false,
"properties": {
"tcpv4-port": {
"allOf": [
......@@ -464,6 +465,7 @@
},
"jupyter": {
"description": "Jupyter subinstance parameters",
"additionalProperties": false,
"properties": {
"enable": {
"description": "Whether to enable creation of associated Jupyter subinstance",
......@@ -480,6 +482,7 @@
},
"wcfs": {
"description": "Parameters for wendelin.core filesystem",
"additionalProperties": false,
"properties": {
"enable": {
"description": "Whether to enable WCFS filesystem and use it to access ZBigArray/ZBigFile data. In WCFS mode wendelin.core clients (Zope/ERP5 processes) share in-RAM cache for in-ZODB data without duplicating it for every client. This cache sharing does not affect correctness as isolation property is continued to be provided to every client.",
......@@ -495,6 +498,8 @@
},
"caucase": {
"description": "Caucase certificate authority parameters",
"allOf": [
{
"properties": {
"url": {
"title": "Caucase URL",
......@@ -503,14 +508,17 @@
"type": "string",
"format": "uri"
}
}
},
"additionalProperties": {
{
"$ref": "../caucase/instance-caucase-input-schema.json"
},
}
],
"type": "object"
},
"test-runner": {
"description": "Test runner parameters.",
"additionalProperties": false,
"properties": {
"enabled": {
"description": "Generate helper scripts to run test suite.",
......@@ -650,6 +658,7 @@
},
"ssl": {
"description": "HTTPS certificate generation parameters",
"additionalProperties": false,
"properties": {
"frontend-caucase-url-list": {
"title": "Frontend Caucase URL List",
......
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"additionalProperties": false,
"properties": {
"tcpv4-port": {
"allOf": [
......
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"additionalProperties": false,
"properties": {
"tcpv4-port": {
"allOf": [
......
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"additionalProperties": false,
"properties": {
"tcpv4-port": {
"allOf": [
......
......@@ -6,7 +6,6 @@
"default": {
"title": "Default",
"software-type": "default",
"description": "No automated database modification (ERP5Site is not automatically created).",
"request": "instance-erp5-input-schema.json",
"response": "instance-erp5-output-schema.json",
"index": 0
......
......@@ -166,10 +166,11 @@ class BalancerTestCase(ERP5InstanceTestCase):
'backend-path-dict': {
'default': '',
},
'ssl-authentication-dict': {},
'ssl-authentication-dict': {'default': False},
'ssl': {
'caucase-url': cls.getManagedResource("caucase", CaucaseService).url,
},
'timeout-dict': {'default': None},
'family-path-routing-dict': {},
'path-routing-list': [],
}
......@@ -186,18 +187,51 @@ class BalancerTestCase(ERP5InstanceTestCase):
class SlowHTTPServer(ManagedHTTPServer):
"""An HTTP Server which reply after 2 seconds.
"""An HTTP Server which reply after a timeout.
Timeout is 2 seconds by default, and can be specified in the path of the URL
"""
class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
# type: () -> None
self.send_response(200)
self.send_header("Content-Type", "text/plain")
time.sleep(2)
timeout = 2
try:
timeout = int(self.path[1:])
except ValueError:
pass
time.sleep(timeout)
self.end_headers()
self.wfile.write(b"OK\n")
log_message = logging.getLogger(__name__ + '.SlowHandler').info
log_message = logging.getLogger(__name__ + '.SlowHTTPServer').info
class TestTimeout(BalancerTestCase, CrontabMixin):
__partition_reference__ = 't'
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
parameter_dict = super(TestTimeout, cls)._getInstanceParameterDict()
# use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
# and set timeout of 1 second
parameter_dict['timeout-dict'] = {'default': 1}
return parameter_dict
def test_timeout(self):
# type: () -> None
self.assertEqual(
requests.get(
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/1'),
verify=False).status_code,
requests.codes.ok)
self.assertEqual(
requests.get(
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/5'),
verify=False).status_code,
requests.codes.gateway_timeout)
class TestLog(BalancerTestCase, CrontabMixin):
......@@ -753,6 +787,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
'default': False,
'default-auth': True,
}
parameter_dict['timeout-dict']['default-auth'] = None
parameter_dict['ssl']['frontend-caucase-url-list'] = [frontend_caucase.url]
return parameter_dict
......@@ -926,6 +961,8 @@ class TestPathBasedRouting(BalancerTestCase):
] = parameter_dict['zope-family-dict'][
'default'
]
parameter_dict['timeout-dict']['second'] = None
parameter_dict['ssl-authentication-dict']['second'] = False
# Routing rules outermost slashes mean nothing. They are internally
# stripped and rebuilt in order to correctly represent the request's URL.
parameter_dict['family-path-routing-dict'] = {
......
......@@ -406,7 +406,7 @@ class TestWatchActivities(ERP5InstanceTestCase):
__partition_reference__ = 'wa'
def test(self):
# "watch_activites" scripts use watch command. We'll fake a watch command
# "watch_activities" scripts use watch command. We'll fake a watch command
# that executes the actual command only once to check the output.
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
......@@ -443,37 +443,35 @@ class TestWatchActivities(ERP5InstanceTestCase):
self.assertIn(' dict ', output)
class ZopeTestMixin(CrontabMixin):
"""Mixin class for zope features.
class ZopeSkinsMixin(object):
"""Mixins with utility methods to test zope behaviors.
"""
wsgi = NotImplemented # type: bool
__partition_reference__ = 'z'
@classmethod
def getInstanceParameterDict(cls):
return {
'_':
json.dumps({
"zope-partition-dict": {
"default": {
"longrequest-logger-interval": 1,
"longrequest-logger-timeout": 1,
},
},
"wsgi": cls.wsgi,
})
}
def _setUpClass(cls):
super(ZopeSkinsMixin, cls)._setUpClass()
param_dict = cls.getRootPartitionConnectionParameterDict()
with cls.getXMLRPCClient() as erp5_xmlrpc_client:
# wait for ERP5 to be ready (TODO: this should probably be a promise)
for _ in range(120):
time.sleep(1)
try:
erp5_xmlrpc_client.getTitle()
except (six.moves.xmlrpc_client.ProtocolError,
six.moves.xmlrpc_client.Fault):
pass
else:
break
@classmethod
def _setUpClass(cls):
super(ZopeTestMixin, cls)._setUpClass()
def _getAuthenticatedZopeUrl(cls, path, family_name='default'):
"""Returns a URL to access a zope family through balancer,
with credentials in the URL.
path is joined with urllib.parse.urljoin to the URL of the portal.
"""
param_dict = cls.getRootPartitionConnectionParameterDict()
# rebuild an url with user and password
parsed = six.moves.urllib.parse.urlparse(param_dict['family-default'])
cls.zope_base_url = parsed._replace(
parsed = six.moves.urllib.parse.urlparse(param_dict['family-' + family_name])
base_url = parsed._replace(
netloc='{}:{}@{}:{}'.format(
param_dict['inituser-login'],
param_dict['inituser-password'],
......@@ -482,22 +480,17 @@ class ZopeTestMixin(CrontabMixin):
),
path=param_dict['site-id'] + '/',
).geturl()
return six.moves.urllib_parse.urljoin(base_url, path)
cls.zope_deadlock_debugger_url = six.moves.urllib_parse.urljoin(
cls.zope_base_url,
'/manage_debug_threads?{deadlock-debugger-password}'.format(
**param_dict),
)
@classmethod
@contextlib.contextmanager
def getXMLRPCClient():
def getXMLRPCClient(cls):
# don't verify certificate
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
erp5_xmlrpc_client = six.moves.xmlrpc_client.ServerProxy(
cls.zope_base_url,
cls._getAuthenticatedZopeUrl(''),
context=ssl_context,
)
# BBB use as a context manager only on python3
......@@ -507,20 +500,9 @@ class ZopeTestMixin(CrontabMixin):
with erp5_xmlrpc_client:
yield erp5_xmlrpc_client
with getXMLRPCClient() as erp5_xmlrpc_client:
# wait for ERP5 to be ready (TODO: this should probably be a promise)
for _ in range(120):
time.sleep(1)
try:
erp5_xmlrpc_client.getTitle()
except (six.moves.xmlrpc_client.ProtocolError,
six.moves.xmlrpc_client.Fault):
pass
else:
break
def addPythonScript(script_id, params, body):
with getXMLRPCClient() as erp5_xmlrpc_client:
@classmethod
def _addPythonScript(cls, script_id, params, body):
with cls.getXMLRPCClient() as erp5_xmlrpc_client:
custom = erp5_xmlrpc_client.portal_skins.custom
try:
custom.manage_addProduct.PythonScripts.manage_addPythonScript(
......@@ -535,8 +517,40 @@ class ZopeTestMixin(CrontabMixin):
body,
)
class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
"""Mixin class for zope features.
"""
wsgi = NotImplemented # type: bool
__partition_reference__ = 'z'
@classmethod
def getInstanceParameterDict(cls):
return {
'_':
json.dumps({
"zope-partition-dict": {
"default": {
"longrequest-logger-interval": 1,
"longrequest-logger-timeout": 1,
},
},
"wsgi": cls.wsgi,
})
}
@classmethod
def _setUpClass(cls):
super(ZopeTestMixin, cls)._setUpClass()
cls.zope_base_url = cls._getAuthenticatedZopeUrl('')
param_dict = cls.getRootPartitionConnectionParameterDict()
cls.zope_deadlock_debugger_url = cls._getAuthenticatedZopeUrl(
'/manage_debug_threads?{deadlock-debugger-password}'.format(
**param_dict))
# a python script to verify activity processing
addPythonScript(
cls._addPythonScript(
script_id='ERP5Site_verifyActivityProcessing',
params='mode',
body='''if 1:
......@@ -556,7 +570,7 @@ class ZopeTestMixin(CrontabMixin):
'ERP5Site_verifyActivityProcessing',
)
# a python script logging to event log
addPythonScript(
cls._addPythonScript(
script_id='ERP5Site_logMessage',
params='name',
body='''if 1:
......@@ -569,7 +583,7 @@ class ZopeTestMixin(CrontabMixin):
'ERP5Site_logMessage',
)
# a python script issuing a long request
addPythonScript(
cls._addPythonScript(
script_id='ERP5Site_executeLongRequest',
params='',
body='''if 1:
......@@ -850,3 +864,67 @@ class TestZopeWSGI(ZopeTestMixin, ERP5InstanceTestCase):
@unittest.expectedFailure
def test_basic_authentication_user_in_access_log(self):
super(TestZopeWSGI, self).test_basic_authentication_user_in_access_log(self)
class TestZopePublisherTimeout(ZopeSkinsMixin, ERP5InstanceTestCase):
__partition_reference__ = 't'
@classmethod
def getInstanceParameterDict(cls):
return {
'_':
json.dumps({
# a default timeout of 3
"publisher-timeout": 3,
# and a family without timeout
"family-override": {
"no-timeout": {
"publisher-timeout": None,
},
},
"zope-partition-dict": {
# a family to process activities, so that our test
# does not hit a zope node processing activities
"activity": {
"family": "activity",
},
"default": {
"family": "default",
"port-base": 2210,
},
"no-timeout": {
"family": "no-timeout",
"port-base": 22220,
},
},
})
}
@classmethod
def _setUpClass(cls):
super(TestZopePublisherTimeout, cls)._setUpClass()
cls._addPythonScript(
'ERP5Site_doSlowRequest',
'',
'''if 1:
import time
def recurse(o):
time.sleep(0.1)
for sub in o.objectValues():
recurse(sub)
recurse(context.getPortalObject())
'''
)
def test_long_request_interupted_on_default_family(self):
ret = requests.get(self._getAuthenticatedZopeUrl(
'ERP5Site_doSlowRequest', family_name='default'), verify=False)
self.assertIn('TimeoutReachedError', ret.text)
self.assertEqual(ret.status_code, requests.codes.server_error)
def test_long_request_not_interupted_on_no_timeout_family(self):
with self.assertRaises(requests.exceptions.Timeout):
requests.get(
self._getAuthenticatedZopeUrl('ERP5Site_doSlowRequest', family_name='no-timeout'),
verify=False,
timeout=6)
......@@ -19,15 +19,15 @@ md5sum = 7e90da1f6dac4233e1aa3248f48e357c
[template-kvm]
filename = instance-kvm.cfg.jinja2
md5sum = fb8f9a962cc1c0d986305c6a6ce59082
md5sum = 4a3f999a8d1705c0cabf58d563953d4d
[template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in
md5sum = 8562882ac4a888d18db50e2a22e3ca86
md5sum = 406bf455afe1c71cb0e8cc04dd877f1d
[template-kvm-resilient]
filename = instance-kvm-resilient.cfg.jinja2
md5sum = a0fd4911401cbbda74323e8d1c7b18ad
md5sum = 839fc16c112d3b87e2dbd2e382e326de
[template-kvm-import]
filename = instance-kvm-import.cfg.jinja2.in
......@@ -71,7 +71,7 @@ md5sum = 752c91a4a6b72f5cf8226d6b940015f8
[template-qemu-ready]
filename = template/qemu-is-ready.in
md5sum = fb330a796fadb6cd5c85217f80a42af3
md5sum = a97ba5a5afcfd6f6bb9f4e77f37555dd
[template-httpd]
filename = instance-kvm-http.cfg.in
......
......@@ -32,7 +32,6 @@ global-ipv6 = {{ ipv6 }}
{% endfor -%}
[request-common]
recipe = slapos.cookbook:request
software-url = ${slap-connection:software-release-url}
server-url = ${slap-connection:server-url}
key-file = ${slap-connection:key-file}
......@@ -47,6 +46,7 @@ config-use-ipv6 = {{ dumps(slapparameter_dict.get('use-ipv6', False)) }}
{% set use_nat = kvm_parameter_dict.get('use-nat', True) -%}
[{{ section }}]
<= request-common
recipe = slapos.cookbook:request.serialised
software-type = kvm
name = {{ instance_name }}
{% if kvm_parameter_dict.get('sticky-computer', '') -%}
......@@ -319,7 +319,7 @@ private-path-list +=
[publish-connection-information]
<= monitor-publish
-extends = publish-early
recipe = slapos.cookbook:publish
recipe = slapos.cookbook:publish.serialised
{% for name, value in publish_dict.items() -%}
{{ name }} = {{ value }}
{% endfor %}
......
{
"type": "object",
"$schema": "http://json-schema.org/draft-04/schema",
"$schema": "http://json-schema.org/draft-06/schema",
"allOf": [
{
"$ref": "instance-kvm-input-schema.json#/"
"$ref": "./instance-kvm-input-schema.json#"
},
{
"properties": {
......
......@@ -61,6 +61,7 @@ etc = ${buildout:directory}/etc
# Bubble down the parameters of the requested instance to the user
[request-kvm]
recipe = slapos.cookbook:request.serialised
# Note: += doesn't work.
return =
# Resilient related parameters
......
......@@ -6,19 +6,19 @@
{% endif %}
{% endfor %}
{% set additional_frontend = slapparameter_dict.get('frontend-additional-instance-guid') %}
{% set enable_http = str(slapparameter_dict.get('enable-http-server', False)).lower() == 'true' -%}
{% set use_tap = str(slapparameter_dict.get('use-tap', True)).lower() == 'true' -%}
{% set use_nat = str(slapparameter_dict.get('use-nat', True)).lower() == 'true' -%}
{% set wipe_disk = str(slapparameter_dict.get('wipe-disk-ondestroy', False)).lower() == 'true' -%}
{% set nat_restrict = str(slapparameter_dict.get('nat-restrict-mode', False)).lower() == 'true' -%}
{% set enable_http = slapparameter_dict.get('enable-http-server', False) -%}
{% set use_tap = slapparameter_dict.get('use-tap', True) -%}
{% set use_nat = slapparameter_dict.get('use-nat', True) -%}
{% set wipe_disk = slapparameter_dict.get('wipe-disk-ondestroy', False) -%}
{% set nat_restrict = slapparameter_dict.get('nat-restrict-mode', False) -%}
{% set name = slapparameter_dict.get('name', 'localhost') -%}
{% set disable_ansible_promise = str(slapparameter_dict.get('disable-ansible-promise', True)).lower() == 'true' -%}
{% set disable_ansible_promise = slapparameter_dict.get('disable-ansible-promise', True) -%}
{% set instance_type = slapparameter_dict.get('type', 'standalone') -%}
{% set nat_rule_list = slapparameter_dict.get('nat-rules', '22 80 443') -%}
{% set disk_device_path = slapparameter_dict.get('disk-device-path', None) -%}
{% set whitelist_domains = slapparameter_dict.get('whitelist-domains', '') -%}
{% set virtual_hard_drive_url_enabled = 'virtual-hard-drive-url' in slapparameter_dict %}
{% set virtual_hard_drive_url_gzipped = str(slapparameter_dict.get('virtual-hard-drive-gzipped', False)).lower() == 'true' %}
{% set virtual_hard_drive_url_gzipped = slapparameter_dict.get('virtual-hard-drive-gzipped', False) %}
{% set boot_image_url_list_enabled = 'boot-image-url-list' in slapparameter_dict %}
{% set boot_image_url_select_enabled = 'boot-image-url-select' in slapparameter_dict %}
{% set bootstrap_script_url = slapparameter_dict.get('bootstrap-script-url') -%}
......@@ -434,7 +434,7 @@ network-adapter = ${slap-parameter:network-adapter}
pid-file-path = ${kvm-controller-parameter-dict:pid-file}
socket-path = ${kvm-controller-parameter-dict:socket-path}
{%- set enable_device_hotplug = slapparameter_dict.get('enable-device-hotplug', 'false').lower() == 'true' %}
{%- set enable_device_hotplug = slapparameter_dict.get('enable-device-hotplug', False) %}
smp-max-count = {{ cpu_max_count }}
ram-max-size = {{ ram_max_size }}
{%- if enable_device_hotplug %}
......@@ -819,7 +819,7 @@ blank-line =
[publish-connection-information]
<= monitor-publish
recipe = slapos.cookbook:publish
recipe = slapos.cookbook:publish.serialised
ipv6 = ${slap-network-information:global-ipv6}
backend-url = https://[${novnc-instance:ip}]:${novnc-instance:port}/vnc.html?auto=1&encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd}
url = ${request-slave-frontend:connection-secure_access}/vnc.html?auto=1&encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd}
......
......@@ -5,6 +5,7 @@
"software-type": {
"default": {
"title": "Default",
"serialisation": "json-in-xml",
"description": "Standalone KVM",
"request": "instance-kvm-input-schema.json",
"response": "instance-kvm-output-schema.json",
......
#!{{ dash }}
FILE="{{ qemu_ready_path }}"
# don't start checks too fast
sleep 2
if [ -f "$FILE" ]; then
if [ "$(cat $FILE)" = "" ]; then
echo "VM correctly started."
......
......@@ -118,6 +118,10 @@ bootstrap_machine_param_dict = {
class KvmMixin(object):
def getConnectionParameterDictJson(self):
return json.loads(
self.computer_partition.getConnectionParameterDict()['_'])
def getProcessInfo(self):
hash_value = generateHashFromFiles([
os.path.join(self.computer_partition_root_path, hash_file)
......@@ -160,13 +164,25 @@ class KvmMixin(object):
state=state)
class KvmMixinJson(object):
@classmethod
def getInstanceParameterDict(cls):
return {
'_': json.dumps(super(KvmMixinJson, cls).getInstanceParameterDict())}
def rerequestInstance(self, parameter_dict, *args, **kwargs):
return super(KvmMixinJson, self).rerequestInstance(
parameter_dict={'_': json.dumps(parameter_dict)},
*args, **kwargs
)
@skipUnlessKvm
class TestInstance(InstanceTestCase, KvmMixin):
__partition_reference__ = 'i'
def test(self):
connection_parameter_dict = self\
.computer_partition.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
present_key_list = []
assert_key_list = [
'backend-url', 'url', 'monitor-setup-url', 'ipv6-network-info',
......@@ -307,7 +323,12 @@ class TestMemoryManagement(InstanceTestCase, KvmMixin):
)
class MonitorAccessMixin(object):
@skipUnlessKvm
class TestMemoryManagementJson(KvmMixinJson, TestMemoryManagement):
pass
class MonitorAccessMixin(KvmMixin):
def sqlite3_connect(self):
sqlitedb_file = os.path.join(
os.path.abspath(
......@@ -334,8 +355,7 @@ class MonitorAccessMixin(object):
db.close()
def test_access_monitor(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
monitor_setup_url = connection_parameter_dict['monitor-setup-url']
monitor_url_with_auth = 'https' + monitor_setup_url.split('https')[2]
......@@ -348,8 +368,8 @@ class MonitorAccessMixin(object):
connection_xml = partition_information.get('connection_xml')
if not connection_xml:
continue
connection_dict = slapos.util.xml2dict(
connection_xml if six.PY3 else connection_xml.encode('utf-8'))
connection_dict = json.loads(slapos.util.xml2dict(
connection_xml if six.PY3 else connection_xml.encode('utf-8'))['_'])
monitor_base_url = connection_dict.get('monitor-base-url')
if not monitor_base_url:
continue
......@@ -376,8 +396,7 @@ class TestAccessDefault(MonitorAccessMixin, InstanceTestCase):
expected_partition_with_monitor_base_url_count = 1
def test(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
result = requests.get(connection_parameter_dict['url'], verify=False)
self.assertEqual(
httplib.OK,
......@@ -387,6 +406,11 @@ class TestAccessDefault(MonitorAccessMixin, InstanceTestCase):
self.assertNotIn('url-additional', connection_parameter_dict)
@skipUnlessKvm
class TestAccessDefaultJson(KvmMixinJson, TestAccessDefault):
pass
@skipUnlessKvm
class TestAccessDefaultAdditional(MonitorAccessMixin, InstanceTestCase):
__partition_reference__ = 'ada'
......@@ -399,8 +423,7 @@ class TestAccessDefaultAdditional(MonitorAccessMixin, InstanceTestCase):
}
def test(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
result = requests.get(connection_parameter_dict['url'], verify=False)
self.assertEqual(
......@@ -418,6 +441,12 @@ class TestAccessDefaultAdditional(MonitorAccessMixin, InstanceTestCase):
self.assertIn('<title>noVNC</title>', result.text)
@skipUnlessKvm
class TestAccessDefaultAdditionalJson(
KvmMixinJson, TestAccessDefaultAdditional):
pass
@skipUnlessKvm
class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase):
__partition_reference__ = 'adb'
......@@ -455,8 +484,7 @@ class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase):
self.slap.waitForInstance(max_retry=10)
# END: mock .slapos-resource with tap.ipv4_addr
cp = self.computer_partition
connection_parameter_dict = cp.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
result = requests.get(connection_parameter_dict['url'], verify=False)
self.assertEqual(
......@@ -492,8 +520,7 @@ class TestAccessKvmCluster(MonitorAccessMixin, InstanceTestCase):
})}
def test(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
result = requests.get(connection_parameter_dict['KVM0-url'], verify=False)
self.assertEqual(
httplib.OK,
......@@ -526,8 +553,7 @@ class TestAccessKvmClusterAdditional(MonitorAccessMixin, InstanceTestCase):
})}
def test(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
result = requests.get(connection_parameter_dict['KVM0-url'], verify=False)
self.assertEqual(
httplib.OK,
......@@ -570,8 +596,7 @@ class TestAccessKvmClusterBootstrap(MonitorAccessMixin, InstanceTestCase):
}))}
def test(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
result = requests.get(
connection_parameter_dict['test-machine1-url'], verify=False)
self.assertEqual(
......@@ -696,6 +721,12 @@ ir3:sshd-on-watch RUNNING""",
)
@skipUnlessKvm
class TestInstanceResilientJson(
KvmMixinJson, TestInstanceResilient):
pass
@skipUnlessKvm
class TestInstanceResilientDiskTypeIde(InstanceTestCase, KvmMixin):
@classmethod
......@@ -705,6 +736,12 @@ class TestInstanceResilientDiskTypeIde(InstanceTestCase, KvmMixin):
}
@skipUnlessKvm
class TestInstanceResilientDiskTypeIdeJson(
KvmMixinJson, TestInstanceResilientDiskTypeIde):
pass
@skipUnlessKvm
class TestAccessResilientAdditional(InstanceTestCase):
__partition_reference__ = 'ara'
......@@ -740,6 +777,12 @@ class TestAccessResilientAdditional(InstanceTestCase):
self.assertIn('<title>noVNC</title>', result.text)
@skipUnlessKvm
class TestAccessResilientAdditionalJson(
KvmMixinJson, TestAccessResilientAdditional):
pass
class TestInstanceNbdServer(InstanceTestCase):
__partition_reference__ = 'ins'
instance_max_retry = 5
......@@ -768,6 +811,12 @@ class TestInstanceNbdServer(InstanceTestCase):
self.assertIn("WARNING", connection_parameter_dict['status_message'])
@skipUnlessKvm
class TestInstanceNbdServerJson(
KvmMixinJson, TestInstanceNbdServer):
pass
class FakeImageHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def log_message(self, *args):
if os.environ.get('SLAPOS_TEST_DEBUG'):
......@@ -1046,6 +1095,12 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
self.assertPromiseFails(self.config_state_promise)
@skipUnlessKvm
class TestBootImageUrlListJson(
KvmMixinJson, TestBootImageUrlList):
pass
@skipUnlessKvm
class TestBootImageUrlListResilient(TestBootImageUrlList):
kvm_instance_partition_reference = 'biul2'
......@@ -1055,6 +1110,12 @@ class TestBootImageUrlListResilient(TestBootImageUrlList):
return 'kvm-resilient'
@skipUnlessKvm
class TestBootImageUrlListResilientJson(
KvmMixinJson, TestBootImageUrlListResilient):
pass
@skipUnlessKvm
class TestBootImageUrlSelect(TestBootImageUrlList):
__partition_reference__ = 'bius'
......@@ -1157,6 +1218,12 @@ class TestBootImageUrlSelect(TestBootImageUrlList):
)
@skipUnlessKvm
class TestBootImageUrlSelectJson(
KvmMixinJson, TestBootImageUrlSelect):
pass
@skipUnlessKvm
class TestBootImageUrlSelectResilient(TestBootImageUrlSelect):
kvm_instance_partition_reference = 'bius2'
......@@ -1166,6 +1233,12 @@ class TestBootImageUrlSelectResilient(TestBootImageUrlSelect):
return 'kvm-resilient'
@skipUnlessKvm
class TestBootImageUrlSelectResilientJson(
KvmMixinJson, TestBootImageUrlSelectResilient):
pass
@skipUnlessKvm
class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin):
__partition_reference__ = 'biulkc'
......@@ -1245,7 +1318,7 @@ class TestBootImageUrlSelectKvmCluster(TestBootImageUrlListKvmCluster):
@skipUnlessKvm
class TestNatRules(InstanceTestCase):
class TestNatRules(KvmMixin, InstanceTestCase):
__partition_reference__ = 'nr'
@classmethod
......@@ -1255,8 +1328,7 @@ class TestNatRules(InstanceTestCase):
}
def test(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
connection_parameter_dict = self.getConnectionParameterDictJson()
self.assertIn('nat-rule-port-tcp-100', connection_parameter_dict)
self.assertIn('nat-rule-port-tcp-200', connection_parameter_dict)
......@@ -1271,6 +1343,12 @@ class TestNatRules(InstanceTestCase):
)
@skipUnlessKvm
class TestNatRulesJson(
KvmMixinJson, TestNatRules):
pass
@skipUnlessKvm
class TestNatRulesKvmCluster(InstanceTestCase):
__partition_reference__ = 'nrkc'
......@@ -1350,6 +1428,12 @@ class TestWhitelistFirewall(InstanceTestCase):
self.assertGreater(len(self.content_json), len(resolv_conf_ip_list))
@skipUnlessKvm
class TestWhitelistFirewallJson(
KvmMixinJson, TestWhitelistFirewall):
pass
@skipUnlessKvm
class TestWhitelistFirewallRequest(TestWhitelistFirewall):
whitelist_domains = '2.2.2.2 3.3.3.3\n4.4.4.4'
......@@ -1367,6 +1451,12 @@ class TestWhitelistFirewallRequest(TestWhitelistFirewall):
self.assertIn('4.4.4.4', self.content_json)
@skipUnlessKvm
class TestWhitelistFirewallRequestJson(
KvmMixinJson, TestWhitelistFirewallRequest):
pass
@skipUnlessKvm
class TestWhitelistFirewallResilient(TestWhitelistFirewall):
kvm_instance_partition_reference = 'wf2'
......@@ -1376,6 +1466,12 @@ class TestWhitelistFirewallResilient(TestWhitelistFirewall):
return 'kvm-resilient'
@skipUnlessKvm
class TestWhitelistFirewallResilientJson(
KvmMixinJson, TestWhitelistFirewallResilient):
pass
@skipUnlessKvm
class TestWhitelistFirewallRequestResilient(TestWhitelistFirewallRequest):
kvm_instance_partition_reference = 'wf2'
......@@ -1385,6 +1481,12 @@ class TestWhitelistFirewallRequestResilient(TestWhitelistFirewallRequest):
return 'kvm-resilient'
@skipUnlessKvm
class TestWhitelistFirewallRequestResilientJson(
KvmMixinJson, TestWhitelistFirewallRequestResilient):
pass
@skipUnlessKvm
class TestWhitelistFirewallCluster(TestWhitelistFirewall):
kvm_instance_partition_reference = 'wf1'
......@@ -1453,6 +1555,12 @@ dd if=/dev/zero of=/dev/virt1 bs=4096 count=500k"""
self.assertTrue(os.access(slapos_wipe_device_disk, os.X_OK))
@skipUnlessKvm
class TestDiskDevicePathWipeDiskOndestroyJson(
KvmMixinJson, TestDiskDevicePathWipeDiskOndestroy):
pass
@skipUnlessKvm
class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
__partition_reference__ = 'idc'
......@@ -1715,6 +1823,12 @@ class TestParameterDefault(InstanceTestCase, KvmMixin):
self._test({'ram-size': 2048}, "ram_max_size = '2560'")
@skipUnlessKvm
class TestParameterDefaultJson(
KvmMixinJson, TestParameterDefault):
pass
@skipUnlessKvm
class TestParameterResilient(TestParameterDefault):
__partition_reference__ = 'pr'
......@@ -1885,3 +1999,9 @@ class TestExternalDisk(InstanceTestCase, KvmMixin):
self.waitForInstance()
dropped_drive_list = self.getRunningDriveList(kvm_instance_partition)
self.assertEqual(drive_list, dropped_drive_list)
@skipUnlessKvm
class TestExternalDiskJson(
KvmMixinJson, TestExternalDisk):
pass
......@@ -13,13 +13,9 @@
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[template-apache-httpd]
filename = apache-httpd.conf.in
md5sum = 9940e05d5e624a7884f4e6e062355798
[template-matomo-instance]
filename = matomo-instance.cfg.in
md5sum = f82fd755c4ca5569836e0638a7502306
md5sum = 9ff98282480b9edf9af75fca5da5f349
[template-matomo-backup.sh]
filename = matomo-backup.sh.in
......
......@@ -5,22 +5,6 @@ dir-backup = ${directory:backup}
find-bin = {{ findutils_location }}
diff-bin = {{ diffutils_location }}
#php.ini parameters
php.memory_limit = 512M
php.date.timezone = Europe/Paris
php.upload_max_filesize = 10240M
php.post_max_size = 10240M
php.session.cookie_secure = True
php.max_execution_time = 1800
php.max_input_time = 3600
php.output_buffering = 'Off'
php.max_file_uploads = 100
[php-bin]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/php
command-line = ${instance-parameter:php-bin} -c ${php.ini-conf:output}
[matomo-backup-cron]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
......@@ -28,13 +12,6 @@ name = matomo-backup
frequency = 0 0 * * *
command = ${matomo-backup.sh:output}
[matomo-apache-httpd]
recipe = slapos.recipe.template:jinja2
url = {{ matomo_apache_httpd }}
output = ${directory:apache.d}/matomo.conf
context =
section parameter_dict apache-php-configuration
[matomo-backup.sh]
recipe = slapos.recipe.template:jinja2
url = {{ matomo_backup_sh }}
......@@ -42,6 +19,3 @@ output = ${directory:scripts}/matomo-backup
context =
section parameter_dict instance-parameter
key php_bin php-bin:wrapper-path
[slap-parameter]
instance.cli-url = ${apache-php-configuration:url}
......@@ -12,10 +12,8 @@ parts =
# Call installation of slapos.cookbook egg defined in stack/slapos.cfg (needed
# in 99,9% of Slapos Software Releases)
slapos-cookbook
# to create file instance-matomo.cfg in instance of apache-php
template-matomo-instance
# to create file instance.cfg of all instances
instance
lamp-instance
# download bas
# inherited by modules that need to download files
......@@ -28,12 +26,13 @@ url = ${:_profile_base_location_}/${:filename}
[application]
url = https://builds.matomo.org/matomo-4.7.1.zip
md5sum = 8d592676bc2c0d51363ad7b2caf171fe
archive-root = matomo
# give the location of the instance-matomo.cfg fil
# Without it the instance-matomo.cfg file will not be executed
[custom-application-deployment]
path = ${template-matomo-instance:output}
part-list = matomo-backup.sh matomo-backup-cron matomo-apache-httpd
part-list = matomo-backup.sh matomo-backup-cron
db-name = matomo
db-user = matomo
db-password = 12345678
......@@ -46,16 +45,9 @@ extensions = jinja2.ext.do
context =
key findutils_location findutils:location
key diffutils_location diffutils:location
key gzip_location gzip:location
key python3_location python3:location
key php_location apache-php:location
key matomo_apache_httpd template-apache-httpd:target
key matomo_backup_sh template-matomo-backup.sh:target
# download apache-httpd.conf.in
[template-apache-httpd]
<= matomo-download
# download matomo-backup.sh.in
[template-matomo-backup.sh]
<= matomo-download
<VirtualHost *:{{ parameter_dict['port'] }}>
ServerAdmin admin@example.com
DocumentRoot {{ parameter_dict['document-root'] }}
SetEnvIf Origin "^http(s)?://(.+\.)?(app\.officejs\.com)$" ORIGIN_DOMAIN=$0
Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN
Header always set Access-Control-Allow-Credentials "true" env=ORIGIN_DOMAIN
Header always set Access-Control-Allow-Methods "PROPFIND, PROPPATCH, COPY, MOVE, DELETE, MKCOL, LOCK, UNLOCK, PUT, GETLIB, VERSION-CONTROL, CHECKIN, CHECKOUT, UNCHECKOUT, REPORT, UPDATE, CANCELUPLOAD, HEAD, OPTIONS, GET, POST" env=ORIGIN_DOMAIN
Header always set Access-Control-Allow-Headers "Overwrite, Destination, Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Authorization" env=ORIGIN_DOMAIN
<IfModule mod_headers.c>
Header always set Strict-Transport-Security "max-age=15552000; includeSubDomains"
</IfModule>
<Directory {{ parameter_dict['document-root'] }}>
Options +FollowSymlinks
AllowOverride All
Require all granted
SetEnv HOME {{ parameter_dict['document-root'] }}
SetEnv HTTP_HOME {{ parameter_dict['document-root'] }}
<IfModule mod_dav.c>
Dav off
</IfModule>
</Directory>
ErrorLog "{{ parameter_dict['log-dir'] }}/nextcloud-error.log"
CustomLog "{{ parameter_dict['log-dir'] }}/nextcloud-access.log" combined
</VirtualHost>
......@@ -16,14 +16,10 @@
filename = nextcloud-install.sh.in
md5sum = 094c26b177fdde69b41d81b89bab542b
[template-apache-httpd]
filename = apache-httpd.conf.in
md5sum = f7e8f6ea20f8685bb4e42cacaee4116f
[template-nextcloud-config.json]
filename = nextcloud-config.json.in
md5sum = 6f42f0a8c5e5c0c657541a65c4d9ee57
[template-nextcloud-instance]
filename = nextcloud-instance.cfg.in
md5sum = 4426b5e62c61efec2e68e7f548bc6463
md5sum = e144dc4cdc3bd0a9be81ac0cc2cfdbd4
......@@ -55,17 +55,6 @@ cli-url = ${slap-parameter:instance.cli-url}
data-dir = ${nc-directory:data}
redis-socket = ${service-redis:unixsocket}
#php.ini parameters
php.memory_limit = 512M
php.date.timezone = Europe/Paris
php.upload_max_filesize = 10240M
php.post_max_size = 10240M
php.session.cookie_secure = True
php.max_execution_time = 1800
php.max_input_time = 3600
php.output_buffering = 'Off'
php.max_file_uploads = 100
#SMTP settings
mail.from = ${slap-parameter:instance.mail-from}
mail.domain = ${slap-parameter:instance.mail-domain}
......@@ -93,7 +82,6 @@ depends =
${redis-promise:recipe}
${nextcloud-cron:recipe}
${nextcloud-optimize:recipe}
${nextcloud-apache-httpd:recipe}
${nextcloud-backup-cron:recipe}
${nextcloud-news-updater:recipe}
${nextcloud-config-update:recipe}
......@@ -120,13 +108,6 @@ input = inline:#!/bin/bash
output = ${directory:bin}/check-nc-install
mode = 744
[nextcloud-apache-httpd]
recipe = slapos.recipe.template:jinja2
url = {{ nextcloud_apache_httpd }}
output = ${directory:apache.d}/nextcloud.conf
context =
section parameter_dict apache-php-configuration
[nextcloud-cron]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
......@@ -199,11 +180,6 @@ input = inline:#!/bin/sh
${php-bin:wrapper-path} ${instance-parameter:nextcloud}/occ config:import ${nextcloud-config.json:output}
mode = 744
[php-bin]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/php
command-line = ${instance-parameter:php-bin} -c ${php.ini-conf:output}
[publish-connection-information]
admin-user = ${instance-parameter:admin-user}
admin-password = ${instance-parameter:admin-password}
......
......@@ -15,9 +15,6 @@ md5sum = d81902d2dec5d547779bec6336a438be
[template-nextcloud-install.sh]
<= nc-download
[template-apache-httpd]
<= nc-download
[template-nextcloud-config.json]
<= nc-download
......@@ -34,7 +31,6 @@ context =
raw redis_bin ${redis:location}/bin/redis-server
raw redis_cli ${redis:location}/bin/redis-cli
key nextcloud_install_sh template-nextcloud-install.sh:target
key nextcloud_apache_httpd template-apache-httpd:target
key nextcloud_parameter_json template-nextcloud-config.json:target
section nextcloud_apps nextcloud-apps
......
......@@ -15,7 +15,7 @@
[instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = bdf824a8e09899d6da27b26706d177c5
md5sum = f1b06742154b97d4173dd11e0949569d
[instance]
_update_hash_filename_ = instance.cfg.in
......
{
"$schema": "http://json-schema.org/draft-04/schema#",
"$schema": "http://json-schema.org/draft-06/schema",
"type": "object",
"description": "Parameters to instantiate Theia",
"additionalProperties": false,
"properties": {
"autorun": {
"title": "Automatically Run Sofware/Instance",
"title": "Automatically Run Software/Instance",
"description": "The option used to pilot automatic build and run of software and instances hosted inside Theia. When 'running', build and run is done automatically in the background. When 'stopped' build and run processes are stopped and need to be run manually. When 'user-controlled', the user can manage it directly in the local supervisord. ",
"type": "string",
"enum": [
......@@ -19,6 +19,7 @@
"title": "Initial Embedded Instance Configuration",
"description": "One-shot optional JSON preconfiguration for an embedded instance. Only applied once when Theia is instantiated. Changing this option afterward will have no effect.",
"type": "string",
"textarea": true,
"examples": [
"{\"software-url\": \"~/srv/project/slapos/software/html5as-base/software.cfg\"}",
"{\"software-url\": \"~/srv/project/slapos/software/html5as/software.cfg\", \"software-type\": \"replicate\", \"instance-parameters\": {\"replicate-quantity\": 3}}"
......
{
"$schema": "http://json-schema.org/draft-04/schema",
"$schema": "http://json-schema.org/draft-06/schema",
"type": "object",
"description": "Parameters to instantiate resilient Theia",
"allOf": [
{
"$ref": "instance-input-schema.json#/"
"$ref": "./instance-input-schema.json#"
},
{
"properties": {
......
......@@ -336,14 +336,14 @@ install =
self.buildout['buildout']['offline'] = 'false'
try:
gravatar_url = "https://www.gravatar.com/avatar/" + hashlib.md5(
b'''$${:seed}'''
options['seed'].encode()
).hexdigest() + "?s=256&d=retro"
shutil.copy(self.download(gravatar_url), '''$${:location}''')
shutil.copy(self.download(gravatar_url), location)
except Exception:
# Because installation should work offline, if we can't download a favicon,
# just ignore this step.
self.logger.exception("Error while downloading favicon, using empty one")
open('''$${:location}''', 'w').close()
open(location, 'w').close()
finally:
self.buildout['buildout']['offline'] = buildout_offline
......
......@@ -238,6 +238,10 @@ class TestTheia(TheiaTestCase):
self.assertIn(b'slaprunner', self.captureSlapos('computer', 'list'))
class TestTheiaWithNonAsciiInstanceName(TestTheia):
default_partition_reference = '💥'
class TestTheiaEmbeddedSlapOSShutdown(TheiaTestCase):
def test_stopping_instance_stops_embedded_slapos(self):
embedded_slapos_supervisord_socket = _getSupervisordSocketPath(
......
......@@ -74,7 +74,7 @@ md5sum = 9a7f7888ba4183c9d900e862074f3baf
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = f5a1661449c9681b3de7d4af645124ba
md5sum = 3d8f3a440b7423c3b947c6ea4d775c6e
[template-zeo]
filename = instance-zeo.cfg.in
......@@ -90,11 +90,11 @@ md5sum = c3e3f8cd985407931b705d15bdedc8d9
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = 4491b256241cb4bbdfafb22ae95a3eba
md5sum = b0751d3d12cfcc8934cb1027190f5e5e
[template-haproxy-cfg]
filename = haproxy.cfg.in
md5sum = d2d98ed3fafce764991b72371e3e09d5
md5sum = 1645ef8990ab2b50f91a4c02f0cf8882
[template-rsyslogd-cfg]
filename = rsyslogd.cfg.in
......
......@@ -48,6 +48,7 @@
# ( 8000, # port int
# 'https', # proto str
# True, # ssl_required bool
# None, # timeout (in seconds) int | None
# [ # backends
# '10.0.0.10:8001', # netloc str
# 1, # max_connection_count int
......@@ -59,6 +60,7 @@
# ( 8002, # port int
# 'https', # proto str
# False, # ssl_required bool
# None, # timeout (in seconds) int | None
# [ # backends
# '10.0.0.10:8003', # netloc str
# 1, # max_connection_count int
......@@ -135,7 +137,6 @@ defaults
timeout connect 10s
timeout queue 60s
timeout server 305s
timeout client 305s
option http-server-close
......@@ -150,7 +151,7 @@ defaults
{% set family_path_routing_dict = parameter_dict['family-path-routing-dict'] %}
{% set path_routing_list = parameter_dict['path-routing-list'] %}
{% for name, (port, _, certificate_authentication, backend_list) in sorted(six.iteritems(parameter_dict['backend-dict'])) -%}
{% for name, (port, _, certificate_authentication, timeout, backend_list) in sorted(six.iteritems(parameter_dict['backend-dict'])) -%}
listen family_{{ name }}
{%- if parameter_dict.get('ca-cert') -%}
{%- set ssl_auth = ' ca-file ' ~ parameter_dict['ca-cert'] ~ ' verify' ~ ( ' required' if certificate_authentication else ' optional' ) ~ ' crl-file ' ~ parameter_dict['crl'] %}
......@@ -162,6 +163,15 @@ listen family_{{ name }}
cookie SERVERID rewrite
http-request set-header X-Balancer-Current-Cookie SERVERID
{% if timeout %}
{#
Apply a slightly longer timeout than the zope timeout so that clients can see the
TimeoutReachedError from zope, that is a bit more informative than the 504 error
page from haproxy.
#}
timeout server {{ timeout + 3 }}s
{%- endif %}
# remove X-Forwarded-For unless client presented a verified certificate
acl client_cert_verified ssl_c_used ssl_c_verify 0
http-request del-header X-Forwarded-For unless client_cert_verified
......
......@@ -123,7 +123,7 @@ update-command = ${:command}
{% for family_name, parameter_id_list in sorted(
six.iteritems(slapparameter_dict['zope-family-dict'])) -%}
{% set zope_family_address_list = [] -%}
{% set ssl_authentication = slapparameter_dict['ssl-authentication-dict'].get(family_name, False) -%}
{% set ssl_authentication = slapparameter_dict['ssl-authentication-dict'][family_name] -%}
{% set has_webdav = [] -%}
{% for parameter_id in parameter_id_list -%}
{% set zope_address_list = slapparameter_dict[parameter_id] -%}
......@@ -177,7 +177,7 @@ update-command = ${:command}
{% else %}
{% set external_scheme = 'https' -%}
{% endif -%}
{% do haproxy_dict.__setitem__(family_name, (haproxy_port, external_scheme, slapparameter_dict['ssl-authentication-dict'].get(family_name, False), zope_family_address_list)) -%}
{% do haproxy_dict.__setitem__(family_name, (haproxy_port, external_scheme, slapparameter_dict['ssl-authentication-dict'][family_name], slapparameter_dict['timeout-dict'][family_name], zope_family_address_list)) -%}
{% endfor -%}
[haproxy-cfg-parameter-dict]
......@@ -309,7 +309,7 @@ config-port = {{ next(six.itervalues(haproxy_dict))[0] }}
[{{ section('publish') }}]
recipe = slapos.cookbook:publish.serialised
{% for family_name, (port, scheme, _, _) in haproxy_dict.items() -%}
{% for family_name, (port, scheme, _, _, _) in haproxy_dict.items() -%}
{{ family_name ~ '-v6' }} = {% if ipv6_set %}{{ scheme ~ '://[' ~ ipv6 ~ ']:' ~ port }}{% endif %}
{{ family_name }} = {{ scheme ~ '://' ~ ipv4 ~ ':' ~ port }}
{% endfor -%}
......
......@@ -265,12 +265,13 @@ config-wcfs_enable = {{ dumps(wcfs_enable) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
software-type = zope
{% set global_publisher_timeout = slapparameter_dict.get('publisher-timeout') -%}
{% set global_publisher_timeout = slapparameter_dict.get('publisher-timeout', 300) -%}
{% set global_activity_timeout = slapparameter_dict.get('activity-timeout') -%}
{% set zope_family_dict = {} -%}
{% set zope_family_name_list = [] -%}
{% set zope_backend_path_dict = {} -%}
{% set ssl_authentication_dict = {} -%}
{% set balancer_timeout_dict = {} -%}
{% set jupyter_zope_family_default = [] -%}
{% for custom_name, zope_parameter_dict in six.iteritems(zope_partition_dict) -%}
{% set partition_name = 'zope-' ~ custom_name -%}
......@@ -288,6 +289,7 @@ software-type = zope
{% do zope_backend_path_dict.__setitem__(zope_family, backend_path) -%}
{% do ssl_authentication_dict.__setitem__(zope_family, zope_parameter_dict.get('ssl-authentication', False)) -%}
{% set current_zope_family_override_dict = zope_family_override_dict.get(zope_family, {}) -%}
{% do balancer_timeout_dict.__setitem__(zope_family, current_zope_family_override_dict.get('publisher-timeout', global_publisher_timeout)) -%}
[{{ section_name }}]
<= request-zope-base
name = {{ partition_name }}
......@@ -406,6 +408,7 @@ config-url = ${request-jupyter:connection-url}
'zope-family-dict': zope_family_parameter_dict,
'backend-path-dict': zope_backend_path_dict,
'ssl-authentication-dict': ssl_authentication_dict,
'timeout-dict': balancer_timeout_dict,
'apachedex-promise-threshold': monitor_dict.get('apachedex-promise-threshold', 70),
'apachedex-configuration': monitor_dict.get(
'apachedex-configuration',
......
<VirtualHost *:{{ parameter_dict['port'] }}>
ServerAdmin admin@example.com
DocumentRoot {{ parameter_dict['document-root'] }}/matomo
DocumentRoot {{ parameter_dict['document-root'] }}
SetEnvIf Origin "^http(s)?://(.+\.)?(app\.officejs\.com)$" ORIGIN_DOMAIN=$0
Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN
......@@ -18,7 +18,7 @@
Dav off
</Directory>
ErrorLog "{{ parameter_dict['log-dir'] }}/matomo-error.log"
CustomLog "{{ parameter_dict['log-dir'] }}/matomo-access.log" combined
ErrorLog "{{ parameter_dict['log-dir'] }}/lamp-error.log"
CustomLog "{{ parameter_dict['log-dir'] }}/lamp-access.log" combined
</VirtualHost>
......@@ -9,7 +9,7 @@ parts =
php-apcu
dropbear
eggs
instance
lamp-instance
extends =
buildout.hash.cfg
......@@ -37,6 +37,14 @@ url = ${:_profile_base_location_}/${:filename}
[application]
recipe = slapos.recipe.build:download-unpacked
# softwares using this stack can extend this section to provide the url of
# an archive containing the application using url and md5sum options.
# The archive will be extracted directly in apache's DocumentRoot folder.
# In case the archive contain multiple files and folders and only a
# specific folder should be used as DocumentRoot, archive-root can be
# set to the relative path of this folder.
archive-root =
[python]
part = python3
......@@ -61,12 +69,13 @@ db-password = insecure
#----------------
#-- Instance-level buildout profiles.
[instance]
[lamp-instance]
recipe = slapos.recipe.template:jinja2
output = ${buildout:directory}/instance.cfg
url = ${:_profile_base_location_}/${:filename}
context =
key application_location application:location
key application_archive_root application:archive-root
key application_deployment_part_list custom-application-deployment:part-list
key apache_location apache:location
key apache_php_location apache-php:location
......@@ -107,6 +116,7 @@ context =
key db_name custom-application-deployment:db-name
key db_user custom-application-deployment:db-user
key db_password custom-application-deployment:db-password
key lamp_apache_httpd template-apache-httpd:target
[instance-apache-php]
<= template-download-base
......@@ -120,6 +130,10 @@ context =
[template-php.ini]
<= template-download-base
# download apache-httpd.conf.in
[template-apache-httpd]
<= template-download-base
[erp5]
# lamp stack reuses erp5 stack to have mariadb, but we don't need to checkout erp5 here.
recipe =
......
......@@ -12,17 +12,17 @@
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[instance]
[lamp-instance]
filename = instance.cfg.in
md5sum = a5a630377bfb0421d6993c9c2c411a23
md5sum = 603d03f43ea04e95237c80d3b9e72ba7
[instance-apache-php]
filename = instance-apache-php.cfg.in
md5sum = 0952ef9f6cb5e259ad5519d2975d2f37
md5sum = 8506f245087dfdc02561e8739a567bd6
[instance-lamp]
filename = instance-lamp.cfg.jinja2.in
md5sum = b3d68a13d7a7ffcac774f51f02a68359
md5sum = 79343539dff96f2d4592a4358d469201
[template-apache.conf]
filename = apache.conf.in
......@@ -30,4 +30,8 @@ md5sum = 04080510698732d84122b464fdb08c6a
[template-php.ini]
filename = php.ini.in
md5sum = 599358f7df4c1e0de86270f5992ef904
md5sum = 980f603c34fcb008ce73a9dde2dbceb3
[template-apache-httpd]
filename = apache-httpd.conf.in
md5sum = 4ad906791f0d6fd3a4aef8f5a8018b45
......@@ -13,6 +13,7 @@ parts =
php.ini-conf
apache-php-service
publish-connection-information
lamp-apache-httpd
{{ parameter_dict['application-part-list'] }}
......@@ -133,10 +134,17 @@ cert-file = ${ca-directory:certs}/httpd.crt
key-file = ${ca-directory:certs}/httpd.key
apache-config-dir = ${directory:apache.d}
[lamp-apache-httpd]
recipe = slapos.recipe.template:jinja2
url = {{ lamp_apache_httpd }}
output = ${directory:apache.d}/lamp.conf
context =
section parameter_dict apache-php-configuration
[apache-php-conf]
recipe = slapos.recipe.template:jinja2
url = {{ parameter_dict['template-apache-conf'] }}
output = ${directory:etc}/apache.conf
output = ${directory:etc}/apache.confgraceful
context =
section parameter_dict apache-php-configuration
extensions = jinja2.ext.do
......@@ -188,6 +196,24 @@ backend-url = ${apache-php-configuration:url}
php-bin = {{ parameter_dict['apache-php-location'] }}/bin/php
php-ini = ${php.ini-conf:output}
#Defaut php.ini parameters
#They have been set in file php.ini.in
#I reserve them here just for an example
#You can reset them like this in your own software
php.memory_limit = 512M
php.date.timezone = Europe/Paris
php.upload_max_filesize = 10240M
php.post_max_size = 10240M
php.session.cookie_secure = True
php.max_execution_time = 1800
php.max_input_time = 3600
php.output_buffering = 'Off'
php.max_file_uploads = 100
[php-bin]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/php
command-line = ${instance-parameter:php-bin} -c ${php.ini-conf:output}
#----------------
#--
......
......@@ -37,8 +37,7 @@ return =
{% do publish_dict.__setitem__('backend-url', '${request-apache:connection-backend-url}') -%}
{% do monitor_base_url_dict.__setitem__('apache', '${request-apache:connection-monitor-base-url}') -%}
{% do mariadb_dict.__setitem__('database-list', [{'name': db_name, 'user': db_user, 'password': db_password}]) -%}
{% do mariadb_dict.__setitem__('database-list', [{'name': db_name, 'user': db_user, 'password': db_password }]) -%}
{% do mariadb_dict.__setitem__('test-database-amount', 0) -%}
{% do mariadb_dict.__setitem__('tcpv4-port', 2099) -%}
{% do mariadb_dict.__setitem__('max-slowqueries-threshold', 1000) -%}
......
......@@ -54,7 +54,11 @@ extra-context =
raw db_password {{ db_password }}
[dynamic-template-apache-php-parameters]
{% if application_archive_root %}
application-location = {{ application_location }}/{{ application_archive_root }}
{% else %}
application-location = {{ application_location }}
{% endif %}
application-part-list = {{ application_deployment_part_list }}
template-apache-conf = {{ template_apache_conf }}
apache-location = {{ apache_location }}
......@@ -72,6 +76,7 @@ filename = template-apache-php.cfg
extra-context =
key custom_application_template application-parameters:custom-application-template
section parameter_dict dynamic-template-apache-php-parameters
raw lamp_apache_httpd {{ lamp_apache_httpd }}
[dynamic-template-mariadb-parameters]
bash = {{ bash_location }}
......
......@@ -13,15 +13,15 @@ session.save_path = "{{ parameter_dict['tmp-dir'] }}"
session.auto_start = 0
date.timezone = {{ instance_dict.get('php.date.timezone', 'Europe/Paris') }}
file_uploads = On
upload_max_filesize = {{ instance_dict.get('php.upload_max_filesize', '8M') }}
post_max_size = {{ instance_dict.get('php.post_max_size', '8M') }}
upload_max_filesize = {{ instance_dict.get('php.upload_max_filesize', '10240M') }}
post_max_size = {{ instance_dict.get('php.post_max_size', '10240M') }}
magic_quotes_gpc=Off
memory_limit = {{ instance_dict.get('php.memory_limit', '128M') }}
memory_limit = {{ instance_dict.get('php.memory_limit', '512M') }}
session.cookie_secure = True
max_execution_time = {{ instance_dict.get('php.max_execution_time', 60) }}
max_input_time = {{ instance_dict.get('php.max_input_time', 60) }}
max_execution_time = {{ instance_dict.get('php.max_execution_time', 1800) }}
max_input_time = {{ instance_dict.get('php.max_input_time', 3600) }}
output_buffering = {{ instance_dict.get('php.output_buffering', 4096) }}
max_file_uploads = {{ instance_dict.get('php.max_file_uploads', 20) }}
max_file_uploads = {{ instance_dict.get('php.max_file_uploads', 100) }}
#extension_dir="./"#
sys_temp_dir="{{ parameter_dict['tmp-dir'] }}"
......
......@@ -200,7 +200,7 @@ slapos.extension.strip = 0.4
slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.25
slapos.rebootstrap = 4.5
slapos.recipe.build = 0.54
slapos.recipe.build = 0.55
slapos.recipe.cmmi = 0.19
slapos.recipe.template = 5.0
slapos.toolbox = 0.127
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment