Commit b138ec22 authored by Jérome Perrin's avatar Jérome Perrin

restore to the state of 1.0.308

There have been push force on master branch, and 1.0.309 seems to be the
result of merging an old version of master branch, the commits between
1.0.308 and 1.0.309 are same as commit on master branch but with
different hashes and when we try to merge master in 1.0 there are
conflicts.

This commit resets to the same state as 1.0.308, it was done with:

  git resore --source 1.0.308 :/
  git commit -a

so it revert all these commits so that we can merge master again.
parent 238bb076
...@@ -15,8 +15,8 @@ extends = ...@@ -15,8 +15,8 @@ extends =
[groonga] [groonga]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://packages.groonga.org/source/groonga/groonga-13.0.0.tar.gz url = https://packages.groonga.org/source/groonga/groonga-12.0.7.tar.gz
md5sum = 76aae9bc04c4047cbb31cc543bde8540 md5sum = 5ef412a6941994e623cb50d76a8be261
groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/ groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/
# temporary patch to respect more tokens in natural language mode. # temporary patch to respect more tokens in natural language mode.
patches = patches =
...@@ -48,8 +48,8 @@ environment = ...@@ -48,8 +48,8 @@ environment =
[groonga-normalizer-mysql] [groonga-normalizer-mysql]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.2.1.tar.gz url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.8.tar.gz
md5sum = d9b48b3ea9dc7423e7c40eb326d86395 md5sum = a1520691da3083e14bdc65a9ec57a620
groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/ groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/
pre-configure = mkdir -p ${:groonga-plugin-dir} pre-configure = mkdir -p ${:groonga-plugin-dir}
make-targets = GROONGA_PLUGINS_DIR=${:groonga-plugin-dir} install make-targets = GROONGA_PLUGINS_DIR=${:groonga-plugin-dir} install
......
...@@ -13,8 +13,8 @@ parts = haproxy ...@@ -13,8 +13,8 @@ parts = haproxy
[haproxy] [haproxy]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://www.haproxy.org/download/2.6/src/haproxy-2.6.9.tar.gz url = http://www.haproxy.org/download/2.6/src/haproxy-2.6.7.tar.gz
md5sum = 8c8fa1a2f9592efa2793bcc74e2ea501 md5sum = cfa36413f2bc5187ab34ffcdf71914d4
configure-command = true configure-command = true
# for Linux kernel 2.6.28 and above, we use "linux-glibc" as the TARGET, # for Linux kernel 2.6.28 and above, we use "linux-glibc" as the TARGET,
# otherwise use "generic". # otherwise use "generic".
......
...@@ -30,8 +30,8 @@ parts = ...@@ -30,8 +30,8 @@ parts =
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://archive.mariadb.org//mariadb-${:version}/source/mariadb-${:version}.tar.gz url = https://archive.mariadb.org//mariadb-${:version}/source/mariadb-${:version}.tar.gz
version = 10.4.28 version = 10.4.25
md5sum = d098e9af77a26260a6a6c21a7ed0daf6 md5sum = 76e6ee973adb7deb15d7936f710eb5a4
pre-configure = pre-configure =
set '\bSET(PLUGIN_AUTH_PAM YES CACHE BOOL "")' cmake/build_configurations/mysql_release.cmake set '\bSET(PLUGIN_AUTH_PAM YES CACHE BOOL "")' cmake/build_configurations/mysql_release.cmake
grep -q "$@" grep -q "$@"
...@@ -59,8 +59,6 @@ configure-options = ...@@ -59,8 +59,6 @@ configure-options =
-DWITH_ROCKSDB_snappy=ON -DWITH_ROCKSDB_snappy=ON
-DWITH_ROCKSDB_ZSTD=ON -DWITH_ROCKSDB_ZSTD=ON
-DWITH_SAFEMALLOC=OFF -DWITH_SAFEMALLOC=OFF
# TokuDB is removed in 10.6
-DWITHOUT_TOKUDB=true
-DPLUGIN_DAEMON_EXAMPLE=NO -DPLUGIN_DAEMON_EXAMPLE=NO
-DPLUGIN_EXAMPLE=NO -DPLUGIN_EXAMPLE=NO
-DPLUGIN_MROONGA=NO -DPLUGIN_MROONGA=NO
...@@ -101,8 +99,8 @@ post-install = ...@@ -101,8 +99,8 @@ post-install =
# as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir ) # as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir )
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://packages.groonga.org/source/mroonga/mroonga-13.00.tar.gz url = https://packages.groonga.org/source/mroonga/mroonga-12.09.tar.gz
md5sum = e7bda4edd284e788abf78e3a6f485fbe md5sum = 637d73b86239cc9c3758e9486746d430
pre-configure = pre-configure =
rm -rf fake_mariadb_source rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source mkdir -p fake_mariadb_source
...@@ -136,8 +134,8 @@ environment = ...@@ -136,8 +134,8 @@ environment =
### (we just override here for easier revert) ### (we just override here for easier revert)
[mariadb-10.3] [mariadb-10.3]
<= mariadb-10.4 <= mariadb-10.4
version = 10.3.38 version = 10.3.35
md5sum = 4cb91021431f514afbb1c1c2ab13944f md5sum = b7a2e69d103eda3dd61c8bad8775c7bd
post-install = post-install =
ldd=`ldd %(location)s/lib/plugin/ha_rocksdb.so` ldd=`ldd %(location)s/lib/plugin/ha_rocksdb.so`
for x in ${lz4:location} ${snappy:location} ${zstd:location} for x in ${lz4:location} ${snappy:location} ${zstd:location}
......
...@@ -17,8 +17,8 @@ parts = ...@@ -17,8 +17,8 @@ parts =
[openssl] [openssl]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://www.openssl.org/source/openssl-1.1.1t.tar.gz url = https://www.openssl.org/source/openssl-1.1.1o.tar.gz
md5sum = 1cfee919e0eac6be62c88c5ae8bcd91e md5sum = d05e96e200d2ff0aef20c114cb5f17bf
location = @@LOCATION@@ location = @@LOCATION@@
# 'prefix' option to override --openssldir/--prefix (which is useful # 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with DESTDIR). Used by slapos.package.git/obs # when combined with DESTDIR). Used by slapos.package.git/obs
...@@ -48,8 +48,8 @@ environment = ...@@ -48,8 +48,8 @@ environment =
[openssl-quictls] [openssl-quictls]
<= openssl <= openssl
url = https://github.com/quictls/openssl/archive/refs/tags/openssl-3.0.8-quic1.tar.gz url = https://github.com/quictls/openssl/archive/refs/tags/openssl-3.0.7+quic1.tar.gz
md5sum = a203b9b158429ca75539b55a137d317b md5sum = 8e27cd201b554a33ed03a59f6c679c77
[openssl-output] [openssl-output]
# Shared binary location to ease migration # Shared binary location to ease migration
......
...@@ -8,15 +8,15 @@ parts = ...@@ -8,15 +8,15 @@ parts =
[pycurl-env] [pycurl-env]
PATH = ${curl:location}/bin:${openssl:location}/bin:%(PATH)s PATH = ${curl:location}/bin:${openssl:location}/bin:%(PATH)s
PYCURL_SSL_LIBRARY = openssl PYCURL_SSL_LIBRARY=openssl
CPPFLAGS=-I${openssl:location}/include
CFLAGS=-I${openssl:location}/include
[pycurl] [pycurl]
recipe = zc.recipe.egg:custom recipe = zc.recipe.egg:custom
egg = pycurl egg = pycurl
include-dirs =
${openssl:location}/include rpath =
library-dirs = ${curl:location}/lib/
${openssl:location}/lib ${openssl:location}/lib/
rpath = ${:library-dirs}
${curl:location}/lib
environment = pycurl-env environment = pycurl-env
...@@ -6,93 +6,50 @@ extends = ...@@ -6,93 +6,50 @@ extends =
../libffi/buildout.cfg ../libffi/buildout.cfg
../ncurses/buildout.cfg ../ncurses/buildout.cfg
../openssl/buildout.cfg ../openssl/buildout.cfg
../patch/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../python-2.7/buildout.cfg
../sqlite3/buildout.cfg ../sqlite3/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
[pycparser-shared]
# XXX:
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/eliben/pycparser/archive/release_v2.20.tar.gz
md5sum = a5d9ea5350a8edb8239af73913ea2858
configure-command = :
make-binary =
make-targets = python setup.py install --install-lib @@LOCATION@@
[pypy2] [pypy2]
recipe = slapos.recipe.build recipe = slapos.recipe.cmmi
shared = true shared = true
version = 2.7 url = https://downloads.python.org/pypy/pypy2.7-v7.3.1-src.tar.bz2
url = https://downloads.python.org/pypy/pypy${:version}-v7.3.11-src.tar.bz2 md5sum = 7608bd58940ffc5403632c2c786d83bb
md5sum = 249ad8b0ddffbb16a9d416c3ae85dd2c configure-command =
pycparser-url = https://github.com/eliben/pycparser/archive/release_v2.21.tar.gz sed -i '/"_tkinter":/s/^/#/' lib_pypy/tools/build_cffi_imports.py
pycparser-md5sum = cf4b60f0beca2a25bb599e9e858a8223 cat <<EOF > Makefile
patch-binary = ${patch:location}/bin/patch PREFIX = @@LOCATION@@
patches = ${python2.7-lib-patches:patches} export PYPY_USESSION_BASENAME=slapos
init = export TMPDIR=\$(realpath ..)
import os, sys all: pypy/goal/pypy-c lib_pypy/tools/build_cffi_imports.py
options['executable'] = os.path.join(location, 'bin', 'pypy') \$^
if sys.version_info.major == 2: c_src_dir:
self.python = sys.executable cd pypy/goal && PYTHONPATH=${pycparser-shared:location} $${PYTHON:-python2} ../../rpython/bin/rpython --batch --source --opt=jit --shared targetpypystandalone
else: ln -s ../usession-\$\$PYPY_USESSION_BASENAME-\$\$USER/testing_1 \$@
# XXX: We don't care which Python 2 we use to build PyPy pypy/goal/pypy-c: c_src_dir
# and we don't want to rebootstrap twice. \$(MAKE) -C \$<
depends = options.depends mv \$</libpypy-c.so \$</pypy-c pypy/goal
options.depends = set(depends) touch \$@
self.python = self.buildout['python2.7']['executable'] install:
options.depends = depends mkdir -p \$(PREFIX)/bin \$(PREFIX)/include
install = find lib_pypy lib-python/2.7 -type d '(' '(' -name __pycache__ -o -name _tkinter -o -name test -o -name tests ')' -prune -o -print ')' \
import os |while read d; do mkdir -p \$(PREFIX)/\$\$d && find \$\$d -maxdepth 1 -type f ! -name '*.o' ! -name '*.c' |xargs -r cp -t \$(PREFIX)/\$\$d; done
env = self.environ d=lib-python/2.7/test && mkdir -p \$(PREFIX)/\$\$d && for x in __init__ pystone regrtest test_support; do echo \$\$d/\$\$x.py; done |xargs -r cp -t \$(PREFIX)/\$\$d
pycparser = self.extract(self.download( cd lib-python && cp conftest.py stdlib-version.* \$(PREFIX)/lib-python
options['pycparser-url'], options['pycparser-md5sum'])) cp -r include/pypy_*.h include/PyPy.h pypy/module/cpyext/include/* pypy/module/cpyext/parse/* \$(PREFIX)/include
pycparser_lib = os.path.join(pycparser, 'lib') cd pypy/goal && cp libpypy-c.so \$(PREFIX)/bin && cp pypy-c \$(PREFIX)/bin/pypy
call((self.python, 'setup.py', 'install', '--install-lib', pycparser_lib),
cwd=guessworkdir(pycparser), env=env)
pypy_src = guessworkdir(self.extract(self.download()))
build_cffi_imports = 'lib_pypy/pypy_tools/build_cffi_imports.py'
version = options['version']
self.applyPatchList(options['patches'].replace('#',' '),
'--no-backup-if-mismatch -p2', options['patch-binary'],
os.path.join(pypy_src, 'lib-python', version))
with open(os.path.join(pypy_src, 'Makefile'), 'w') as f:
f.write(options['makefile'].replace('\n|','\n')[1:] % {
'build_cffi_imports': build_cffi_imports,
'location': location,
'pycparser': pycparser_lib,
'python': self.python,
'version': version,
})
for cmd in ( ('sed', '-i', '/"_tkinter",/s/^/#/', build_cffi_imports)
, ('make',)
, ('make', 'install')
):
call(cmd, cwd=pypy_src, env=env)
for x in '', version[0], version:
os.symlink('pypy', os.path.join(location, 'bin', 'python' + x))
# WKRD: Buildout does not preserve leading tabs in .installed.cfg
# so prefix with a dummy character.
makefile =
|PREFIX = %(location)s
|export PYPY_USESSION_BASENAME=slapos
|export TMPDIR=$(realpath ..)
|all: pypy/goal/pypy-c %(build_cffi_imports)s
| $^
|c_src_dir:
| cd pypy/goal && PYTHONPATH=%(pycparser)s %(python)s ../../rpython/bin/rpython --batch --source --opt=jit --shared targetpypystandalone
| ln -s ../usession-$$PYPY_USESSION_BASENAME-$$USER/testing_1 $@
|pypy/goal/pypy-c: c_src_dir
| $(MAKE) -C $<
| mv $</libpypy-c.so $</pypy-c pypy/goal
| touch $@
|install:
| mkdir -p $(PREFIX)/bin $(PREFIX)/include
| find lib_pypy lib-python/%(version)s -type d '(' '(' -name __pycache__ -o -name _tkinter -o -name test -o -name tests ')' -prune -o -print ')' \
| |while read d; do mkdir -p $(PREFIX)/$$d && find $$d -maxdepth 1 -type f ! -name '*.o' ! -name '*.c' ! -name '*.pyc' |xargs -r cp -t $(PREFIX)/$$d; done
| d=lib-python/%(version)s/test && mkdir -p $(PREFIX)/$$d && for x in __init__ pystone regrtest test_support; do echo $$d/$$x.py; done |xargs -r cp -t $(PREFIX)/$$d
| cd lib-python && cp conftest.py stdlib-version.* $(PREFIX)/lib-python
| cp -r include/pypy_*.h pypy/module/cpyext/include/* pypy/module/cpyext/parse/* $(PREFIX)/include
| cd pypy/goal && cp libpypy-c.so $(PREFIX)/bin && cp pypy-c $(PREFIX)/bin/pypy
| rm $(PREFIX)/lib_pypy/_cffi_ssl/tools/make_ssl_data.py # this is a Py3 script
| rmdir $(PREFIX)/lib_pypy/_cffi_ssl/tools
| cd $(PREFIX) && find lib_pypy lib-python/%(version)s -name '*.py' |bin/pypy -Bm py_compile -
# the entry "-Wl,-rpath=${file:location}/lib" below is needed by python-magic,
# which would otherwise load the system libmagic.so with ctypes
environment = environment =
C_INCLUDE_PATH=${bzip2:location}/include:${gdbm:location}/include:${libexpat:location}/include:${ncurses:location}/include:${ncurses:location}/include:${openssl:location}/include:${sqlite3:location}/include:${zlib:location}/include C_INCLUDE_PATH=${bzip2:location}/include:${gdbm:location}/include:${libexpat:location}/include:${ncurses:location}/include:${ncurses:location}/include:${openssl:location}/include:${sqlite3:location}/include:${zlib:location}/include
LDFLAGS=-L${bzip2:location}/lib -L${gdbm:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -L${zlib:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib LDFLAGS=-L${bzip2:location}/lib -L${gdbm:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -L${zlib:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib
PATH=${pkgconfig:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libffi:location}/lib/pkgconfig PKG_CONFIG_PATH=${libffi:location}/lib/pkgconfig
...@@ -26,13 +26,6 @@ scripts = ...@@ -26,13 +26,6 @@ scripts =
arguments = sys.argv[1:] + ["bootstrap"] arguments = sys.argv[1:] + ["bootstrap"]
python = python2.7 python = python2.7
[python2.7-lib-patches]
# Patches that apply to both CPython & PyPy.
patches =
${:_profile_base_location_}/asyncore_poll_insteadof_select.patch#ab6991c0ee6e25aeb8951e71f280a2f1
${:_profile_base_location_}/fix_compiler_module_issue_20613.patch#94443a77f903e9de880a029967fa6aa7
${:_profile_base_location_}/py27-subproc-closefds-fast.patch#e495e44491694a8972da11739206f2e6
[python2.7] [python2.7]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
...@@ -44,9 +37,12 @@ version = 2.7 ...@@ -44,9 +37,12 @@ version = 2.7
executable = @@LOCATION@@/bin/python${:version} executable = @@LOCATION@@/bin/python${:version}
patch-options = -p1 patch-options = -p1
patches = ${python2.7-lib-patches:patches} patches =
${:_profile_base_location_}/disabled_module_list.patch#e038a8016475574c810cbaaf0e42f4ac ${:_profile_base_location_}/fix_compiler_module_issue_20613.patch#94443a77f903e9de880a029967fa6aa7
${:_profile_base_location_}/pytracemalloc_pep445.patch#9f3145817afa2b7fad801fde8447e396 ${:_profile_base_location_}/pytracemalloc_pep445.patch#9f3145817afa2b7fad801fde8447e396
${:_profile_base_location_}/disabled_module_list.patch#e038a8016475574c810cbaaf0e42f4ac
${:_profile_base_location_}/asyncore_poll_insteadof_select.patch#ab6991c0ee6e25aeb8951e71f280a2f1
${:_profile_base_location_}/py27-subproc-closefds-fast.patch#e495e44491694a8972da11739206f2e6
url = url =
http://www.python.org/ftp/python/${:package_version}/Python-${:package_version}${:package_version_suffix}.tar.xz http://www.python.org/ftp/python/${:package_version}/Python-${:package_version}${:package_version_suffix}.tar.xz
configure-options = configure-options =
...@@ -74,4 +70,4 @@ post-install = cd '%(prefix)s' && ...@@ -74,4 +70,4 @@ post-install = cd '%(prefix)s' &&
environment = environment =
PATH=${patch:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${patch:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include -fPIC CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include -fPIC
LDFLAGS=-L${bzip2:location}/lib -L${gdbm:location}/lib -L${gettext:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -L${readline:location}/lib -L${zlib:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib LDFLAGS=-L${zlib:location}/lib -L${readline:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib
...@@ -55,7 +55,7 @@ def setUpModule(): ...@@ -55,7 +55,7 @@ def setUpModule():
class ERP5InstanceTestMeta(type): class ERP5InstanceTestMeta(type):
"""Adjust ERP5InstanceTestCase instances to be run in several flavours (e.g. NEO/ZEO) """Adjust ERP5InstanceTestCase instances to be run in several flavours (e.g. NEO/ZEO)
Adjustments can be declared via setting the '__test_matrix__' attribute Adjustements can be declared via setting the '__test_matrix__' attribute
of a test case. of a test case.
A test matrix is a dict which maps the flavoured class name suffix to A test matrix is a dict which maps the flavoured class name suffix to
a tuple of parameters. a tuple of parameters.
...@@ -181,7 +181,7 @@ class ERP5InstanceTestCase(SlapOSInstanceTestCase, metaclass=ERP5InstanceTestMet ...@@ -181,7 +181,7 @@ class ERP5InstanceTestCase(SlapOSInstanceTestCase, metaclass=ERP5InstanceTestMet
@classmethod @classmethod
def getRootPartitionConnectionParameterDict(cls): def getRootPartitionConnectionParameterDict(cls):
"""Return the output parameters from the root partition""" """Return the output paramters from the root partition"""
return json.loads( return json.loads(
cls.computer_partition.getConnectionParameterDict()['_']) cls.computer_partition.getConnectionParameterDict()['_'])
......
##############################################################################
#
# Copyright (c) 2022 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import datetime
import json
import pathlib
import subprocess
import time
import typing
import urllib.parse
import psutil
import requests
from . import ERP5InstanceTestCase, default, matrix, setUpModule
from .test_erp5 import ZopeSkinsMixin
class TestOrderBuildPackingListSimulation(
ZopeSkinsMixin,
ERP5InstanceTestCase,
):
"""Create orders and build packing lists.
"""
__partition_reference__ = 's'
__test_matrix__ = matrix((default, ))
_start: datetime.datetime
_previous: datetime.datetime
@classmethod
def getInstanceParameterDict(cls) -> dict:
return {
'_':
json.dumps(
{
"bt5":
" ".join(
[
"erp5_full_text_mroonga_catalog",
"erp5_configurator_standard",
"erp5_scalability_test",
]),
"mariadb": {
# We use a large innodb-buffer-pool-size because the simulation
# select method used for sale packing list does not use index and
# cause slpow queries
"innodb-buffer-pool-size": 32 * 1024 * 1024 * 1024, # 32Go
},
"zope-partition-dict": {
"activities": {
"instance-count": 32,
"family": "activities",
"thread-amount": 2,
"port-base": 2300
},
"default": {
"instance-count": 1,
"family": "default",
"port-base": 2200
},
},
})
}
@classmethod
def _setUpClass(cls) -> None:
super()._setUpClass()
cls.zope_base_url = cls._getAuthenticatedZopeUrl('')
cls.create_sale_order_batch_url = urllib.parse.urljoin(
cls.zope_base_url, 'ERP5Site_createScalabilityTestSaleOrderBatch')
def setUp(self) -> None:
super().setUp()
self.measurement_file = open(f'measures{self.id()}.jsonl', 'w')
self.addCleanup(self.measurement_file.close)
def write_measurement(
self, measurement: dict[str, typing.Union[str, float]]) -> None:
json.dump(
measurement,
self.measurement_file,
)
self.measurement_file.write('\n')
self.measurement_file.flush()
def take_measurements(self, step: str) -> None:
# Time for this iteration
now = datetime.datetime.now()
elapsed = now - self._previous
self._previous = now
# Memory usage of all zopes
with self.slap.instance_supervisor_rpc as supervisor:
zope_memory_info_list = [
psutil.Process(process['pid']).memory_info()
for process in supervisor.getAllProcessInfo()
if process['name'].startswith('zope-') and process['pid']
]
zope_total_rss = sum(mem.rss for mem in zope_memory_info_list)
zope_count = len(zope_memory_info_list)
# Database size
root_fs = pathlib.Path(
self.getComputerPartitionPath('zodb')) / 'srv' / 'zodb' / 'root.fs'
root_fs_size = root_fs.stat().st_size
self.logger.info(
"Measurements for %s (after %s): "
"elapsed=%s zope_total_rss=%s / %s root_fs_size=%s",
step,
now - self._start,
elapsed,
zope_total_rss,
zope_count,
root_fs_size,
)
self.write_measurement(
{
'step': step,
'step_duration_seconds': elapsed.total_seconds(),
'step_duration': str(elapsed),
'zope_total_rss': zope_total_rss,
'zope_count': zope_count,
'root_fs_size': root_fs_size,
'now': str(now),
})
def test(self) -> None:
self._start = self._previous = datetime.datetime.now()
with requests.Session() as session:
ret = session.get(
urllib.parse.urljoin(
self.zope_base_url, 'ERP5Site_bootstrapScalabilityTest'),
verify=False,
params={'user_quantity:int': 1})
if not ret.ok:
self.logger.error(ret.text)
ret.raise_for_status()
self._waitForActivities(
timeout=datetime.timedelta(hours=2).total_seconds())
# XXX default reference generator for sale packing list cause
# many conflict errors, disable it.
self._addPythonScript(
script_id='Delivery_generateReference',
params='*args, **kw',
body='context.setReference("no reference for benchmark")',
)
self.take_measurements("setup")
# XXX now that we have installed business templates,
# restart all zopes to workaround a bug with accessors not
# working after some time (packing_list_line.getStartDate no longer
# acquire from parent's sale packing list)
with self.slap.instance_supervisor_rpc as supervisor:
supervisor.stopAllProcesses()
supervisor.startAllProcesses()
self.slap.waitForInstance()
self.take_measurements("restart")
with requests.Session() as session:
for i in range(100):
for j in range(5):
ret = session.get(
self.create_sale_order_batch_url,
verify=False,
params={
'random_seed': f'{i}.{j}',
'order_count:int': '50',
},
)
if not ret.ok:
self.logger.error(ret.text)
ret.raise_for_status()
self._waitForActivities(
timeout=datetime.timedelta(hours=2).total_seconds())
self.take_measurements(f"iteration_{i+1:03}")
# final measurements, take a "zodb analyze" snapshot
zodb_cmd = pathlib.Path(
self.computer_partition_root_path
) / 'software_release' / 'bin' / 'zodb'
root_fs = pathlib.Path(
self.getComputerPartitionPath('zodb')) / 'srv' / 'zodb' / 'root.fs'
self.write_measurement(
{
'zodb analyze':
subprocess.check_output((zodb_cmd, 'analyze', root_fs), text=True)
})
# and a pt-query-digest for slow log
pt_query_digest = pathlib.Path(
self.computer_partition_root_path
) / 'software_release' / 'parts' / 'percona-toolkit' / 'bin' / 'pt-query-digest'
mariadb_slowquery_log = pathlib.Path(
self.getComputerPartitionPath(
'mariadb')) / 'var' / 'log' / 'mariadb_slowquery.log'
self.write_measurement(
{
'pt-query-digest':
subprocess.check_output(
(pt_query_digest, mariadb_slowquery_log), text=True)
})
breakpoint()
...@@ -27,7 +27,6 @@ ...@@ -27,7 +27,6 @@
import contextlib import contextlib
import datetime
import glob import glob
import http.client import http.client
import json import json
...@@ -451,13 +450,10 @@ class ZopeSkinsMixin: ...@@ -451,13 +450,10 @@ class ZopeSkinsMixin:
@classmethod @classmethod
def _setUpClass(cls): def _setUpClass(cls):
super()._setUpClass() super()._setUpClass()
cls._waitForActivities() param_dict = cls.getRootPartitionConnectionParameterDict()
# wait for ERP5 to be ready and have processed all activities
@classmethod # from initial setup
def _waitForActivities(cls, timeout=datetime.timedelta(minutes=10).total_seconds()): for _ in range(120):
"""Wait for ERP5 to be ready and have processed all activities.
"""
for _ in range(int(timeout / 5)):
with cls.getXMLRPCClient() as erp5_xmlrpc_client: with cls.getXMLRPCClient() as erp5_xmlrpc_client:
try: try:
if erp5_xmlrpc_client.portal_activities.countMessage() == 0: if erp5_xmlrpc_client.portal_activities.countMessage() == 0:
...@@ -468,7 +464,7 @@ class ZopeSkinsMixin: ...@@ -468,7 +464,7 @@ class ZopeSkinsMixin:
pass pass
time.sleep(5) time.sleep(5)
else: else:
raise AssertionError("Timeout waiting for activities") raise AssertionError("ERP5 is not ready")
@classmethod @classmethod
def _getAuthenticatedZopeUrl(cls, path, family_name='default'): def _getAuthenticatedZopeUrl(cls, path, family_name='default'):
...@@ -539,11 +535,6 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin): ...@@ -539,11 +535,6 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
"longrequest-logger-interval": 1, "longrequest-logger-interval": 1,
"longrequest-logger-timeout": 1, "longrequest-logger-timeout": 1,
}, },
"multiple": {
"family": "multiple",
"instance-count": 3,
"port-base": 2210,
},
}, },
"wsgi": cls.wsgi, "wsgi": cls.wsgi,
}) })
...@@ -838,7 +829,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin): ...@@ -838,7 +829,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
) )
if not resp.ok: if not resp.ok:
# XXX we start by flushing existing activities from site creation # XXX we start by flushing existing activities from site creation
# and initial upgrader run. During this time it may happen that # and inital upgrader run. During this time it may happen that
# ERP5 replies with site errors, we tolerate these errors and only # ERP5 replies with site errors, we tolerate these errors and only
# check the final state. # check the final state.
continue continue
...@@ -858,31 +849,6 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin): ...@@ -858,31 +849,6 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
).raise_for_status() ).raise_for_status()
wait_for_activities(10) wait_for_activities(10)
def test_multiple_zope_family_log_files(self):
logfiles = [
os.path.basename(p) for p in glob.glob(
os.path.join(
self.getComputerPartitionPath('zope-multiple'), 'var', 'log', '*'))
]
self.assertEqual(
sorted([l for l in logfiles if l.startswith('zope')]), [
'zope-0-Z2.log',
'zope-0-event.log',
'zope-0-neo-root.log',
'zope-1-Z2.log',
'zope-1-event.log',
'zope-1-neo-root.log',
'zope-2-Z2.log',
'zope-2-event.log',
'zope-2-neo-root.log',
] if '_neo' in self.__class__.__name__ else [
'zope-0-Z2.log',
'zope-0-event.log',
'zope-1-Z2.log',
'zope-1-event.log',
'zope-2-Z2.log',
'zope-2-event.log',
])
class TestZopeMedusa(ZopeTestMixin, ERP5InstanceTestCase): class TestZopeMedusa(ZopeTestMixin, ERP5InstanceTestCase):
wsgi = False wsgi = False
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
# not need these here). # not need these here).
[instance-common] [instance-common]
filename = instance-common.cfg.in filename = instance-common.cfg.in
md5sum = e000e7134113b9d1c63d40861eaf0489 md5sum = 402d09fbe2927f4f744ad6c0dc4329b9
[root-common] [root-common]
filename = root-common.cfg.in filename = root-common.cfg.in
...@@ -30,11 +30,11 @@ md5sum = 9f27195d770b2f57461c60a82c851ab9 ...@@ -30,11 +30,11 @@ md5sum = 9f27195d770b2f57461c60a82c851ab9
[instance-neo] [instance-neo]
filename = instance-neo.cfg.in filename = instance-neo.cfg.in
md5sum = 504b021715566e69ad664101f1b12a5c md5sum = 03a6c72619b02f5100fa7ccc8dc0e5ea
[template-neo-my-cnf] [template-neo-my-cnf]
filename = my.cnf.in filename = my.cnf.in
md5sum = 56ea8f452d9e1526157ab9d03e631e1a md5sum = a8939dcf440126a30db661b5a0a1bfb7
[template-neo] [template-neo]
filename = instance.cfg.in filename = instance.cfg.in
...@@ -46,4 +46,4 @@ md5sum = 5afd326de385563b5aeac81039f23341 ...@@ -46,4 +46,4 @@ md5sum = 5afd326de385563b5aeac81039f23341
[runTestSuite.in] [runTestSuite.in]
_update_hash_filename_ = runTestSuite.in _update_hash_filename_ = runTestSuite.in
md5sum = 1e2126d02f54daaa624cefeea004c2da md5sum = 4e7f5b5230800a65c71310a518225119
...@@ -48,3 +48,6 @@ extra-context = ...@@ -48,3 +48,6 @@ extra-context =
raw template_mysqld_wrapper {{ template_mysqld_wrapper }} raw template_mysqld_wrapper {{ template_mysqld_wrapper }}
raw template_neo_my_cnf {{ template_neo_my_cnf }} raw template_neo_my_cnf {{ template_neo_my_cnf }}
{%- endif %} {%- endif %}
{%- if pypy_location is defined %}
raw pypy_location {{ pypy_location }}
{%- endif %}
...@@ -122,10 +122,6 @@ ...@@ -122,10 +122,6 @@
"description": "Size of private tmpfs mount to store the database. See filesystems/tmpfs.txt in Linux documentation. Use only for testing.", "description": "Size of private tmpfs mount to store the database. See filesystems/tmpfs.txt in Linux documentation. Use only for testing.",
"type": "string" "type": "string"
}, },
"test-binding-list": {
"description": "List of bindings to test when running the test suite.",
"type": "array"
},
"mysql": { "mysql": {
"description": "Dictionary containing parameters for MySQL.", "description": "Dictionary containing parameters for MySQL.",
"default": {}, "default": {},
...@@ -137,13 +133,13 @@ ...@@ -137,13 +133,13 @@
} }
}, },
"additionalProperties": { "additionalProperties": {
"description": "To configure important parameters like innodb_buffer_pool_size, rocksdb_block_cache_size, etc.", "description": "To configure important parameters like innodb_buffer_pool_size, tokudb_cache_size, etc.",
"type": "string" "type": "string"
}, },
"type": "object" "type": "object"
}, },
"engine": { "engine": {
"description": "Configures storage engine, currently only InnoDB and RocksDB are supported. Defaults to NEO's default.", "description": "Configures storage engine, currently only InnoDB, RocksDB and TokuDB are supported. Defaults to NEO's default.",
"type": "string" "type": "string"
} }
}, },
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
{% set private_tmpfs = slapparameter_dict.get('private-tmpfs') -%} {% set private_tmpfs = slapparameter_dict.get('private-tmpfs') -%}
{% set storage_type = slapparameter_dict.get('storage-type') or ( {% set storage_type = slapparameter_dict.get('storage-type') or (
'MySQL' if mariadb_location is defined else 'SQLite') -%} 'MySQL' if mariadb_location is defined else 'SQLite') -%}
{% set mysql = storage_type != 'SQLite' -%} {% set mysql = storage_type == 'MySQL' -%}
{% if mysql -%} {% if mysql -%}
[{{ section('mysqld') }}] [{{ section('mysqld') }}]
...@@ -167,37 +167,33 @@ post = ${binary-wrap-mysql:command-line} -e "FLUSH LOGS" ...@@ -167,37 +167,33 @@ post = ${binary-wrap-mysql:command-line} -e "FLUSH LOGS"
{% if runTestSuite_in is defined -%} {% if runTestSuite_in is defined -%}
# bin/runTestSuite to run NEO tests # bin/runTestSuite to run NEO tests
[runTestSuite-parameters]
bin_directory = {{ bin_directory }}
prepend_path = {{ mariadb_location }}/bin
test_binding_list = {{ dumps(slapparameter_dict.get('test-binding-list', (
'MySQLdb', 'PyMySQL', 'SQLite',
))) }}
[{{ section('runTestSuite') }}] [{{ section('runTestSuite') }}]
{%- if private_tmpfs %} {%- if private_tmpfs %}
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/${:_buildout_section_name_} wrapper-path = ${directory:bin}/${:_buildout_section_name_}
private-tmpfs = {{ private_tmpfs }} ${directory:tmp} private-tmpfs = {{ private_tmpfs }} ${directory:tmp}
command-line = ${runTestSuite-ns:output} command-line = ${runTestSuite-ns:output}
[runTestSuite-ns] [runTestSuite-ns]
{%- endif %} {%- endif %}
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
output = ${directory:bin}/${:_buildout_section_name_} output = ${directory:bin}/${:_buildout_section_name_}
url = {{ runTestSuite_in }} url = {{ runTestSuite_in }}
context = context =
section directory directory section directory directory
section my_cnf_parameters my-cnf-parameters section my_cnf_parameters my-cnf-parameters
section parameters runTestSuite-parameters raw bin_directory {{ bin_directory }}
{%- if private_tmpfs %} raw prepend_path {{ mariadb_location }}/bin
{%- if pypy_location is defined %}
raw pypy_location {{ pypy_location }}/bin/pypy
{%- endif %}
{%- if private_tmpfs %}
key datadir my-cnf-parameters:data-directory key datadir my-cnf-parameters:data-directory
key results_directory directory:results key results_directory directory:results
[directory] [directory]
results = ${directory:srv}/tests results = ${directory:srv}/tests
{%- endif %} {%- endif %}
{%- endif %} {%- endif %}
{%- endif %} {%- endif %}
......
...@@ -13,6 +13,10 @@ ...@@ -13,6 +13,10 @@
{% do extra_dict.setdefault('rocksdb_max_log_file_size', '16M') -%} {% do extra_dict.setdefault('rocksdb_max_log_file_size', '16M') -%}
{% endif -%} {% endif -%}
{% if not engine or engine == 'TokuDB' -%}
{% do plugins.append('ha_tokudb') -%}
{% endif -%}
[mysqld] [mysqld]
skip_networking skip_networking
socket = {{ socket }} socket = {{ socket }}
......
#!{{ parameters.bin_directory }}/runTestSuite_py #!{{ bin_directory }}/runTestSuite_py
""" """
Script to run NEO test suite using Nexedi's test node framework. Script to run NEO test suite using Nexedi's test node framework.
""" """
...@@ -13,12 +13,12 @@ SUMMARY_RE = re.compile( ...@@ -13,12 +13,12 @@ SUMMARY_RE = re.compile(
r' (.*) (?P<duration>\d+(\.\d*)?|\.\d+)s', re.MULTILINE) r' (.*) (?P<duration>\d+(\.\d*)?|\.\d+)s', re.MULTILINE)
PATH = os.environ['PATH'] PATH = os.environ['PATH']
PATH = {{ repr(parameters.prepend_path) }} + (PATH and ':' + PATH) PATH = {{ repr(prepend_path) }} + (PATH and ':' + PATH)
# NEO specific environment # NEO specific environment
TEMP_DIRECTORY = {{ repr(directory.tmp) }} TEMP_DIRECTORY = {{ repr(directory.tmp) }}
NEO_DB_SOCKET = {{ repr(my_cnf_parameters.socket) }} NEO_DB_SOCKET = {{ repr(my_cnf_parameters.socket) }}
RUN_NEO_TESTS_COMMAND = {{ repr(parameters.bin_directory + '/neotestrunner') }} RUN_NEO_TESTS_COMMAND = {{ repr(bin_directory + '/neotestrunner') }}
def parseTestStdOut(data): def parseTestStdOut(data):
""" """
...@@ -90,7 +90,7 @@ def main(): ...@@ -90,7 +90,7 @@ def main():
test_suite_title = args.test_suite_title or args.test_suite test_suite_title = args.test_suite_title or args.test_suite
test_name_list = {{ repr(parameters.test_binding_list) }} test_name_list = 'SQLite', 'MySQL'
if args.master_url: if args.master_url:
tool = taskdistribution.TaskDistributor(portal_url = args.master_url) tool = taskdistribution.TaskDistributor(portal_url = args.master_url)
...@@ -115,15 +115,24 @@ def main(): ...@@ -115,15 +115,24 @@ def main():
shutil.rmtree(temp) shutil.rmtree(temp)
os.mkdir(temp) os.mkdir(temp)
args = [RUN_NEO_TESTS_COMMAND, '-ufz'] args = [RUN_NEO_TESTS_COMMAND,
{%- if pypy_location is defined -%}
'-fz'
{%- else -%}
'-ufz'
{%- endif -%}
]
command = ' '.join(args) command = ' '.join(args)
env = {'PATH': PATH, env = {'PATH': PATH,
'TEMP': temp, 'TEMP': temp,
{%- if pypy_location is defined %}
'NEO_PYPY': {{ repr(pypy_location) }},
{%- endif %}
'NEO_TESTS_ADAPTER': adapter, 'NEO_TESTS_ADAPTER': adapter,
'NEO_TEST_ZODB_FUNCTIONAL': '1', 'NEO_TEST_ZODB_FUNCTIONAL': '1',
'NEO_DB_USER': 'root'} 'NEO_DB_USER': 'root'}
try: try:
if adapter != 'SQLite': if adapter == 'MySQL':
env['NEO_DB_SOCKET'] = NEO_DB_SOCKET env['NEO_DB_SOCKET'] = NEO_DB_SOCKET
timeout = time() + 60 timeout = time() + 60
while not os.path.exists(NEO_DB_SOCKET): while not os.path.exists(NEO_DB_SOCKET):
......
...@@ -57,9 +57,7 @@ eggs = neoppod[admin, ctl, master] ...@@ -57,9 +57,7 @@ eggs = neoppod[admin, ctl, master]
zope.testing zope.testing
coverage coverage
setproctitle setproctitle
adapter-egg = adapter-egg = ${python-mysqlclient:egg}
${python-mysqlclient:egg}
PyMySQL
[download-base-neo] [download-base-neo]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
...@@ -130,7 +128,6 @@ inline = ...@@ -130,7 +128,6 @@ inline =
coverage = 5.5 coverage = 5.5
ecdsa = 0.13 ecdsa = 0.13
mysqlclient = 1.3.12 mysqlclient = 1.3.12
PyMySQL = 0.10.1
pycrypto = 2.6.1 pycrypto = 2.6.1
cython-zstd = 0.2 cython-zstd = 0.2
funcsigs = 1.0.2 funcsigs = 1.0.2
......
[buildout]
extends = software.cfg
[mariadb]
location = ${mariadb-10.4:location}
...@@ -3,5 +3,6 @@ extends = ...@@ -3,5 +3,6 @@ extends =
../../component/pypy/buildout.cfg ../../component/pypy/buildout.cfg
software.cfg software.cfg
[python] [instance-common]
part = pypy2 context +=
key pypy_location pypy2:location
[instance-profile] [instance-profile]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = b6a9ac23e2bd0779408464f6fbf4a321 md5sum = fa8d1d0a44720e0ffa4f6a953b65eae4
...@@ -62,11 +62,6 @@ ...@@ -62,11 +62,6 @@
"description": "Network address URL type used for Pub/Sub.", "description": "Network address URL type used for Pub/Sub.",
"type": "string", "type": "string",
"default": "opc.udp://224.0.0.22:4840/" "default": "opc.udp://224.0.0.22:4840/"
},
"network_interface": {
"description": "Network interface to be used for Pub/Sub.",
"type": "string",
"default": ""
} }
} }
} }
...@@ -19,7 +19,7 @@ recipe = slapos.cookbook:wrapper ...@@ -19,7 +19,7 @@ recipe = slapos.cookbook:wrapper
environment = environment =
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{{ open62541_location }}/lib:{{ mbedtls_location }}/lib LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{{ open62541_location }}/lib:{{ mbedtls_location }}/lib
command-line = command-line =
{{ coupler_location }}server -d ${instance-parameter:configuration.coupler_block_device} -s ${instance-parameter:configuration.coupler_i2c_slave_list} -p ${instance-parameter:configuration.opc_ua_port} -u ${instance-parameter:configuration.username} -w ${instance-parameter:configuration.password} -b ${instance-parameter:configuration.heart_beat} -t ${instance-parameter:configuration.heart_beat_interval} -l ${instance-parameter:configuration.heart_beat_id_list} -n ${instance-parameter:configuration.network_address_url_data_type} -o ${instance-parameter:configuration.heart_beat_timeout_interval} -i ${instance-parameter:configuration.id} -m ${instance-parameter:configuration.mode} -j ${instance-parameter:configuration.network_interface} {{ coupler_location }}/server -d ${instance-parameter:configuration.coupler_block_device} -s ${instance-parameter:configuration.coupler_i2c_slave_list} -p ${instance-parameter:configuration.opc_ua_port} -u ${instance-parameter:configuration.username} -w ${instance-parameter:configuration.password} -b ${instance-parameter:configuration.heart_beat} -t ${instance-parameter:configuration.heart_beat_interval} -l ${instance-parameter:configuration.heart_beat_id_list} -n ${instance-parameter:configuration.network_address_url_data_type} -o ${instance-parameter:configuration.heart_beat_timeout_interval} -i ${instance-parameter:configuration.id} -m ${instance-parameter:configuration.mode}
wrapper-path = ${directory:service}/coupler-opc-ua wrapper-path = ${directory:service}/coupler-opc-ua
[instance-parameter] [instance-parameter]
...@@ -42,7 +42,6 @@ configuration.heart_beat_interval = 500 ...@@ -42,7 +42,6 @@ configuration.heart_beat_interval = 500
configuration.heart_beat_id_list = configuration.heart_beat_id_list =
configuration.network_address_url_data_type = opc.udp://224.0.0.22:4840/ configuration.network_address_url_data_type = opc.udp://224.0.0.22:4840/
configuration.heart_beat_timeout_interval = 2000 configuration.heart_beat_timeout_interval = 2000
configuration.network_interface =
[directory] [directory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
......
...@@ -44,7 +44,7 @@ environment += ...@@ -44,7 +44,7 @@ environment +=
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
repository = https://lab.nexedi.com/nexedi/osie.git repository = https://lab.nexedi.com/nexedi/osie.git
revision = 7c49d252659c3c1278591f8c789ab3dd4fa25142 revision = dc273b64b594de90080afdef873403f953c989f5
[compile-coupler] [compile-coupler]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
......
...@@ -22,15 +22,15 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68 ...@@ -22,15 +22,15 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68
[profile-frontend] [profile-frontend]
filename = instance-frontend.cfg.in filename = instance-frontend.cfg.in
md5sum = 83dca4b9ba8d8beeed2d5d861dee0e3f md5sum = f6a7678e8dc18871d6b18e2138e95f7f
[profile-master] [profile-master]
filename = instance-master.cfg.in filename = instance-master.cfg.in
md5sum = 2ac67006dede70c905b0d32b4a995b9a md5sum = 2aaab85bad51136b38f6a16d662a7b3e
[profile-slave-list] [profile-slave-list]
filename = instance-slave-list.cfg.in filename = instance-slave-list.cfg.in
md5sum = 1e6f3c8eec64cb5c59b06c1a35502e6b md5sum = 939f475b5a1a67ade2617802e5dde3a9
[profile-master-publish-slave-information] [profile-master-publish-slave-information]
filename = instance-master-publish-slave-information.cfg.in filename = instance-master-publish-slave-information.cfg.in
......
...@@ -370,6 +370,7 @@ software_type = single-custom-personal ...@@ -370,6 +370,7 @@ software_type = single-custom-personal
organization = {{ slapparameter_dict['cluster-identification'] }} organization = {{ slapparameter_dict['cluster-identification'] }}
organizational-unit = {{ instance_parameter_dict['configuration.frontend-name'] }} organizational-unit = {{ instance_parameter_dict['configuration.frontend-name'] }}
backend-client-caucase-url = {{ slapparameter_dict['backend-client-caucase-url'] }} backend-client-caucase-url = {{ slapparameter_dict['backend-client-caucase-url'] }}
partition_ipv6 = ${slap-configuration:ipv6-random}
url-ready-file = ${directory:var}/url-ready.txt url-ready-file = ${directory:var}/url-ready.txt
quic = {{ FRONTEND_HAPROXY_QUIC }} quic = {{ FRONTEND_HAPROXY_QUIC }}
extra-context = extra-context =
......
...@@ -74,6 +74,8 @@ ...@@ -74,6 +74,8 @@
'https-only', 'https-only',
'https-url', 'https-url',
'https-url-netloc-list', 'https-url-netloc-list',
'monitor-ipv4-test',
'monitor-ipv6-test',
'path', 'path',
'prefer-gzip-encoding-to-backend', 'prefer-gzip-encoding-to-backend',
'request-timeout', 'request-timeout',
......
...@@ -100,6 +100,18 @@ ...@@ -100,6 +100,18 @@
"title": "HTTPS Backend URL", "title": "HTTPS Backend URL",
"type": "string" "type": "string"
}, },
"monitor-ipv4-test": {
"default": "",
"description": "IPv4 Address for the frontend keep monitoring with ping",
"title": "IPv4 Address to Monitor Packet Lost",
"type": "string"
},
"monitor-ipv6-test": {
"default": "",
"description": "IPv6 Address for the frontend keep monitoring with ping6 (without brackets)",
"title": "IPv6 Address to Monitor Packet Lost",
"type": "string"
},
"websocket-path-list": { "websocket-path-list": {
"default": "", "default": "",
"description": "Space separated list of path to the websocket application. If not set the whole slave will be websocket, if set then / will be HTTP, and /<websocket-path> will be WSS. In order to have ' ' in the space use '%20'", "description": "Space separated list of path to the websocket application. If not set the whole slave will be websocket, if set then / will be HTTP, and /<websocket-path> will be WSS. In order to have ' ' in the space use '%20'",
......
...@@ -371,6 +371,32 @@ local_ipv4 = {{ dumps('' ~ instance_parameter_dict['ipv4-random']) }} ...@@ -371,6 +371,32 @@ local_ipv4 = {{ dumps('' ~ instance_parameter_dict['ipv4-random']) }}
{%- endif %} {%- endif %}
{%- endfor %} {%- endfor %}
{%- set monitor_ipv6_test = slave_instance.get('monitor-ipv6-test', '') %}
{%- if monitor_ipv6_test %}
{%- set monitor_ipv6_section_title = 'check-%s-ipv6-packet-list-test' % slave_instance.get('slave_reference') %}
{%- do part_list.append(monitor_ipv6_section_title) %}
[{{ monitor_ipv6_section_title }}]
<= monitor-promise-base
promise = check_icmp_packet_lost
name = {{ monitor_ipv6_section_title }}.py
config-address = {{ dumps(monitor_ipv6_test) }}
# promise frequency in minutes (2 times/day)
config-frequency = 720
{%- endif %}
{%- set monitor_ipv4_test = slave_instance.get('monitor-ipv4-test', '') %}
{%- if monitor_ipv4_test %}
{%- set monitor_ipv4_section_title = 'check-%s-ipv4-packet-list-test' % slave_instance.get('slave_reference') %}
{%- do part_list.append(monitor_ipv4_section_title) %}
[{{ monitor_ipv4_section_title }}]
<= monitor-promise-base
promise = check_icmp_packet_lost
name = {{ monitor_ipv4_section_title }}.py
config-address = {{ dumps(monitor_ipv4_test) }}
config-ipv4 = true
# promise frequency in minutes (2 times/day)
config-frequency = 720
{%- endif %}
{#- ############################### #} {#- ############################### #}
{#- Prepare Slave Information #} {#- Prepare Slave Information #}
{%- do slave_instance_information_list.append(slave_publish_dict) %} {%- do slave_instance_information_list.append(slave_publish_dict) %}
......
...@@ -1910,6 +1910,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -1910,6 +1910,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
'url': cls.backend_url, 'url': cls.backend_url,
'disabled-cookie-list': 'Chocolate', 'disabled-cookie-list': 'Chocolate',
}, },
'monitor-ipv4-test': {
'monitor-ipv4-test': 'monitor-ipv4-test',
},
'monitor-ipv6-test': {
'monitor-ipv6-test': 'monitor-ipv6-test',
},
'ciphers': { 'ciphers': {
'ciphers': 'RSA-3DES-EDE-CBC-SHA RSA-AES128-CBC-SHA', 'ciphers': 'RSA-3DES-EDE-CBC-SHA RSA-AES128-CBC-SHA',
}, },
...@@ -2094,9 +2100,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -2094,9 +2100,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'domain': 'example.com', 'domain': 'example.com',
'accepted-slave-amount': '54', 'accepted-slave-amount': '56',
'rejected-slave-amount': '0', 'rejected-slave-amount': '0',
'slave-amount': '54', 'slave-amount': '56',
'rejected-slave-dict': { 'rejected-slave-dict': {
}, },
'warning-slave-dict': { 'warning-slave-dict': {
...@@ -3647,6 +3653,81 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -3647,6 +3653,81 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
result.status_code result.status_code
) )
def test_monitor_ipv6_test(self):
parameter_dict = self.assertSlaveBase('monitor-ipv6-test')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
self.assertEqual(
self.certificate_pem,
der2pem(result.peercert))
self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path')
self.assertEqual(
http.client.FOUND,
result_http.status_code
)
self.assertEqual(
'https://monitoripv6test.example.com:%s/test-path' % (HTTP_PORT,),
result_http.headers['Location']
)
monitor_file = glob.glob(
os.path.join(
self.instance_path, '*', 'etc', 'plugin',
'check-_monitor-ipv6-test-ipv6-packet-list-test.py'))[0]
# get promise module and check that parameters are ok
self.assertEqual(
getPromisePluginParameterDict(monitor_file),
{
'frequency': '720',
'address': 'monitor-ipv6-test'
}
)
def test_monitor_ipv4_test(self):
parameter_dict = self.assertSlaveBase('monitor-ipv4-test')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
self.assertEqual(
self.certificate_pem,
der2pem(result.peercert))
self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path')
self.assertEqual(
http.client.FOUND,
result_http.status_code
)
self.assertEqual(
'https://monitoripv4test.example.com:%s/test-path' % (HTTP_PORT,),
result_http.headers['Location']
)
monitor_file = glob.glob(
os.path.join(
self.instance_path, '*', 'etc', 'plugin',
'check-_monitor-ipv4-test-ipv4-packet-list-test.py'))[0]
# get promise module and check that parameters are ok
self.assertEqual(
getPromisePluginParameterDict(monitor_file),
{
'frequency': '720',
'ipv4': 'true',
'address': 'monitor-ipv4-test',
}
)
def test_ciphers(self): def test_ciphers(self):
parameter_dict = self.assertSlaveBase( parameter_dict = self.assertSlaveBase(
'ciphers', expected_parameter_dict={ 'ciphers', expected_parameter_dict={
...@@ -5966,6 +6047,12 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -5966,6 +6047,12 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
'url': cls.backend_url, 'url': cls.backend_url,
'default-path': '${section:option}\nn"\newline\n}\n}proxy\n/slashed', 'default-path': '${section:option}\nn"\newline\n}\n}proxy\n/slashed',
}, },
'MONITOR-IPV4-TEST-UNSAFE': {
'monitor-ipv4-test': '${section:option}\nafternewline ipv4',
},
'MONITOR-IPV6-TEST-UNSAFE': {
'monitor-ipv6-test': '${section:option}\nafternewline ipv6',
},
'BAD-CIPHERS': { 'BAD-CIPHERS': {
'ciphers': 'bad ECDHE-ECDSA-AES256-GCM-SHA384 again', 'ciphers': 'bad ECDHE-ECDSA-AES256-GCM-SHA384 again',
}, },
...@@ -6068,9 +6155,9 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -6068,9 +6155,9 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'domain': 'example.com', 'domain': 'example.com',
'accepted-slave-amount': '3', 'accepted-slave-amount': '5',
'rejected-slave-amount': '28', 'rejected-slave-amount': '28',
'slave-amount': '31', 'slave-amount': '33',
'rejected-slave-dict': { 'rejected-slave-dict': {
'_HTTPS-URL': ['slave https-url "https://[fd46::c2ae]:!py!u\'123123\'"' '_HTTPS-URL': ['slave https-url "https://[fd46::c2ae]:!py!u\'123123\'"'
' invalid'], ' invalid'],
...@@ -6324,6 +6411,66 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -6324,6 +6411,66 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
result.headers['Location'] result.headers['Location']
) )
def test_monitor_ipv4_test_unsafe(self):
parameter_dict = self.assertSlaveBase('MONITOR-IPV4-TEST-UNSAFE')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
self.assertEqual(
self.certificate_pem,
der2pem(result.peercert))
self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path')
self.assertEqual(http.client.FOUND, result_http.status_code)
monitor_file = glob.glob(
os.path.join(
self.instance_path, '*', 'etc', 'plugin',
'check-_MONITOR-IPV4-TEST-UNSAFE-ipv4-packet-list-test.py'))[0]
# get promise module and check that parameters are ok
self.assertEqual(
getPromisePluginParameterDict(monitor_file),
{
'frequency': '720',
'ipv4': 'true',
'address': '${section:option}\nafternewline ipv4',
}
)
def test_monitor_ipv6_test_unsafe(self):
parameter_dict = self.assertSlaveBase('MONITOR-IPV6-TEST-UNSAFE')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
self.assertEqual(
self.certificate_pem,
der2pem(result.peercert))
self.assertEqual(http.client.SERVICE_UNAVAILABLE, result.status_code)
result_http = fakeHTTPResult(
parameter_dict['domain'], 'test-path')
self.assertEqual(http.client.FOUND, result_http.status_code)
monitor_file = glob.glob(
os.path.join(
self.instance_path, '*', 'etc', 'plugin',
'check-_MONITOR-IPV6-TEST-UNSAFE-ipv6-packet-list-test.py'))[0]
# get promise module and check that parameters are ok
self.assertEqual(
getPromisePluginParameterDict(monitor_file),
{
'frequency': '720',
'address': '${section:option}\nafternewline ipv6'
}
)
def test_site_1(self): def test_site_1(self):
self.assertSlaveBase('SITE_1', hostname='duplicate') self.assertSlaveBase('SITE_1', hostname='duplicate')
......
...@@ -53,6 +53,8 @@ T-2/var/log/httpd/_https-only_access_log ...@@ -53,6 +53,8 @@ T-2/var/log/httpd/_https-only_access_log
T-2/var/log/httpd/_https-only_backend_log T-2/var/log/httpd/_https-only_backend_log
T-2/var/log/httpd/_https-url-netloc-list_access_log T-2/var/log/httpd/_https-url-netloc-list_access_log
T-2/var/log/httpd/_https-url-netloc-list_backend_log T-2/var/log/httpd/_https-url-netloc-list_backend_log
T-2/var/log/httpd/_monitor-ipv4-test_access_log
T-2/var/log/httpd/_monitor-ipv6-test_access_log
T-2/var/log/httpd/_prefer-gzip-encoding-to-backend-https-only_access_log T-2/var/log/httpd/_prefer-gzip-encoding-to-backend-https-only_access_log
T-2/var/log/httpd/_prefer-gzip-encoding-to-backend-https-only_backend_log T-2/var/log/httpd/_prefer-gzip-encoding-to-backend-https-only_backend_log
T-2/var/log/httpd/_prefer-gzip-encoding-to-backend_access_log T-2/var/log/httpd/_prefer-gzip-encoding-to-backend_access_log
......
...@@ -35,6 +35,8 @@ T-2/etc/plugin/backend_haproxy_http.py ...@@ -35,6 +35,8 @@ T-2/etc/plugin/backend_haproxy_http.py
T-2/etc/plugin/backend_haproxy_https.py T-2/etc/plugin/backend_haproxy_https.py
T-2/etc/plugin/buildout-T-2-status.py T-2/etc/plugin/buildout-T-2-status.py
T-2/etc/plugin/caucase-updater.py T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-_monitor-ipv4-test-ipv4-packet-list-test.py
T-2/etc/plugin/check-_monitor-ipv6-test-ipv6-packet-list-test.py
T-2/etc/plugin/check-free-disk-space.py T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr-ip-port-listening.py T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-frontend-haproxy-configuration-promise.py T-2/etc/plugin/frontend-frontend-haproxy-configuration-promise.py
......
...@@ -35,6 +35,8 @@ T-2/etc/plugin/backend_haproxy_http.py ...@@ -35,6 +35,8 @@ T-2/etc/plugin/backend_haproxy_http.py
T-2/etc/plugin/backend_haproxy_https.py T-2/etc/plugin/backend_haproxy_https.py
T-2/etc/plugin/buildout-T-2-status.py T-2/etc/plugin/buildout-T-2-status.py
T-2/etc/plugin/caucase-updater.py T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-_monitor-ipv4-test-ipv4-packet-list-test.py
T-2/etc/plugin/check-_monitor-ipv6-test-ipv6-packet-list-test.py
T-2/etc/plugin/check-free-disk-space.py T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr-ip-port-listening.py T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-frontend-haproxy-configuration-promise.py T-2/etc/plugin/frontend-frontend-haproxy-configuration-promise.py
......
...@@ -18,7 +18,7 @@ md5sum = 55c7fd4dd6a39b31878889fbfb00f995 ...@@ -18,7 +18,7 @@ md5sum = 55c7fd4dd6a39b31878889fbfb00f995
[instance-repman.cfg] [instance-repman.cfg]
_update_hash_filename_ = instance-repman.cfg.jinja2.in _update_hash_filename_ = instance-repman.cfg.jinja2.in
md5sum = c5a7a519270c7829bb4c987391fbfc39 md5sum = 79e8caeb717277f2e09d54afdeac6398
[config-toml.in] [config-toml.in]
_update_hash_filename_ = templates/config.toml.in _update_hash_filename_ = templates/config.toml.in
......
...@@ -248,7 +248,7 @@ ...@@ -248,7 +248,7 @@
}, },
"db-memory-shared-pct": { "db-memory-shared-pct": {
"title": "Percent memory list shared per buffer", "title": "Percent memory list shared per buffer",
"description": "Percent memory shared per buffer. Default: [\"threads:16\", \"innodb:60\", \"myisam:10\", \"aria:10\", \"rocksdb:1\", \"s3:1\", \"archive:1\", \"querycache:0\"]", "description": "Percent memory shared per buffer. Default: [\"threads:16\", \"innodb:60\", \"myisam:10\", \"aria:10\", \"rocksdb:1\", \"tokudb:1\", \"s3:1\", \"archive:1\", \"querycache:0\"]",
"type": "array", "type": "array",
"items": { "items": {
"type": "string" "type": "string"
...@@ -259,6 +259,7 @@ ...@@ -259,6 +259,7 @@
"myisam:10", "myisam:10",
"aria:10", "aria:10",
"rocksdb:1", "rocksdb:1",
"tokudb:1",
"s3:1", "s3:1",
"archive:1", "archive:1",
"querycache:0" "querycache:0"
......
...@@ -234,7 +234,7 @@ proxy-memory = {{ parameter_dict.get("proxy-memory", 1) }} ...@@ -234,7 +234,7 @@ proxy-memory = {{ parameter_dict.get("proxy-memory", 1) }}
db-cpu-cores = {{ parameter_dict.get("db-cpu-cores", 2) }} db-cpu-cores = {{ parameter_dict.get("db-cpu-cores", 2) }}
db-disk-iops = {{ parameter_dict.get("db-disk-iops", 300) }} db-disk-iops = {{ parameter_dict.get("db-disk-iops", 300) }}
db-memory = {{ parameter_dict.get("db-memory", 256) }} db-memory = {{ parameter_dict.get("db-memory", 256) }}
db-memory-shared-pct = {{ parameter_dict.get("db-memory-shared-pct", ["threads:16", "innodb:60", "myisam:10", "aria:10", "rocksdb:1", "s3:1", "archive:1", "querycache:0"]) | join(',') }} db-memory-shared-pct = {{ parameter_dict.get("db-memory-shared-pct", ["threads:16", "innodb:60", "myisam:10", "aria:10", "rocksdb:1", "tokudb:1", "s3:1", "archive:1", "querycache:0"]) | join(',') }}
db-memory-threaded-pct = {{ parameter_dict.get("db-memory-threaded-pct", ["tmp:70", "join:20", "sort:10"]) | join(',') }} db-memory-threaded-pct = {{ parameter_dict.get("db-memory-threaded-pct", ["tmp:70", "join:20", "sort:10"]) | join(',') }}
# failover # failover
failover-mode = {{ parameter_dict.get('failover-mode', 'manual') }} failover-mode = {{ parameter_dict.get('failover-mode', 'manual') }}
......
...@@ -86,7 +86,7 @@ md5sum = 0ac4b74436f554cd677f19275d18d880 ...@@ -86,7 +86,7 @@ md5sum = 0ac4b74436f554cd677f19275d18d880
[template-zope] [template-zope]
filename = instance-zope.cfg.in filename = instance-zope.cfg.in
md5sum = 0451190711157fc204418662126d5cf8 md5sum = 7d3b3769b60f0cc2883beeb05cdf82d7
[template-balancer] [template-balancer]
filename = instance-balancer.cfg.in filename = instance-balancer.cfg.in
......
...@@ -542,7 +542,7 @@ environment-extra += ...@@ -542,7 +542,7 @@ environment-extra +=
[promise-test-runner-apache-url-executable] [promise-test-runner-apache-url-executable]
# promise to wait for apache partition to have returned the parameter # promise to wait for apache partition to have returned the parameter
recipe = slapos.cookbook:check_parameter recipe = slapos.cookbook:check_parameter
value = {{ slapparameter_dict['test-runner-apache-url-list'] }} value = {{ slapparameter_dict['test-runner-apache-url-list'] }}
expected-not-value = not-ready expected-not-value = not-ready
path = ${directory:bin}/${:_buildout_section_name_} path = ${directory:bin}/${:_buildout_section_name_}
expected-value = expected-value =
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment