Commit 38aabd7a authored by Jérome Perrin's avatar Jérome Perrin

Update Release Candidate

parents 1eb6911c daadef35
...@@ -48,6 +48,7 @@ recipe = zc.recipe.egg:develop ...@@ -48,6 +48,7 @@ recipe = zc.recipe.egg:develop
setup = ${ZEO4-repository:location} setup = ${ZEO4-repository:location}
egg = ZEO egg = ZEO
egg-versions = egg-versions =
ZEO = 4.3.1
[ZEO4-repository] [ZEO4-repository]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
......
...@@ -24,6 +24,7 @@ extends = ...@@ -24,6 +24,7 @@ extends =
../icu/buildout.cfg ../icu/buildout.cfg
../openssl/buildout.cfg ../openssl/buildout.cfg
../libnsl/buildout.cfg ../libnsl/buildout.cfg
../libsodium/buildout.cfg
../sqlite3/buildout.cfg ../sqlite3/buildout.cfg
../oniguruma/buildout.cfg ../oniguruma/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
...@@ -78,9 +79,12 @@ configure-options = ...@@ -78,9 +79,12 @@ configure-options =
--enable-mbstring --enable-mbstring
--enable-pcntl --enable-pcntl
--enable-session --enable-session
--enable-sysvsem
--with-apxs2=${apache:location}/bin/apxs --with-apxs2=${apache:location}/bin/apxs
--with-bz2=${bzip2:location} --with-bz2=${bzip2:location}
--with-curl --with-curl
--with-freetype
--with-jpeg
--with-gettext=${gettext:location} --with-gettext=${gettext:location}
--with-imap-ssl --with-imap-ssl
--with-imap=${cclient:location} --with-imap=${cclient:location}
...@@ -88,6 +92,7 @@ configure-options = ...@@ -88,6 +92,7 @@ configure-options =
--with-mysqli=mysqlnd --with-mysqli=mysqlnd
--with-openssl=${openssl:location} --with-openssl=${openssl:location}
--with-pdo-mysql=mysqlnd --with-pdo-mysql=mysqlnd
--with-sodium=${libsodium:location}
--with-zip --with-zip
--with-zlib --with-zlib
...@@ -95,7 +100,7 @@ configure-options = ...@@ -95,7 +100,7 @@ configure-options =
# It will create a pear/temp directory under the SR instead of a shared /tmp/pear/temp. # It will create a pear/temp directory under the SR instead of a shared /tmp/pear/temp.
# XXX we could mkdir tmp there # XXX we could mkdir tmp there
environment = environment =
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${libzip:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig:${curl:location}/lib/pkgconfig:${icu:location}/lib/pkgconfig:${oniguruma:location}/lib/pkgconfig:${argon2:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${mariadb:location}/lib/pkgconfig:${libjpeg:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig:${freetype:location}/lib/pkgconfig:${libiconv:location}/lib/pkgconfig:${libzip:location}/lib/pkgconfig PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${libzip:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig:${curl:location}/lib/pkgconfig:${icu:location}/lib/pkgconfig:${oniguruma:location}/lib/pkgconfig:${argon2:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${mariadb:location}/lib/pkgconfig:${libjpeg:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig:${freetype:location}/lib/pkgconfig:${libiconv:location}/lib/pkgconfig:${libzip:location}/lib/pkgconfig:${libsodium:location}/lib/pkgconfig
PATH=${pkgconfig:location}/bin:${bzip2:location}/bin:${libxml2:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${bzip2:location}/bin:${libxml2:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${libzip:location}/include CPPFLAGS=-I${libzip:location}/include
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath -Wl,${curl:location}/lib -L${libtool:location}/lib -Wl,-rpath -Wl,${libtool:location}/lib -L${mariadb:location}/lib -Wl,-rpath -Wl,${mariadb:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${libzip:location}/lib -Wl,-rpath -Wl,${libzip:location}/lib -L${argon2:location}/lib/x86_64-linux-gnu -Wl,-rpath -Wl,${argon2:location}/lib/x86_64-linux-gnu -Wl,-rpath -Wl,${zstd:location}/lib -L${libnsl:location}/lib -Wl,-rpath -Wl,${libnsl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath -Wl,${curl:location}/lib -L${libtool:location}/lib -Wl,-rpath -Wl,${libtool:location}/lib -L${mariadb:location}/lib -Wl,-rpath -Wl,${mariadb:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${libzip:location}/lib -Wl,-rpath -Wl,${libzip:location}/lib -L${argon2:location}/lib/x86_64-linux-gnu -Wl,-rpath -Wl,${argon2:location}/lib/x86_64-linux-gnu -Wl,-rpath -Wl,${zstd:location}/lib -L${libnsl:location}/lib -Wl,-rpath -Wl,${libnsl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib
......
...@@ -5,21 +5,24 @@ ...@@ -5,21 +5,24 @@
[buildout] [buildout]
extends = extends =
../chromium/buildout.cfg ../chromium/buildout.cfg
../nss/buildout.cfg ../glib/buildout.cfg
../nspr/buildout.cfg ../nspr/buildout.cfg
../nss/buildout.cfg
../pcre2/buildout.cfg
../xorg/buildout.cfg ../xorg/buildout.cfg
parts = parts =
chromedriver-wrapper chromedriver-wrapper
[chromedriver-wrapper-120]
<= chromedriver-wrapper
part = chromedriver-120
[chromedriver-wrapper-91] [chromedriver-wrapper-91]
<= chromedriver-wrapper <= chromedriver-wrapper
part = chromedriver-91 part = chromedriver-91
[chromedriver-wrapper-2.41]
<= chromedriver-wrapper
part = chromedriver-2.41
[chromedriver-wrapper] [chromedriver-wrapper]
# generate a wrapper named ${:wrapper-name} setting $LD_LIBRARY_PATH # generate a wrapper named ${:wrapper-name} setting $LD_LIBRARY_PATH
...@@ -42,30 +45,36 @@ install = ...@@ -42,30 +45,36 @@ install =
[chromedriver] [chromedriver]
<= chromedriver-91 <= chromedriver-120
[chromedriver-2.41] [chromedriver-120]
<= chromedriver-download <= chromedriver-download
version = 2.41 version = 120.0.6099.109
# Supports Chrome v67-69 revision-x86_64 = 1217362
md5sum-x86_64 = fbd8b9561575054e0e7e9cc53b680a70 generation-x86_64 = 1698717838856458
md5sum-x86_64 = 5cb8d386f01052cfc58c80ec63477db0
[chromedriver-91] [chromedriver-91]
<= chromedriver-download <= chromedriver-download
url = https://chromedriver.storage.googleapis.com/${:version}/chromedriver_linux64.zip
version = 91.0.4472.101 version = 91.0.4472.101
# Supports Chrome v91
md5sum-x86_64 = cc43ba0babbfff7f22b48165ec8e8c81 md5sum-x86_64 = cc43ba0babbfff7f22b48165ec8e8c81
[chromedriver-download] [chromedriver-download]
# Installs chromedriver ${version}. # Installs chromedriver ${version}.
# This chromedriver is not usable directly, it needs a wrapper. # This chromedriver is not usable directly, it needs a wrapper.
recipe = slapos.recipe.build:download-unpacked recipe = slapos.recipe.build:download-unpacked
url = https://chromedriver.storage.googleapis.com/${:version}/chromedriver_${:_url}.zip
library = library =
${nss:location}/lib ${glib:location}/lib
${nspr:location}/lib
${libX11:location}/lib ${libX11:location}/lib
${libXau:location}/lib
${libxcb:location}/lib
${libXdmcp:location}/lib
${nspr:location}/lib
${nss:location}/lib
${pcre2:location}/lib
[chromedriver-download:getattr(sys,'_multiarch',None)=='x86_64-linux-gnu'] [chromedriver-download:getattr(sys,'_multiarch',None)=='x86_64-linux-gnu']
_url = linux64 url = https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2F${:revision-x86_64}%2Fchromedriver_linux64.zip?generation=${:generation-x86_64}&alt=media
md5sum = ${:md5sum-x86_64} md5sum = ${:md5sum-x86_64}
...@@ -65,36 +65,35 @@ install = ...@@ -65,36 +65,35 @@ install =
)) ))
os.fchmod(f.fileno(), 0o755) os.fchmod(f.fileno(), 0o755)
[chromium-wrapper-120]
<= chromium-wrapper
part = chromium-120
[chromium-wrapper-91] [chromium-wrapper-91]
<= chromium-wrapper <= chromium-wrapper
part = chromium-91 part = chromium-91
[chromium-wrapper-69] [chromium]
<= chromium-wrapper <= chromium-120
part = chromium-69
[chromium-120]
<= chromium-download
version = 120.0.6099.109
revision-x86_64 = 1217362
md5sum-x86_64 = 86719e40f3d33f1b421d073bb4a71f41
generation-x86_64 = 1698717835110888
[chromium]
<= chromium-91
[chromium-91] [chromium-91]
<= chromium-download <= chromium-download
version = 91.0.4472.114 version = 91.0.4472.114
revision_x86-64 = 870763 revision-x86_64 = 870763
md5sum-x86_64 = 74eab41580469c2b8117cf396db823cb md5sum-x86_64 = 74eab41580469c2b8117cf396db823cb
generation-x86_64 = 1617926496067901 generation-x86_64 = 1617926496067901
[chromium-69]
<= chromium-download
version = 69.0.3497.0
revision_x86-64 = 576753
md5sum-x86_64 = 08ac27fd40ace4ca8dfbd1db403deccb
generation-x86_64 = 1532051976706023
[chromium-download] [chromium-download]
# macro to download a binary build of chromium and generate a # macro to download a binary build of chromium and generate a
# wrapper as chrome-slapos in the part directory # wrapper as chrome-slapos in the part directory
...@@ -148,5 +147,5 @@ path = ...@@ -148,5 +147,5 @@ path =
${fontconfig:location}/bin ${fontconfig:location}/bin
[chromium-download:getattr(sys,'_multiarch',None)=='x86_64-linux-gnu'] [chromium-download:getattr(sys,'_multiarch',None)=='x86_64-linux-gnu']
url = https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2F${:revision_x86-64}%2Fchrome-linux.zip?generation=${:generation-x86_64}&alt=media url = https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2F${:revision-x86_64}%2Fchrome-linux.zip?generation=${:generation-x86_64}&alt=media
md5sum = ${:md5sum-x86_64} md5sum = ${:md5sum-x86_64}
From a02c80e17f794dc5eaea1c4edd3a2a3277a13638 Mon Sep 17 00:00:00 2001
From: Kazuhiko SHIOZAKI <kazuhiko@nexedi.com>
Date: Tue, 18 Jul 2023 10:26:54 +0200
Subject: [PATCH 1/4] Cast int to float in compare methods.
---
src/DateTime/DateTime.py | 30 ++++++++++-------------------
src/DateTime/tests/test_datetime.py | 17 ++++++++++++++++
2 files changed, 27 insertions(+), 20 deletions(-)
diff --git a/src/DateTime/DateTime.py b/src/DateTime/DateTime.py
index 2d2d97f..c141306 100644
--- a/src/DateTime/DateTime.py
+++ b/src/DateTime/DateTime.py
@@ -1256,12 +1256,10 @@ class DateTime(object):
"""
if t is None:
t = 0
- if isinstance(t, float):
+ if isinstance(t, (float, int)):
return self._micros > long(t * 1000000)
- try:
+ else:
return self._micros > t._micros
- except AttributeError:
- return self._micros > t
__gt__ = greaterThan
@@ -1279,12 +1277,10 @@ class DateTime(object):
"""
if t is None:
t = 0
- if isinstance(t, float):
+ if isinstance(t, (float, int)):
return self._micros >= long(t * 1000000)
- try:
+ else:
return self._micros >= t._micros
- except AttributeError:
- return self._micros >= t
__ge__ = greaterThanEqualTo
@@ -1301,12 +1297,10 @@ class DateTime(object):
"""
if t is None:
t = 0
- if isinstance(t, float):
+ if isinstance(t, (float, int)):
return self._micros == long(t * 1000000)
- try:
+ else:
return self._micros == t._micros
- except AttributeError:
- return self._micros == t
def notEqualTo(self, t):
"""Compare this DateTime object to another DateTime object
@@ -1348,12 +1342,10 @@ class DateTime(object):
"""
if t is None:
t = 0
- if isinstance(t, float):
+ if isinstance(t, (float, int)):
return self._micros < long(t * 1000000)
- try:
+ else:
return self._micros < t._micros
- except AttributeError:
- return self._micros < t
__lt__ = lessThan
@@ -1370,12 +1362,10 @@ class DateTime(object):
"""
if t is None:
t = 0
- if isinstance(t, float):
+ if isinstance(t, (float, int)):
return self._micros <= long(t * 1000000)
- try:
+ else:
return self._micros <= t._micros
- except AttributeError:
- return self._micros <= t
__le__ = lessThanEqualTo
diff --git a/src/DateTime/tests/test_datetime.py b/src/DateTime/tests/test_datetime.py
index 249e79a..e6b3f93 100644
--- a/src/DateTime/tests/test_datetime.py
+++ b/src/DateTime/tests/test_datetime.py
@@ -228,6 +228,23 @@ class DateTimeTests(unittest.TestCase):
self.assertTrue(dt.lessThanEqualTo(dt1))
self.assertTrue(dt.notEqualTo(dt1))
self.assertFalse(dt.equalTo(dt1))
+ # Compare a date to float
+ dt = DateTime(1.0)
+ self.assertFalse(dt.greaterThan(1.0))
+ self.assertTrue(dt.greaterThanEqualTo(1.0))
+ self.assertFalse(dt.lessThan(1.0))
+ self.assertTrue(dt.lessThanEqualTo(1.0))
+ self.assertFalse(dt.notEqualTo(1.0))
+ self.assertTrue(dt.equalTo(1.0))
+ # Compare a date to int
+ dt = DateTime(1)
+ self.assertEqual(dt, DateTime(1.0))
+ self.assertFalse(dt.greaterThan(1))
+ self.assertTrue(dt.greaterThanEqualTo(1))
+ self.assertFalse(dt.lessThan(1))
+ self.assertTrue(dt.lessThanEqualTo(1))
+ self.assertFalse(dt.notEqualTo(1))
+ self.assertTrue(dt.equalTo(1))
def test_compare_methods_none(self):
# Compare a date to None
--
2.40.1
From 892e66132025ab8c213e4be57dd10f0f8eec7e60 Mon Sep 17 00:00:00 2001
From: Kazuhiko SHIOZAKI <kazuhiko@nexedi.com>
Date: Fri, 14 Jul 2023 21:10:06 +0200
Subject: [PATCH 2/4] Fix compare methods between DateTime(0) and None (fix
#52).
This is a fixup commit of 'further py3 work' that changed the behaviour of compare methods between DateTime(0) and None.
Now None is less than any DateTime instance including DateTime(0), just same as DateTime 2.
---
src/DateTime/DateTime.py | 10 +++++-----
src/DateTime/tests/test_datetime.py | 14 +++++++-------
2 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/src/DateTime/DateTime.py b/src/DateTime/DateTime.py
index c141306..caf67e1 100644
--- a/src/DateTime/DateTime.py
+++ b/src/DateTime/DateTime.py
@@ -1255,7 +1255,7 @@ class DateTime(object):
long integer microseconds.
"""
if t is None:
- t = 0
+ return True
if isinstance(t, (float, int)):
return self._micros > long(t * 1000000)
else:
@@ -1276,7 +1276,7 @@ class DateTime(object):
long integer microseconds.
"""
if t is None:
- t = 0
+ return True
if isinstance(t, (float, int)):
return self._micros >= long(t * 1000000)
else:
@@ -1296,7 +1296,7 @@ class DateTime(object):
long integer microseconds.
"""
if t is None:
- t = 0
+ return False
if isinstance(t, (float, int)):
return self._micros == long(t * 1000000)
else:
@@ -1341,7 +1341,7 @@ class DateTime(object):
long integer microseconds.
"""
if t is None:
- t = 0
+ return False
if isinstance(t, (float, int)):
return self._micros < long(t * 1000000)
else:
@@ -1361,7 +1361,7 @@ class DateTime(object):
long integer microseconds.
"""
if t is None:
- t = 0
+ return False
if isinstance(t, (float, int)):
return self._micros <= long(t * 1000000)
else:
diff --git a/src/DateTime/tests/test_datetime.py b/src/DateTime/tests/test_datetime.py
index e6b3f93..1dd6c32 100644
--- a/src/DateTime/tests/test_datetime.py
+++ b/src/DateTime/tests/test_datetime.py
@@ -248,13 +248,13 @@ class DateTimeTests(unittest.TestCase):
def test_compare_methods_none(self):
# Compare a date to None
- dt = DateTime('1997/1/1')
- self.assertTrue(dt.greaterThan(None))
- self.assertTrue(dt.greaterThanEqualTo(None))
- self.assertFalse(dt.lessThan(None))
- self.assertFalse(dt.lessThanEqualTo(None))
- self.assertTrue(dt.notEqualTo(None))
- self.assertFalse(dt.equalTo(None))
+ for dt in (DateTime('1997/1/1'), DateTime(0)):
+ self.assertTrue(dt.greaterThan(None))
+ self.assertTrue(dt.greaterThanEqualTo(None))
+ self.assertFalse(dt.lessThan(None))
+ self.assertFalse(dt.lessThanEqualTo(None))
+ self.assertTrue(dt.notEqualTo(None))
+ self.assertFalse(dt.equalTo(None))
def test_pickle(self):
dt = DateTime()
--
2.40.1
From 4a9798072c87d2fe53b2e1e15b004ff982f9686a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Thu, 30 Nov 2023 06:19:54 +0100
Subject: [PATCH 3/4] Make it possible to pickle datetimes returned by
asdatetime
Fixes #58
---
src/DateTime/pytz_support.py | 5 +++++
src/DateTime/tests/test_datetime.py | 7 +++++++
2 files changed, 12 insertions(+)
diff --git a/src/DateTime/pytz_support.py b/src/DateTime/pytz_support.py
index 9ebf3db..e0746ea 100644
--- a/src/DateTime/pytz_support.py
+++ b/src/DateTime/pytz_support.py
@@ -199,9 +199,14 @@ for hour in range(0, 13):
_old_zmap['+%s00' % fhour] = 'GMT+%i' % hour
+def _p(zone):
+ return _numeric_timezones[zone]
+
+
def _static_timezone_factory(data):
zone = data[0]
cls = type(zone, (StaticTzInfo,), dict(
+ __reduce__=lambda _: (_p, (zone, )),
zone=zone,
_utcoffset=memorized_timedelta(data[5][0][0]),
_tzname=data[6][:-1])) # strip the trailing null
diff --git a/src/DateTime/tests/test_datetime.py b/src/DateTime/tests/test_datetime.py
index 1dd6c32..b9eeea9 100644
--- a/src/DateTime/tests/test_datetime.py
+++ b/src/DateTime/tests/test_datetime.py
@@ -270,6 +270,13 @@ class DateTimeTests(unittest.TestCase):
for key in DateTime.__slots__:
self.assertEqual(getattr(dt, key), getattr(new, key))
+ def test_pickle_asdatetime_with_tz(self):
+ dt = DateTime('2002/5/2 8:00am GMT+8')
+ data = pickle.dumps(dt.asdatetime(), 1)
+ new = DateTime(pickle.loads(data))
+ for key in DateTime.__slots__:
+ self.assertEqual(getattr(dt, key), getattr(new, key))
+
def test_pickle_with_numerical_tz(self):
for dt_str in ('2007/01/02 12:34:56.789 +0300',
'2007/01/02 12:34:56.789 +0430',
--
2.40.1
From 6ac321746ab86374871623ddaf414b7948325d22 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Sun, 3 Dec 2023 15:57:01 +0100
Subject: [PATCH 4/4] Repair equality comparison between DateTime instances and
other types
Fixes #60
---
src/DateTime/DateTime.py | 20 +++++++++++++++-----
src/DateTime/tests/test_datetime.py | 17 +++++++++++++++++
2 files changed, 32 insertions(+), 5 deletions(-)
diff --git a/src/DateTime/DateTime.py b/src/DateTime/DateTime.py
index caf67e1..84570b9 100644
--- a/src/DateTime/DateTime.py
+++ b/src/DateTime/DateTime.py
@@ -1258,8 +1258,10 @@ class DateTime(object):
return True
if isinstance(t, (float, int)):
return self._micros > long(t * 1000000)
- else:
+ try:
return self._micros > t._micros
+ except AttributeError:
+ return self._micros > t
__gt__ = greaterThan
@@ -1279,8 +1281,10 @@ class DateTime(object):
return True
if isinstance(t, (float, int)):
return self._micros >= long(t * 1000000)
- else:
+ try:
return self._micros >= t._micros
+ except AttributeError:
+ return self._micros >= t
__ge__ = greaterThanEqualTo
@@ -1299,8 +1303,10 @@ class DateTime(object):
return False
if isinstance(t, (float, int)):
return self._micros == long(t * 1000000)
- else:
+ try:
return self._micros == t._micros
+ except AttributeError:
+ return self._micros == t
def notEqualTo(self, t):
"""Compare this DateTime object to another DateTime object
@@ -1344,8 +1350,10 @@ class DateTime(object):
return False
if isinstance(t, (float, int)):
return self._micros < long(t * 1000000)
- else:
+ try:
return self._micros < t._micros
+ except AttributeError:
+ return self._micros < t
__lt__ = lessThan
@@ -1364,8 +1372,10 @@ class DateTime(object):
return False
if isinstance(t, (float, int)):
return self._micros <= long(t * 1000000)
- else:
+ try:
return self._micros <= t._micros
+ except AttributeError:
+ return self._micros <= t
__le__ = lessThanEqualTo
diff --git a/src/DateTime/tests/test_datetime.py b/src/DateTime/tests/test_datetime.py
index b9eeea9..970a072 100644
--- a/src/DateTime/tests/test_datetime.py
+++ b/src/DateTime/tests/test_datetime.py
@@ -230,6 +230,8 @@ class DateTimeTests(unittest.TestCase):
self.assertFalse(dt.equalTo(dt1))
# Compare a date to float
dt = DateTime(1.0)
+ self.assertTrue(dt == DateTime(1.0)) # testing __eq__
+ self.assertFalse(dt != DateTime(1.0)) # testing __ne__
self.assertFalse(dt.greaterThan(1.0))
self.assertTrue(dt.greaterThanEqualTo(1.0))
self.assertFalse(dt.lessThan(1.0))
@@ -239,12 +241,27 @@ class DateTimeTests(unittest.TestCase):
# Compare a date to int
dt = DateTime(1)
self.assertEqual(dt, DateTime(1.0))
+ self.assertTrue(dt == DateTime(1)) # testing __eq__
+ self.assertFalse(dt != DateTime(1)) # testing __ne__
self.assertFalse(dt.greaterThan(1))
self.assertTrue(dt.greaterThanEqualTo(1))
self.assertFalse(dt.lessThan(1))
self.assertTrue(dt.lessThanEqualTo(1))
self.assertFalse(dt.notEqualTo(1))
self.assertTrue(dt.equalTo(1))
+ # Compare a date to string; there is no implicit type conversion
+ # but behavior if consistent as when comparing, for example, an int
+ # and a string.
+ dt = DateTime("2023")
+ self.assertFalse(dt == "2023") # testing __eq__
+ self.assertTrue(dt != "2023") # testing __ne__
+ if sys.version_info > (3, ):
+ self.assertRaises(TypeError, dt.greaterThan, "2023")
+ self.assertRaises(TypeError, dt.greaterThanEqualTo, "2023")
+ self.assertRaises(TypeError, dt.lessThan, "2023")
+ self.assertRaises(TypeError, dt.lessThanEqualTo, "2023")
+ self.assertTrue(dt.notEqualTo("2023"))
+ self.assertFalse(dt.equalTo("2023"))
def test_compare_methods_none(self):
# Compare a date to None
--
2.40.1
...@@ -61,17 +61,9 @@ install = ...@@ -61,17 +61,9 @@ install =
<= firefox-wrapper <= firefox-wrapper
part = firefox-115 part = firefox-115
[firefox-wrapper-78] [firefox-wrapper-102]
<= firefox-wrapper <= firefox-wrapper
part = firefox-78 part = firefox-102
[firefox-wrapper-68]
<= firefox-wrapper
part = firefox-68
[firefox-wrapper-60]
<= firefox-wrapper
part = firefox-60
[firefox-default-fonts-conf] [firefox-default-fonts-conf]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -103,23 +95,12 @@ version = 115.3.1esr ...@@ -103,23 +95,12 @@ version = 115.3.1esr
i686-md5sum = f0df1b5cce1edd65addc823da02f9488 i686-md5sum = f0df1b5cce1edd65addc823da02f9488
x86_64-md5sum = 910c0786459cf1e4dc214e6402d0633e x86_64-md5sum = 910c0786459cf1e4dc214e6402d0633e
[firefox-78] [firefox-102]
<= firefox-download <= firefox-download
version = 78.1.0esr version = 102.15.1esr
i686-md5sum = 09595a1b9a99d17a618a51bc1f971e5e i686-md5sum = 418b51b3553e98070998fcdbc344487d
x86_64-md5sum = 06f4d488721ce7229d9a86cb4c6786f3 x86_64-md5sum = ff477480d34e44fbd0040c32ed905aaf
[firefox-68]
<= firefox-download
version = 68.0.2esr
i686-md5sum = eaa9e0246eb2a31ccf55c100dc2edd5a
x86_64-md5sum = d22dc17ce0949cdff78009afca6f2043
[firefox-60]
<= firefox-download
version = 60.0.2esr
i686-md5sum = ce7c80716036dfb5c2fb1ca2538556ff
x86_64-md5sum = 6fe25d9a3fcc82670320242c9047d1da
[firefox-download] [firefox-download]
recipe = slapos.recipe.build recipe = slapos.recipe.build
...@@ -199,18 +180,6 @@ version = 0.33.0 ...@@ -199,18 +180,6 @@ version = 0.33.0
i686-md5sum = c4a9e6c92dc493f25c8d390f1c6fb11c i686-md5sum = c4a9e6c92dc493f25c8d390f1c6fb11c
x86_64-md5sum = 563c82cfbb21478450e1c828e3730b10 x86_64-md5sum = 563c82cfbb21478450e1c828e3730b10
[geckodriver-0.24.0]
<= geckodriver-base
version = 0.24.0
i686-md5sum = b88eee754f6c90b01f760f7a453dda95
x86_64-md5sum = 7552b85e43973c84763e212af7cca566
[geckodriver-0.22.0]
<= geckodriver-base
version = 0.22.0
i686-md5sum = 6de7544753fda56fbaa8382dcac99aaa
x86_64-md5sum = 81746200ce5841e00cabf3b8ea7db542
[geckodriver-base] [geckodriver-base]
# Installs geckodriver ${version} # Installs geckodriver ${version}
recipe = slapos.recipe.build recipe = slapos.recipe.build
......
...@@ -3,10 +3,11 @@ extends = ...@@ -3,10 +3,11 @@ extends =
../freetype/buildout.cfg ../freetype/buildout.cfg
../libxml2/buildout.cfg ../libxml2/buildout.cfg
../pkgconfig/buildout.cfg ../pkgconfig/buildout.cfg
../python3/buildout.cfg
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
../bzip2/buildout.cfg
../gperf/buildout.cfg ../gperf/buildout.cfg
../xz-utils/buildout.cfg
buildout.hash.cfg buildout.hash.cfg
parts = parts =
...@@ -15,8 +16,8 @@ parts = ...@@ -15,8 +16,8 @@ parts =
[fontconfig] [fontconfig]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = http://fontconfig.org/release/fontconfig-2.12.6.tar.bz2 url = https://www.freedesktop.org/software/fontconfig/release/fontconfig-2.14.2.tar.xz
md5sum = 733f5e2371ca77b69707bd7b30cc2163 md5sum = 95261910ea727b5dd116b06fbfd84b1f
pkg_config_depends = ${freetype:pkg_config_depends}:${freetype:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig pkg_config_depends = ${freetype:pkg_config_depends}:${freetype:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig
configure-options = configure-options =
--disable-static --disable-static
...@@ -24,7 +25,8 @@ configure-options = ...@@ -24,7 +25,8 @@ configure-options =
--enable-libxml2 --enable-libxml2
--with-add-fonts=no --with-add-fonts=no
environment = environment =
PATH=${pkgconfig:location}/bin:${gperf:location}/bin:%(PATH)s PATH=${python3:location}/bin:${pkgconfig:location}/bin:${gperf:location}/bin:${xz-utils:location}/bin:%(PATH)s
PYTHON=${python3:location}/bin/python3
PKG_CONFIG_PATH=${:pkg_config_depends} PKG_CONFIG_PATH=${:pkg_config_depends}
CPPFLAGS=-I${zlib:location}/include -I${bzip2:location}/include CPPFLAGS=-I${zlib:location}/include -I${bzip2:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib
......
...@@ -13,8 +13,8 @@ parts = gdb ...@@ -13,8 +13,8 @@ parts = gdb
[gdb] [gdb]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = http://ftp.gnu.org/gnu/gdb/gdb-9.2.tar.xz url = https://ftp.gnu.org/gnu/gdb/gdb-14.1.tar.xz
md5sum = db95524e554870209ab7d9f8fd8dc557 md5sum = 4a084d03915b271f67e9b8ea2ab24972
location = @@LOCATION@@ location = @@LOCATION@@
# gdb refuses to build in-tree -> build it inside build/ # gdb refuses to build in-tree -> build it inside build/
pre-configure = pre-configure =
......
...@@ -30,24 +30,28 @@ environment = ...@@ -30,24 +30,28 @@ environment =
PATH=${swig:location}/bin:${patch:location}/bin:%(PATH)s PATH=${swig:location}/bin:${patch:location}/bin:%(PATH)s
GOROOT_FINAL=${:location} GOROOT_FINAL=${:location}
${:environment-extra} ${:environment-extra}
patch-options = -p1
[golang-common-pre-1.21]
<= golang-common
# TestChown currently fails in a user-namespace # TestChown currently fails in a user-namespace
# https://github.com/golang/go/issues/42525 # https://github.com/golang/go/issues/42525
# the patches apply to go >= 1.12 # the patches apply to 1.21 > go >= 1.12
patch-options = -p1 # (in go 1.21 we can't apply it, due to code changes,
patches = # in go > 1.21 it's hopefully fixed with
# https://github.com/golang/go/commit/9f03e8367d85d75675b2f2e90873e3293799d8aa)
patches +=
${:_profile_base_location_}/skip-chown-tests.patch#d4e3c8ef83788fb2a5d80dd75034786f ${:_profile_base_location_}/skip-chown-tests.patch#d4e3c8ef83788fb2a5d80dd75034786f
[golang-common-pre-1.19] [golang-common-pre-1.19]
<= golang-common <= golang-common-pre-1.21
# TestSCMCredentials fails in a user-namespace if golang version < 1.19 # TestSCMCredentials fails in a user-namespace if golang version < 1.19
# https://github.com/golang/go/issues/42525 # https://github.com/golang/go/issues/42525
patches += patches +=
${:_profile_base_location_}/fix-TestSCMCredentials.patch#1d8dbc97cd579e03fafd8627d48f1c59 ${:_profile_base_location_}/fix-TestSCMCredentials.patch#1d8dbc97cd579e03fafd8627d48f1c59
[golang14] [golang14]
<= golang-common-pre-1.19 <= golang-common-pre-1.19
# https://golang.org/doc/install/source#bootstrapFromSource # https://golang.org/doc/install/source#bootstrapFromSource
...@@ -117,13 +121,28 @@ environment-extra = ...@@ -117,13 +121,28 @@ environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} GOROOT_BOOTSTRAP=${golang14:location}
[golang1.20] [golang1.20]
<= golang-common <= golang-common-pre-1.21
url = https://go.dev/dl/go1.20.6.src.tar.gz url = https://go.dev/dl/go1.20.6.src.tar.gz
md5sum = 1dc2d18790cfaede7df1e73a1eff8b7b md5sum = 1dc2d18790cfaede7df1e73a1eff8b7b
# go1.20 requires go1.17.13 to bootstrap (see https://go.dev/doc/go1.20#bootstrap) # go1.20 requires go1.17.13 to bootstrap (see https://go.dev/doc/go1.20#bootstrap)
environment-extra = environment-extra =
GOROOT_BOOTSTRAP=${golang1.17:location} GOROOT_BOOTSTRAP=${golang1.17:location}
[golang1.21]
<= golang-common
url = https://go.dev/dl/go1.21.5.src.tar.gz
md5sum = 99385ded31906f1554c27015bbbee52d
# go1.21 requires go1.17.13 to bootstrap (see https://go.dev/blog/rebuild)
environment-extra =
GOROOT_BOOTSTRAP=${golang1.17:location}
patches +=
# TestChown fix, old fix doesn't work due to code change,
# for golang > 1.21 this patch is hopefully already included with
# https://github.com/golang/go/commit/9f03e8367d85d75675b2f2e90873e3293799d8aa
${:_profile_base_location_}/os-skip-Chown-tests-for-auxiliary-groups-that-fail-d.patch#81b7f75786d9024049c26d1663b79ba4
${:_profile_base_location_}/skip-unshare-mount-test.patch#325446d5135452e8685e95ab99c13a51
# ---- infrastructure to build Go workspaces / projects ---- # ---- infrastructure to build Go workspaces / projects ----
# gowork is the top-level section that defines Go workspace. # gowork is the top-level section that defines Go workspace.
...@@ -183,7 +202,7 @@ bin = ${gowork.dir:bin} ...@@ -183,7 +202,7 @@ bin = ${gowork.dir:bin}
depends = ${gowork.goinstall:recipe} depends = ${gowork.goinstall:recipe}
# go version used for the workspace (possible to override in applications) # go version used for the workspace (possible to override in applications)
golang = ${golang1.20:location} golang = ${golang1.21:location}
# no special build flags by default # no special build flags by default
buildflags = buildflags =
......
From 9f03e8367d85d75675b2f2e90873e3293799d8aa Mon Sep 17 00:00:00 2001
From: "Bryan C. Mills" <bcmills@google.com>
Date: Tue, 15 Aug 2023 18:01:16 -0400
Subject: [PATCH] os: skip Chown tests for auxiliary groups that fail due to
permission errors
This addresses the failure mode described in
https://git.alpinelinux.org/aports/commit/community/go/tests-filter-overflow-gid.patch?id=9851dde0f5d2a5a50f7f3b5323d1b2ff22e1d028,
but without special-casing an implementation-specific group ID.
For #62053.
Change-Id: I70b1046837b8146889fff7085497213349cd2bf0
Reviewed-on: https://go-review.googlesource.com/c/go/+/520055
Reviewed-by: Ian Lance Taylor <iant@google.com>
TryBot-Result: Gopher Robot <gobot@golang.org>
Run-TryBot: Bryan Mills <bcmills@google.com>
Auto-Submit: Bryan Mills <bcmills@google.com>
---
src/os/os_unix_test.go | 22 ++++++++++++++++++++++
1 file changed, 22 insertions(+)
diff --git a/src/os/os_unix_test.go b/src/os/os_unix_test.go
index 9041b25471..e4271ff905 100644
--- a/src/os/os_unix_test.go
+++ b/src/os/os_unix_test.go
@@ -75,6 +75,12 @@ func TestChown(t *testing.T) {
t.Log("groups: ", groups)
for _, g := range groups {
if err = Chown(f.Name(), -1, g); err != nil {
+ if testenv.SyscallIsNotSupported(err) {
+ t.Logf("chown %s -1 %d: %s (error ignored)", f.Name(), g, err)
+ // Since the Chown call failed, the file should be unmodified.
+ checkUidGid(t, f.Name(), int(sys.Uid), gid)
+ continue
+ }
t.Fatalf("chown %s -1 %d: %s", f.Name(), g, err)
}
checkUidGid(t, f.Name(), int(sys.Uid), g)
@@ -123,6 +129,12 @@ func TestFileChown(t *testing.T) {
t.Log("groups: ", groups)
for _, g := range groups {
if err = f.Chown(-1, g); err != nil {
+ if testenv.SyscallIsNotSupported(err) {
+ t.Logf("chown %s -1 %d: %s (error ignored)", f.Name(), g, err)
+ // Since the Chown call failed, the file should be unmodified.
+ checkUidGid(t, f.Name(), int(sys.Uid), gid)
+ continue
+ }
t.Fatalf("fchown %s -1 %d: %s", f.Name(), g, err)
}
checkUidGid(t, f.Name(), int(sys.Uid), g)
@@ -181,12 +193,22 @@ func TestLchown(t *testing.T) {
t.Log("groups: ", groups)
for _, g := range groups {
if err = Lchown(linkname, -1, g); err != nil {
+ if testenv.SyscallIsNotSupported(err) {
+ t.Logf("lchown %s -1 %d: %s (error ignored)", f.Name(), g, err)
+ // Since the Lchown call failed, the file should be unmodified.
+ checkUidGid(t, f.Name(), int(sys.Uid), gid)
+ continue
+ }
t.Fatalf("lchown %s -1 %d: %s", linkname, g, err)
}
checkUidGid(t, linkname, int(sys.Uid), g)
// Check that link target's gid is unchanged.
checkUidGid(t, f.Name(), int(sys.Uid), int(sys.Gid))
+
+ if err = Lchown(linkname, -1, gid); err != nil {
+ t.Fatalf("lchown %s -1 %d: %s", f.Name(), gid, err)
+ }
}
}
--
2.36.0
https://github.com/golang/go/commit/092671423cd95eaa6df93eb29442fef41504d097 breaks
TestUnshareMountNameSpace test on SlapOS.
---
diff --git a/src/syscall/exec_linux_test.go b/src/syscall/exec_linux_test.go
index f4ff7bf81b..bc8bdb0a35 100644
--- a/src/syscall/exec_linux_test.go
+++ b/src/syscall/exec_linux_test.go
@@ -206,6 +206,7 @@ func TestGroupCleanupUserNamespace(t *testing.T) {
// Test for https://go.dev/issue/19661: unshare fails because systemd
// has forced / to be shared
func TestUnshareMountNameSpace(t *testing.T) {
+ t.Skip("skipping: not supported in SlapOS")
testenv.MustHaveExec(t)
if os.Getenv("GO_WANT_HELPER_PROCESS") == "1" {
...@@ -13,8 +13,8 @@ parts = haproxy ...@@ -13,8 +13,8 @@ parts = haproxy
[haproxy] [haproxy]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://www.haproxy.org/download/2.6/src/haproxy-2.6.15.tar.gz url = https://www.haproxy.org/download/2.6/src/haproxy-2.6.16.tar.gz
md5sum = ecac9724e3ca6368624fb2fab29dd366 md5sum = b01e605cdaf2742fcedf214a61e187b4
configure-command = true configure-command = true
# for Linux kernel 2.6.28 and above, we use "linux-glibc" as the TARGET, # for Linux kernel 2.6.28 and above, we use "linux-glibc" as the TARGET,
# otherwise use "generic". # otherwise use "generic".
...@@ -47,6 +47,7 @@ make-options = ...@@ -47,6 +47,7 @@ make-options =
${:QUIC} ${:QUIC}
USE_PCRE=1 USE_PCRE=1
USE_ZLIB=1 USE_ZLIB=1
USE_PROMEX=1
ZLIB_INC=${zlib:location}/include ZLIB_INC=${zlib:location}/include
ZLIB_LIB=${zlib:location}/lib ZLIB_LIB=${zlib:location}/lib
ADDLIB="${:SSL_ADDLIB} -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${zlib:location}/lib" ADDLIB="${:SSL_ADDLIB} -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${zlib:location}/lib"
......
...@@ -5,7 +5,7 @@ parts = ...@@ -5,7 +5,7 @@ parts =
[libsodium] [libsodium]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://download.libsodium.org/libsodium/releases/old/unsupported/libsodium-1.0.8.tar.gz url = https://download.libsodium.org/libsodium/releases/old/libsodium-1.0.17.tar.gz
md5sum = 0a66b86fd3aab3fe4c858edcd2772760 md5sum = 0f71e2680187a1558b5461e6879342c5
configure-options = configure-options =
--disable-static --disable-static
...@@ -72,14 +72,6 @@ md5sum = 28bf6a4d98b238403fa58a0805f4a979 ...@@ -72,14 +72,6 @@ md5sum = 28bf6a4d98b238403fa58a0805f4a979
PATH = ${pkgconfig:location}/bin:${python2.7:location}/bin:%(PATH)s PATH = ${pkgconfig:location}/bin:${python2.7:location}/bin:%(PATH)s
configure-command = ./configure configure-command = ./configure
[nodejs-8.9.4]
<= nodejs-base
version = v8.9.4
md5sum = 4ddc1daff327d7e6f63da57fdfc24f55
openssl-location = ${openssl-1.0:location}
PATH = ${pkgconfig:location}/bin:${python2.7:location}/bin:%(PATH)s
configure-command = ./configure
[nodejs-8.12.0] [nodejs-8.12.0]
<= nodejs-base <= nodejs-base
version = v8.12.0 version = v8.12.0
......
[buildout] [buildout]
extends = extends =
../icu/buildout.cfg
../openssl/buildout.cfg ../openssl/buildout.cfg
../pkgconfig/buildout.cfg
../readline/buildout.cfg ../readline/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
../ncurses/buildout.cfg ../ncurses/buildout.cfg
...@@ -30,10 +32,13 @@ configure-options = ...@@ -30,10 +32,13 @@ configure-options =
--without-libxslt --without-libxslt
# build core PostgreSQL + pg_trgm contrib extension for GitLab # build core PostgreSQL + pg_trgm contrib extension for GitLab
# unaccent contrib extension is for peertube # unaccent contrib extension is for peertube
make-targets = install && make -C contrib/pg_trgm/ install && make -C contrib/unaccent/ install # citext contrib extension is for metabase
make-targets = install && make -C contrib/pg_trgm/ install && make -C contrib/unaccent/ install && make -C contrib/citext/ install
environment = environment =
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${openssl:location}/include -I${ncurses:location}/lib PATH=${pkgconfig:location}/bin:%(PATH)s
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${perl:location}/libs-c -Wl,-rpath=${perl:location}/libs-c CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${openssl:location}/include -I${ncurses:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${perl:location}/libs-c -Wl,-rpath=${perl:location}/libs-c -Wl,-rpath=${icu:location}/lib
PKG_CONFIG_PATH=${icu:location}/lib/pkgconfig/
[postgresql10] [postgresql10]
<= postgresql-common <= postgresql-common
......
...@@ -18,8 +18,10 @@ parts = ...@@ -18,8 +18,10 @@ parts =
# tune pygolang to install with all and for-tests extras. # tune pygolang to install with all and for-tests extras.
# list all_test-dependent eggs that must come through in-tree recipes.
[pygolang] [pygolang]
egg = pygolang[all_test] egg = pygolang[all_test]
depends += ${numpy:egg}
# bin/python is preinstalled with sys.path to pygolang & friends. # bin/python is preinstalled with sys.path to pygolang & friends.
[pygolang-python] [pygolang-python]
......
...@@ -31,7 +31,7 @@ PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${libxslt:location}/lib/pkgcon ...@@ -31,7 +31,7 @@ PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${libxslt:location}/lib/pkgcon
[versions] [versions]
xmlsec = 1.3.13 xmlsec = 1.3.13
setuptools-scm = 7.0.5 setuptools-scm = 7.0.5:whl
toml = 0.10.2 toml = 0.10.2
[versions:python2] [versions:python2]
......
...@@ -109,8 +109,8 @@ md5sum = b33775a9ab6eae784b6da9f31be48be3 ...@@ -109,8 +109,8 @@ md5sum = b33775a9ab6eae784b6da9f31be48be3
[debian-amd64-bookworm-netinst.iso] [debian-amd64-bookworm-netinst.iso]
<= debian-amd64-netinst-base <= debian-amd64-netinst-base
version = 12.1.0 version = 12.4.0
md5sum = 8d77d1b0bcfef29e4d56dc0fbe23de15 md5sum = a03cf771ba9513d908093101a094ac88
alternate-url = https://cdimage.debian.org/cdimage/release/current/${:arch}/iso-cd/${:filename} alternate-url = https://cdimage.debian.org/cdimage/release/current/${:arch}/iso-cd/${:filename}
[debian-amd64-netinst.iso] [debian-amd64-netinst.iso]
......
...@@ -50,7 +50,7 @@ CGO_LDFLAGS += -Wl,-rpath=${zlib:location}/lib ...@@ -50,7 +50,7 @@ CGO_LDFLAGS += -Wl,-rpath=${zlib:location}/lib
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/wendelin.core.git repository = https://lab.nexedi.com/nexedi/wendelin.core.git
branch = master branch = master
revision = wendelin.core-2.0.alpha3-7-g885b355 revision = wendelin.core-2.0.alpha3-9-gda765ef
# dir is pretty name as top-level recipe # dir is pretty name as top-level recipe
location = ${buildout:parts-directory}/wendelin.core location = ${buildout:parts-directory}/wendelin.core
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
...@@ -3,8 +3,8 @@ ...@@ -3,8 +3,8 @@
[buildout] [buildout]
extends = extends =
# test*.cfg first extend from neoppod/software<ZODB-flavour>.cfg to use ../../stack/erp5/buildout.cfg
# appropriate ZODB and versions of other components. ../../software/neoppod/software.cfg
../pytest/buildout.cfg ../pytest/buildout.cfg
../scipy/buildout.cfg ../scipy/buildout.cfg
......
# SlapOS software release to test wendelin.core/ZODB4-wc2 on Nexedi testing infrastructure. # SlapOS software release to test wendelin.core/ZODB4-wc2 on Nexedi testing infrastructure.
[buildout] [buildout]
extends = test.cfg extends = test-common.cfg
[ZODB] [ZODB]
major = 4-wc2 major = 4-wc2
# SlapOS software release to test wendelin.core/ZODB5 on Nexedi testing infrastructure. # SlapOS software release to test wendelin.core/ZODB5 on Nexedi testing infrastructure.
[buildout] [buildout]
extends = extends = test-common.cfg
../../stack/erp5/buildout.cfg
../../software/neoppod/software-zodb5.cfg [ZODB]
test-common.cfg major = 5
# ZEO[test] needs ZopeUndo
[versions]
ZopeUndo = 5.0
# SlapOS software release to test wendelin.core on Nexedi testing infrastructure.
[buildout]
extends =
../../stack/erp5/buildout.cfg
../../software/neoppod/software.cfg
test-common.cfg
...@@ -10,8 +10,8 @@ extends = ...@@ -10,8 +10,8 @@ extends =
[xmlsec] [xmlsec]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://www.aleksey.com/xmlsec/download/older-releases/xmlsec1-1.2.34.tar.gz url = https://github.com/lsh123/xmlsec/releases/download/xmlsec-1_2_37/xmlsec1-1.2.37.tar.gz
md5sum = 87b0074e7ae535e061acf8ef64dada1b md5sum = 98dd3c884e2816c25c038a6e8af138fb
shared = true shared = true
configure-options = configure-options =
--disable-crypto-dl --disable-crypto-dl
......
...@@ -2,5 +2,8 @@ ...@@ -2,5 +2,8 @@
[buildout] [buildout]
extends = extends =
test-zodb4.cfg test-common.cfg
test-py2.cfg test-py2.cfg
[ZODB]
major = 4
...@@ -2,5 +2,8 @@ ...@@ -2,5 +2,8 @@
[buildout] [buildout]
extends = extends =
test-zodb4-wc2.cfg test-common.cfg
test-py2.cfg test-py2.cfg
[ZODB]
major = 4-wc2
# SlapOS software release to test zodbtools/ZODB4-wc2-py3 on Nexedi testing infrastructure.
[buildout]
extends = test-common.cfg
[ZODB]
major = 4-wc2
# SlapOS software release to test zodbtools/ZODB4-py3 on Nexedi testing infrastructure.
[buildout]
extends = test-common.cfg
[ZODB]
major = 4
...@@ -28,7 +28,7 @@ def main(): ...@@ -28,7 +28,7 @@ def main():
print(e, file=sys.stderr) print(e, file=sys.stderr)
else: else:
with open(f, 'w') as outfile: with open(f, 'w') as outfile:
json.dump(obj, outfile, sort_keys=False, indent=2, separators=(',', ': ')) json.dump(obj, outfile, ensure_ascii=False, sort_keys=False, indent=2, separators=(',', ': '))
outfile.write('\n') outfile.write('\n')
sys.exit(exit_code) sys.exit(exit_code)
......
...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages ...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob import glob
import os import os
version = '1.0.329' version = '1.0.351'
name = 'slapos.cookbook' name = 'slapos.cookbook'
long_description = open("README.rst").read() long_description = open("README.rst").read()
...@@ -72,6 +72,8 @@ setup(name=name, ...@@ -72,6 +72,8 @@ setup(name=name,
'zc.buildout', # plays with buildout 'zc.buildout', # plays with buildout
'zc.recipe.egg', # for scripts generation 'zc.recipe.egg', # for scripts generation
'pytz', # for timezone database 'pytz', # for timezone database
'passlib',
'bcrypt',
], ],
zip_safe=True, zip_safe=True,
entry_points={ entry_points={
......
...@@ -45,10 +45,8 @@ class Recipe(GenericBaseRecipe): ...@@ -45,10 +45,8 @@ class Recipe(GenericBaseRecipe):
elif self.options.get('expected-type') == "ipv4": elif self.options.get('expected-type') == "ipv4":
template = self.getTemplateFilename('check_ipv4.py.in') template = self.getTemplateFilename('check_ipv4.py.in')
else: else:
config["expected-value"] = self.options.get('expected-value') config["expected-value"] = str(self.options.get('expected-value', ''))
config["expected-not-value"] = str(self.options.get('expected-not-value', ''))
config["expected-not-value"] = self.options.get('expected-not-value')
template = self.getTemplateFilename('check_parameter.py.in') template = self.getTemplateFilename('check_parameter.py.in')
promise = self.createExecutable( promise = self.createExecutable(
......
...@@ -5,9 +5,9 @@ from __future__ import print_function ...@@ -5,9 +5,9 @@ from __future__ import print_function
import socket import socket
import sys import sys
value = "%(value)s" value = %(value)r
expected = "%(expected-value)s" expected = %(expected-value)r
not_expected = "%(expected-not-value)s" not_expected = %(expected-not-value)r
if expected != "" and value != expected: if expected != "" and value != expected:
print("FAIL: %%s != %%s" %% (value, expected)) print("FAIL: %%s != %%s" %% (value, expected))
......
...@@ -24,9 +24,14 @@ ...@@ -24,9 +24,14 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# #
############################################################################## ##############################################################################
import logging
import os import os
import time
from inotify_simple import INotify, flags from inotify_simple import INotify, flags
logger = logging.getLogger(__name__)
def subfiles(directory): def subfiles(directory):
"""Return the list of subfiles of a directory, and wait for the newly created """Return the list of subfiles of a directory, and wait for the newly created
ones. ones.
...@@ -35,10 +40,19 @@ def subfiles(directory): ...@@ -35,10 +40,19 @@ def subfiles(directory):
ALWAYS ITERATE OVER IT !!!*""" ALWAYS ITERATE OVER IT !!!*"""
with INotify() as inotify: with INotify() as inotify:
try:
inotify.add_watch(directory, flags.CLOSE_WRITE | flags.MOVED_TO) inotify.add_watch(directory, flags.CLOSE_WRITE | flags.MOVED_TO)
inotify_available = True
except OSError:
logger.warning("Unable to add inotify watch, falling back to polling")
inotify_available = False
names = os.listdir(directory) names = os.listdir(directory)
while True: while True:
for name in names: for name in names:
yield os.path.join(directory, name) yield os.path.join(directory, name)
if inotify_available:
names = (event.name for event in inotify.read()) names = (event.name for event in inotify.read())
else:
time.sleep(5)
names = os.listdir(directory)
...@@ -131,7 +131,9 @@ class Recipe(GenericSlapRecipe): ...@@ -131,7 +131,9 @@ class Recipe(GenericSlapRecipe):
new = {} new = {}
for k, v in six.iteritems(init): for k, v in six.iteritems(init):
try: try:
options[k] = publish_dict[k] = new[v] = init_section.pop(v) init_section_value = init_section[v]
options[k] = publish_dict[k] = new[v] = init_section_value
del init_section[v]
except KeyError: except KeyError:
pass pass
if new != override: if new != override:
......
...@@ -33,12 +33,16 @@ buildout Software Releases and Instances developments. ...@@ -33,12 +33,16 @@ buildout Software Releases and Instances developments.
from __future__ import absolute_import from __future__ import absolute_import
import errno import errno
import json
import os import os
import random import random
import string import string
import sys
from .librecipe import GenericBaseRecipe from .librecipe import GenericBaseRecipe
from .publish_early import volatileOptions from .publish_early import volatileOptions
from slapos.util import str2bytes
import passlib.hash
class Integer(object): class Integer(object):
""" """
...@@ -113,7 +117,7 @@ def generatePassword(length): ...@@ -113,7 +117,7 @@ def generatePassword(length):
class Password(object): class Password(object):
"""Generate a password that is only composed of lowercase letters """Generate a password.
This recipe only makes sure that ${:passwd} does not end up in `.installed` This recipe only makes sure that ${:passwd} does not end up in `.installed`
file, which is world-readable by default. So be careful not to spread it file, which is world-readable by default. So be careful not to spread it
...@@ -128,6 +132,11 @@ class Password(object): ...@@ -128,6 +132,11 @@ class Password(object):
- create-once: boolean value which set if storage-path won't be modified - create-once: boolean value which set if storage-path won't be modified
as soon the file is created with the password (not empty). as soon the file is created with the password (not empty).
(default: True) (default: True)
- passwd: the generated password. Can also be set, to reuse the password
hashing capabilities.
- passwd-*: the hashed password, using schemes supported by passlib.
for example, passwd-sha256-crypt will expose the password hashed
with sha256 crypt algorithm.
If storage-path is empty, the recipe does not save the password, which is If storage-path is empty, the recipe does not save the password, which is
fine it is saved by other means, e.g. using the publish-early recipe. fine it is saved by other means, e.g. using the publish-early recipe.
...@@ -141,24 +150,53 @@ class Password(object): ...@@ -141,24 +150,53 @@ class Password(object):
except KeyError: except KeyError:
self.storage_path = options['storage-path'] = os.path.join( self.storage_path = options['storage-path'] = os.path.join(
buildout['buildout']['parts-directory'], name) buildout['buildout']['parts-directory'], name)
passwd = options.get('passwd') passwd_dict = {
if not passwd: '': options.get('passwd')
}
if not passwd_dict['']:
if self.storage_path: if self.storage_path:
self._needs_migration = False
try: try:
with open(self.storage_path) as f: with open(self.storage_path) as f:
passwd = f.read().strip('\n') content = f.read().strip('\n')
# new format: the file contains password and hashes in json format
try:
passwd_dict = json.loads(content)
if sys.version_info < (3, ):
passwd_dict = {k: v.encode() for k, v in passwd_dict.items()}
except ValueError:
# old format: the file only contains the password in plain text
passwd_dict[''] = content
self._needs_migration = True
except IOError as e: except IOError as e:
if e.errno != errno.ENOENT: if e.errno != errno.ENOENT:
raise raise
if not passwd:
passwd = self.generatePassword(int(options.get('bytes', '16'))) if not passwd_dict['']:
passwd_dict[''] = self.generatePassword(int(options.get('bytes', '16')))
self.update = self.install self.update = self.install
options['passwd'] = passwd options['passwd'] = passwd_dict['']
class HashedPasswordDict(dict):
def __missing__(self, key):
if not key.startswith('passwd-'):
raise KeyError(key)
if key in passwd_dict:
return passwd_dict[key]
handler = getattr(
passlib.hash, key[len('passwd-'):].replace('-', '_'), None)
if handler is None:
raise KeyError(key)
hashed = handler.hash(passwd_dict[''])
passwd_dict[key] = hashed
return hashed
options._data = HashedPasswordDict(options._data)
# Password must not go into .installed file, for 2 reasons: # Password must not go into .installed file, for 2 reasons:
# security of course but also to prevent buildout to always reinstall. # security of course but also to prevent buildout to always reinstall.
# publish_early already does it, but this recipe may also be used alone. # publish_early already does it, but this recipe may also be used alone.
volatileOptions(options, ('passwd',)) volatileOptions(options, ('passwd',))
self.passwd = passwd self.passwd_dict = passwd_dict
generatePassword = staticmethod(generatePassword) generatePassword = staticmethod(generatePassword)
...@@ -167,19 +205,14 @@ class Password(object): ...@@ -167,19 +205,14 @@ class Password(object):
try: try:
# The following 2 lines are just an optimization to avoid recreating # The following 2 lines are just an optimization to avoid recreating
# the file with the same content. # the file with the same content.
if self.create_once and os.stat(self.storage_path).st_size: if self.create_once and os.stat(self.storage_path).st_size and not self._needs_migration:
return return
os.unlink(self.storage_path) os.unlink(self.storage_path)
except OSError as e: except OSError as e:
if e.errno != errno.ENOENT: if e.errno != errno.ENOENT:
raise raise
with open(self.storage_path, 'w') as f:
fd = os.open(self.storage_path, json.dump(self.passwd_dict, f)
os.O_CREAT | os.O_EXCL | os.O_WRONLY | os.O_TRUNC, 0o600)
try:
os.write(fd, str2bytes(self.passwd))
finally:
os.close(fd)
if not self.create_once: if not self.create_once:
return self.storage_path return self.storage_path
......
import os
import subprocess
import tempfile
import unittest
import zc.buildout.testing
from slapos.recipe import check_parameter
class TestCheckParameter(unittest.TestCase):
def setUp(self):
self.buildout = zc.buildout.testing.Buildout()
def _makeRecipe(self, options):
path = tempfile.NamedTemporaryFile(delete=False).name
self.addCleanup(os.unlink, path)
options.setdefault("path", path)
self.buildout["check-parameter"] = options
recipe = check_parameter.Recipe(
self.buildout, "check-parameter", self.buildout["check-parameter"]
)
return recipe
def test_expected_value_ok(self):
script = self._makeRecipe({"expected-value": "foo", "value": "foo"}).install()
subprocess.check_call(script)
def test_expected_value_not_ok(self):
script = self._makeRecipe({"expected-value": "foo", "value": "bar"}).install()
with self.assertRaises(subprocess.CalledProcessError) as e:
subprocess.check_output(script, universal_newlines=True)
self.assertEqual(e.exception.output, "FAIL: bar != foo\n")
def test_expected_value_multi_lines_ok(self):
script = self._makeRecipe(
{"expected-value": "foo\nbar", "value": "foo\nbar"}
).install()
subprocess.check_output(script)
def test_expected_value_multi_lines_not_ok(self):
script = self._makeRecipe({"expected-value": "foo\nbar", "value": "foo"}).install()
with self.assertRaises(subprocess.CalledProcessError) as e:
subprocess.check_output(script, universal_newlines=True)
self.assertEqual(e.exception.output, "FAIL: foo != foo\nbar\n")
def test_expected_not_value_ok(self):
script = self._makeRecipe({"expected-not-value": "foo", "value": "bar"}).install()
subprocess.check_call(script)
def test_expected_not_value_not_ok(self):
script = self._makeRecipe({"expected-not-value": "foo", "value": "foo"}).install()
with self.assertRaises(subprocess.CalledProcessError) as e:
subprocess.check_output(script, universal_newlines=True)
self.assertEqual(e.exception.output, "FAIL: foo == foo\n")
import json
import os
import shutil
import tempfile
import unittest
import zc.buildout.testing
import zc.buildout.buildout
import passlib.hash
from slapos.recipe import random
class TestPassword(unittest.TestCase):
def setUp(self):
self.buildout = zc.buildout.testing.Buildout()
parts_directory = tempfile.mkdtemp()
self.buildout['buildout']['parts-directory'] = parts_directory
self.addCleanup(shutil.rmtree, parts_directory)
def _makeRecipe(self, options, section_name="random"):
self.buildout[section_name] = options
recipe = random.Password(
self.buildout, section_name, self.buildout[section_name]
)
return recipe
def test_empty_options(self):
recipe = self._makeRecipe({})
passwd = self.buildout["random"]["passwd"]
self.assertEqual(len(passwd), 16)
recipe.install()
with open(self.buildout["random"]["storage-path"]) as f:
self.assertEqual(json.load(f), {'': passwd})
def test_storage_path(self):
tf = tempfile.NamedTemporaryFile(delete=False)
self.addCleanup(os.unlink, tf.name)
self._makeRecipe({'storage-path': tf.name}).install()
passwd = self.buildout["random"]["passwd"]
self.assertEqual(len(passwd), 16)
with open(tf.name) as f:
self.assertEqual(json.load(f), {'': passwd})
self._makeRecipe({'storage-path': tf.name}, "another").install()
self.assertEqual(self.buildout["another"]["passwd"], passwd)
def test_storage_path_legacy_format(self):
with tempfile.NamedTemporaryFile(delete=False) as tf:
tf.write(b'secret\n')
tf.flush()
self._makeRecipe({'storage-path': tf.name}).install()
passwd = self.buildout["random"]["passwd"]
self.assertEqual(passwd, 'secret')
tf.flush()
with open(tf.name) as f:
self.assertEqual(json.load(f), {'': 'secret'})
self._makeRecipe({'storage-path': tf.name}, "another").install()
self.assertEqual(self.buildout["another"]["passwd"], passwd)
def test_bytes(self):
self._makeRecipe({'bytes': '32'}).install()
passwd = self.buildout["random"]["passwd"]
self.assertEqual(len(passwd), 32)
with open(self.buildout["random"]["storage-path"]) as f:
self.assertEqual(json.load(f), {'': passwd})
def test_volatile(self):
self._makeRecipe({})
options = self.buildout['random']
self.assertIn('passwd', options)
options_items = [(k, v) for k, v in options.items() if k != 'passwd']
copied_options = options.copy()
self.assertEqual(list(copied_options.items()), options_items)
def test_passlib(self):
recipe = self._makeRecipe({})
hashed = self.buildout['random']['passwd-sha256-crypt']
self.assertTrue(
passlib.hash.sha256_crypt.verify(
self.buildout['random']['passwd'], hashed))
hashed = self.buildout['random']['passwd-md5-crypt']
self.assertTrue(
passlib.hash.md5_crypt.verify(
self.buildout['random']['passwd'], hashed))
hashed = self.buildout['random']['passwd-bcrypt']
self.assertTrue(
passlib.hash.bcrypt.verify(
self.buildout['random']['passwd'], hashed))
hashed = self.buildout['random']['passwd-ldap-salted-sha1']
self.assertTrue(
passlib.hash.ldap_salted_sha1.verify(
self.buildout['random']['passwd'], hashed))
with self.assertRaises(zc.buildout.buildout.MissingOption):
self.buildout['random']['passwd-unknown']
with self.assertRaises(zc.buildout.buildout.MissingOption):
self.buildout['random']['unknown']
copied_options = self.buildout['random'].copy()
self.assertEqual(list(copied_options.keys()), ['storage-path'])
recipe.install()
# when buildout runs again, the values are read from the storage
# and even the hashed values are the same
self._makeRecipe({'storage-path': self.buildout['random']['storage-path']}, 'reread')
self.assertEqual(
self.buildout['reread']['passwd'],
self.buildout['random']['passwd'])
self.assertEqual(
self.buildout['reread']['passwd-sha256-crypt'],
self.buildout['random']['passwd-sha256-crypt'])
self.assertEqual(
self.buildout['reread']['passwd-bcrypt'],
self.buildout['random']['passwd-bcrypt'])
self.assertEqual(
self.buildout['reread']['passwd-ldap-salted-sha1'],
self.buildout['random']['passwd-ldap-salted-sha1'])
# values are strings which is important for python2
self.assertIsInstance(self.buildout['reread']['passwd'], str)
self.assertIsInstance(self.buildout['reread']['passwd-ldap-salted-sha1'], str)
def test_passlib_input_passwd(self):
self._makeRecipe({'passwd': 'insecure'})
self.assertEqual(self.buildout['random']['passwd'], 'insecure')
hashed = self.buildout['random']['passwd-sha256-crypt']
self.assertTrue(passlib.hash.sha256_crypt.verify('insecure', hashed))
...@@ -85,6 +85,7 @@ def createFormatTest(path): ...@@ -85,6 +85,7 @@ def createFormatTest(path):
self.assertEqual( self.assertEqual(
(json.dumps( (json.dumps(
json.loads(content, object_pairs_hook=collections.OrderedDict), json.loads(content, object_pairs_hook=collections.OrderedDict),
ensure_ascii=False,
sort_keys=False, sort_keys=False,
indent=2, indent=2,
separators=(',', ': ')) + "\n").splitlines(), separators=(',', ': ')) + "\n").splitlines(),
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
}, },
"mimetype-entry-addition": { "mimetype-entry-addition": {
"description": "The list of entry to add to the cloudooo mimetype registry. Each entry should on one line which format is: \"<source_mimetype> <destination_mimetype> <handler>\"", "description": "The list of entry to add to the cloudooo mimetype registry. Each entry should on one line which format is: \"<source_mimetype> <destination_mimetype> <handler>\"",
"textarea": true,
"type": "string" "type": "string"
} }
} }
......
...@@ -37,9 +37,12 @@ import base64 ...@@ -37,9 +37,12 @@ import base64
import io import io
import requests import requests
import PIL.Image
import PyPDF2 import PyPDF2
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import ImageComparisonTestCase
setUpModule, _CloudOooTestCase = makeModuleSetUpAndTestCaseClass( setUpModule, _CloudOooTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath( os.path.abspath(
...@@ -148,7 +151,6 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase): ...@@ -148,7 +151,6 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
'Courier New': 'LiberationSans', 'Courier New': 'LiberationSans',
'DejaVu Sans': 'DejaVuSans', 'DejaVu Sans': 'DejaVuSans',
'DejaVu Sans Condensed': 'LiberationSans', 'DejaVu Sans Condensed': 'LiberationSans',
'DejaVu Sans ExtraLight': 'LiberationSans',
'DejaVu Sans Mono': 'DejaVuSansMono', 'DejaVu Sans Mono': 'DejaVuSansMono',
'DejaVu Serif': 'DejaVuSerif', 'DejaVu Serif': 'DejaVuSerif',
'DejaVu Serif Condensed': 'LiberationSans', 'DejaVu Serif Condensed': 'LiberationSans',
...@@ -165,11 +167,11 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase): ...@@ -165,11 +167,11 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
'Liberation Sans Narrow': 'LiberationSansNarrow', 'Liberation Sans Narrow': 'LiberationSansNarrow',
'Liberation Serif': 'LiberationSerif', 'Liberation Serif': 'LiberationSerif',
'Linux LibertineG': 'LiberationSans', 'Linux LibertineG': 'LiberationSans',
'OpenSymbol': {'DejaVuSans', 'OpenSymbol'}, 'OpenSymbol': {'NotoSans-Regular', 'OpenSymbol'},
'Palatino': 'LiberationSans', 'Palatino': 'LiberationSans',
'Roboto Black': 'LiberationSans', 'Roboto Black': 'LiberationSans',
'Roboto Condensed Light': 'LiberationSans', 'Roboto Condensed Light': 'LiberationSans',
'Roboto Condensed Regular': 'LiberationSans', 'Roboto Condensed': 'RobotoCondensed-Regular',
'Roboto Light': 'LiberationSans', 'Roboto Light': 'LiberationSans',
'Roboto Medium': 'LiberationSans', 'Roboto Medium': 'LiberationSans',
'Roboto Thin': 'LiberationSans', 'Roboto Thin': 'LiberationSans',
...@@ -198,43 +200,42 @@ class TestLibreoffice(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase): ...@@ -198,43 +200,42 @@ class TestLibreoffice(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
pdf_producer = 'LibreOffice 7.5' pdf_producer = 'LibreOffice 7.5'
expected_font_mapping = { expected_font_mapping = {
'Arial': 'LiberationSans', 'Arial': 'LiberationSans',
'Arial Black': 'DejaVuSans', 'Arial Black': 'NotoSans-Regular',
'Avant Garde': 'DejaVuSans', 'Avant Garde': 'NotoSans-Regular',
'Bookman': 'DejaVuSans', 'Bookman': 'NotoSans-Regular',
'Carlito': 'Carlito', 'Carlito': 'Carlito',
'Comic Sans MS': 'DejaVuSans', 'Comic Sans MS': 'NotoSans-Regular',
'Courier New': 'LiberationMono', 'Courier New': 'LiberationMono',
'DejaVu Sans': 'DejaVuSans', 'DejaVu Sans': 'DejaVuSans',
'DejaVu Sans Condensed': 'DejaVuSansCondensed', 'DejaVu Sans Condensed': 'DejaVuSansCondensed',
'DejaVu Sans ExtraLight': 'DejaVuSans',
'DejaVu Sans Mono': 'DejaVuSansMono', 'DejaVu Sans Mono': 'DejaVuSansMono',
'DejaVu Serif': 'DejaVuSerif', 'DejaVu Serif': 'DejaVuSerif',
'DejaVu Serif Condensed': 'DejaVuSerifCondensed', 'DejaVu Serif Condensed': 'DejaVuSerifCondensed',
'Garamond': 'DejaVuSerif', 'Garamond': 'NotoSerif-Regular',
'Gentium Basic': 'GentiumBasic', 'Gentium Basic': 'GentiumBasic',
'Gentium Book Basic': 'GentiumBookBasic', 'Gentium Book Basic': 'GentiumBookBasic',
'Georgia': 'DejaVuSerif', 'Georgia': 'NotoSerif-Regular',
'Helvetica': 'LiberationSans', 'Helvetica': 'LiberationSans',
'IPAex Gothic': 'IPAexGothic', 'IPAex Gothic': 'IPAexGothic',
'IPAex Mincho': 'IPAexMincho', 'IPAex Mincho': 'IPAexMincho',
'Impact': 'DejaVuSans', 'Impact': 'NotoSans-Regular',
'Liberation Mono': 'LiberationMono', 'Liberation Mono': 'LiberationMono',
'Liberation Sans': 'LiberationSans', 'Liberation Sans': 'LiberationSans',
'Liberation Sans Narrow': 'LiberationSansNarrow', 'Liberation Sans Narrow': 'LiberationSansNarrow',
'Liberation Serif': 'LiberationSerif', 'Liberation Serif': 'LiberationSerif',
'Linux LibertineG': 'LinuxLibertineG', 'Linux LibertineG': 'LinuxLibertineG',
'OpenSymbol': {'OpenSymbol', 'IPAMincho'}, 'OpenSymbol': {'OpenSymbol', 'IPAMincho'},
'Palatino': 'DejaVuSerif', 'Palatino': 'NotoSerif-Regular',
'Roboto Black': 'Roboto-Black', 'Roboto Black': 'Roboto-Black',
'Roboto Condensed Light': 'RobotoCondensed-Light', 'Roboto Condensed Light': 'RobotoCondensed-Light',
'Roboto Condensed Regular': 'DejaVuSans', 'Roboto Condensed': 'RobotoCondensed-Regular',
'Roboto Light': 'Roboto-Light', 'Roboto Light': 'Roboto-Light',
'Roboto Medium': 'Roboto-Medium', 'Roboto Medium': 'Roboto-Medium',
'Roboto Thin': 'Roboto-Thin', 'Roboto Thin': 'Roboto-Thin',
'Times New Roman': 'LiberationSerif', 'Times New Roman': 'LiberationSerif',
'Trebuchet MS': 'DejaVuSans', 'Trebuchet MS': 'NotoSans-Regular',
'Verdana': 'DejaVuSans', 'Verdana': 'NotoSans-Regular',
'ZZZdefault fonts when no match': 'DejaVuSans' 'ZZZdefault fonts when no match': 'NotoSans-Regular'
} }
def _convert_html_to_pdf(self, src_html): def _convert_html_to_pdf(self, src_html):
...@@ -246,6 +247,27 @@ class TestLibreoffice(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase): ...@@ -246,6 +247,27 @@ class TestLibreoffice(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
).encode()) ).encode())
class TestLibreofficeDrawToPNGConversion(CloudOooTestCase, ImageComparisonTestCase):
__partition_reference__ = 'l'
def test(self):
reference_png = PIL.Image.open(os.path.join('data', f'{self.id()}.png'))
with open(os.path.join('data', f'{self.id()}.odg'), 'rb') as f:
actual_png_data = base64.decodebytes(
self.server.convertFile(
base64.encodebytes(f.read()).decode(),
'odg',
'png',
).encode())
actual_png = PIL.Image.open(io.BytesIO(actual_png_data))
# save a snapshot
with open(os.path.join(self.computer_partition_root_path, self.id() + '.png'), 'wb') as f:
f.write(actual_png_data)
self.assertImagesSame(actual_png, reference_png)
class TestLibreOfficeTextConversion(CloudOooTestCase): class TestLibreOfficeTextConversion(CloudOooTestCase):
__partition_reference__ = 'txt' __partition_reference__ = 'txt'
......
[instance] [instance]
filename = instance.cfg filename = instance.cfg
md5sum = a4e19280bc672cc98e0fef241c8439ba md5sum = e4092f606716171a5c9c5980c70573b3
[buildout] [buildout]
parts = parts =
dream_simulation dream-simulation
dream_platform dream-platform
dream_test_suite dream-test-suite
dream_interpreter dream-interpreter
grunt_watch grunt-watch
publish-connection-parameter publish-connection-parameter
dream-platform-url-available dream-platform-url-available
...@@ -22,44 +22,44 @@ url = $${slap-connection:server-url} ...@@ -22,44 +22,44 @@ url = $${slap-connection:server-url}
key = $${slap-connection:key-file} key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file} cert = $${slap-connection:cert-file}
[dream_platform_parameter] [dream-platform-parameter]
port = 18080 port = 18080
host = $${instance-parameter:ipv6-random} host = $${instance-parameter:ipv6-random}
url = http://[$${:host}]:$${:port} url = http://[$${:host}]:$${:port}
# interpreter # interpreter
[dream_interpreter] [dream-interpreter]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = ${buildout:bin-directory}/dream_interpreter command-line = ${buildout:bin-directory}/dream_interpreter
wrapper-path = $${buildout:bin-directory}/dream_interpreter wrapper-path = $${buildout:bin-directory}/dream_interpreter
# service # service
[dream_platform] [dream-platform]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = ${buildout:bin-directory}/dream_platform --debug --host $${dream_platform_parameter:host} --port $${dream_platform_parameter:port} --log $${directory:log}/dream_platform.log command-line = ${buildout:bin-directory}/dream_platform --debug --host $${dream-platform-parameter:host} --port $${dream-platform-parameter:port} --log $${directory:log}/dream_platform.log
wrapper-path = $${directory:service}/dream_platform wrapper-path = $${directory:service}/dream_platform
[dream-platform-url-available] [dream-platform-url-available]
<= monitor-promise-base <= monitor-promise-base
promise = check_url_available promise = check_url_available
name = $${:_buildout_section_name_}.py name = $${:_buildout_section_name_}.py
config-url= $${dream_platform_parameter:url} config-url= $${dream-platform-parameter:url}
[grunt_watch] [grunt-watch]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = bash -c 'cd ${dream-repository.git:location}; PATH=${nodejs:location}/bin/:$PATH ${dream-repository.git:location}/node_modules/grunt-cli/bin/grunt watch -f > $${directory:log}/grunt.log' command-line = bash -c 'cd ${dream-repository.git:location}; PATH=${nodejs:location}/bin/:$PATH ${dream-repository.git:location}/node_modules/grunt-cli/bin/grunt watch -f > $${directory:log}/grunt.log'
wrapper-path = $${directory:service}/dream_grunt_watch wrapper-path = $${directory:service}/dream_grunt_watch
# CLI # CLI
[dream_simulation] [dream-simulation]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = ${buildout:bin-directory}/dream_simulation command-line = ${buildout:bin-directory}/dream_simulation
wrapper-path = $${directory:script}/dream_simulation wrapper-path = $${directory:script}/dream_simulation
[dream_test_suite] [dream-test-suite]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = ${dream_testrunner:script} command-line = ${dream-testrunner:script}
wrapper-path = $${directory:script}/dream_test_suite wrapper-path = $${directory:script}/dream_test_suite
[directory] [directory]
...@@ -74,4 +74,4 @@ log = $${:var}/log ...@@ -74,4 +74,4 @@ log = $${:var}/log
[publish-connection-parameter] [publish-connection-parameter]
recipe = slapos.cookbook:publishurl recipe = slapos.cookbook:publishurl
url = $${dream_platform_parameter:url} url = $${dream-platform-parameter:url}
...@@ -9,8 +9,8 @@ extends = ...@@ -9,8 +9,8 @@ extends =
parts = parts =
slapos-cookbook slapos-cookbook
manpy manpy
dream_testrunner dream-testrunner
npm_install npm-install
instance instance
[gcc] [gcc]
...@@ -20,22 +20,22 @@ max_version = 0 ...@@ -20,22 +20,22 @@ max_version = 0
[python] [python]
part = python2.7 part = python2.7
[dream-repository.git]
revision = f3bcf115741886835df8c0ca0fdbf510d77d8db8
[instance] [instance]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/instance.cfg output = ${buildout:directory}/instance.cfg
[dream_testrunner] [dream-testrunner]
recipe = zc.recipe.testrunner recipe = zc.recipe.testrunner
eggs = dream eggs = dream
script = dream_testrunner script = dream_testrunner
initialization = initialization =
${manpy:initialization} ${manpy:initialization}
[nodejs] [npm-install]
<= nodejs-8.9.4
[npm_install]
recipe = plone.recipe.command recipe = plone.recipe.command
stop-on-error = true stop-on-error = true
command = command =
...@@ -52,8 +52,4 @@ simpy = 3.0.5 ...@@ -52,8 +52,4 @@ simpy = 3.0.5
zope.dottedname = 4.1.0 zope.dottedname = 4.1.0
tablib = 0.10.0 tablib = 0.10.0
mysqlclient = 1.3.12 mysqlclient = 1.3.12
# indirect dependancies
cp.recipe.cmd = 0.5
plone.recipe.command = 1.1
zc.recipe.testrunner = 2.0.0 zc.recipe.testrunner = 2.0.0
# End To End Testing
This software release is used to run end-to-end test of SlapOS softwares on an actual SlapOS cloud such as Rapid.Space. Since it can supply softwares and request instances on an actual cloud, it needs a SlaPOS client certificate and the URLs of your tests.
## Input parameters
```
{
"client.crt": <content of client.crt>,
"client.key": <content of client.key>,
"master-url": <url of SlapOS master>,
"tests": [
{
"url": "<url of test1 script>",
"md5sum": "MD5sum of test1 script"
},
{
"url": "<url of test2 python script>",
"md5sum": "MD5sum of test2 python script"
},
...
]
}
```
Example:
(`e2e-parameters.json`)
```
{
"client.crt": "Certificate:...-----END CERTIFICATE-----\n",
"master-url": "https://slap.vifib.com",
"tests": [
{
"url": "https://lab.nexedi.com/lu.xu/slapos/raw/feat/end-to-end-testing/software/end-to-end-testing/test_test.py",
"md5sum": "c074373dbb4154aa924ef5781dade7a0"
}
]
}
```
## Generate client certificate
Follow [How To Set Up SlapOS Client](https://handbook.rapid.space/user/rapidspace-HowTo.Setup.SlapOS.Client) to prepare `slapos-client.cfg` if you don't have one.
A convenience script `generate_parameters.py` is provided to compute these parameters in JSON format from an existing SlapOS client configuration:
```
python3 generate_parameters.py --cfg <absolute path to slapos-client.cfg> -o <output path>
```
## Adding tests
There are 3 example tests available in end-to-end testing SR:
- test_test.py
Simple successful test and failed test
- test_kvm.py
Request a KVM instance with published SR and verify one of the connection parameters
- test_health.py
Request a Monitor instance with published SR and log promises output
All tests should be written in Python with a `.py` extension and should have names that start with `test_`.
Once your test is prepared, you have the option to input a URL and its corresponding md5sum as parameters. This will enable the end-to-end testing instance to automatically detect the test.
## Running tests
### In Nexedi ERP5 automated test environment (testnodes)
When performing tests on a software release (SR) using the ERR5 test suite, you can use "SlapOS.SoftwareReleases.IntegrationTest". Additionally, make sure to fill in the "Slapos Parameters" field with the content of the input parameter mentioned above.
### In Theia locally
#### Using virtualenv
Using slapos.core for quick testing:
```
python3 -m venv testenv
source testenv/bin/activate
pip install -e path/to/my/slapos.core
export SLAPOS_E2E_TEST_CLIENT_CFG=my_test_client_cfg
export SLAPOS_E2E_TEST_LOG_FILE=my_test_log_file
python -m unittest my_test_file_in_development
# edit e2e.py in path/to/my/slapos.core or the instanciated one if improvements are needed
```
#### Using an instance of software/end-to-end-testing
1. Setup and instantiate the runner
```
slapos supply ~/srv/project/slapos/software/end-to-end-testing/software.cfg slaprunner
slapos request <e2e_instance_name> ~/srv/project/slapos/software/end-to-end-testing/software.cfg --parameters-file <e2e_parameter_json_file>
```
Your tests should be listed in <e2e_parameter_json_file> with URL and MD5sum
2. Go to instance directory and run test
```
cd ~/srv/runner/instance/slappartX
./bin/runTestSuite
```
Downloaded tests and the reusable `e2e.py` script can be found in the `~/srv/runner/instance/slappartX/var/tests/` directory.
To quickly test, you have the option to modify the test script directly here(`~/srv/runner/instance/slappartX/var/tests/`). After making the necessary changes, you can relaunch the tests by running `./bin/runTestSuite`.
## FAQ
Q1. What is the difference between `slapos-sr-testing` and `end-to-end-testing`?
- slapos-sr-testing requests SRs on a slapproxy in an SlapOSStandalone inside the slapos-sr-testing instance (same kind of thing as Theia or webrunner).
- end-to-end-testing requests SRs on the actual master, in real compute nodes (COMP-XXX). To do this it needs a slapos certificate which is passed as instance parameter to end-to-end-testing instance in the test suite.
So unlike slapos-sr-testing, end-to-end-testing does not contain the SRs it tests, it merely runs the python tests scripts (like mentioned `test_kvm.py`) and integrates with the ERP5 test suite. This also means we cannot access the files in the partition of the tested SRs, as those are on other computers. All we have access to is what a normal user requesting on panel would have access to.
[instance.cfg]
filename = instance.cfg.in
md5sum = 962830010e0a257d52c22141db3d34cf
import argparse
import configparser
import json
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--cfg', required=True)
parser.add_argument('-o', '--output', required=True)
args = parser.parse_args()
configp = configparser.ConfigParser()
configp.read(args.cfg)
with open(configp.get('slapconsole', 'cert_file')) as f:
crt = f.read()
with open(configp.get('slapconsole', 'key_file')) as f:
key = f.read()
url = configp.get('slapos', 'master_url')
with open(args.output, 'w') as f:
json.dump(
{
'client.crt': crt,
'client.key': key,
'master-url': url
}, f, indent=2)
if __name__ == '__main__':
main()
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"client.crt": {
"type": "string",
"default": "Certificate:\ndefault-client-crt\n-----END CERTIFICATE-----\n"
},
"client.key": {
"type": "string",
"default": "-----BEGIN PRIVATE KEY-----\ndefault-client-key\n-----END PRIVATE KEY-----\n"
},
"master-url": {
"type": "string",
"format": "uri",
"default": "https://slap.vifib.com"
},
"tests": {
"type": "array",
"items": {
"type": "object",
"properties": {
"url": {
"type": "string",
"format": "uri"
},
"md5sum": {
"type": "string"
}
},
"additionalProperties": false
}
}
},
"additionalProperties": false
}
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Values returned by End to End Testing instanciation",
"additionalProperties": false,
"properties": {},
"type": "object"
}
[buildout]
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
extends =
${nxdtest-instance.cfg:output}
parts =
.nxdtest
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
bin = $${buildout:directory}/bin
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
cfg = $${buildout:directory}/.slapos
nxdtestdir = $${:var}/nxdtest
log = $${:var}/log
[client.crt]
recipe = slapos.recipe.template
output = $${directory:cfg}/client.crt
inline = $${slap-configuration:configuration.client.crt}
[client.key]
recipe = slapos.recipe.template
output = $${directory:cfg}/client.key
inline = $${slap-configuration:configuration.client.key}
[slapos-client.cfg]
recipe = slapos.recipe.template
output = $${directory:cfg}/slapos-client.cfg
inline =
[slapos]
master_url = $${slap-configuration:configuration.master-url}
[slapconsole]
cert_file = $${client.crt:output}
key_file = $${client.key:output}
[env.sh]
recipe = slapos.recipe.template:jinja2
output = $${directory:cfg}/env.sh
inline =
export HOME=$${directory:home}
[runTestSuite]
# extended from stack/nxdtest
env.sh = $${env.sh:output}
workdir = $${directory:nxdtestdir}
[.nxdtest]
recipe = slapos.recipe.template:jinja2
output = $${runTestSuite:workdir}/.nxdtest
python_for_test = ${python_for_test:executable}
testdir = $${tests:location}
log-output = $${directory:log}/e2e-testing.log
context =
key python_for_test :python_for_test
key testdir :testdir
key slapos_cfg slapos-client.cfg:output
key log_file :log-output
inline =
import os
directory = "{{ testdir }}"
slapos_cfg = {{ repr(slapos_cfg) }}
log_file = {{ repr(log_file) }}
dir_list = os.listdir(directory)
for filename in dir_list:
name, ext = os.path.splitext(filename)
if name.startswith('test') and ext == '.py':
TestCase(
name,
[{{ repr(python_for_test) }} , '-m', 'unittest', '-v', name],
cwd=directory,
env={'SLAPOS_E2E_TEST_CLIENT_CFG': slapos_cfg,
'SLAPOS_E2E_TEST_LOG_FILE': log_file},
summaryf=UnitTest.summary,
)
[tests]
recipe = slapos.recipe.build
tests = $${slap-configuration:configuration.tests}
location = $${directory:var}/tests
install =
import os
os.mkdir(location)
buildout_offline = self.buildout['buildout']['offline']
try:
# Allow to do self.download() which can only be used in "online" mode
self.buildout['buildout']['offline'] = 'false'
for i, test in enumerate(options['tests']):
tmp = self.download(test['url'], test['md5sum'])
path = os.path.join(location, 'test%s.py' % i)
os.rename(tmp, path)
finally:
# reset the parameter
self.buildout['buildout']['offline'] = buildout_offline
[buildout]
extends =
../../component/pygolang/buildout.cfg
../../stack/slapos.cfg
../../stack/nxdtest.cfg
buildout.hash.cfg
parts =
instance.cfg
slapos-cookbook
[instance.cfg]
recipe = slapos.recipe.template
output = ${buildout:directory}/instance.cfg
url = ${:_profile_base_location_}/${:filename}
[e2e.py]
recipe = slapos.recipe.build:download
output = ${buildout:directory}/${:filename}
url = ${:_profile_base_location_}/${:filename}
[python_for_test]
<= python-interpreter
interpreter = python_for_test
executable = ${buildout:bin-directory}/${:interpreter}
depends = ${lxml-python:egg}
eggs =
${pygolang:egg}
slapos.core
websocket-client
requests
opcua
[versions]
websocket-client = 1.4.2
opcua = 0.98.13
{
"name": "End-To-End-Testing",
"description": "End-To-End Testing on SlapOS Cloud",
"serialisation": "json-in-xml",
"software-type": {
"default": {
"title": "default",
"software-type": "default",
"description": "default",
"request": "instance-input-schema.json",
"response": "instance-output-schema.json"
}
}
}
import slapos.testing.e2e as e2e
import time
class HealthTest(e2e.EndToEndTestCase):
@classmethod
def test_health_promise_feed(self):
instance_name = e2e.time.strftime('e2e-test-health-%Y-%B-%d-%H:%M:%S')
product = self.product.slapmonitor
parameter_dict = {}
self.request(
self.product.slapmonitor,
instance_name,
software_type='default',
filter_kw={"computer_guid": "COMP-4057"})
self.waitUntilGreen(instance_name)
self.connection_dict = self.getInstanceInfos(instance_name).connection_dict
resp, url = self.waitUntilMonitorURLReady(instance_name=instance_name)
self.getMonitorPromises(resp.content)
import slapos.testing.e2e as e2e
class KvmTest(e2e.EndToEndTestCase):
def test(self):
instance_name = e2e.time.strftime('e2e-test-kvm-%Y-%B-%d-%H:%M:%S')
# instance_name = 'e2e-kvm-test' # avoid timestamp to reuse instance
self.request(self.product.kvm, instance_name)
self.waitUntilGreen(instance_name)
connection_dict = self.request(self.product.kvm, instance_name)
self.assertIn('url', connection_dict)
import unittest
class Test(unittest.TestCase):
def test_fail(self):
self.assertEqual(0, 1)
def test_succeed(self):
self.assertEqual(0, 0)
...@@ -103,6 +103,10 @@ ...@@ -103,6 +103,10 @@
"null" "null"
] ]
}, },
"with-max-rlimit-nofile": {
"description": "Set open file descriptors soft limit to hard limit",
"type": "boolean"
},
"family-override": { "family-override": {
"description": "Family-wide options, possibly overriding global options", "description": "Family-wide options, possibly overriding global options",
"default": {}, "default": {},
......
...@@ -25,12 +25,26 @@ ...@@ -25,12 +25,26 @@
# #
############################################################################## ##############################################################################
import hashlib
import itertools import itertools
import json import json
import os import os
import shutil
import subprocess
import sys import sys
import tempfile
import time
import urllib
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass import requests
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.oid import NameOID
from slapos.testing.testcase import ManagedResource, makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import findFreeTCPPort
_setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass( _setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
...@@ -200,3 +214,175 @@ class ERP5InstanceTestCase(SlapOSInstanceTestCase, metaclass=ERP5InstanceTestMet ...@@ -200,3 +214,175 @@ class ERP5InstanceTestCase(SlapOSInstanceTestCase, metaclass=ERP5InstanceTestMet
def getComputerPartitionPath(cls, partition_reference): def getComputerPartitionPath(cls, partition_reference):
partition_id = cls.getComputerPartition(partition_reference).getId() partition_id = cls.getComputerPartition(partition_reference).getId()
return os.path.join(cls.slap._instance_root, partition_id) return os.path.join(cls.slap._instance_root, partition_id)
class CaucaseService(ManagedResource):
"""A caucase service.
"""
url: str = None
directory: str = None
_caucased_process: subprocess.Popen = None
def open(self) -> None:
# start a caucased and server certificate.
software_release_root_path = os.path.join(
self._cls.slap._software_root,
hashlib.md5(self._cls.getSoftwareURL().encode()).hexdigest(),
)
caucased_path = os.path.join(software_release_root_path, 'bin', 'caucased')
self.directory = tempfile.mkdtemp()
caucased_dir = os.path.join(self.directory, 'caucased')
os.mkdir(caucased_dir)
os.mkdir(os.path.join(caucased_dir, 'user'))
os.mkdir(os.path.join(caucased_dir, 'service'))
backend_caucased_netloc = f'{self._cls._ipv4_address}:{findFreeTCPPort(self._cls._ipv4_address)}'
self.url = 'http://' + backend_caucased_netloc
self._caucased_process = subprocess.Popen(
[
caucased_path,
'--db', os.path.join(caucased_dir, 'caucase.sqlite'),
'--server-key', os.path.join(caucased_dir, 'server.key.pem'),
'--netloc', backend_caucased_netloc,
'--service-auto-approve-count', '1',
],
# capture subprocess output not to pollute test's own stdout
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
for _ in range(30):
try:
if requests.get(self.url).status_code == 200:
break
except Exception:
pass
time.sleep(1)
else:
raise RuntimeError('caucased failed to start.')
def close(self) -> None:
self._caucased_process.terminate()
self._caucased_process.wait()
self._caucased_process.stdout.close()
shutil.rmtree(self.directory)
@property
def ca_crt_path(self) -> str:
"""Path of the CA certificate from this caucase.
"""
ca_crt_path = os.path.join(self.directory, 'ca.crt.pem')
if not os.path.exists(ca_crt_path):
with open(ca_crt_path, 'w') as f:
f.write(
requests.get(urllib.parse.urljoin(
self.url,
'/cas/crt/ca.crt.pem',
)).text)
return ca_crt_path
class CaucaseCertificate(ManagedResource):
"""A certificate signed by a caucase service.
"""
ca_crt_file: str = None
crl_file: str = None
csr_file: str = None
cert_file: str = None
key_file: str = None
def open(self) -> None:
self.tmpdir = tempfile.mkdtemp()
self.ca_crt_file = os.path.join(self.tmpdir, 'ca-crt.pem')
self.crl_file = os.path.join(self.tmpdir, 'ca-crl.pem')
self.csr_file = os.path.join(self.tmpdir, 'csr.pem')
self.cert_file = os.path.join(self.tmpdir, 'crt.pem')
self.key_file = os.path.join(self.tmpdir, 'key.pem')
def close(self) -> None:
shutil.rmtree(self.tmpdir)
@property
def _caucase_path(self) -> str:
"""path of caucase executable.
"""
software_release_root_path = os.path.join(
self._cls.slap._software_root,
hashlib.md5(self._cls.getSoftwareURL().encode()).hexdigest(),
)
return os.path.join(software_release_root_path, 'bin', 'caucase')
def request(self, common_name: str, caucase: CaucaseService) -> None:
"""Generate certificate and request signature to the caucase service.
This overwrite any previously requested certificate for this instance.
"""
cas_args = [
self._caucase_path,
'--ca-url', caucase.url,
'--ca-crt', self.ca_crt_file,
'--crl', self.crl_file,
]
key = rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=default_backend()
)
with open(self.key_file, 'wb') as f:
f.write(
key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
))
csr = x509.CertificateSigningRequestBuilder().subject_name(
x509.Name([
x509.NameAttribute(
NameOID.COMMON_NAME,
common_name,
),
])).sign(
key,
hashes.SHA256(),
default_backend(),
)
with open(self.csr_file, 'wb') as f:
f.write(csr.public_bytes(serialization.Encoding.PEM))
csr_id = subprocess.check_output(
cas_args + [
'--send-csr', self.csr_file,
],
).split()[0].decode()
assert csr_id
for _ in range(30):
if not subprocess.call(
cas_args + [
'--get-crt', csr_id, self.cert_file,
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) == 0:
break
else:
time.sleep(1)
else:
raise RuntimeError('getting service certificate failed.')
with open(self.cert_file) as cert_file:
assert 'BEGIN CERTIFICATE' in cert_file.read()
def revoke(self, caucase: CaucaseService) -> None:
"""Revoke the client certificate on this caucase instance.
"""
subprocess.check_call([
self._caucase_path,
'--ca-url', caucase.url,
'--ca-crt', self.ca_crt_file,
'--crl', self.crl_file,
'--revoke-crt', self.cert_file, self.key_file,
])
import glob import glob
import hashlib
import json import json
import logging import logging
import os import os
import re import re
import shutil
import subprocess import subprocess
import tempfile
import time import time
import urllib.parse import urllib.parse
from http.server import BaseHTTPRequestHandler from http.server import BaseHTTPRequestHandler
...@@ -15,16 +12,10 @@ from unittest import mock ...@@ -15,16 +12,10 @@ from unittest import mock
import pexpect import pexpect
import psutil import psutil
import requests import requests
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.oid import NameOID
from slapos.testing.testcase import ManagedResource from slapos.testing.utils import CrontabMixin, ManagedHTTPServer
from slapos.testing.utils import CrontabMixin, ManagedHTTPServer, findFreeTCPPort
from . import ERP5InstanceTestCase, default, matrix, setUpModule from . import CaucaseCertificate, CaucaseService, ERP5InstanceTestCase, default, matrix, setUpModule
setUpModule # pyflakes setUpModule # pyflakes
...@@ -34,8 +25,7 @@ class EchoHTTPServer(ManagedHTTPServer): ...@@ -34,8 +25,7 @@ class EchoHTTPServer(ManagedHTTPServer):
encoded in json. encoded in json.
""" """
class RequestHandler(BaseHTTPRequestHandler): class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self): def do_GET(self) -> None:
# type: () -> None
self.send_response(200) self.send_response(200)
self.send_header("Content-Type", "application/json") self.send_header("Content-Type", "application/json")
response = json.dumps( response = json.dumps(
...@@ -57,8 +47,7 @@ class EchoHTTP11Server(ManagedHTTPServer): ...@@ -57,8 +47,7 @@ class EchoHTTP11Server(ManagedHTTPServer):
""" """
class RequestHandler(BaseHTTPRequestHandler): class RequestHandler(BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1' protocol_version = 'HTTP/1.1'
def do_GET(self): def do_GET(self) -> None:
# type: () -> None
self.send_response(200) self.send_response(200)
self.send_header("Content-Type", "application/json") self.send_header("Content-Type", "application/json")
response = json.dumps( response = json.dumps(
...@@ -75,61 +64,6 @@ class EchoHTTP11Server(ManagedHTTPServer): ...@@ -75,61 +64,6 @@ class EchoHTTP11Server(ManagedHTTPServer):
log_message = logging.getLogger(__name__ + '.EchoHTTP11Server').info log_message = logging.getLogger(__name__ + '.EchoHTTP11Server').info
class CaucaseService(ManagedResource):
"""A caucase service.
"""
url = None # type: str
directory = None # type: str
_caucased_process = None # type: subprocess.Popen
def open(self):
# type: () -> None
# start a caucased and server certificate.
software_release_root_path = os.path.join(
self._cls.slap._software_root,
hashlib.md5(self._cls.getSoftwareURL().encode()).hexdigest(),
)
caucased_path = os.path.join(software_release_root_path, 'bin', 'caucased')
self.directory = tempfile.mkdtemp()
caucased_dir = os.path.join(self.directory, 'caucased')
os.mkdir(caucased_dir)
os.mkdir(os.path.join(caucased_dir, 'user'))
os.mkdir(os.path.join(caucased_dir, 'service'))
backend_caucased_netloc = f'{self._cls._ipv4_address}:{findFreeTCPPort(self._cls._ipv4_address)}'
self.url = 'http://' + backend_caucased_netloc
self._caucased_process = subprocess.Popen(
[
caucased_path,
'--db', os.path.join(caucased_dir, 'caucase.sqlite'),
'--server-key', os.path.join(caucased_dir, 'server.key.pem'),
'--netloc', backend_caucased_netloc,
'--service-auto-approve-count', '1',
],
# capture subprocess output not to pollute test's own stdout
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
for _ in range(30):
try:
if requests.get(self.url).status_code == 200:
break
except Exception:
pass
time.sleep(1)
else:
raise RuntimeError('caucased failed to start.')
def close(self):
# type: () -> None
self._caucased_process.terminate()
self._caucased_process.wait()
self._caucased_process.stdout.close()
shutil.rmtree(self.directory)
class BalancerTestCase(ERP5InstanceTestCase): class BalancerTestCase(ERP5InstanceTestCase):
# We explicitly specify 'balancer' as our software type here, # We explicitly specify 'balancer' as our software type here,
# therefore we don't request ZODB. We therefore don't # therefore we don't request ZODB. We therefore don't
...@@ -143,8 +77,7 @@ class BalancerTestCase(ERP5InstanceTestCase): ...@@ -143,8 +77,7 @@ class BalancerTestCase(ERP5InstanceTestCase):
return 'balancer' return 'balancer'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
return { return {
'tcpv4-port': 8000, 'tcpv4-port': 8000,
'computer-memory-percent-threshold': 100, 'computer-memory-percent-threshold': 100,
...@@ -179,12 +112,10 @@ class BalancerTestCase(ERP5InstanceTestCase): ...@@ -179,12 +112,10 @@ class BalancerTestCase(ERP5InstanceTestCase):
} }
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls) -> dict:
# type: () -> dict
return {'_': json.dumps(cls._getInstanceParameterDict())} return {'_': json.dumps(cls._getInstanceParameterDict())}
def setUp(self): def setUp(self) -> None:
# type: () -> None
self.default_balancer_url = json.loads( self.default_balancer_url = json.loads(
self.computer_partition.getConnectionParameterDict()['_'])['default'] self.computer_partition.getConnectionParameterDict()['_'])['default']
...@@ -195,8 +126,7 @@ class SlowHTTPServer(ManagedHTTPServer): ...@@ -195,8 +126,7 @@ class SlowHTTPServer(ManagedHTTPServer):
Timeout is 2 seconds by default, and can be specified in the path of the URL Timeout is 2 seconds by default, and can be specified in the path of the URL
""" """
class RequestHandler(BaseHTTPRequestHandler): class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self): def do_GET(self) -> None:
# type: () -> None
self.send_response(200) self.send_response(200)
self.send_header("Content-Type", "text/plain") self.send_header("Content-Type", "text/plain")
timeout = 2 timeout = 2
...@@ -214,8 +144,7 @@ class SlowHTTPServer(ManagedHTTPServer): ...@@ -214,8 +144,7 @@ class SlowHTTPServer(ManagedHTTPServer):
class TestTimeout(BalancerTestCase, CrontabMixin): class TestTimeout(BalancerTestCase, CrontabMixin):
__partition_reference__ = 't' __partition_reference__ = 't'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
parameter_dict = super()._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead # use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]] parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
...@@ -223,8 +152,7 @@ class TestTimeout(BalancerTestCase, CrontabMixin): ...@@ -223,8 +152,7 @@ class TestTimeout(BalancerTestCase, CrontabMixin):
parameter_dict['timeout-dict'] = {'default': 1} parameter_dict['timeout-dict'] = {'default': 1}
return parameter_dict return parameter_dict
def test_timeout(self): def test_timeout(self) -> None:
# type: () -> None
self.assertEqual( self.assertEqual(
requests.get( requests.get(
urllib.parse.urljoin(self.default_balancer_url, '/1'), urllib.parse.urljoin(self.default_balancer_url, '/1'),
...@@ -242,15 +170,13 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -242,15 +170,13 @@ class TestLog(BalancerTestCase, CrontabMixin):
""" """
__partition_reference__ = 'l' __partition_reference__ = 'l'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
parameter_dict = super()._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead # use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]] parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
return parameter_dict return parameter_dict
def test_access_log_format(self): def test_access_log_format(self) -> None:
# type: () -> None
requests.get( requests.get(
urllib.parse.urljoin(self.default_balancer_url, '/url_path'), urllib.parse.urljoin(self.default_balancer_url, '/url_path'),
verify=False, verify=False,
...@@ -274,8 +200,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -274,8 +200,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
self.assertGreater(request_time, 2 * 1000) self.assertGreater(request_time, 2 * 1000)
self.assertLess(request_time, 20 * 1000) self.assertLess(request_time, 20 * 1000)
def test_access_log_apachedex_report(self): def test_access_log_apachedex_report(self) -> None:
# type: () -> None
# make a request so that we have something in the logs # make a request so that we have something in the logs
requests.get(self.default_balancer_url, verify=False) requests.get(self.default_balancer_url, verify=False)
...@@ -297,8 +222,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -297,8 +222,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
# having this table means that apachedex could parse some lines. # having this table means that apachedex could parse some lines.
self.assertIn('<h2>Hits per status code</h2>', report_text) self.assertIn('<h2>Hits per status code</h2>', report_text)
def test_access_log_rotation(self): def test_access_log_rotation(self) -> None:
# type: () -> None
# run logrotate a first time so that it create state files # run logrotate a first time so that it create state files
self._executeCrontabAtDate('logrotate', '2000-01-01') self._executeCrontabAtDate('logrotate', '2000-01-01')
...@@ -324,8 +248,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -324,8 +248,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
self.assertTrue(os.path.exists(rotated_log_file + '.xz')) self.assertTrue(os.path.exists(rotated_log_file + '.xz'))
self.assertFalse(os.path.exists(rotated_log_file)) self.assertFalse(os.path.exists(rotated_log_file))
def test_error_log(self): def test_error_log(self) -> None:
# type: () -> None
# stop backend server # stop backend server
backend_server = self.getManagedResource("slow_web_server", SlowHTTPServer) backend_server = self.getManagedResource("slow_web_server", SlowHTTPServer)
self.addCleanup(backend_server.open) self.addCleanup(backend_server.open)
...@@ -356,8 +279,7 @@ class BalancerCookieHTTPServer(ManagedHTTPServer): ...@@ -356,8 +279,7 @@ class BalancerCookieHTTPServer(ManagedHTTPServer):
def RequestHandler(self): def RequestHandler(self):
server = self server = self
class RequestHandler(BaseHTTPRequestHandler): class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self): def do_GET(self) -> None:
# type: () -> None
self.send_response(200) self.send_response(200)
self.send_header("Content-Type", "text/plain") self.send_header("Content-Type", "text/plain")
if self.path == '/set_cookie': if self.path == '/set_cookie':
...@@ -378,8 +300,7 @@ class TestBalancer(BalancerTestCase): ...@@ -378,8 +300,7 @@ class TestBalancer(BalancerTestCase):
""" """
__partition_reference__ = 'b' __partition_reference__ = 'b'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
parameter_dict = super()._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use two backend servers # use two backend servers
...@@ -389,16 +310,14 @@ class TestBalancer(BalancerTestCase): ...@@ -389,16 +310,14 @@ class TestBalancer(BalancerTestCase):
] ]
return parameter_dict return parameter_dict
def test_balancer_round_robin(self): def test_balancer_round_robin(self) -> None:
# type: () -> None
# requests are by default balanced to both servers # requests are by default balanced to both servers
self.assertEqual( self.assertEqual(
{requests.get(self.default_balancer_url, verify=False).text for _ in range(10)}, {requests.get(self.default_balancer_url, verify=False).text for _ in range(10)},
{'backend_web_server1', 'backend_web_server2'} {'backend_web_server1', 'backend_web_server2'}
) )
def test_balancer_server_down(self): def test_balancer_server_down(self) -> None:
# type: () -> None
# if one backend is down, it is excluded from balancer # if one backend is down, it is excluded from balancer
self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).close() self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).close()
self.addCleanup(self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).open) self.addCleanup(self.getManagedResource("backend_web_server2", BalancerCookieHTTPServer).open)
...@@ -407,8 +326,7 @@ class TestBalancer(BalancerTestCase): ...@@ -407,8 +326,7 @@ class TestBalancer(BalancerTestCase):
{'backend_web_server1',} {'backend_web_server1',}
) )
def test_balancer_set_cookie(self): def test_balancer_set_cookie(self) -> None:
# type: () -> None
# if backend provides a "SERVERID" cookie, balancer will overwrite it with the # if backend provides a "SERVERID" cookie, balancer will overwrite it with the
# backend selected by balancing algorithm # backend selected by balancing algorithm
self.assertIn( self.assertIn(
...@@ -416,8 +334,7 @@ class TestBalancer(BalancerTestCase): ...@@ -416,8 +334,7 @@ class TestBalancer(BalancerTestCase):
('default-0', 'default-1'), ('default-0', 'default-1'),
) )
def test_balancer_respects_sticky_cookie(self): def test_balancer_respects_sticky_cookie(self) -> None:
# type: () -> None
# if request is made with the sticky cookie, the client stick on one balancer # if request is made with the sticky cookie, the client stick on one balancer
cookies = dict(SERVERID='default-1') cookies = dict(SERVERID='default-1')
self.assertEqual( self.assertEqual(
...@@ -432,8 +349,7 @@ class TestBalancer(BalancerTestCase): ...@@ -432,8 +349,7 @@ class TestBalancer(BalancerTestCase):
requests.get(self.default_balancer_url, verify=False, cookies=cookies).text, requests.get(self.default_balancer_url, verify=False, cookies=cookies).text,
'backend_web_server1') 'backend_web_server1')
def test_balancer_stats_socket(self): def test_balancer_stats_socket(self) -> None:
# type: () -> None
# real time statistics can be obtained by using the stats socket and there # real time statistics can be obtained by using the stats socket and there
# is a wrapper which makes this a bit easier. # is a wrapper which makes this a bit easier.
socat_process = subprocess.Popen( socat_process = subprocess.Popen(
...@@ -458,8 +374,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase): ...@@ -458,8 +374,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
""" """
__partition_reference__ = 't' __partition_reference__ = 't'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
parameter_dict = super()._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
parameter_dict['dummy_http_server-test-runner-address-list'] = [ parameter_dict['dummy_http_server-test-runner-address-list'] = [
...@@ -478,8 +393,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase): ...@@ -478,8 +393,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
] ]
return parameter_dict return parameter_dict
def test_use_proper_backend(self): def test_use_proper_backend(self) -> None:
# type: () -> None
# requests are directed to proper backend based on URL path # requests are directed to proper backend based on URL path
test_runner_url_list = self.getRootPartitionConnectionParameterDict( test_runner_url_list = self.getRootPartitionConnectionParameterDict(
)['default-test-runner-url-list'] )['default-test-runner-url-list']
...@@ -532,8 +446,7 @@ class TestHTTP(BalancerTestCase): ...@@ -532,8 +446,7 @@ class TestHTTP(BalancerTestCase):
"""Check HTTP protocol with a HTTP/1.1 backend """Check HTTP protocol with a HTTP/1.1 backend
""" """
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
parameter_dict = super()._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use a HTTP/1.1 server instead # use a HTTP/1.1 server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]] parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]]
...@@ -541,8 +454,7 @@ class TestHTTP(BalancerTestCase): ...@@ -541,8 +454,7 @@ class TestHTTP(BalancerTestCase):
__partition_reference__ = 'h' __partition_reference__ = 'h'
def test_http_version(self): def test_http_version(self) -> None:
# type: () -> None
self.assertEqual( self.assertEqual(
subprocess.check_output([ subprocess.check_output([
'curl', 'curl',
...@@ -558,8 +470,7 @@ class TestHTTP(BalancerTestCase): ...@@ -558,8 +470,7 @@ class TestHTTP(BalancerTestCase):
b'2', b'2',
) )
def test_keep_alive(self): def test_keep_alive(self) -> None:
# type: () -> None
# when doing two requests, connection is established only once # when doing two requests, connection is established only once
with requests.Session() as session: with requests.Session() as session:
session.verify = False session.verify = False
...@@ -594,8 +505,7 @@ class ContentTypeHTTPServer(ManagedHTTPServer): ...@@ -594,8 +505,7 @@ class ContentTypeHTTPServer(ManagedHTTPServer):
""" """
class RequestHandler(BaseHTTPRequestHandler): class RequestHandler(BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1' protocol_version = 'HTTP/1.1'
def do_GET(self): def do_GET(self) -> None:
# type: () -> None
self.send_response(200) self.send_response(200)
if self.path == '/': if self.path == '/':
self.send_header("Content-Length", '0') self.send_header("Content-Length", '0')
...@@ -615,16 +525,14 @@ class TestContentEncoding(BalancerTestCase): ...@@ -615,16 +525,14 @@ class TestContentEncoding(BalancerTestCase):
""" """
__partition_reference__ = 'ce' __partition_reference__ = 'ce'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
parameter_dict = super()._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
parameter_dict['dummy_http_server'] = [ parameter_dict['dummy_http_server'] = [
[cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False], [cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False],
] ]
return parameter_dict return parameter_dict
def test_gzip_encoding(self): def test_gzip_encoding(self) -> None:
# type: () -> None
for content_type in ( for content_type in (
'text/cache-manifest', 'text/cache-manifest',
'text/html', 'text/html',
...@@ -652,129 +560,18 @@ class TestContentEncoding(BalancerTestCase): ...@@ -652,129 +560,18 @@ class TestContentEncoding(BalancerTestCase):
'{} uses wrong encoding: {}'.format(content_type, resp.headers.get('Content-Encoding'))) '{} uses wrong encoding: {}'.format(content_type, resp.headers.get('Content-Encoding')))
self.assertEqual(resp.text, 'OK') self.assertEqual(resp.text, 'OK')
def test_no_gzip_encoding(self): def test_no_gzip_encoding(self) -> None:
# type: () -> None
resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False) resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
self.assertNotIn('Content-Encoding', resp.headers) self.assertNotIn('Content-Encoding', resp.headers)
self.assertEqual(resp.text, 'OK') self.assertEqual(resp.text, 'OK')
class CaucaseCertificate(ManagedResource):
"""A certificate signed by a caucase service.
"""
ca_crt_file = None # type: str
crl_file = None # type: str
csr_file = None # type: str
cert_file = None # type: str
key_file = None # type: str
def open(self):
# type: () -> None
self.tmpdir = tempfile.mkdtemp()
self.ca_crt_file = os.path.join(self.tmpdir, 'ca-crt.pem')
self.crl_file = os.path.join(self.tmpdir, 'ca-crl.pem')
self.csr_file = os.path.join(self.tmpdir, 'csr.pem')
self.cert_file = os.path.join(self.tmpdir, 'crt.pem')
self.key_file = os.path.join(self.tmpdir, 'key.pem')
def close(self):
# type: () -> None
shutil.rmtree(self.tmpdir)
@property
def _caucase_path(self):
# type: () -> str
"""path of caucase executable.
"""
software_release_root_path = os.path.join(
self._cls.slap._software_root,
hashlib.md5(self._cls.getSoftwareURL().encode()).hexdigest(),
)
return os.path.join(software_release_root_path, 'bin', 'caucase')
def request(self, common_name, caucase):
# type: (str, CaucaseService) -> None
"""Generate certificate and request signature to the caucase service.
This overwrite any previously requested certificate for this instance.
"""
cas_args = [
self._caucase_path,
'--ca-url', caucase.url,
'--ca-crt', self.ca_crt_file,
'--crl', self.crl_file,
]
key = rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=default_backend()
)
with open(self.key_file, 'wb') as f:
f.write(
key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
))
csr = x509.CertificateSigningRequestBuilder().subject_name(
x509.Name([
x509.NameAttribute(
NameOID.COMMON_NAME,
common_name,
),
])).sign(
key,
hashes.SHA256(),
default_backend(),
)
with open(self.csr_file, 'wb') as f:
f.write(csr.public_bytes(serialization.Encoding.PEM))
csr_id = subprocess.check_output(
cas_args + [
'--send-csr', self.csr_file,
],
).split()[0].decode()
assert csr_id
for _ in range(30):
if not subprocess.call(
cas_args + [
'--get-crt', csr_id, self.cert_file,
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) == 0:
break
else:
time.sleep(1)
else:
raise RuntimeError('getting service certificate failed.')
with open(self.cert_file) as cert_file:
assert 'BEGIN CERTIFICATE' in cert_file.read()
def revoke(self, caucase):
# type: (CaucaseService) -> None
"""Revoke the client certificate on this caucase instance.
"""
subprocess.check_call([
self._caucase_path,
'--ca-url', caucase.url,
'--ca-crt', self.ca_crt_file,
'--crl', self.crl_file,
'--revoke-crt', self.cert_file, self.key_file,
])
class TestFrontendXForwardedFor(BalancerTestCase): class TestFrontendXForwardedFor(BalancerTestCase):
__partition_reference__ = 'xff' __partition_reference__ = 'xff'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
frontend_caucase = cls.getManagedResource('frontend_caucase', CaucaseService) frontend_caucase = cls.getManagedResource('frontend_caucase', CaucaseService)
certificate = cls.getManagedResource('client_certificate', CaucaseCertificate) certificate = cls.getManagedResource('client_certificate', CaucaseCertificate)
certificate.request('shared frontend', frontend_caucase) certificate.request('shared frontend', frontend_caucase)
...@@ -791,8 +588,7 @@ class TestFrontendXForwardedFor(BalancerTestCase): ...@@ -791,8 +588,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
parameter_dict['ssl']['frontend-caucase-url-list'] = [frontend_caucase.url] parameter_dict['ssl']['frontend-caucase-url-list'] = [frontend_caucase.url]
return parameter_dict return parameter_dict
def test_x_forwarded_for_added_when_verified_connection(self): def test_x_forwarded_for_added_when_verified_connection(self) -> None:
# type: () -> None
client_certificate = self.getManagedResource('client_certificate', CaucaseCertificate) client_certificate = self.getManagedResource('client_certificate', CaucaseCertificate)
for backend in ('default', 'default-auth'): for backend in ('default', 'default-auth'):
...@@ -805,8 +601,7 @@ class TestFrontendXForwardedFor(BalancerTestCase): ...@@ -805,8 +601,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
).json() ).json()
self.assertEqual(result['Incoming Headers'].get('x-forwarded-for', '').split(', ')[0], '1.2.3.4') self.assertEqual(result['Incoming Headers'].get('x-forwarded-for', '').split(', ')[0], '1.2.3.4')
def test_x_forwarded_for_stripped_when_no_certificate(self): def test_x_forwarded_for_stripped_when_no_certificate(self) -> None:
# type: () -> None
balancer_url = json.loads(self.computer_partition.getConnectionParameterDict()['_'])['default'] balancer_url = json.loads(self.computer_partition.getConnectionParameterDict()['_'])['default']
result = requests.get( result = requests.get(
balancer_url, balancer_url,
...@@ -822,8 +617,7 @@ class TestFrontendXForwardedFor(BalancerTestCase): ...@@ -822,8 +617,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
verify=False, verify=False,
) )
def test_x_forwarded_for_stripped_when_not_verified_certificate(self): def test_x_forwarded_for_stripped_when_not_verified_certificate(self) -> None:
# type: () -> None
balancer_url = json.loads(self.computer_partition.getConnectionParameterDict()['_'])['default'] balancer_url = json.loads(self.computer_partition.getConnectionParameterDict()['_'])['default']
# certificate from an unknown CA # certificate from an unknown CA
...@@ -855,8 +649,7 @@ class TestServerTLSProvidedCertificate(BalancerTestCase): ...@@ -855,8 +649,7 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
__partition_reference__ = 's' __partition_reference__ = 's'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
server_caucase = cls.getManagedResource('server_caucase', CaucaseService) server_caucase = cls.getManagedResource('server_caucase', CaucaseService)
server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate) server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate)
server_certificate.request(cls._ipv4_address, server_caucase) server_certificate.request(cls._ipv4_address, server_caucase)
...@@ -867,8 +660,7 @@ class TestServerTLSProvidedCertificate(BalancerTestCase): ...@@ -867,8 +660,7 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
parameter_dict['ssl']['key'] = f.read() parameter_dict['ssl']['key'] = f.read()
return parameter_dict return parameter_dict
def test_certificate_validates_with_provided_ca(self): def test_certificate_validates_with_provided_ca(self) -> None:
# type: () -> None
server_certificate = self.getManagedResource("server_certificate", CaucaseCertificate) server_certificate = self.getManagedResource("server_certificate", CaucaseCertificate)
requests.get(self.default_balancer_url, verify=server_certificate.ca_crt_file) requests.get(self.default_balancer_url, verify=server_certificate.ca_crt_file)
...@@ -877,8 +669,7 @@ class TestClientTLS(BalancerTestCase): ...@@ -877,8 +669,7 @@ class TestClientTLS(BalancerTestCase):
__partition_reference__ = 'c' __partition_reference__ = 'c'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
frontend_caucase1 = cls.getManagedResource('frontend_caucase1', CaucaseService) frontend_caucase1 = cls.getManagedResource('frontend_caucase1', CaucaseService)
certificate1 = cls.getManagedResource('client_certificate1', CaucaseCertificate) certificate1 = cls.getManagedResource('client_certificate1', CaucaseCertificate)
certificate1.request('client_certificate1', frontend_caucase1) certificate1.request('client_certificate1', frontend_caucase1)
...@@ -897,8 +688,7 @@ class TestClientTLS(BalancerTestCase): ...@@ -897,8 +688,7 @@ class TestClientTLS(BalancerTestCase):
] ]
return parameter_dict return parameter_dict
def test_refresh_crl(self): def test_refresh_crl(self) -> None:
# type: () -> None
logger = self.logger logger = self.logger
class DebugLogFile: class DebugLogFile:
...@@ -916,8 +706,7 @@ class TestClientTLS(BalancerTestCase): ...@@ -916,8 +706,7 @@ class TestClientTLS(BalancerTestCase):
# when client certificate can be authenticated, backend receive the CN of # when client certificate can be authenticated, backend receive the CN of
# the client certificate in "remote-user" header # the client certificate in "remote-user" header
def _make_request(): def _make_request() -> dict:
# type: () -> dict
return requests.get( return requests.get(
self.default_balancer_url, self.default_balancer_url,
cert=(client_certificate.cert_file, client_certificate.key_file), cert=(client_certificate.cert_file, client_certificate.key_file),
...@@ -976,8 +765,7 @@ class TestPathBasedRouting(BalancerTestCase): ...@@ -976,8 +765,7 @@ class TestPathBasedRouting(BalancerTestCase):
__partition_reference__ = 'pbr' __partition_reference__ = 'pbr'
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
parameter_dict = super()._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
parameter_dict['zope-family-dict'][ parameter_dict['zope-family-dict'][
'second' 'second'
...@@ -1003,8 +791,7 @@ class TestPathBasedRouting(BalancerTestCase): ...@@ -1003,8 +791,7 @@ class TestPathBasedRouting(BalancerTestCase):
] ]
return parameter_dict return parameter_dict
def test_routing(self): def test_routing(self) -> None:
# type: () -> None
published_dict = json.loads(self.computer_partition.getConnectionParameterDict()['_']) published_dict = json.loads(self.computer_partition.getConnectionParameterDict()['_'])
scheme = 'scheme' scheme = 'scheme'
netloc = 'example.com:8080' netloc = 'example.com:8080'
...@@ -1015,8 +802,7 @@ class TestPathBasedRouting(BalancerTestCase): ...@@ -1015,8 +802,7 @@ class TestPathBasedRouting(BalancerTestCase):
# For easier reading of test data, visually separating the virtual host # For easier reading of test data, visually separating the virtual host
# base from the virtual host root # base from the virtual host root
vhr = '/VirtualHostRoot' vhr = '/VirtualHostRoot'
def assertRoutingEqual(family, path, expected_path): def assertRoutingEqual(family: str, path: str, expected_path: str) -> None:
# type: (str, str, str) -> None
# sanity check: unlike the rules, this test is sensitive to outermost # sanity check: unlike the rules, this test is sensitive to outermost
# slashes, and paths must be absolute-ish for code simplicity. # slashes, and paths must be absolute-ish for code simplicity.
assert path.startswith('/') assert path.startswith('/')
......
...@@ -32,6 +32,7 @@ import glob ...@@ -32,6 +32,7 @@ import glob
import http.client import http.client
import json import json
import os import os
import resource
import shutil import shutil
import socket import socket
import sqlite3 import sqlite3
...@@ -633,8 +634,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin): ...@@ -633,8 +634,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
)): )):
os.unlink(logfile) os.unlink(logfile)
def _getCrontabCommand(self, crontab_name): def _getCrontabCommand(self, crontab_name: str) -> str:
# type: (str) -> str
"""Read a crontab and return the command that is executed. """Read a crontab and return the command that is executed.
overloaded to use crontab from zope partition overloaded to use crontab from zope partition
...@@ -1045,8 +1045,7 @@ class TestNEO(ZopeSkinsMixin, CrontabMixin, ERP5InstanceTestCase): ...@@ -1045,8 +1045,7 @@ class TestNEO(ZopeSkinsMixin, CrontabMixin, ERP5InstanceTestCase):
__partition_reference__ = 'n' __partition_reference__ = 'n'
__test_matrix__ = matrix((neo,)) __test_matrix__ = matrix((neo,))
def _getCrontabCommand(self, crontab_name): def _getCrontabCommand(self, crontab_name: str) -> str:
# type: (str) -> str
"""Read a crontab and return the command that is executed. """Read a crontab and return the command that is executed.
overloaded to use crontab from neo partition overloaded to use crontab from neo partition
...@@ -1100,3 +1099,36 @@ class TestNEO(ZopeSkinsMixin, CrontabMixin, ERP5InstanceTestCase): ...@@ -1100,3 +1099,36 @@ class TestNEO(ZopeSkinsMixin, CrontabMixin, ERP5InstanceTestCase):
'var', 'var',
'log', 'log',
f)) f))
class TestWithMaxRlimitNofileParameter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
"""Test setting the with-max-rlimit-nofile parameter sets the open fd soft limit to the hard limit.
"""
__partition_reference__ = 'nf'
@classmethod
def getInstanceParameterDict(cls):
return {'_': json.dumps({'with-max-rlimit-nofile': True})}
def test_with_max_rlimit_nofile(self):
with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo()
_, current_hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
process_info, = (p for p in all_process_info if p['name'].startswith('zope-'))
self.assertEqual(
resource.prlimit(process_info['pid'], resource.RLIMIT_NOFILE),
(current_hard_limit, current_hard_limit))
class TestUnsetWithMaxRlimitNofileParameter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
"""Test not setting the with-max-rlimit-nofile parameter doesn't change the soft limit of erp5
"""
__partition_reference__ = 'nnf'
def test_unset_with_max_rlimit_nofile(self) -> None:
with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo()
limit = resource.getrlimit(resource.RLIMIT_NOFILE)
process_info, = (p for p in all_process_info if p['name'].startswith('zope-'))
self.assertEqual(
resource.prlimit(process_info['pid'], resource.RLIMIT_NOFILE), limit)
...@@ -36,6 +36,7 @@ import subprocess ...@@ -36,6 +36,7 @@ import subprocess
import urllib.parse import urllib.parse
import MySQLdb import MySQLdb
import MySQLdb.connections
from slapos.testing.utils import CrontabMixin, getPromisePluginParameterDict from slapos.testing.utils import CrontabMixin, getPromisePluginParameterDict
...@@ -60,8 +61,7 @@ class MariaDBTestCase(ERP5InstanceTestCase): ...@@ -60,8 +61,7 @@ class MariaDBTestCase(ERP5InstanceTestCase):
return "mariadb" return "mariadb"
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls) -> dict:
# type: () -> dict
return { return {
'tcpv4-port': 3306, 'tcpv4-port': 3306,
'max-connection-count': 5, 'max-connection-count': 5,
...@@ -76,12 +76,10 @@ class MariaDBTestCase(ERP5InstanceTestCase): ...@@ -76,12 +76,10 @@ class MariaDBTestCase(ERP5InstanceTestCase):
} }
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls) -> dict:
# type: () -> dict
return {'_': json.dumps(cls._getInstanceParameterDict())} return {'_': json.dumps(cls._getInstanceParameterDict())}
def getDatabaseConnection(self): def getDatabaseConnection(self) -> MySQLdb.connections.Connection:
# type: () -> MySQLdb.connections.Connection
connection_parameter_dict = json.loads( connection_parameter_dict = json.loads(
self.computer_partition.getConnectionParameterDict()['_']) self.computer_partition.getConnectionParameterDict()['_'])
db_url = urllib.parse.urlparse(connection_parameter_dict['database-list'][0]) db_url = urllib.parse.urlparse(connection_parameter_dict['database-list'][0])
...@@ -106,8 +104,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin): ...@@ -106,8 +104,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
'*/srv/backup/*', '*/srv/backup/*',
) )
def test_full_backup(self): def test_full_backup(self) -> None:
# type: () -> None
self._executeCrontabAtDate('mariadb-backup', '2050-01-01') self._executeCrontabAtDate('mariadb-backup', '2050-01-01')
full_backup_file, = glob.glob( full_backup_file, = glob.glob(
os.path.join( os.path.join(
...@@ -121,8 +118,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin): ...@@ -121,8 +118,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
with gzip.open(full_backup_file, 'rt') as dump: with gzip.open(full_backup_file, 'rt') as dump:
self.assertIn('CREATE TABLE', dump.read()) self.assertIn('CREATE TABLE', dump.read())
def test_logrotate_and_slow_query_digest(self): def test_logrotate_and_slow_query_digest(self) -> None:
# type: () -> None
# slow query digest needs to run after logrotate, since it operates on the rotated # slow query digest needs to run after logrotate, since it operates on the rotated
# file, so this tests both logrotate and slow query digest. # file, so this tests both logrotate and slow query digest.
...@@ -193,8 +189,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin): ...@@ -193,8 +189,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
class TestMariaDB(MariaDBTestCase): class TestMariaDB(MariaDBTestCase):
def test_utf8_collation(self): def test_utf8_collation(self) -> None:
# type: () -> None
cnx = self.getDatabaseConnection() cnx = self.getDatabaseConnection()
with contextlib.closing(cnx): with contextlib.closing(cnx):
cnx.query( cnx.query(
...@@ -219,8 +214,7 @@ class TestMariaDB(MariaDBTestCase): ...@@ -219,8 +214,7 @@ class TestMariaDB(MariaDBTestCase):
class TestMroonga(MariaDBTestCase): class TestMroonga(MariaDBTestCase):
def test_mroonga_plugin_loaded(self): def test_mroonga_plugin_loaded(self) -> None:
# type: () -> None
cnx = self.getDatabaseConnection() cnx = self.getDatabaseConnection()
with contextlib.closing(cnx): with contextlib.closing(cnx):
cnx.query("show plugins") cnx.query("show plugins")
...@@ -229,8 +223,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -229,8 +223,7 @@ class TestMroonga(MariaDBTestCase):
('Mroonga', 'ACTIVE', 'STORAGE ENGINE', 'ha_mroonga.so', 'GPL'), ('Mroonga', 'ACTIVE', 'STORAGE ENGINE', 'ha_mroonga.so', 'GPL'),
plugins) plugins)
def test_mroonga_normalize_udf(self): def test_mroonga_normalize_udf(self) -> None:
# type: () -> None
# example from https://mroonga.org/docs/reference/udf/mroonga_normalize.html#usage # example from https://mroonga.org/docs/reference/udf/mroonga_normalize.html#usage
cnx = self.getDatabaseConnection() cnx = self.getDatabaseConnection()
with contextlib.closing(cnx): with contextlib.closing(cnx):
...@@ -255,8 +248,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -255,8 +248,7 @@ class TestMroonga(MariaDBTestCase):
self.assertEqual((('ABCDあぃうぇ㍑'.encode(),),), self.assertEqual((('ABCDあぃうぇ㍑'.encode(),),),
cnx.store_result().fetch_row(maxrows=2)) cnx.store_result().fetch_row(maxrows=2))
def test_mroonga_full_text_normalizer(self): def test_mroonga_full_text_normalizer(self) -> None:
# type: () -> None
# example from https://mroonga.org//docs/tutorial/storage.html#how-to-specify-the-normalizer # example from https://mroonga.org//docs/tutorial/storage.html#how-to-specify-the-normalizer
cnx = self.getDatabaseConnection() cnx = self.getDatabaseConnection()
with contextlib.closing(cnx): with contextlib.closing(cnx):
...@@ -293,8 +285,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -293,8 +285,7 @@ class TestMroonga(MariaDBTestCase):
cnx.store_result().fetch_row(maxrows=2), cnx.store_result().fetch_row(maxrows=2),
) )
def test_mroonga_full_text_normalizer_TokenBigramSplitSymbolAlphaDigit(self): def test_mroonga_full_text_normalizer_TokenBigramSplitSymbolAlphaDigit(self) -> None:
# type: () -> None
# Similar to as ERP5's testI18NSearch with erp5_full_text_mroonga_catalog # Similar to as ERP5's testI18NSearch with erp5_full_text_mroonga_catalog
cnx = self.getDatabaseConnection() cnx = self.getDatabaseConnection()
with contextlib.closing(cnx): with contextlib.closing(cnx):
...@@ -337,8 +328,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -337,8 +328,7 @@ class TestMroonga(MariaDBTestCase):
""") """)
self.assertEqual(((1,),), cnx.store_result().fetch_row(maxrows=2)) self.assertEqual(((1,),), cnx.store_result().fetch_row(maxrows=2))
def test_mroonga_full_text_stem(self): def test_mroonga_full_text_stem(self) -> None:
# type: () -> None
# example from https://mroonga.org//docs/tutorial/storage.html#how-to-specify-the-token-filters # example from https://mroonga.org//docs/tutorial/storage.html#how-to-specify-the-token-filters
cnx = self.getDatabaseConnection() cnx = self.getDatabaseConnection()
with contextlib.closing(cnx): with contextlib.closing(cnx):
......
...@@ -46,7 +46,7 @@ from slapos.testing.testcase import ( ...@@ -46,7 +46,7 @@ from slapos.testing.testcase import (
makeModuleSetUpAndTestCaseClass, makeModuleSetUpAndTestCaseClass,
) )
old_software_release_url = 'https://lab.nexedi.com/nexedi/slapos/raw/1.0.167.9/software/erp5/software.cfg' old_software_release_url = 'https://lab.nexedi.com/nexedi/slapos/raw/1.0.167.10/software/erp5/software.cfg'
new_software_release_url = os.path.abspath( new_software_release_url = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')) os.path.join(os.path.dirname(__file__), '..', 'software.cfg'))
......
...@@ -92,8 +92,8 @@ class WendelinTutorialTestCase(FluentdTestCase): ...@@ -92,8 +92,8 @@ class WendelinTutorialTestCase(FluentdTestCase):
@classmethod @classmethod
def measureDict(cls): def measureDict(cls):
return {k: v.encode() for k, v in return {k: v for k, v in
zip((b'pressure', b'humidity', b'temperature'), cls._measurementList)} zip(('pressure', 'humidity', 'temperature'), cls._measurementList)}
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
......
...@@ -27,4 +27,4 @@ md5sum = 98faa5ad8cfb23a11d97a459078a1d05 ...@@ -27,4 +27,4 @@ md5sum = 98faa5ad8cfb23a11d97a459078a1d05
[template-runTestSuite] [template-runTestSuite]
filename = runTestSuite.in filename = runTestSuite.in
md5sum = 2bb3d71a0e04bc8bc828bb3f726ef3ff md5sum = 5db53d622bd68fb07e078ddc4403a240
...@@ -93,7 +93,7 @@ def main(): ...@@ -93,7 +93,7 @@ def main():
firefox_capabilities['marionette'] = True firefox_capabilities['marionette'] = True
browser = webdriver.Firefox( browser = webdriver.Firefox(
capabilities=firefox_capabilities, capabilities=firefox_capabilities,
firefox_binary='${firefox-wrapper-68:location}', firefox_binary='${firefox-wrapper:location}',
executable_path='${geckodriver:location}') executable_path='${geckodriver:location}')
else: else:
assert target == 'selenium-server', f"Unsupported target {test_runner['target']}" assert target == 'selenium-server', f"Unsupported target {test_runner['target']}"
......
...@@ -22,7 +22,7 @@ parts = ...@@ -22,7 +22,7 @@ parts =
git git
eggs eggs
xserver xserver
firefox-68 firefox
xwd xwd
renderjs-install renderjs-install
jio-install jio-install
......
[instance-profile] [instance-profile]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 0d50ed911a41b76b952b63d37853c3a4 md5sum = f753802ad631a57c559d868e525cf81b
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
"type": "object", "type": "object",
"additionalProperties": false, "additionalProperties": false,
"properties": { "properties": {
"mb_password_complexity": { "mb-password-complexity": {
"title": "Password complexity", "title": "Password complexity",
"description": "Check Metabase documentation for more details.", "description": "Check Metabase documentation for more details.",
"type": "string", "type": "string",
...@@ -14,11 +14,25 @@ ...@@ -14,11 +14,25 @@
"strong" "strong"
] ]
}, },
"mb_password_length": { "mb-password-length": {
"title": "Password length", "title": "Password length",
"description": "Password length", "description": "Password length",
"type": "integer", "type": "integer",
"default": 6 "default": 6
},
"mb-aggregated-query-row-limit": {
"title": "Aggregated row limit",
"description": "Maximum number of rows to return for aggregated queries via the API.",
"type": "integer",
"default": 10000,
"maximum": 1048575
},
"mb-unaggregated-query-row-limit": {
"title": "Unaggregated row limit",
"description": "Maximum number of rows to return specifically on `:rows`-type queries via the API. Must be less than the number configured in `mb-aggregated-query-row-limit`",
"type": "integer",
"default": 2000,
"maximum": 1048575
} }
} }
} }
...@@ -31,8 +31,16 @@ slapparameter-dict = $${slap-configuration:configuration} ...@@ -31,8 +31,16 @@ slapparameter-dict = $${slap-configuration:configuration}
home = $${buildout:directory} home = $${buildout:directory}
init = init =
default_parameters = options.get('slapparameter-dict') default_parameters = options.get('slapparameter-dict')
options['mb_password_complexity'] = default_parameters.get('mb_password_complexity', 'normal') options['mb-password-complexity'] = default_parameters.get(
options['mb_password_length'] = default_parameters.get('mb_password_length', '6') 'mb-password-complexity',
default_parameters.get('mb_password_complexity', 'normal'))
options['mb-password-length'] = default_parameters.get(
'mb-password-length',
default_parameters.get('mb_password_length', '6'))
options['mb-aggregated-query-row-limit'] = default_parameters.get(
'mb-aggregated-query-row-limit', '10000')
options['mb-unaggregated-query-row-limit'] = default_parameters.get(
'mb-unaggregated-query-row-limit', '2000')
[metabase-instance] [metabase-instance]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
...@@ -51,8 +59,10 @@ environment = ...@@ -51,8 +59,10 @@ environment =
MB_DB_USER=$${postgresql:superuser} MB_DB_USER=$${postgresql:superuser}
MB_DB_PASS=$${postgresql:password} MB_DB_PASS=$${postgresql:password}
MB_DB_HOST=$${postgresql:ipv4} MB_DB_HOST=$${postgresql:ipv4}
MB_PASSWORD_COMPLEXITY=$${slap-parameter:mb_password_complexity} MB_PASSWORD_COMPLEXITY=$${slap-parameter:mb-password-complexity}
MB_PASSWORD_LENGTH=$${slap-parameter:mb_password_length} MB_PASSWORD_LENGTH=$${slap-parameter:mb-password-length}
MB_AGGREGATED_QUERY_ROW_LIMIT=$${slap-parameter:mb-aggregated-query-row-limit}
MB_UNAGGREGATED_QUERY_ROW_LIMIT=$${slap-parameter:mb-unaggregated-query-row-limit}
FONTCONFIG_FILE=$${fontconfig-conf:output} FONTCONFIG_FILE=$${fontconfig-conf:output}
JAVA_ARGS=-Dorg.quartz.scheduler.instanceId=$${slap-connection:computer-id}.$${slap-connection:partition-id} -Djava.io.tmpdir="$${directory:tmp}" JAVA_ARGS=-Dorg.quartz.scheduler.instanceId=$${slap-connection:computer-id}.$${slap-connection:partition-id} -Djava.io.tmpdir="$${directory:tmp}"
hash-existing-files = hash-existing-files =
......
...@@ -20,8 +20,8 @@ parts = ...@@ -20,8 +20,8 @@ parts =
[metabase.jar] [metabase.jar]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = https://downloads.metabase.com/v0.47.0/metabase.jar url = https://downloads.metabase.com/v0.48.2/metabase.jar
md5sum = b81c71668a2177d89690730fabd85d9e md5sum = d708a85436da3d5751f0e48ebd10c142
[instance-profile] [instance-profile]
recipe = slapos.recipe.template recipe = slapos.recipe.template
......
...@@ -14,4 +14,4 @@ ...@@ -14,4 +14,4 @@
# not need these here). # not need these here).
[template-instance] [template-instance]
filename = instance.cfg filename = instance.cfg
md5sum = 7a558c2b9461ec588c9d77bdeef64e4d md5sum = 5a765463118f8b2a09df6260f56c2175
...@@ -25,7 +25,7 @@ develop-eggs-directory = {{ develop_eggs_directory }} ...@@ -25,7 +25,7 @@ develop-eggs-directory = {{ develop_eggs_directory }}
# needed for the "repo" command (to download many git repositories) # needed for the "repo" command (to download many git repositories)
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = https://chromium.googlesource.com/chromium/tools/depot_tools.git repository = https://chromium.googlesource.com/chromium/tools/depot_tools.git
branch = master branch = main
git-executable = {{ git_path }}/bin/git git-executable = {{ git_path }}/bin/git
[customize-path] [customize-path]
......
...@@ -14,12 +14,12 @@ ...@@ -14,12 +14,12 @@
# not need these here). # not need these here).
[template-nextcloud-install.sh] [template-nextcloud-install.sh]
filename = nextcloud-install.sh.in filename = nextcloud-install.sh.in
md5sum = 965cc84d4c8e39f06850fac361575647 md5sum = f31dfd6fce79fcf1c13cbd96dd366492
[template-nextcloud-config.json] [template-nextcloud-config.json]
filename = nextcloud-config.json.in filename = nextcloud-config.json.in
md5sum = 6f42f0a8c5e5c0c657541a65c4d9ee57 md5sum = 133ad47aec7e16f716eb710ef38823e8
[template-nextcloud-instance] [template-nextcloud-instance]
filename = nextcloud-instance.cfg.in filename = nextcloud-instance.cfg.in
md5sum = a59b081bd39f61c7361fdb6c54fc2039 md5sum = 65d2fef4aa41fa70e5194d73a8cb2c4a
...@@ -35,6 +35,19 @@ ...@@ -35,6 +35,19 @@
"timeout": 0 "timeout": 0
}, },
"logfile": "{{ parameter_dict['data-dir'] }}/nextcloud.log", "logfile": "{{ parameter_dict['data-dir'] }}/nextcloud.log",
"datadirectory": "{{ parameter_dict['data-dir'] }}" "loglevel" => 2,
"datadirectory": "{{ parameter_dict['data-dir'] }}",
"preview_ffmpeg_path": "{{ parameter_dict['ffmpeg-path'] }}",
"tempdirectory": "{{ parameter_dict['tmp-dir'] }}",
"apps_paths": [
{
"path": "{{ parameter_dict['nextcloud'] }}/apps",
"url": "/apps",
"writable": true
}
],
"default_phone_region": "FR",
"default_locale": "fr_FR",
"default_timezone": "Europe/Paris"
} }
} }
...@@ -115,19 +115,9 @@ ...@@ -115,19 +115,9 @@
"type": "string", "type": "string",
"format": "uri" "format": "uri"
}, },
"instance.trusted-domain-1": { "instance.trusted-domain-list": {
"title": "Authorized domain on nextcloud", "title": "Authorized domain(s) on nextcloud",
"description": "Trusted domain used to connect to Nextcloud instance.", "description": "Trusted domain(s) used to connect to Nextcloud instance. Space separated.",
"type": "string"
},
"instance.trusted-domain-2": {
"title": "Second authorized domain on nextcloud",
"description": "Trusted domain used to connect to Nextcloud instance.",
"type": "string"
},
"instance.trusted-domain-3": {
"title": "Third authorized domain on nextcloud",
"description": "Trusted domain used to connect to Nextcloud instance.",
"type": "string" "type": "string"
}, },
"instance.trusted-proxy-list": { "instance.trusted-proxy-list": {
......
...@@ -140,4 +140,4 @@ if [ -f "{{ parameter_dict['nextcloud'] }}/config/CAN_INSTALL" ]; then ...@@ -140,4 +140,4 @@ if [ -f "{{ parameter_dict['nextcloud'] }}/config/CAN_INSTALL" ]; then
rm {{ parameter_dict['nextcloud'] }}/config/CAN_INSTALL rm {{ parameter_dict['nextcloud'] }}/config/CAN_INSTALL
fi fi
date > {{ parameter_dict['nextcloud'] }}/.slapos-install-done date > {{ parameter_dict['installed-file'] }}
...@@ -8,6 +8,7 @@ redis = ${directory:srv}/redis ...@@ -8,6 +8,7 @@ redis = ${directory:srv}/redis
redis-log = ${directory:log}/redis redis-log = ${directory:log}/redis
data = ${directory:srv}/data data = ${directory:srv}/data
backup = ${directory:backup}/nextcloud backup = ${directory:backup}/nextcloud
tmp = ${buildout:directory}/tmp
[service-redis] [service-redis]
recipe = slapos.cookbook:redis.server recipe = slapos.cookbook:redis.server
...@@ -42,13 +43,15 @@ rotate-num = 30 ...@@ -42,13 +43,15 @@ rotate-num = 30
[instance-parameter] [instance-parameter]
nextcloud = ${:document-root} nextcloud = ${:document-root}
installed-file = ${directory:etc}/.nextcloud-install-done
admin-user = admin admin-user = admin
admin-password = admin admin-password = admin
ffmpeg-path = {{ ffmpeg_location }}/bin/ffmpeg
tmp-dir = ${nc-directory:tmp}
trusted-domain-list = trusted-domain-list =
[${apache-php-configuration:ip}]:${apache-php-configuration:port} [${apache-php-configuration:ip}]:${apache-php-configuration:port}
${slap-parameter:instance.trusted-domain-1} ${request-frontend:connection-domain}
${slap-parameter:instance.trusted-domain-2} ${slap-parameter:instance.trusted-domain-list}
${slap-parameter:instance.trusted-domain-3}
trusted-proxy-list = ${slap-parameter:instance.trusted-proxy-list} trusted-proxy-list = ${slap-parameter:instance.trusted-proxy-list}
cli-url = ${slap-parameter:instance.cli-url} cli-url = ${slap-parameter:instance.cli-url}
...@@ -69,6 +72,9 @@ collabora-url = ${slap-parameter:instance.collabora-url} ...@@ -69,6 +72,9 @@ collabora-url = ${slap-parameter:instance.collabora-url}
stun-server = ${slap-parameter:instance.stun-server} stun-server = ${slap-parameter:instance.stun-server}
turn-server = ${slap-parameter:instance.turn-server} turn-server = ${slap-parameter:instance.turn-server}
turn-secret = ${slap-parameter:instance.turn-secret} turn-secret = ${slap-parameter:instance.turn-secret}
# php.ini
php.opcache.revalidate-freq = 60
php.opcache.interned-strings-buffer = 24
[nextcloud-install.sh] [nextcloud-install.sh]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -105,7 +111,7 @@ input = inline:#!/bin/bash ...@@ -105,7 +111,7 @@ input = inline:#!/bin/bash
echo "Nextcloud is not installed."; echo "Nextcloud is not installed.";
exit 1; exit 1;
fi fi
if [ ! -f "${instance-parameter:nextcloud}/.slapos-install-done" ]; then if [ ! -f "${instance-parameter:installed-file}" ]; then
echo "Nextcloud is not configured."; echo "Nextcloud is not configured.";
exit 1; exit 1;
fi fi
...@@ -203,7 +209,5 @@ instance.turn-server = ...@@ -203,7 +209,5 @@ instance.turn-server =
instance.turn-secret = instance.turn-secret =
instance.cli-url = ${apache-php-configuration:url} instance.cli-url = ${apache-php-configuration:url}
instance.trusted-domain-1 =
instance.trusted-domain-2 =
instance.trusted-domain-3 =
instance.trusted-proxy-list = instance.trusted-proxy-list =
instance.trusted-domain-list =
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
extends = extends =
buildout.hash.cfg buildout.hash.cfg
../../component/redis/buildout.cfg ../../component/redis/buildout.cfg
../../component/ffmpeg/buildout.cfg
../../stack/lamp/buildout.cfg ../../stack/lamp/buildout.cfg
[nc-download] [nc-download]
...@@ -28,6 +29,7 @@ context = ...@@ -28,6 +29,7 @@ context =
key python3_location python3:location key python3_location python3:location
key news_updater_location news-updater:location key news_updater_location news-updater:location
key php_location apache-php:location key php_location apache-php:location
key ffmpeg_location ffmpeg:location
raw redis_bin ${redis:location}/bin/redis-server raw redis_bin ${redis:location}/bin/redis-server
raw redis_cli ${redis:location}/bin/redis-cli raw redis_cli ${redis:location}/bin/redis-cli
key nextcloud_install_sh template-nextcloud-install.sh:target key nextcloud_install_sh template-nextcloud-install.sh:target
...@@ -42,7 +44,6 @@ db-user = nextcloud ...@@ -42,7 +44,6 @@ db-user = nextcloud
[nc-download-unpacked] [nc-download-unpacked]
recipe = slapos.recipe.build:download-unpacked recipe = slapos.recipe.build:download-unpacked
shared = true
[news-updater] [news-updater]
<= nc-download-unpacked <= nc-download-unpacked
...@@ -66,8 +67,8 @@ md5sum = 88adcbc34ef7e461f515ba96b82365d9 ...@@ -66,8 +67,8 @@ md5sum = 88adcbc34ef7e461f515ba96b82365d9
[nextcloud-app-snappymail] [nextcloud-app-snappymail]
<= nc-download-unpacked <= nc-download-unpacked
url = https://snappymail.eu/repository/nextcloud/snappymail-2.29.1-nextcloud.tar.gz url = https://snappymail.eu/repository/nextcloud/snappymail-2.29.4-nextcloud.tar.gz
md5sum = b7500ea4e089d8a9e3fa381d6df3a3b0 md5sum = 676bf0fa3b9f0fb9f0208304cf302a26
[nextcloud-app-news] [nextcloud-app-news]
<= nc-download-unpacked <= nc-download-unpacked
......
...@@ -92,7 +92,7 @@ class NextCloudTestCase(InstanceTestCase): ...@@ -92,7 +92,7 @@ class NextCloudTestCase(InstanceTestCase):
mail_smtpname="", mail_smtpname="",
cli_url="https://[%s]:9988/" % self.nextcloud_ipv6, cli_url="https://[%s]:9988/" % self.nextcloud_ipv6,
partition_dir=self.partition_dir, partition_dir=self.partition_dir,
trusted_domain_list=json.dumps(["[%s]:9988" % self.nextcloud_ipv6]), trusted_domain_list=json.dumps(["[%s]:9988" % self.nextcloud_ipv6] * 2),
trusted_proxy_list=[], trusted_proxy_list=[],
) )
data_dict.update(config_dict) data_dict.update(config_dict)
...@@ -309,8 +309,7 @@ class TestNextCloudParameters(NextCloudTestCase): ...@@ -309,8 +309,7 @@ class TestNextCloudParameters(NextCloudTestCase):
'instance.turn-server': 'turn.example.net:5439', 'instance.turn-server': 'turn.example.net:5439',
'instance.turn-secret': 'c4f0ead40a49bbbac3c58f7b9b43990f78ebd96900757ae67e10190a3a6b6053', 'instance.turn-secret': 'c4f0ead40a49bbbac3c58f7b9b43990f78ebd96900757ae67e10190a3a6b6053',
'instance.cli-url': 'nextcloud.example.com', 'instance.cli-url': 'nextcloud.example.com',
'instance.trusted-domain-1': 'nextcloud.example.com', 'instance.trusted-domain-list': 'nextcloud.example.com nextcloud.proxy.com',
'instance.trusted-domain-2': 'nextcloud.proxy.com',
'instance.trusted-proxy-list': '2001:67c:1254:e:89::5df3 127.0.0.1 10.23.1.3', 'instance.trusted-proxy-list': '2001:67c:1254:e:89::5df3 127.0.0.1 10.23.1.3',
} }
...@@ -345,6 +344,7 @@ class TestNextCloudParameters(NextCloudTestCase): ...@@ -345,6 +344,7 @@ class TestNextCloudParameters(NextCloudTestCase):
cli_url="nextcloud.example.com", cli_url="nextcloud.example.com",
partition_dir=self.partition_dir, partition_dir=self.partition_dir,
trusted_domain_list=json.dumps([ trusted_domain_list=json.dumps([
"[%s]:9988" % self.nextcloud_ipv6,
"[%s]:9988" % self.nextcloud_ipv6, "[%s]:9988" % self.nextcloud_ipv6,
"nextcloud.example.com", "nextcloud.example.com",
"nextcloud.proxy.com" "nextcloud.proxy.com"
......
...@@ -16,35 +16,35 @@ ...@@ -16,35 +16,35 @@
[template] [template]
filename = instance.cfg filename = instance.cfg
md5sum = a9e416eaa3ad7d2ea29cb90ce2c41a60 md5sum = 12591aca22ddc61f9fb1ec7e596873e3
[slaplte.jinja2] [slaplte.jinja2]
_update_hash_filename_ = slaplte.jinja2 _update_hash_filename_ = slaplte.jinja2
md5sum = c31dffa87765d93327f18ffd89ce36ca md5sum = c31dffa87765d93327f18ffd89ce36ca
[amarisoft-stats.jinja2.py] [ru_amarisoft-stats.jinja2.py]
_update_hash_filename_ = amarisoft-stats.jinja2.py _update_hash_filename_ = ru/amarisoft-stats.jinja2.py
md5sum = c4d5e9fcf460d88bc2b4bcfbdfe554f7 md5sum = c4d5e9fcf460d88bc2b4bcfbdfe554f7
[amarisoft-rf-info.jinja2.py] [ru_amarisoft-rf-info.jinja2.py]
_update_hash_filename_ = amarisoft-rf-info.jinja2.py _update_hash_filename_ = ru/amarisoft-rf-info.jinja2.py
md5sum = ab666fdfadbfc7d8a16ace38d295c883 md5sum = ab666fdfadbfc7d8a16ace38d295c883
[ru_libinstance.jinja2.cfg] [ru_libinstance.jinja2.cfg]
_update_hash_filename_ = ru/libinstance.jinja2.cfg _update_hash_filename_ = ru/libinstance.jinja2.cfg
md5sum = 6febf4dc601ba5feb30aa402f37265cf md5sum = d3ae8839b1a7b7a225bc69c3910c0b35
[ru_sdr_libinstance.jinja2.cfg] [ru_sdr_libinstance.jinja2.cfg]
_update_hash_filename_ = ru/sdr/libinstance.jinja2.cfg _update_hash_filename_ = ru/sdr/libinstance.jinja2.cfg
md5sum = c20b620111a4dc4bc2bcae57c2007cbe md5sum = de71c63b8df940207409de7e948f7c8c
[ru_lopcomm_libinstance.jinja2.cfg] [ru_lopcomm_libinstance.jinja2.cfg]
_update_hash_filename_ = ru/lopcomm/libinstance.jinja2.cfg _update_hash_filename_ = ru/lopcomm/libinstance.jinja2.cfg
md5sum = abce2deca15b8d7a8c5378e0789f8ce7 md5sum = b2af1e70141216a4db07cca655aa63a7
[ru_sunwave_libinstance.jinja2.cfg] [ru_sunwave_libinstance.jinja2.cfg]
_update_hash_filename_ = ru/sunwave/libinstance.jinja2.cfg _update_hash_filename_ = ru/sunwave/libinstance.jinja2.cfg
md5sum = 0450e9fa50844e4d6e51d608625c57f6 md5sum = c855ee7a6132899eb53b8d80ec27701a
[ru_lopcomm_ncclient_common.py] [ru_lopcomm_ncclient_common.py]
_update_hash_filename_ = ru/lopcomm/ncclient_common.py _update_hash_filename_ = ru/lopcomm/ncclient_common.py
...@@ -80,11 +80,11 @@ md5sum = 52da9fe3a569199e35ad89ae1a44c30e ...@@ -80,11 +80,11 @@ md5sum = 52da9fe3a569199e35ad89ae1a44c30e
[template-enb] [template-enb]
_update_hash_filename_ = instance-enb.jinja2.cfg _update_hash_filename_ = instance-enb.jinja2.cfg
md5sum = 3b380ac8a44aafc30cc6d87b35860fd6 md5sum = ae49a3a9a97407f9aea30981403ee1a2
[template-gnb] [template-gnb]
_update_hash_filename_ = instance-gnb.jinja2.cfg _update_hash_filename_ = instance-gnb.jinja2.cfg
md5sum = e8e87a50b861d733894eb69e1aefa683 md5sum = 54a0c7c3a2a1c905a15c58c650ee1095
[template-core-network] [template-core-network]
_update_hash_filename_ = instance-core-network.jinja2.cfg _update_hash_filename_ = instance-core-network.jinja2.cfg
...@@ -108,7 +108,7 @@ md5sum = dcaac06553a3222b14c0013a13f4a149 ...@@ -108,7 +108,7 @@ md5sum = dcaac06553a3222b14c0013a13f4a149
[enb.jinja2.cfg] [enb.jinja2.cfg]
filename = config/enb.jinja2.cfg filename = config/enb.jinja2.cfg
md5sum = a961cc1469bd2534645470f914f12905 md5sum = 914d781af63f4214e6cc3be4ffe93215
[drb_lte.jinja2.cfg] [drb_lte.jinja2.cfg]
filename = config/drb_lte.jinja2.cfg filename = config/drb_lte.jinja2.cfg
...@@ -122,10 +122,6 @@ md5sum = 84d3cef8fc7f1c2aed7c348d500f5636 ...@@ -122,10 +122,6 @@ md5sum = 84d3cef8fc7f1c2aed7c348d500f5636
filename = config/sib23.jinja2.asn filename = config/sib23.jinja2.asn
md5sum = a1973ba6e43d40e510d61d461c2d13ac md5sum = a1973ba6e43d40e510d61d461c2d13ac
[gnb.jinja2.cfg]
filename = config/gnb.jinja2.cfg
md5sum = a4f91c1c9cfd91d000f4845a88cdb38a
[mme.jinja2.cfg] [mme.jinja2.cfg]
filename = config/mme.jinja2.cfg filename = config/mme.jinja2.cfg
md5sum = 3d7833ddba3242cedcd74c7db52390c6 md5sum = 3d7833ddba3242cedcd74c7db52390c6
...@@ -136,7 +132,7 @@ md5sum = f167b4be5e327b276b42267e0678f577 ...@@ -136,7 +132,7 @@ md5sum = f167b4be5e327b276b42267e0678f577
[ru_dnsmasq.jinja2.cfg] [ru_dnsmasq.jinja2.cfg]
_update_hash_filename_ = ru/dnsmasq.jinja2.cfg _update_hash_filename_ = ru/dnsmasq.jinja2.cfg
md5sum = 345e4967d468b00c13d77821bce8a248 md5sum = 9bd5b08f23640f71ad109d186d060f2d
[ims.jinja2.cfg] [ims.jinja2.cfg]
filename = config/ims.jinja2.cfg filename = config/ims.jinja2.cfg
......
Changelog Changelog
========= =========
Version 1.0.344 (2023-11-03)
-------------
* Set dpc_snr_target to 25 for PUSCH also
Version 1.0.341 (2023-10-20)
-------------
* Publish amarisoft version and license expiration information
* Add network name parameter
Version 1.0.340 (2023-10-20)
-------------
* Update RRH firmware and reset
Version 1.0.339 (2023-10-16) Version 1.0.339 (2023-10-16)
------------- -------------
* Lopcomm firmware update * Lopcomm firmware update
* RRH reset (reboot) function added * RRH reset (reboot) function added
* Fix cpri_tx_dbm parameter * Fix cpri_tx_dbm parameter
......
...@@ -7,18 +7,37 @@ ...@@ -7,18 +7,37 @@
{%- do slaplte.load_cell(cell_list) %} {%- do slaplte.load_cell(cell_list) %}
{%- endif %} {%- endif %}
{%- set cell_count = cell_list|length %}
{#- do_lte/do_nr indicate whether we have LTE or NR cell #}
{%- do assert(do_lte or do_nr) %}
{%- do assert(not (do_lte and do_nr)) %}
{%- if do_lte %}
{%- if slapparameter_dict.get('tdd_ul_dl_config', '[Configuration 2] 5ms 2UL 6DL (default)') == '[Configuration 2] 5ms 2UL 6DL (default)' %} {%- if slapparameter_dict.get('tdd_ul_dl_config', '[Configuration 2] 5ms 2UL 6DL (default)') == '[Configuration 2] 5ms 2UL 6DL (default)' %}
{%- set tdd_config = 2 %} {%- set tdd_config = 2 %}
{%- elif slapparameter_dict.get('tdd_ul_dl_config', '[Configuration 2] 5ms 2UL 6DL (default)') == '[Configuration 6] 5ms 5UL 3DL (maximum uplink)' %} {%- elif slapparameter_dict.get('tdd_ul_dl_config', '[Configuration 2] 5ms 2UL 6DL (default)') == '[Configuration 6] 5ms 5UL 3DL (maximum uplink)' %}
{%- set tdd_config = 6 %} {%- set tdd_config = 6 %}
{%- endif %} {%- endif %}
{%- endif %}
{%- if do_nr %}
{%- if slapparameter_dict.get('tdd_ul_dl_config', '5ms 2UL 7DL 4/6 (default)') == '5ms 2UL 7DL 4/6 (default)' %}
{%- set tdd_config = 1 %}
{%- elif slapparameter_dict.get('tdd_ul_dl_config', '5ms 2UL 7DL 4/6 (default)') == '2.5ms 1UL 3DL 2/10' %}
{%- set tdd_config = 2 %}
{%- elif slapparameter_dict.get('tdd_ul_dl_config', '5ms 2UL 7DL 4/6 (default)') == '5ms 8UL 1DL 2/10 (maximum uplink)' %}
{%- set tdd_config = 3 %}
{%- endif %}
{%- endif %}
{% if rf_mode == 'tdd' %} {% if rf_mode == 'tdd' %}
#define TDD 1 #define TDD 1
{% else %} {% else %}
#define TDD 0 #define TDD 0
{% endif %} {% endif %}
{%- if do_lte %}
{%- if slapparameter_dict.get('bandwidth', slap_configuration['configuration.default_lte_bandwidth']) == '1.4 MHz' %} {%- if slapparameter_dict.get('bandwidth', slap_configuration['configuration.default_lte_bandwidth']) == '1.4 MHz' %}
#define N_RB_DL 6 #define N_RB_DL 6
{%- elif slapparameter_dict.get('bandwidth', slap_configuration['configuration.default_lte_bandwidth']) == '3 MHz' %} {%- elif slapparameter_dict.get('bandwidth', slap_configuration['configuration.default_lte_bandwidth']) == '3 MHz' %}
...@@ -32,16 +51,33 @@ ...@@ -32,16 +51,33 @@
{%- elif slapparameter_dict.get('bandwidth', slap_configuration['configuration.default_lte_bandwidth']) == '20 MHz' %} {%- elif slapparameter_dict.get('bandwidth', slap_configuration['configuration.default_lte_bandwidth']) == '20 MHz' %}
#define N_RB_DL 100 #define N_RB_DL 100
{%- endif %} {%- endif %}
{%- endif %}
#define N_ANTENNA_DL {{ slapparameter_dict.get('n_antenna_dl', slap_configuration['configuration.default_n_antenna_dl']) }} #define N_ANTENNA_DL {{ slapparameter_dict.get('n_antenna_dl', slap_configuration['configuration.default_n_antenna_dl']) }}
{% if ru == "m2ru" %}
#define N_ANTENNA_UL {{ slapparameter_dict.get('n_antenna_ul', 1) }}
{% else %}
#define N_ANTENNA_UL {{ slapparameter_dict.get('n_antenna_ul', slap_configuration['configuration.default_n_antenna_ul']) }} #define N_ANTENNA_UL {{ slapparameter_dict.get('n_antenna_ul', slap_configuration['configuration.default_n_antenna_ul']) }}
{% endif %}
{ {
{%- if do_lte %}
{% if slapparameter_dict.get('log_phy_debug', False) %} {% if slapparameter_dict.get('log_phy_debug', False) %}
log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,s1ap.level=debug,s1ap.max_size=1,x2ap.level=debug,x2ap.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=debug,file.rotate=1G,file.path=/dev/null", log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,s1ap.level=debug,s1ap.max_size=1,x2ap.level=debug,x2ap.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=debug,file.rotate=1G,file.path=/dev/null",
{% else %} {% else %}
log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,s1ap.level=debug,s1ap.max_size=1,x2ap.level=debug,x2ap.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=info,file.rotate=1G,file.path=/dev/null", log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,s1ap.level=debug,s1ap.max_size=1,x2ap.level=debug,x2ap.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=info,file.rotate=1G,file.path=/dev/null",
{% endif %} {% endif %}
log_filename: "{{ directory['log'] }}/enb.log", log_filename: "{{ directory['log'] }}/enb.log",
{%- endif %}
{%- if do_nr %}
{% if slapparameter_dict.get('log_phy_debug', False) %}
log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,ngap.level=debug,ngap.max_size=1,xnap.level=debug,xnap.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=debug,file.rotate=1G,file.path=/dev/null",
{% else %}
log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,ngap.level=debug,ngap.max_size=1,xnap.level=debug,xnap.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=info,file.rotate=1G,file.path=/dev/null",
{% endif %}
log_filename: "{{ directory['log'] }}/gnb.log",
{%- endif %}
{% if ru == "lopcomm" %} {% if ru == "lopcomm" %}
rf_driver: { rf_driver: {
...@@ -97,6 +133,23 @@ ...@@ -97,6 +133,23 @@
}, },
tx_gain: 0, tx_gain: 0,
rx_gain: 0, rx_gain: 0,
{% elif ru == "m2ru" %}
rf_driver: {
{%- if slapparameter_dict.get('disable_sdr', False) %}
name: "dummy",
{%- else %}
name: "sdr",
{%- endif %}
args: "dev0=/dev/sdr0@0",
cpri_mapping: "bf1",
cpri_mult: 16,
cpri_rx_delay: 11.0,
cpri_tx_delay: 0,
ifname: "cpri0",
cpri_tx_dbm: 42.0,
},
tx_gain: 0,
rx_gain: 0,
{% elif bbu == "ors" %} {% elif bbu == "ors" %}
rf_driver: { rf_driver: {
{%- if slapparameter_dict.get('disable_sdr', False) %} {%- if slapparameter_dict.get('disable_sdr', False) %}
...@@ -123,6 +176,7 @@ ...@@ -123,6 +176,7 @@
com_addr: "{{ slap_configuration['configuration.com_addr'] }}:{{ slap_configuration['configuration.com_ws_port'] }}", com_addr: "{{ slap_configuration['configuration.com_addr'] }}:{{ slap_configuration['configuration.com_ws_port'] }}",
{%- endif %} {%- endif %}
{%- if do_lte %}
mme_list: [ mme_list: [
{% if slapparameter_dict.get('mme_list', '') %} {% if slapparameter_dict.get('mme_list', '') %}
{%- for i, k in enumerate(slapparameter_dict['mme_list']) %} {%- for i, k in enumerate(slapparameter_dict['mme_list']) %}
...@@ -140,7 +194,29 @@ ...@@ -140,7 +194,29 @@
}, },
{% endif %} {% endif %}
], ],
{% if slapparameter_dict.get('mme_list', '') %} {%- endif %}
{%- if do_nr %}
amf_list: [
{% if slapparameter_dict.get('amf_list', '') %}
{%- for i, k in enumerate(slapparameter_dict['amf_list']) %}
{%- if i == 0 %}
{
{%- else -%}
, {
{%- endif %}
amf_addr: "{{ slapparameter_dict['amf_list'][k]['amf_addr'] }}",
}
{%- endfor -%}
{% else %}
{
amf_addr: "{{ slap_configuration['configuration.amf_addr'] }}",
},
{% endif %}
],
{%- endif %}
{% if slapparameter_dict.get('mme_list', '') or slapparameter_dict.get('amf_list', '') %}
{% if slapparameter_dict.get('gtp_addr') %} {% if slapparameter_dict.get('gtp_addr') %}
gtp_addr: "{{ slapparameter_dict.get('gtp_addr') }}", gtp_addr: "{{ slapparameter_dict.get('gtp_addr') }}",
{% else %} {% else %}
...@@ -154,9 +230,27 @@ ...@@ -154,9 +230,27 @@
gtp_addr: "{{ slap_configuration['configuration.gtp_addr'] }}", gtp_addr: "{{ slap_configuration['configuration.gtp_addr'] }}",
{% endif %} {% endif %}
{%- if do_nr %}
{% if slapparameter_dict.get('xn_peers', '') %}
xn_peers: [
{%- for k in slapparameter_dict['xn_peers'] -%}
"{{ slapparameter_dict['xn_peers'][k]['xn_addr'] }}",
{%- endfor -%}
],
{% endif %}
{%- endif %}
{%- if do_lte %}
enb_id: {{ slapparameter_dict.get('enb_id', '0x1A2D0') }}, enb_id: {{ slapparameter_dict.get('enb_id', '0x1A2D0') }},
{%- endif %}
{%- if do_nr %}
gnb_id_bits: {{ slapparameter_dict.get('gnb_id_bits', 28) }},
gnb_id: {{ slapparameter_dict.get('gnb_id', '0x12345') }},
en_dc_support: true,
{%- endif %}
cell_list: [ cell_list: [
{%- if do_lte %}
{%- for i, k in enumerate(cell_list) %} {%- for i, k in enumerate(cell_list) %}
{%- if i == 0 -%} {%- if i == 0 -%}
{ {
...@@ -205,8 +299,10 @@ ...@@ -205,8 +299,10 @@
], ],
} }
{%- endfor %} {%- endfor %}
{%- endif %}
], ],
{%- if do_lte %}
cell_default: { cell_default: {
plmn_list: [ plmn_list: [
{%- if slapparameter_dict.get('plmn_list', '') %} {%- if slapparameter_dict.get('plmn_list', '') %}
...@@ -396,4 +492,514 @@ ...@@ -396,4 +492,514 @@
meas_gap_config: "gp0", meas_gap_config: "gp0",
ho_from_meas: true, ho_from_meas: true,
}, },
{%- endif %}
{% if do_nr %}
nr_cell_list: [
{
rf_port: 0,
cell_id: {{ slapparameter_dict.get('cell_id', '0x01') }},
band: {{ nr_band }},
dl_nr_arfcn: {{ nr_arfcn }},
subcarrier_spacing: 30,
ssb_pos_bitmap: "{{ slapparameter_dict.get('ssb_pos_bitmap', slap_configuration['configuration.default_nr_ssb_pos_bitmap']) }}",
{%- if slapparameter_dict.get('ncell_list', '') %}
ncell_list: [
{%- for i, k in enumerate(slapparameter_dict['ncell_list']) %}
{%- if i == 0 -%}
{
{%- else -%}
, {
{%- endif %}
rat: "nr",
dl_nr_arfcn: {{ slapparameter_dict['ncell_list'][k].get('dl_nr_arfcn', '') }},
ssb_nr_arfcn: {{ slapparameter_dict['ncell_list'][k].get('ssb_nr_arfcn', '') }},
ul_nr_arfcn: {{ slapparameter_dict['ncell_list'][k].get('dl_nr_arfcn', '') }},
n_id_cell: {{ slapparameter_dict['ncell_list'][k].get('pci', '') }},
gnb_id_bits: {{ slapparameter_dict['ncell_list'][k].get('gnb_id_bits', '') }},
nr_cell_id: {{ slapparameter_dict['ncell_list'][k].get('nr_cell_id', '') }},
tac: {{ slapparameter_dict['ncell_list'][k].get('tac', 1) }},
band: {{ slapparameter_dict['ncell_list'][k].get('nr_band', '') }},
ssb_subcarrier_spacing: 30,
ssb_period: 20,
ssb_offset: 0,
ssb_duration: 1,
}
{%- endfor -%}
],
{% endif %}
},
],
nr_cell_default: {
bandwidth: {{ slapparameter_dict.get('nr_bandwidth', slap_configuration['configuration.default_nr_bandwidth']) }},
n_antenna_dl: N_ANTENNA_DL,
n_antenna_ul: N_ANTENNA_UL,
{%- if bbu == "ors" %}
manual_ref_signal_power: true,
{%- if one_watt == "True" %}
ss_pbch_block_power: {{ (tx_gain | int) - 54 }},
{%- else %}
ss_pbch_block_power: {{ (tx_gain | int) - 35 }},
{%- endif -%}
{%- endif %}
{% if tdd_config == 1 %}
tdd_ul_dl_config: {
pattern1: {
period: 5,
dl_slots: 7,
dl_symbols: 6,
ul_slots: 2,
ul_symbols: 4,
},
},
{% elif tdd_config == 2 %}
tdd_ul_dl_config: {
pattern1: {
period: 2.5,
dl_slots: 3,
dl_symbols: 10,
ul_slots: 1,
ul_symbols: 2,
},
},
{% elif tdd_config == 3 %}
tdd_ul_dl_config: {
pattern1: {
period: 5, /* in ms */
dl_slots: 1,
dl_symbols: 10,
ul_slots: 8,
ul_symbols: 2,
},
},
{% endif %}
ssb_period: 20,
n_id_cell: {{ slapparameter_dict.get('pci', 500) }},
plmn_list: [
{%- if slapparameter_dict.get('plmn_list', '') %}
{%- for i, k in enumerate(slapparameter_dict['plmn_list']) %}
{%- if i == 0 -%}
{
{%- else -%}
, {
{%- endif %}
plmn: "{{ slapparameter_dict['plmn_list'][k]['plmn'] }}",
tac: {{ slapparameter_dict['plmn_list'][k].get('tac', 100) }},
{%- if slapparameter_dict['plmn_list'][k].get('ranac', '') %}
ranac: {{ slapparameter_dict['plmn_list'][k]['ranac'] }},
{%- endif %}
reserved: {{ str(slapparameter_dict['plmn_list'][k].get('reserved', false)).lower() }},
nssai: [
{%- if slapparameter_dict.get('nssai', '') %}
{%- for j, k in enumerate(slapparameter_dict['nssai']) %}
{%- if j == 0 %}
{
{%- else -%}
, {
{%- endif %}
sst: {{ slapparameter_dict['nssai'][k]['sst'] }},
{%- if slapparameter_dict['nssai'][k].get('sd', '') %}
sd: {{ slapparameter_dict['nssai'][k]['sd'] }},
{%- endif %}
}
{%- endfor -%}
{% else %}
{
sst: 1,
},
{% endif %}
],
}
{%- endfor -%}
{% else %}
{
plmn: "00101",
tac: 100,
reserved: false,
nssai: [
{%- if slapparameter_dict.get('nssai', '') %}
{%- for j, k in enumerate(slapparameter_dict['nssai']) %}
{%- if j == 0 %}
{
{%- else -%}
, {
{%- endif %}
sst: {{ slapparameter_dict['nssai'][k]['sst'] }},
{%- if slapparameter_dict['nssai'][k].get('sd', '') %}
sd: {{ slapparameter_dict['nssai'][k]['sd'] }},
{%- endif %}
}
{%- endfor -%}
{% else %}
{
sst: 1,
},
{% endif %}
],
},
{%- endif %}
],
si_window_length: 40,
cell_barred: false,
intra_freq_reselection: true,
q_rx_lev_min: -70,
q_qual_min: -20,
root_sequence_index: 1,
sr_period: 40,
dmrs_type_a_pos: 2,
prach: {
prach_config_index: 160,
msg1_subcarrier_spacing: 30,
msg1_fdm: 1,
{% if ru == "m2ru" %}
msg1_frequency_start: 0,
{% else %}
msg1_frequency_start: -1,
{% endif %}
zero_correlation_zone_config: 15,
preamble_received_target_power: -110,
preamble_trans_max: 7,
power_ramping_step: 4,
ra_response_window: 20,
restricted_set_config: "unrestricted_set",
ra_contention_resolution_timer: 64,
ssb_per_prach_occasion: 1,
cb_preambles_per_ssb: 8,
},
pdcch: {
{% if ru == "m2ru" %}
n_rb_coreset0: 48,
n_symb_coreset0: 1,
{% endif %}
search_space0_index: 0,
dedicated_coreset: {
rb_start: -1,
l_crb: -1,
{% if ru == "m2ru" %}
duration: 1,
{% else %}
duration: 0,
{% endif %}
precoder_granularity: "sameAsREG_bundle",
},
css: {
n_candidates: [ 0, 0, 4, 0, 0 ],
},
rar_al_index: 2,
si_al_index: 2,
{% if tdd_config == 3 %}
uss: {
n_candidates: [ 0, 8, 1, 0, 0 ],
dci_0_1_and_1_1: true,
},
{% else %}
uss: {
n_candidates: [ 0, 2, 1, 0, 0 ],
dci_0_1_and_1_1: true,
},
{% endif %}
al_index: 1,
},
pdsch: {
mapping_type: "typeA",
dmrs_add_pos: 1,
dmrs_type: 1,
dmrs_max_len: 1,
{% if ru == "m2ru" %}
k0: 0,
k1: [ 8, 7, 7, 6, 5, 4, 12, 11 ],
{% elif tdd_config == 3 %}
k1: [4, 11],
{% endif %}
mcs_table: "qam256",
rar_mcs: 2,
si_mcs: 6,
},
csi_rs: {
nzp_csi_rs_resource: [
{
csi_rs_id: 0,
#if N_ANTENNA_DL == 1
n_ports: 1,
frequency_domain_allocation: "row2",
bitmap: "100000000000",
cdm_type: "no_cdm",
#elif N_ANTENNA_DL == 2
n_ports: 2,
frequency_domain_allocation: "other",
bitmap: "100000",
cdm_type: "fd_cdm2",
#elif N_ANTENNA_DL == 4
n_ports: 4,
frequency_domain_allocation: "row4",
bitmap: "100",
cdm_type: "fd_cdm2",
#elif N_ANTENNA_DL == 8
n_ports: 8,
frequency_domain_allocation: "other",
bitmap: "110011",
cdm_type: "fd_cdm2",
#else
#error unsupported number of DL antennas
#endif
density: 1,
first_symb: 4,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 80,
offset: 1,
qcl_info_periodic_csi_rs: 0,
},
{% if tdd_config != 3 %}
{
csi_rs_id: 1,
n_ports: 1,
frequency_domain_allocation: "row1",
bitmap: "0001",
cdm_type: "no_cdm",
density: 3,
first_symb: 4,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 40,
offset: 11,
qcl_info_periodic_csi_rs: 0,
},
{
csi_rs_id: 2,
n_ports: 1,
frequency_domain_allocation: "row1",
bitmap: "0001",
cdm_type: "no_cdm",
density: 3,
first_symb: 8,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 40,
offset: 11,
qcl_info_periodic_csi_rs: 0,
},
{
csi_rs_id: 3,
n_ports: 1,
frequency_domain_allocation: "row1",
bitmap: "0001",
cdm_type: "no_cdm",
density: 3,
first_symb: 4,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 40,
offset: 12,
qcl_info_periodic_csi_rs: 0,
},
{
csi_rs_id: 4,
n_ports: 1,
frequency_domain_allocation: "row1",
bitmap: "0001",
cdm_type: "no_cdm",
density: 3,
first_symb: 8,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 40,
offset: 12,
qcl_info_periodic_csi_rs: 0,
},
{% endif %}
],
nzp_csi_rs_resource_set: [
{
csi_rs_set_id: 0,
nzp_csi_rs_resources: [ 0 ],
repetition: false,
},
{% if tdd_config != 3 %}
{
csi_rs_set_id: 1,
nzp_csi_rs_resources: [ 1, 2, 3, 4 ],
repetition: false,
trs_info: true,
},
{% endif %}
],
csi_im_resource: [
{
csi_im_id: 0,
pattern: 1,
subcarrier_location: 8,
symbol_location: 8,
rb_start: 0,
l_crb: -1,
period: 80,
offset: 1,
},
],
csi_im_resource_set: [
{
csi_im_set_id: 0,
csi_im_resources: [ 0 ],
}
],
zp_csi_rs_resource: [
{
csi_rs_id: 0,
frequency_domain_allocation: "row4",
bitmap: "100",
n_ports: 4,
cdm_type: "fd_cdm2",
first_symb: 8,
density: 1,
rb_start: 0,
l_crb: -1,
period: 80,
offset: 1,
},
],
p_zp_csi_rs_resource_set: [
{
zp_csi_rs_resources: [ 0 ],
},
],
csi_resource_config: [
{
csi_rsc_config_id: 0,
nzp_csi_rs_resource_set_list: [ 0 ],
resource_type: "periodic",
},
{
csi_rsc_config_id: 1,
csi_im_resource_set_list: [ 0 ],
resource_type: "periodic",
},
{% if tdd_config != 3 %}
{
csi_rsc_config_id: 2,
nzp_csi_rs_resource_set_list: [ 1 ],
resource_type: "periodic",
},
{% endif %}
],
csi_report_config: [
{
resources_for_channel_measurement: 0,
csi_im_resources_for_interference: 1,
report_config_type: "periodic",
period: 80,
report_quantity: "CRI_RI_PMI_CQI",
#if N_ANTENNA_DL > 1
codebook_config: {
codebook_type: "type1",
sub_type: "typeI_SinglePanel",
#if N_ANTENNA_DL == 2
#elif N_ANTENNA_DL == 4
n1: 2,
n2: 1,
codebook_mode: 1,
#elif N_ANTENNA_DL == 8
n1: 4,
n2: 1,
codebook_mode: 1,
#endif
},
#endif
cqi_table: 2,
subband_size: "value1",
},
],
},
pucch: {
dpc_snr_target: 25,
pucch_group_hopping: "neither",
hopping_id: -1,
p0_nominal: -90,
pucch1: {
n_cs: 3,
n_occ: 3,
freq_hopping: true,
},
pucch2: {
n_symb: 2,
n_prb: 1,
freq_hopping: true,
simultaneous_harq_ack_csi: false,
max_code_rate: 0.25,
},
},
pusch: {
dpc_snr_target: 25,
mapping_type: "typeA",
n_symb: 14,
dmrs_add_pos: 1,
dmrs_type: 1,
dmrs_max_len: 1,
tf_precoding: false,
mcs_table: "qam256",
mcs_table_tp: "qam256",
ldpc_max_its: 5,
{% if ru == "m2ru" %}
k2: 4,
msg3_k2: 7,
{% elif tdd_config == 3 %}
k2: [11, 12, 4, 5, 6, 7, 7, 8],
msg3_k2: 7,
{% endif %}
p0_nominal_with_grant: -84,
msg3_mcs: 4,
msg3_delta_power: 0,
beta_offset_ack_index: 9,
},
mac_config: {
msg3_max_harq_tx: 5,
ul_max_harq_tx: 5,
dl_max_harq_tx: 5,
ul_max_consecutive_retx: 30,
dl_max_consecutive_retx: 30,
periodic_bsr_timer: 20,
retx_bsr_timer: 320,
periodic_phr_timer: 500,
prohibit_phr_timer: 200,
phr_tx_power_factor_change: "dB3",
sr_prohibit_timer: 0,
sr_trans_max: 64,
},
cipher_algo_pref: [],
integ_algo_pref: [2, 1],
inactivity_timer: {{ slapparameter_dict.get('inactivity_timer', slap_configuration['configuration.default_nr_inactivity_timer']) }},
drb_config: "{{ drb_file }}",
meas_config_desc: {
a1_report_type: "rsrp",
a1_rsrp: -60,
a1_hysteresis: 10,
a1_time_to_trigger: 100,
a2_report_type: "rsrp",
a2_rsrp: -70,
a2_hysteresis: 0,
a2_time_to_trigger: 100,
a3_report_type: "rsrp",
a3_offset: {{ slapparameter_dict.get('nr_handover_a3_offset', 6) }},
a3_hysteresis: 0,
a3_time_to_trigger: {{ slapparameter_dict.get('nr_handover_time_to_trigger', 100) }},
ssb_rsrq_filter_coeff: 3,
ssb_sinr_filter_coeff: 5
},
meas_gap_config: {
pattern_id: 0
},
},
{%- endif %}
} }
{%- if slapparameter_dict.get('tdd_ul_dl_config', '5ms 2UL 7DL 4/6 (default)') == '5ms 2UL 7DL 4/6 (default)' %}
{%- set tdd_config = 1 %}
{%- elif slapparameter_dict.get('tdd_ul_dl_config', '5ms 2UL 7DL 4/6 (default)') == '2.5ms 1UL 3DL 2/10' %}
{%- set tdd_config = 2 %}
{%- elif slapparameter_dict.get('tdd_ul_dl_config', '5ms 2UL 7DL 4/6 (default)') == '5ms 8UL 1DL 2/10 (maximum uplink)' %}
{%- set tdd_config = 3 %}
{%- endif %}
#define N_ANTENNA_DL {{ slapparameter_dict.get('n_antenna_dl', slap_configuration['configuration.default_n_antenna_dl']) }}
{% if ru == "m2ru" %}
#define N_ANTENNA_UL {{ slapparameter_dict.get('n_antenna_ul', 1) }}
{% else %}
#define N_ANTENNA_UL {{ slapparameter_dict.get('n_antenna_ul', slap_configuration['configuration.default_n_antenna_dl']) }}
{% endif %}
{
{% if slapparameter_dict.get('log_phy_debug', False) %}
log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,ngap.level=debug,ngap.max_size=1,xnap.level=debug,xnap.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=debug,file.rotate=1G,file.path=/dev/null",
{% else %}
log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,ngap.level=debug,ngap.max_size=1,xnap.level=debug,xnap.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=info,file.rotate=1G,file.path=/dev/null",
{% endif %}
log_filename: "{{ directory['log'] }}/gnb.log",
{% if ru == "m2ru" %}
rf_driver: {
{%- if slapparameter_dict.get('disable_sdr', False) %}
name: "dummy",
{%- else %}
name: "sdr",
{%- endif %}
args: "dev0=/dev/sdr0@0",
cpri_mapping: "bf1",
cpri_mult: 16,
cpri_rx_delay: 11.0,
cpri_tx_delay: 0,
ifname: "cpri0",
cpri_tx_dbm: 42.0,
},
tx_gain: 0,
rx_gain: 0,
{% else %}
rf_driver: {
{%- if slapparameter_dict.get('disable_sdr', False) %}
name: "dummy",
{%- else %}
name: "sdr",
{%- endif %}
args: "dev0=/dev/sdr0",
{% if slapparameter_dict.get('gps_sync', False) %}
sync: "gps",
{% endif %}
rx_antenna: "tx_rx",
tdd_tx_mod: 1,
},
tx_gain: {{ tx_gain }},
rx_gain: {{ rx_gain }},
{% endif %}
{%- if slapparameter_dict.get('websocket_password', '') %}
com_addr: "[{{ gtp_addr_v6 }}]:{{ slap_configuration['configuration.com_ws_port'] }}",
com_auth: {
password: "{{ slapparameter_dict['websocket_password'] }}",
},
{%- else %}
com_addr: "{{ slap_configuration['configuration.com_addr'] }}:{{ slap_configuration['configuration.com_ws_port'] }}",
{%- endif %}
amf_list: [
{% if slapparameter_dict.get('amf_list', '') %}
{%- for i, k in enumerate(slapparameter_dict['amf_list']) %}
{%- if i == 0 %}
{
{%- else -%}
, {
{%- endif %}
amf_addr: "{{ slapparameter_dict['amf_list'][k]['amf_addr'] }}",
}
{%- endfor -%}
{% else %}
{
amf_addr: "{{ slap_configuration['configuration.amf_addr'] }}",
},
{% endif %}
],
{% if slapparameter_dict.get('xn_peers', '') %}
xn_peers: [
{%- for k in slapparameter_dict['xn_peers'] -%}
"{{ slapparameter_dict['xn_peers'][k]['xn_addr'] }}",
{%- endfor -%}
],
{% endif %}
{% if slapparameter_dict.get('amf_list', '') %}
{% if slapparameter_dict.get('use_ipv4', False) %}
gtp_addr: "{{ gtp_addr_v4 }}",
{% else %}
gtp_addr: "{{ gtp_addr_v6 }}",
{% endif %}
{% else %}
gtp_addr: "{{ slap_configuration['configuration.gtp_addr'] }}",
{% endif %}
gnb_id_bits: {{ slapparameter_dict.get('gnb_id_bits', 28) }},
gnb_id: {{ slapparameter_dict.get('gnb_id', '0x12345') }},
en_dc_support: true,
cell_list: [],
nr_cell_list: [
{
rf_port: 0,
cell_id: {{ slapparameter_dict.get('cell_id', '0x01') }},
band: {{ nr_band }},
dl_nr_arfcn: {{ nr_arfcn }},
subcarrier_spacing: 30,
ssb_pos_bitmap: "{{ slapparameter_dict.get('ssb_pos_bitmap', slap_configuration['configuration.default_nr_ssb_pos_bitmap']) }}",
{%- if slapparameter_dict.get('ncell_list', '') %}
ncell_list: [
{%- for i, k in enumerate(slapparameter_dict['ncell_list']) %}
{%- if i == 0 -%}
{
{%- else -%}
, {
{%- endif %}
rat: "nr",
dl_nr_arfcn: {{ slapparameter_dict['ncell_list'][k].get('dl_nr_arfcn', '') }},
ssb_nr_arfcn: {{ slapparameter_dict['ncell_list'][k].get('ssb_nr_arfcn', '') }},
ul_nr_arfcn: {{ slapparameter_dict['ncell_list'][k].get('dl_nr_arfcn', '') }},
n_id_cell: {{ slapparameter_dict['ncell_list'][k].get('pci', '') }},
gnb_id_bits: {{ slapparameter_dict['ncell_list'][k].get('gnb_id_bits', '') }},
nr_cell_id: {{ slapparameter_dict['ncell_list'][k].get('nr_cell_id', '') }},
tac: {{ slapparameter_dict['ncell_list'][k].get('tac', 1) }},
band: {{ slapparameter_dict['ncell_list'][k].get('nr_band', '') }},
ssb_subcarrier_spacing: 30,
ssb_period: 20,
ssb_offset: 0,
ssb_duration: 1,
}
{%- endfor -%}
],
{% endif %}
},
],
nr_cell_default: {
bandwidth: {{ slapparameter_dict.get('nr_bandwidth', slap_configuration['configuration.default_nr_bandwidth']) }},
n_antenna_dl: N_ANTENNA_DL,
n_antenna_ul: N_ANTENNA_UL,
{%- if bbu == "ors" %}
manual_ref_signal_power: true,
{%- if one_watt == "True" %}
ss_pbch_block_power: {{ (tx_gain | int) - 54 }},
{%- else %}
ss_pbch_block_power: {{ (tx_gain | int) - 35 }},
{%- endif -%}
{%- endif %}
{% if tdd_config == 1 %}
tdd_ul_dl_config: {
pattern1: {
period: 5,
dl_slots: 7,
dl_symbols: 6,
ul_slots: 2,
ul_symbols: 4,
},
},
{% elif tdd_config == 2 %}
tdd_ul_dl_config: {
pattern1: {
period: 2.5,
dl_slots: 3,
dl_symbols: 10,
ul_slots: 1,
ul_symbols: 2,
},
},
{% elif tdd_config == 3 %}
tdd_ul_dl_config: {
pattern1: {
period: 5, /* in ms */
dl_slots: 1,
dl_symbols: 10,
ul_slots: 8,
ul_symbols: 2,
},
},
{% endif %}
ssb_period: 20,
n_id_cell: {{ slapparameter_dict.get('pci', 500) }},
plmn_list: [
{%- if slapparameter_dict.get('plmn_list', '') %}
{%- for i, k in enumerate(slapparameter_dict['plmn_list']) %}
{%- if i == 0 -%}
{
{%- else -%}
, {
{%- endif %}
plmn: "{{ slapparameter_dict['plmn_list'][k]['plmn'] }}",
tac: {{ slapparameter_dict['plmn_list'][k].get('tac', 100) }},
{%- if slapparameter_dict['plmn_list'][k].get('ranac', '') %}
ranac: {{ slapparameter_dict['plmn_list'][k]['ranac'] }},
{%- endif %}
reserved: {{ str(slapparameter_dict['plmn_list'][k].get('reserved', false)).lower() }},
nssai: [
{%- if slapparameter_dict.get('nssai', '') %}
{%- for j, k in enumerate(slapparameter_dict['nssai']) %}
{%- if j == 0 %}
{
{%- else -%}
, {
{%- endif %}
sst: {{ slapparameter_dict['nssai'][k]['sst'] }},
{%- if slapparameter_dict['nssai'][k].get('sd', '') %}
sd: {{ slapparameter_dict['nssai'][k]['sd'] }},
{%- endif %}
}
{%- endfor -%}
{% else %}
{
sst: 1,
},
{% endif %}
],
}
{%- endfor -%}
{% else %}
{
plmn: "00101",
tac: 100,
reserved: false,
nssai: [
{%- if slapparameter_dict.get('nssai', '') %}
{%- for j, k in enumerate(slapparameter_dict['nssai']) %}
{%- if j == 0 %}
{
{%- else -%}
, {
{%- endif %}
sst: {{ slapparameter_dict['nssai'][k]['sst'] }},
{%- if slapparameter_dict['nssai'][k].get('sd', '') %}
sd: {{ slapparameter_dict['nssai'][k]['sd'] }},
{%- endif %}
}
{%- endfor -%}
{% else %}
{
sst: 1,
},
{% endif %}
],
},
{%- endif %}
],
si_window_length: 40,
cell_barred: false,
intra_freq_reselection: true,
q_rx_lev_min: -70,
q_qual_min: -20,
root_sequence_index: 1,
sr_period: 40,
dmrs_type_a_pos: 2,
prach: {
prach_config_index: 160,
msg1_subcarrier_spacing: 30,
msg1_fdm: 1,
{% if ru == "m2ru" %}
msg1_frequency_start: 0,
{% else %}
msg1_frequency_start: -1,
{% endif %}
zero_correlation_zone_config: 15,
preamble_received_target_power: -110,
preamble_trans_max: 7,
power_ramping_step: 4,
ra_response_window: 20,
restricted_set_config: "unrestricted_set",
ra_contention_resolution_timer: 64,
ssb_per_prach_occasion: 1,
cb_preambles_per_ssb: 8,
},
pdcch: {
{% if ru == "m2ru" %}
n_rb_coreset0: 48,
n_symb_coreset0: 1,
{% endif %}
search_space0_index: 0,
dedicated_coreset: {
rb_start: -1,
l_crb: -1,
{% if ru == "m2ru" %}
duration: 1,
{% else %}
duration: 0,
{% endif %}
precoder_granularity: "sameAsREG_bundle",
},
css: {
n_candidates: [ 0, 0, 4, 0, 0 ],
},
rar_al_index: 2,
si_al_index: 2,
{% if tdd_config == 3 %}
uss: {
n_candidates: [ 0, 8, 1, 0, 0 ],
dci_0_1_and_1_1: true,
},
{% else %}
uss: {
n_candidates: [ 0, 2, 1, 0, 0 ],
dci_0_1_and_1_1: true,
},
{% endif %}
al_index: 1,
},
pdsch: {
mapping_type: "typeA",
dmrs_add_pos: 1,
dmrs_type: 1,
dmrs_max_len: 1,
{% if ru == "m2ru" %}
k0: 0,
k1: [ 8, 7, 7, 6, 5, 4, 12, 11 ],
{% elif tdd_config == 3 %}
k1: [4, 11],
{% endif %}
mcs_table: "qam256",
rar_mcs: 2,
si_mcs: 6,
},
csi_rs: {
nzp_csi_rs_resource: [
{
csi_rs_id: 0,
#if N_ANTENNA_DL == 1
n_ports: 1,
frequency_domain_allocation: "row2",
bitmap: "100000000000",
cdm_type: "no_cdm",
#elif N_ANTENNA_DL == 2
n_ports: 2,
frequency_domain_allocation: "other",
bitmap: "100000",
cdm_type: "fd_cdm2",
#elif N_ANTENNA_DL == 4
n_ports: 4,
frequency_domain_allocation: "row4",
bitmap: "100",
cdm_type: "fd_cdm2",
#elif N_ANTENNA_DL == 8
n_ports: 8,
frequency_domain_allocation: "other",
bitmap: "110011",
cdm_type: "fd_cdm2",
#else
#error unsupported number of DL antennas
#endif
density: 1,
first_symb: 4,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 80,
offset: 1,
qcl_info_periodic_csi_rs: 0,
},
{% if tdd_config != 3 %}
{
csi_rs_id: 1,
n_ports: 1,
frequency_domain_allocation: "row1",
bitmap: "0001",
cdm_type: "no_cdm",
density: 3,
first_symb: 4,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 40,
offset: 11,
qcl_info_periodic_csi_rs: 0,
},
{
csi_rs_id: 2,
n_ports: 1,
frequency_domain_allocation: "row1",
bitmap: "0001",
cdm_type: "no_cdm",
density: 3,
first_symb: 8,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 40,
offset: 11,
qcl_info_periodic_csi_rs: 0,
},
{
csi_rs_id: 3,
n_ports: 1,
frequency_domain_allocation: "row1",
bitmap: "0001",
cdm_type: "no_cdm",
density: 3,
first_symb: 4,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 40,
offset: 12,
qcl_info_periodic_csi_rs: 0,
},
{
csi_rs_id: 4,
n_ports: 1,
frequency_domain_allocation: "row1",
bitmap: "0001",
cdm_type: "no_cdm",
density: 3,
first_symb: 8,
rb_start: 0,
l_crb: -1,
power_control_offset: 0,
power_control_offset_ss: 0,
period: 40,
offset: 12,
qcl_info_periodic_csi_rs: 0,
},
{% endif %}
],
nzp_csi_rs_resource_set: [
{
csi_rs_set_id: 0,
nzp_csi_rs_resources: [ 0 ],
repetition: false,
},
{% if tdd_config != 3 %}
{
csi_rs_set_id: 1,
nzp_csi_rs_resources: [ 1, 2, 3, 4 ],
repetition: false,
trs_info: true,
},
{% endif %}
],
csi_im_resource: [
{
csi_im_id: 0,
pattern: 1,
subcarrier_location: 8,
symbol_location: 8,
rb_start: 0,
l_crb: -1,
period: 80,
offset: 1,
},
],
csi_im_resource_set: [
{
csi_im_set_id: 0,
csi_im_resources: [ 0 ],
}
],
zp_csi_rs_resource: [
{
csi_rs_id: 0,
frequency_domain_allocation: "row4",
bitmap: "100",
n_ports: 4,
cdm_type: "fd_cdm2",
first_symb: 8,
density: 1,
rb_start: 0,
l_crb: -1,
period: 80,
offset: 1,
},
],
p_zp_csi_rs_resource_set: [
{
zp_csi_rs_resources: [ 0 ],
},
],
csi_resource_config: [
{
csi_rsc_config_id: 0,
nzp_csi_rs_resource_set_list: [ 0 ],
resource_type: "periodic",
},
{
csi_rsc_config_id: 1,
csi_im_resource_set_list: [ 0 ],
resource_type: "periodic",
},
{% if tdd_config != 3 %}
{
csi_rsc_config_id: 2,
nzp_csi_rs_resource_set_list: [ 1 ],
resource_type: "periodic",
},
{% endif %}
],
csi_report_config: [
{
resources_for_channel_measurement: 0,
csi_im_resources_for_interference: 1,
report_config_type: "periodic",
period: 80,
report_quantity: "CRI_RI_PMI_CQI",
#if N_ANTENNA_DL > 1
codebook_config: {
codebook_type: "type1",
sub_type: "typeI_SinglePanel",
#if N_ANTENNA_DL == 2
#elif N_ANTENNA_DL == 4
n1: 2,
n2: 1,
codebook_mode: 1,
#elif N_ANTENNA_DL == 8
n1: 4,
n2: 1,
codebook_mode: 1,
#endif
},
#endif
cqi_table: 2,
subband_size: "value1",
},
],
},
pucch: {
dpc_snr_target: 25,
pucch_group_hopping: "neither",
hopping_id: -1,
p0_nominal: -90,
pucch1: {
n_cs: 3,
n_occ: 3,
freq_hopping: true,
},
pucch2: {
n_symb: 2,
n_prb: 1,
freq_hopping: true,
simultaneous_harq_ack_csi: false,
max_code_rate: 0.25,
},
},
pusch: {
dpc_snr_target: 25,
mapping_type: "typeA",
n_symb: 14,
dmrs_add_pos: 1,
dmrs_type: 1,
dmrs_max_len: 1,
tf_precoding: false,
mcs_table: "qam256",
mcs_table_tp: "qam256",
ldpc_max_its: 5,
{% if ru == "m2ru" %}
k2: 4,
msg3_k2: 7,
{% elif tdd_config == 3 %}
k2: [11, 12, 4, 5, 6, 7, 7, 8],
msg3_k2: 7,
{% endif %}
p0_nominal_with_grant: -84,
msg3_mcs: 4,
msg3_delta_power: 0,
beta_offset_ack_index: 9,
},
mac_config: {
msg3_max_harq_tx: 5,
ul_max_harq_tx: 5,
dl_max_harq_tx: 5,
ul_max_consecutive_retx: 30,
dl_max_consecutive_retx: 30,
periodic_bsr_timer: 20,
retx_bsr_timer: 320,
periodic_phr_timer: 500,
prohibit_phr_timer: 200,
phr_tx_power_factor_change: "dB3",
sr_prohibit_timer: 0,
sr_trans_max: 64,
},
cipher_algo_pref: [],
integ_algo_pref: [2, 1],
inactivity_timer: {{ slapparameter_dict.get('inactivity_timer', slap_configuration['configuration.default_nr_inactivity_timer']) }},
drb_config: "{{ drb_file }}",
meas_config_desc: {
a1_report_type: "rsrp",
a1_rsrp: -60,
a1_hysteresis: 10,
a1_time_to_trigger: 100,
a2_report_type: "rsrp",
a2_rsrp: -70,
a2_hysteresis: 0,
a2_time_to_trigger: 100,
a3_report_type: "rsrp",
a3_offset: {{ slapparameter_dict.get('nr_handover_a3_offset', 6) }},
a3_hysteresis: 0,
a3_time_to_trigger: {{ slapparameter_dict.get('nr_handover_time_to_trigger', 100) }},
ssb_rsrq_filter_coeff: 3,
ssb_sinr_filter_coeff: 5
},
meas_gap_config: {
pattern_id: 0
},
},
}
...@@ -6,16 +6,8 @@ parts = ...@@ -6,16 +6,8 @@ parts =
xamari-xlog-service xamari-xlog-service
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %} {% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
xlog-fluentbit-service xlog-fluentbit-service
{% endif %}
amarisoft-stats-service
amarisoft-rf-info-service
{% if ru == "lopcomm" %}
sshd-service
sshd-add-authorized-key
sshd-promise
{% endif %} {% endif %}
check-baseband-latency.py check-baseband-latency.py
check-amarisoft-stats-log.py
monitor-base monitor-base
publish-connection-information publish-connection-information
...@@ -179,111 +171,6 @@ wrapper-path = ${directory:service}/${:_buildout_section_name_} ...@@ -179,111 +171,6 @@ wrapper-path = ${directory:service}/${:_buildout_section_name_}
hash-files = ${:fluentbit-config} hash-files = ${:fluentbit-config}
{% endif %} {% endif %}
[amarisoft-stats-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
log-output = ${directory:var}/log/amarisoft-stats.json.log
context =
section directory directory
key slapparameter_dict slap-configuration:configuration
key log_file :log-output
raw stats_period {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
raw testing {{ slapparameter_dict.get("testing", False) }}
raw python_path {{ buildout_directory}}/bin/pythonwitheggs
mode = 0775
url = {{ amarisoft_stats_template }}
output = ${directory:bin}/amarisoft-stats.py
[amarisoft-rf-info-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
log-output = ${directory:var}/log/amarisoft-rf-info.json.log
context =
section directory directory
key slapparameter_dict slap-configuration:configuration
key log_file :log-output
raw stats_period {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
raw testing {{ slapparameter_dict.get("testing", False) }}
raw python_path {{ buildout_directory}}/bin/pythonwitheggs
mode = 0775
url = {{ amarisoft_rf_info_template }}
output = ${directory:bin}/amarisoft-rf-info.py
[amarisoft-stats-service]
recipe = slapos.cookbook:wrapper
command-line = ${amarisoft-stats-template:output}
wrapper-path = ${directory:service}/amarisoft-stats
mode = 0775
hash-files =
${amarisoft-stats-template:output}
[amarisoft-rf-info-service]
recipe = slapos.cookbook:wrapper
command-line = ${amarisoft-rf-info-template:output}
wrapper-path = ${directory:service}/amarisoft-rf-info
mode = 0775
hash-files =
${amarisoft-rf-info-template:output}
[user-info]
recipe = slapos.cookbook:userinfo
# Deploy openssh-server
[sshd-port]
recipe = slapos.cookbook:free_port
minimum = 22222
maximum = 22231
ip = ${slap-configuration:ipv6-random}
[sshd-config]
recipe = slapos.recipe.template:jinja2
output = ${directory:etc}/sshd.conf
path_pid = ${directory:run}/sshd.pid
inline =
PidFile ${:path_pid}
Port ${sshd-port:port}
ListenAddress ${slap-configuration:ipv6-random}
Protocol 2
HostKey ${sshd-ssh-host-rsa-key:output}
HostKey ${sshd-ssh-host-ecdsa-key:output}
PasswordAuthentication no
PubkeyAuthentication yes
HostKeyAlgorithms ssh-rsa,ssh-dss,rsa-sha2-512,rsa-sha2-256,ecdsa-sha2-nistp521
AuthorizedKeysFile ${buildout:directory}/.ssh/authorized_keys
Subsystem sftp {{ openssh_location }}/libexec/sftp-server
[sshd-service]
recipe = slapos.cookbook:wrapper
command-line = {{ openssh_location }}/sbin/sshd -D -e -f ${sshd-config:output}
wrapper-path = ${directory:service}/sshd
hash-files = ${sshd-config:output}
environment =
HOME=${directory:home}
[sshd-add-authorized-key]
recipe = slapos.cookbook:dropbear.add_authorized_key
home = ${buildout:directory}
key = {{ slapparameter_dict.get("user-authorized-key", '') }}
[sshd-ssh-keygen-base]
recipe = plone.recipe.command
output = ${directory:etc}/${:_buildout_section_name_}
command = {{ openssh_output_keygen }} -f ${:output} -N '' ${:extra-args}
[sshd-ssh-host-rsa-key]
<=sshd-ssh-keygen-base
extra-args=-t rsa
[sshd-ssh-host-ecdsa-key]
<=sshd-ssh-keygen-base
extra-args=-t ecdsa -b 521
[sshd-promise]
<= monitor-promise-base
promise = check_socket_listening
name = sshd.py
config-host = ${slap-configuration:ipv6-random}
config-port = ${sshd-port:port}
[config-base] [config-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do extensions = jinja2.ext.do
...@@ -303,6 +190,8 @@ context = ...@@ -303,6 +190,8 @@ context =
raw trx {{ trx }} raw trx {{ trx }}
raw bbu {{ bbu }} raw bbu {{ bbu }}
raw ru {{ ru }} raw ru {{ ru }}
json do_lte true
json do_nr false
import netaddr netaddr import netaddr netaddr
${:extra-context} ${:extra-context}
...@@ -351,12 +240,6 @@ current-earfcn = {{ ors_version['current-earfcn'] }} ...@@ -351,12 +240,6 @@ current-earfcn = {{ ors_version['current-earfcn'] }}
amarisoft-version = {{ lte_version }} amarisoft-version = {{ lte_version }}
license-expiration = {{ lte_expiration }} license-expiration = {{ lte_expiration }}
monitor-gadget-url = ${:monitor-base-url}/gadget/software.cfg.html monitor-gadget-url = ${:monitor-base-url}/gadget/software.cfg.html
{% if ru == "lopcomm" %}
ssh-command = ssh ${user-info:pw-name}@${slap-configuration:ipv6-random} -p ${sshd-port:port}
ssh-url = ssh://${user-info:pw-name}@[${slap-configuration:ipv6-random}]:${sshd-port:port}
ru-firmware = {{ru_lopcomm_firmware_filename}}
ru-ipv6 = ${slap-configuration:tap-ipv6-gateway}
{% endif %}
[monitor-instance-parameter] [monitor-instance-parameter]
{% if slapparameter_dict.get("name", None) %} {% if slapparameter_dict.get("name", None) %}
...@@ -374,14 +257,6 @@ name = ${:_buildout_section_name_} ...@@ -374,14 +257,6 @@ name = ${:_buildout_section_name_}
<= macro.promise <= macro.promise
promise = check_baseband_latency promise = check_baseband_latency
config-testing = {{ slapparameter_dict.get("testing", False) }} config-testing = {{ slapparameter_dict.get("testing", False) }}
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output} config-amarisoft-stats-log = ${ru_amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }} config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
config-min-rxtx-delay = {{ slapparameter_dict.get("min_rxtx_delay", 0) }} config-min-rxtx-delay = {{ slapparameter_dict.get("min_rxtx_delay", 0) }}
[check-amarisoft-stats-log.py]
<= macro.promise
promise = check_amarisoft_stats_log
output = ${directory:plugins}/check-amarisoft-stats-log.py
config-testing = {{ slapparameter_dict.get("testing", False) }}
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output}
config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
...@@ -188,7 +188,7 @@ context = ...@@ -188,7 +188,7 @@ context =
raw testing {{ slapparameter_dict.get("testing", False) }} raw testing {{ slapparameter_dict.get("testing", False) }}
raw python_path {{ buildout_directory}}/bin/pythonwitheggs raw python_path {{ buildout_directory}}/bin/pythonwitheggs
mode = 0775 mode = 0775
url = {{ amarisoft_stats_template }} url = {{ ru_amarisoft_stats_template }}
output = ${directory:bin}/amarisoft-stats.py output = ${directory:bin}/amarisoft-stats.py
[amarisoft-rf-info-template] [amarisoft-rf-info-template]
...@@ -203,7 +203,7 @@ context = ...@@ -203,7 +203,7 @@ context =
raw testing {{ slapparameter_dict.get("testing", False) }} raw testing {{ slapparameter_dict.get("testing", False) }}
raw python_path {{ buildout_directory}}/bin/pythonwitheggs raw python_path {{ buildout_directory}}/bin/pythonwitheggs
mode = 0775 mode = 0775
url = {{ amarisoft_rf_info_template }} url = {{ ru_amarisoft_rf_info_template }}
output = ${directory:bin}/amarisoft-rf-info.py output = ${directory:bin}/amarisoft-rf-info.py
[amarisoft-stats-service] [amarisoft-stats-service]
...@@ -242,6 +242,8 @@ context = ...@@ -242,6 +242,8 @@ context =
raw trx {{ trx }} raw trx {{ trx }}
raw bbu {{ bbu }} raw bbu {{ bbu }}
raw ru {{ ru }} raw ru {{ ru }}
json do_lte false
json do_nr true
import netaddr netaddr import netaddr netaddr
${:extra-context} ${:extra-context}
...@@ -255,11 +257,13 @@ output = ${directory:etc}/drb.cfg ...@@ -255,11 +257,13 @@ output = ${directory:etc}/drb.cfg
{% if slapparameter_dict.get("gnb_config_link", None) %} {% if slapparameter_dict.get("gnb_config_link", None) %}
url = ${gnb-config-dl:target} url = ${gnb-config-dl:target}
{% else %} {% else %}
url = {{ gnb_template }} url = {{ enb_template }}
{% endif %} {% endif %}
output = ${directory:etc}/gnb.cfg output = ${directory:etc}/gnb.cfg
extra-context = extra-context =
key drb_file drb-config:output key drb_file drb-config:output
import-list =
rawfile slaplte.jinja2 {{ slaplte_template }}
[publish-connection-information] [publish-connection-information]
<= monitor-publish <= monitor-publish
......
...@@ -279,8 +279,8 @@ extra-context = ...@@ -279,8 +279,8 @@ extra-context =
raw slaplte_template ${slaplte.jinja2:target} raw slaplte_template ${slaplte.jinja2:target}
raw drb_lte_template ${drb_lte.jinja2.cfg:target} raw drb_lte_template ${drb_lte.jinja2.cfg:target}
raw sib23_template ${sib23.jinja2.asn:target} raw sib23_template ${sib23.jinja2.asn:target}
raw amarisoft_stats_template ${amarisoft-stats.jinja2.py:target} raw ru_amarisoft_stats_template ${ru_amarisoft-stats.jinja2.py:target}
raw amarisoft_rf_info_template ${amarisoft-rf-info.jinja2.py:target} raw ru_amarisoft_rf_info_template ${ru_amarisoft-rf-info.jinja2.py:target}
raw ru_lopcomm_stats_template ${ru_lopcomm_stats.jinja2.py:target} raw ru_lopcomm_stats_template ${ru_lopcomm_stats.jinja2.py:target}
raw ru_lopcomm_config_template ${ru_lopcomm_config.jinja2.py:target} raw ru_lopcomm_config_template ${ru_lopcomm_config.jinja2.py:target}
raw ru_lopcomm_software_template ${ru_lopcomm_software.jinja2.py:target} raw ru_lopcomm_software_template ${ru_lopcomm_software.jinja2.py:target}
...@@ -316,10 +316,11 @@ extra-context = ...@@ -316,10 +316,11 @@ extra-context =
key lte_expiration amarisoft:lte-expiration key lte_expiration amarisoft:lte-expiration
key enb amarisoft:enb key enb amarisoft:enb
key sdr amarisoft:sdr key sdr amarisoft:sdr
raw gnb_template ${gnb.jinja2.cfg:target} raw enb_template ${enb.jinja2.cfg:target}
raw slaplte_template ${slaplte.jinja2:target}
raw drb_nr_template ${drb_nr.jinja2.cfg:target} raw drb_nr_template ${drb_nr.jinja2.cfg:target}
raw amarisoft_stats_template ${amarisoft-stats.jinja2.py:target} raw ru_amarisoft_stats_template ${ru_amarisoft-stats.jinja2.py:target}
raw amarisoft_rf_info_template ${amarisoft-rf-info.jinja2.py:target} raw ru_amarisoft_rf_info_template ${ru_amarisoft-rf-info.jinja2.py:target}
raw openssl_location ${openssl:location} raw openssl_location ${openssl:location}
raw default_nr_bandwidth ${default-params:default-nr-bandwidth} raw default_nr_bandwidth ${default-params:default-nr-bandwidth}
raw default_nr_ssb_pos_bitmap ${default-params:default-nr-ssb-pos-bitmap} raw default_nr_ssb_pos_bitmap ${default-params:default-nr-ssb-pos-bitmap}
......
...@@ -33,3 +33,9 @@ stop-on-error = true ...@@ -33,3 +33,9 @@ stop-on-error = true
[setcap-netcapdo] [setcap-netcapdo]
<= setcap <= setcap
exe = ${netcapdo:exe} exe = ${netcapdo:exe}
[ru_amarisoft-stats.jinja2.py]
<= download-base
[ru_amarisoft-rf-info.jinja2.py]
<= download-base
...@@ -7,7 +7,8 @@ port=5354 ...@@ -7,7 +7,8 @@ port=5354
{%- set plen = netaddr.IPNetwork(vtap.network).prefixlen %} {%- set plen = netaddr.IPNetwork(vtap.network).prefixlen %}
# {{ cell_ref }} @ {{ ru_tap }} # {{ cell_ref }} @ {{ ru_tap }}
dhcp-range=tag:{{ ru_tap }},{{ vtap.gateway }},{{ vtap.gateway }},static,{{ plen }},5m {#- TODO consider using /128 as we give only 1 address to RU #}
dhcp-range=tag:{{ ru_tap }},{{ vtap.gateway }},{{ vtap.gateway }},static,{{ max(plen,64) }},5m
dhcp-host={{ cell.ru_mac_addr }},tag:{{ ru_tap }},[{{ vtap.gateway }}] dhcp-host={{ cell.ru_mac_addr }},tag:{{ ru_tap }},[{{ vtap.gateway }}]
# option 17 used for RU callhome # option 17 used for RU callhome
# dhcp-option=option6:17,[{{ vtap.addr }}] # dhcp-option=option6:17,[{{ vtap.addr }}]
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
{%- import 'slaplte.jinja2' as slaplte with context %} {%- import 'slaplte.jinja2' as slaplte with context %}
NOTE: driver-specific logic is implemented in rudrv.buildout_ru() . NOTE: driver-specific logic is implemented in rudrv .buildout_ru() and .buildout() .
#} #}
{#- cell_list keeps cell registry #} {#- cell_list keeps cell registry #}
...@@ -39,6 +39,7 @@ config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }} ...@@ -39,6 +39,7 @@ config-stats-period = {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
{%- set rudrv_dict = namespace(sdr=rudrv_sdr, {%- set rudrv_dict = namespace(sdr=rudrv_sdr,
lopcomm=rudrv_lopcomm, lopcomm=rudrv_lopcomm,
sunwave=rudrv_sunwave) %} sunwave=rudrv_sunwave) %}
{%- set rudrv_init = {} %}
{#- split slapos tap interface for each RU {#- split slapos tap interface for each RU
fallback to non-split approach for ntap <= 1 to avoid hard-dependecy on setcap/tapsplit fallback to non-split approach for ntap <= 1 to avoid hard-dependecy on setcap/tapsplit
...@@ -172,7 +173,7 @@ config-dma_chan = {{ cell.cpri_port_number }} ...@@ -172,7 +173,7 @@ config-dma_chan = {{ cell.cpri_port_number }}
promise = check_cpri_lock promise = check_cpri_lock
config-sdr_dev = {{ slapparameter_dict.get('sdr_number', 0) }} config-sdr_dev = {{ slapparameter_dict.get('sdr_number', 0) }}
config-sfp_port = {{ cell.cpri_port_number }} config-sfp_port = {{ cell.cpri_port_number }}
config-amarisoft-rf-info-log = ${amarisoft-rf-info-template:log-output} config-amarisoft-rf-info-log = ${ru_amarisoft-rf-info-template:log-output}
{%- else %} {%- else %}
{%- do bug('unreachable') %} {%- do bug('unreachable') %}
...@@ -181,13 +182,70 @@ config-amarisoft-rf-info-log = ${amarisoft-rf-info-template:log-output} ...@@ -181,13 +182,70 @@ config-amarisoft-rf-info-log = ${amarisoft-rf-info-template:log-output}
{{ promise('%s-rx-saturated' % ru_ref) }} {{ promise('%s-rx-saturated' % ru_ref) }}
promise = check_rx_saturated promise = check_rx_saturated
config-rf-rx-chan-list = {{ list(range(i*n_antenna_ul, (i+1)*n_antenna_ul)) }} config-rf-rx-chan-list = {{ list(range(i*n_antenna_ul, (i+1)*n_antenna_ul)) }}
config-amarisoft-stats-log = ${amarisoft-stats-template:log-output} config-amarisoft-stats-log = ${ru_amarisoft-stats-template:log-output}
config-max-rx-sample-db = {{ slapparameter_dict.get("max_rx_sample_db", 0) }} config-max-rx-sample-db = {{ slapparameter_dict.get("max_rx_sample_db", 0) }}
{#- driver-specific part #} {#- driver-specific part #}
{%- if not rudrv_init.get(ru_type) %}
{{ rudrv.buildout() }}
{%- do rudrv_init.update({ru_type: 1}) %}
{%- endif %}
{{ rudrv.buildout_ru(ru_ref, cell) }} {{ rudrv.buildout_ru(ru_ref, cell) }}
{%- endfor %} {%- endfor %}
{#- retrieve rf and stats[rf,samples] data from amarisoft service for promises
such as check_cpri_lock and check_rx_saturated.
#}
[ru_amarisoft-rf-info-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
log-output = ${directory:var}/log/amarisoft-rf-info.json.log
context =
section directory directory
key slapparameter_dict slap-configuration:configuration
key log_file :log-output
raw stats_period {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
raw testing {{ slapparameter_dict.get("testing", False) }}
raw python_path {{ buildout_directory}}/bin/pythonwitheggs
mode = 0775
url = {{ ru_amarisoft_rf_info_template }}
output = ${directory:bin}/amarisoft-rf-info.py
{{ part('amarisoft-rf-info-service') }}
recipe = slapos.cookbook:wrapper
command-line = ${ru_amarisoft-rf-info-template:output}
wrapper-path = ${directory:service}/amarisoft-rf-info
mode = 0775
hash-files =
${ru_amarisoft-rf-info-template:output}
[ru_amarisoft-stats-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
log-output = ${directory:var}/log/amarisoft-stats.json.log
context =
section directory directory
key slapparameter_dict slap-configuration:configuration
key log_file :log-output
raw stats_period {{ slapparameter_dict.get("enb_stats_fetch_period", 60) }}
raw testing {{ slapparameter_dict.get("testing", False) }}
raw python_path {{ buildout_directory}}/bin/pythonwitheggs
mode = 0775
url = {{ ru_amarisoft_stats_template }}
output = ${directory:bin}/amarisoft-stats.py
{{ part('amarisoft-stats-service') }}
recipe = slapos.cookbook:wrapper
command-line = ${ru_amarisoft-stats-template:output}
wrapper-path = ${directory:service}/amarisoft-stats
mode = 0775
hash-files =
${ru_amarisoft-stats-template:output}
{{ promise('amarisoft-stats-log') }}
promise = check_amarisoft_stats_log
config-amarisoft-stats-log = ${ru_amarisoft-stats-template:log-output}
[buildout] [buildout]
parts += parts +=
......
...@@ -35,5 +35,5 @@ destination = ${buildout:directory}/ncclient_common.py ...@@ -35,5 +35,5 @@ destination = ${buildout:directory}/ncclient_common.py
[ru_lopcomm_firmware-dl] [ru_lopcomm_firmware-dl]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = https://lab.nexedi.com/nexedi/ors-utils/raw/master/lopcomm-firmware/${:filename} url = https://lab.nexedi.com/nexedi/ors-utils/raw/master/lopcomm-firmware/${:filename}
filename = PR.PRM61C70V1004.006.tar.gz filename = PR.PRM61C70V1005.004.tar.gz
md5sum = 5139019aae77c2f3178a99915f1c57dc md5sum = f16413604a8c7631fc6e3782fa9a2695
...@@ -18,7 +18,7 @@ extensions = jinja2.ext.do ...@@ -18,7 +18,7 @@ extensions = jinja2.ext.do
_logbase = ${directory:var}/log/{{ru_ref}}-software _logbase = ${directory:var}/log/{{ru_ref}}-software
log-output = ${:_logbase}.log log-output = ${:_logbase}.log
software-reply-json-log-output = ${:_logbase}-reply.json.log software-reply-json-log-output = ${:_logbase}-reply.json.log
remote-file-path = sftp://${user-info:pw-name}@[${slap-configuration:ipv6-random}]:${sshd-port:port}{{ru_lopcomm_firmware_path}} remote-file-path = sftp://${user-info:pw-name}@[${sshd-service:ipv6}]:${sshd-service:port}{{ru_lopcomm_firmware_path}}
is_firmware_updated = ${directory:etc}/{{ru_ref}}.is_firmware_updated is_firmware_updated = ${directory:etc}/{{ru_ref}}.is_firmware_updated
context = context =
section directory directory section directory directory
...@@ -214,4 +214,77 @@ hash-files = ...@@ -214,4 +214,77 @@ hash-files =
{%- endif %} {%- endif %}
{#- amend published information with Lopcomm-specific bits
TODO make it per-RU #}
[publish-connection-information]
ssh-command = ssh ${user-info:pw-name}@${sshd-service:ipv6} -p ${sshd-service:port}
ssh-url = ssh://${user-info:pw-name}@[${sshd-service:ipv6}]:${sshd-service:port}
ru-firmware = {{ru_lopcomm_firmware_filename}}
ru-ipv6 = ${slap-configuration:tap-ipv6-gateway}
{%- endmacro %}
{%- macro buildout() %}
# deploy openssh-server for software upgrade
[user-info]
recipe = slapos.cookbook:userinfo
[sshd-port]
recipe = slapos.cookbook:free_port
minimum = 22222
maximum = 22231
ip = ${slap-configuration:ipv6-random}
[sshd-config]
recipe = slapos.recipe.template:jinja2
output = ${directory:etc}/sshd.conf
path_pid = ${directory:run}/sshd.pid
inline =
PidFile ${:path_pid}
Port ${sshd-port:port}
ListenAddress ${sshd-port:ip}
Protocol 2
HostKey ${sshd-ssh-host-rsa-key:output}
HostKey ${sshd-ssh-host-ecdsa-key:output}
PasswordAuthentication no
PubkeyAuthentication yes
HostKeyAlgorithms ssh-rsa,ssh-dss,rsa-sha2-512,rsa-sha2-256,ecdsa-sha2-nistp521
AuthorizedKeysFile ${buildout:directory}/.ssh/authorized_keys
Subsystem sftp {{ openssh_location }}/libexec/sftp-server
{{ part('sshd-service') }}
recipe = slapos.cookbook:wrapper
command-line = {{ openssh_location }}/sbin/sshd -D -e -f ${sshd-config:output}
wrapper-path = ${directory:service}/sshd
hash-files = ${sshd-config:output}
environment =
HOME=${directory:home}
ipv6 = ${sshd-port:ip}
port = ${sshd-port:port}
{{ part('sshd-add-authorized-key') }}
recipe = slapos.cookbook:dropbear.add_authorized_key
home = ${buildout:directory}
key = {{ slapparameter_dict.get("user-authorized-key", '') }}
[sshd-ssh-keygen-base]
recipe = plone.recipe.command
output = ${directory:etc}/${:_buildout_section_name_}
command = {{ openssh_output_keygen }} -f ${:output} -N '' ${:extra-args}
[sshd-ssh-host-rsa-key]
<=sshd-ssh-keygen-base
extra-args=-t rsa
[sshd-ssh-host-ecdsa-key]
<=sshd-ssh-keygen-base
extra-args=-t ecdsa -b 521
{{ promise('sshd') }}
promise = check_socket_listening
config-host = ${sshd-service:ipv6}
config-port = ${sshd-service:port}
{%- endmacro %} {%- endmacro %}
...@@ -3,3 +3,7 @@ ...@@ -3,3 +3,7 @@
{%- macro buildout_ru(ru_ref, cell) %} {%- macro buildout_ru(ru_ref, cell) %}
{#- nothing SDR-specific #} {#- nothing SDR-specific #}
{%- endmacro %} {%- endmacro %}
{%- macro buildout() %}
{#- nothing SDR-specific #}
{%- endmacro %}
...@@ -3,3 +3,7 @@ ...@@ -3,3 +3,7 @@
{%- macro buildout_ru(ru_ref, cell) %} {%- macro buildout_ru(ru_ref, cell) %}
{#- nothing SunWave-specific #} {#- nothing SunWave-specific #}
{%- endmacro %} {%- endmacro %}
{%- macro buildout() %}
{#- nothing SunWave-specific #}
{%- endmacro %}
...@@ -9,8 +9,8 @@ from slapos.recipe.template import jinja2_template ...@@ -9,8 +9,8 @@ from slapos.recipe.template import jinja2_template
import json import json
# j2render renders config/<cfg>.jinja2.cfg into config/<cfg>.cfg with provided json parameters. # j2render renders config/<src> into config/<out> with provided json parameters.
def j2render(cfg, jcfg): def j2render(src, out, jcfg):
ctx = json.loads(jcfg) ctx = json.loads(jcfg)
assert '_standalone' not in ctx assert '_standalone' not in ctx
ctx['_standalone'] = True ctx['_standalone'] = True
...@@ -20,8 +20,8 @@ def j2render(cfg, jcfg): ...@@ -20,8 +20,8 @@ def j2render(cfg, jcfg):
buildout = None # stub buildout = None # stub
r = jinja2_template.Recipe(buildout, "recipe", { r = jinja2_template.Recipe(buildout, "recipe", {
'extensions': 'jinja2.ext.do', 'extensions': 'jinja2.ext.do',
'url': 'config/{}.jinja2.cfg'.format(cfg), 'url': 'config/{}'.format(src),
'output': 'config/{}.cfg'.format(cfg), 'output': 'config/{}'.format(out),
'context': textctx, 'context': textctx,
'import-list': ''' 'import-list': '''
rawfile slaplte.jinja2 slaplte.jinja2''', rawfile slaplte.jinja2 slaplte.jinja2''',
...@@ -34,11 +34,14 @@ def j2render(cfg, jcfg): ...@@ -34,11 +34,14 @@ def j2render(cfg, jcfg):
return f.read() return f.read()
r._read = _read r._read = _read
with open('config/{}.cfg'.format(cfg), 'w+') as f: with open('config/{}'.format(out), 'w+') as f:
f.write(r._render().decode()) f.write(r._render().decode())
def do(cfg, slapparameter_dict): def do(src, out, rat, slapparameter_dict):
assert rat in ('lte', 'nr')
jdo_lte = json.dumps(rat == 'lte')
jdo_nr = json.dumps(rat == 'nr')
jslapparameter_dict = json.dumps(slapparameter_dict) jslapparameter_dict = json.dumps(slapparameter_dict)
json_params_empty = """{ json_params_empty = """{
"rf_mode": 'fdd', "rf_mode": 'fdd',
...@@ -50,6 +53,8 @@ def do(cfg, slapparameter_dict): ...@@ -50,6 +53,8 @@ def do(cfg, slapparameter_dict):
}""" }"""
json_params = """{ json_params = """{
"rf_mode": "tdd", "rf_mode": "tdd",
"do_lte": %(jdo_lte)s,
"do_nr": %(jdo_nr)s,
"trx": "sdr", "trx": "sdr",
"bbu": "ors", "bbu": "ors",
"ru": "ors", "ru": "ors",
...@@ -88,7 +93,7 @@ def do(cfg, slapparameter_dict): ...@@ -88,7 +93,7 @@ def do(cfg, slapparameter_dict):
"slapparameter_dict": %(jslapparameter_dict)s "slapparameter_dict": %(jslapparameter_dict)s
}""" }"""
j2render(cfg, json_params % locals()) j2render(src, out, json_params % locals())
do('enb', {"tdd_ul_dl_config": "[Configuration 6] 5ms 5UL 3DL (maximum uplink)"}) do('enb.jinja2.cfg', 'enb.cfg', 'lte', {"tdd_ul_dl_config": "[Configuration 6] 5ms 5UL 3DL (maximum uplink)"})
do('gnb', {"tdd_ul_dl_config": "5ms 8UL 1DL 2/10 (maximum uplink)"}) do('enb.jinja2.cfg', 'gnb.cfg', 'nr', {"tdd_ul_dl_config": "5ms 8UL 1DL 2/10 (maximum uplink)"})
...@@ -26,7 +26,6 @@ parts += ...@@ -26,7 +26,6 @@ parts +=
dnsmasq-core-network.jinja2.cfg dnsmasq-core-network.jinja2.cfg
ims.jinja2.cfg ims.jinja2.cfg
enb.jinja2.cfg enb.jinja2.cfg
gnb.jinja2.cfg
ue_db.jinja2.cfg ue_db.jinja2.cfg
ue-lte.jinja2.cfg ue-lte.jinja2.cfg
ue-nr.jinja2.cfg ue-nr.jinja2.cfg
...@@ -58,12 +57,6 @@ output = ${buildout:directory}/template.cfg ...@@ -58,12 +57,6 @@ output = ${buildout:directory}/template.cfg
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_} url = ${:_profile_base_location_}/${:_update_hash_filename_}
[amarisoft-stats.jinja2.py]
<= download-base
[amarisoft-rf-info.jinja2.py]
<= download-base
[template-enb] [template-enb]
<= download-base <= download-base
...@@ -124,9 +117,6 @@ filename = enb.jinja2.cfg ...@@ -124,9 +117,6 @@ filename = enb.jinja2.cfg
[sib23.jinja2.asn] [sib23.jinja2.asn]
<= copy-config-to-instance <= copy-config-to-instance
filename = sib23.jinja2.asn filename = sib23.jinja2.asn
[gnb.jinja2.cfg]
<= copy-config-to-instance
filename = gnb.jinja2.cfg
[ue_db.jinja2.cfg] [ue_db.jinja2.cfg]
<= copy-config-to-instance <= copy-config-to-instance
filename = ue_db.jinja2.cfg filename = ue_db.jinja2.cfg
......
{ {
"$schema": "http://json-schema.org/draft-04/schema", "$schema": "http://json-schema.org/draft-07/schema",
"properties": { "properties": {
"-frontend-quantity": { "-frontend-quantity": {
"description": "Quantity of Frontends Replicate.", "description": "Quantity of Frontends Replicate.",
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
}, },
"domain": { "domain": {
"description": "Domain used to generate automatic hostnames for slaves. For example 'example.com' will result with slave hostname 'slaveref.example.com'.", "description": "Domain used to generate automatic hostnames for slaves. For example 'example.com' will result with slave hostname 'slaveref.example.com'.",
"pattern": "^([a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,6}$", "format": "idn-hostname",
"title": "Domain", "title": "Domain",
"type": "string" "type": "string"
}, },
......
{ {
"$schema": "http://json-schema.org/draft-04/schema", "$schema": "http://json-schema.org/draft-07/schema",
"properties": { "properties": {
"custom_domain": { "custom_domain": {
"description": "Custom Domain to use for the website. Shall contain only letters, numbers and -, and can look like example.com, first2.example.com special-site.example.com.", "description": "Custom Domain to use for the website. Shall contain only letters, numbers and -, and can look like example.com, first2.example.com special-site.example.com.",
"pattern": "^([a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,6}$", "format": "idn-hostname",
"title": "Custom Domain", "title": "Custom Domain",
"type": "string" "type": "string"
}, },
......
...@@ -19,4 +19,4 @@ md5sum = 10e19df182c692b71ea552da183a0bcf ...@@ -19,4 +19,4 @@ md5sum = 10e19df182c692b71ea552da183a0bcf
[template-selenium] [template-selenium]
filename = instance-selenium.cfg.in filename = instance-selenium.cfg.in
md5sum = 8c48c7bef34dd54ab3bd94c91f2bf041 md5sum = 5a7abfff9f9d7898620f8c7fc1e6f488
\ No newline at end of file
...@@ -25,9 +25,8 @@ fonts = ...@@ -25,9 +25,8 @@ fonts =
${liberation-fonts:location} ${liberation-fonts:location}
${ocrb-fonts:location} ${ocrb-fonts:location}
$${directory:fonts} $${directory:fonts}
# XXX we don't include conf.d for now, to keep compatibility with current font selection problems.
includes = includes =
# ${fontconfig:location}/etc/fonts/conf.d ${fontconfig:location}/etc/fonts/conf.d
[xvfb-instance] [xvfb-instance]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
...@@ -93,26 +92,15 @@ environment = ...@@ -93,26 +92,15 @@ environment =
PATH=${buildout:bin-directory} PATH=${buildout:bin-directory}
XORG_LOCK_DIR=$${directory:tmp} XORG_LOCK_DIR=$${directory:tmp}
DISPLAY=$${xvfb-instance:display} DISPLAY=$${xvfb-instance:display}
LC_ALL=C.UTF8
FONTCONFIG_FILE=$${fontconfig-conf:output} FONTCONFIG_FILE=$${fontconfig-conf:output}
hostname = $${slap-configuration:ipv4-random} hostname = $${slap-configuration:ipv4-random}
[selenium-server-node-instance-firefox-60] [selenium-server-node-instance-firefox-102]
<= selenium-server-node-instance <= selenium-server-node-instance
capabilities = browserName=firefox,maxInstances=3,marionette=true,platform=LINUX,version=${firefox-60:version},firefox_binary=${firefox-wrapper-60:location} capabilities = browserName=firefox,maxInstances=3,marionette=true,platform=LINUX,version=${firefox-102:version},firefox_binary=${firefox-wrapper-102:location}
java-args = -Dwebdriver.gecko.driver=${geckodriver-0.22.0:location} java-args = -Dwebdriver.gecko.driver=${geckodriver-0.33.0:location}
port = 7777
[selenium-server-node-instance-firefox-68]
<= selenium-server-node-instance
capabilities = browserName=firefox,maxInstances=3,marionette=true,platform=LINUX,version=${firefox-68:version},firefox_binary=${firefox-wrapper-68:location}
java-args = -Dwebdriver.gecko.driver=${geckodriver-0.24.0:location}
port = 7778
[selenium-server-node-instance-firefox-78]
<= selenium-server-node-instance
capabilities = browserName=firefox,maxInstances=3,marionette=true,platform=LINUX,version=${firefox-78:version},firefox_binary=${firefox-wrapper-78:location}
java-args = -Dwebdriver.gecko.driver=${geckodriver-0.24.0:location}
port = 7779 port = 7779
[selenium-server-node-instance-firefox-115] [selenium-server-node-instance-firefox-115]
...@@ -121,18 +109,17 @@ capabilities = browserName=firefox,maxInstances=3,marionette=true,platform=LINUX ...@@ -121,18 +109,17 @@ capabilities = browserName=firefox,maxInstances=3,marionette=true,platform=LINUX
java-args = -Dwebdriver.gecko.driver=${geckodriver-0.33.0:location} java-args = -Dwebdriver.gecko.driver=${geckodriver-0.33.0:location}
port = 7780 port = 7780
[selenium-server-node-instance-chromium-69]
<= selenium-server-node-instance
capabilities = browserName=chrome,maxInstances=3,platform=LINUX,version=${chromium-69:version},chrome_binary=${chromium-wrapper-69:location}
java-args = -Dwebdriver.chrome.driver=${chromedriver-wrapper-2.41:location}
port = 7781
[selenium-server-node-instance-chromium-91] [selenium-server-node-instance-chromium-91]
<= selenium-server-node-instance <= selenium-server-node-instance
capabilities = browserName=chrome,maxInstances=3,platform=LINUX,version=${chromium-91:version},chrome_binary=${chromium-wrapper-91:location} capabilities = browserName=chrome,maxInstances=3,platform=LINUX,version=${chromium-91:version},chrome_binary=${chromium-wrapper-91:location}
java-args = -Dwebdriver.chrome.driver=${chromedriver-wrapper-91:location} java-args = -Dwebdriver.chrome.driver=${chromedriver-wrapper-91:location}
port = 7782 port = 7781
[selenium-server-node-instance-chromium-120]
<= selenium-server-node-instance
capabilities = browserName=chrome,maxInstances=3,platform=LINUX,version=${chromium-120:version},chrome_binary=${chromium-wrapper-120:location}
java-args = -Dwebdriver.chrome.driver=${chromedriver-wrapper-120:location}
port = 7782
[selenium-server-admin-password] [selenium-server-admin-password]
recipe = slapos.cookbook:generate.password recipe = slapos.cookbook:generate.password
...@@ -301,12 +288,10 @@ instance-promises = ...@@ -301,12 +288,10 @@ instance-promises =
$${selenium-server-frontend-listen-promise:name} $${selenium-server-frontend-listen-promise:name}
$${selenium-server-hub-listen-promise:name} $${selenium-server-hub-listen-promise:name}
$${selenium-server-hub-nodes-registered-promise:name} $${selenium-server-hub-nodes-registered-promise:name}
$${selenium-server-node-firefox-60-listen-promise:name} $${selenium-server-node-firefox-102-listen-promise:name}
$${selenium-server-node-firefox-68-listen-promise:name}
$${selenium-server-node-firefox-78-listen-promise:name}
$${selenium-server-node-firefox-115-listen-promise:name} $${selenium-server-node-firefox-115-listen-promise:name}
$${selenium-server-node-instance-chromium-69-listen-promise:name}
$${selenium-server-node-instance-chromium-91-listen-promise:name} $${selenium-server-node-instance-chromium-91-listen-promise:name}
$${selenium-server-node-instance-chromium-120-listen-promise:name}
[check-port-listening-promise] [check-port-listening-promise]
...@@ -336,8 +321,8 @@ promise = check_command_execute ...@@ -336,8 +321,8 @@ promise = check_command_execute
name = $${:_buildout_section_name_}.py name = $${:_buildout_section_name_}.py
config-command = config-command =
$${selenium-server-check-nodes-registered:output} $${selenium-server-hub-instance:api-url} $${:expected-node-count} $${selenium-server-check-nodes-registered:output} $${selenium-server-hub-instance:api-url} $${:expected-node-count}
# We have 6 nodes with 3 slots each # We have 4 nodes with 3 slots each
expected-node-count = 18 expected-node-count = 12
[selenium-server-check-nodes-registered] [selenium-server-check-nodes-registered]
recipe = slapos.recipe.template recipe = slapos.recipe.template
...@@ -351,36 +336,25 @@ inline = ...@@ -351,36 +336,25 @@ inline =
sys.exit(0 if expected_node_count == actual_node_count else 1) sys.exit(0 if expected_node_count == actual_node_count else 1)
[selenium-server-node-firefox-60-listen-promise] [selenium-server-node-firefox-102-listen-promise]
<= check-port-listening-promise <= check-port-listening-promise
config-host = $${selenium-server-node-instance-firefox-60:hostname} config-host = $${selenium-server-node-instance-firefox-102:hostname}
config-port = $${selenium-server-node-instance-firefox-60:port} config-port = $${selenium-server-node-instance-firefox-102:port}
[selenium-server-node-firefox-68-listen-promise]
<= check-port-listening-promise
config-host = $${selenium-server-node-instance-firefox-68:hostname}
config-port = $${selenium-server-node-instance-firefox-68:port}
[selenium-server-node-firefox-78-listen-promise]
<= check-port-listening-promise
config-host = $${selenium-server-node-instance-firefox-78:hostname}
config-port = $${selenium-server-node-instance-firefox-78:port}
[selenium-server-node-firefox-115-listen-promise] [selenium-server-node-firefox-115-listen-promise]
<= check-port-listening-promise <= check-port-listening-promise
config-host = $${selenium-server-node-instance-firefox-115:hostname} config-host = $${selenium-server-node-instance-firefox-115:hostname}
config-port = $${selenium-server-node-instance-firefox-115:port} config-port = $${selenium-server-node-instance-firefox-115:port}
[selenium-server-node-instance-chromium-69-listen-promise]
<= check-port-listening-promise
config-host = $${selenium-server-node-instance-chromium-69:hostname}
config-port = $${selenium-server-node-instance-chromium-69:port}
[selenium-server-node-instance-chromium-91-listen-promise] [selenium-server-node-instance-chromium-91-listen-promise]
<= check-port-listening-promise <= check-port-listening-promise
config-host = $${selenium-server-node-instance-chromium-91:hostname} config-host = $${selenium-server-node-instance-chromium-91:hostname}
config-port = $${selenium-server-node-instance-chromium-91:port} config-port = $${selenium-server-node-instance-chromium-91:port}
[selenium-server-node-instance-chromium-120-listen-promise]
<= check-port-listening-promise
config-host = $${selenium-server-node-instance-chromium-120:hostname}
config-port = $${selenium-server-node-instance-chromium-120:port}
[publish-connection-parameter] [publish-connection-parameter]
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
......
...@@ -64,15 +64,7 @@ install = ...@@ -64,15 +64,7 @@ install =
dst = os.path.join(location, 'fonts', os.path.basename(extra_font_dir)) dst = os.path.join(location, 'fonts', os.path.basename(extra_font_dir))
os.symlink(extra_font_dir, dst) os.symlink(extra_font_dir, dst)
[firefox-60] [firefox-102]
post-install =
${symlink-extra-fonts-to-firefox-fonts-dir:install}
[firefox-68]
post-install =
${symlink-extra-fonts-to-firefox-fonts-dir:install}
[firefox-78]
post-install = post-install =
${symlink-extra-fonts-to-firefox-fonts-dir:install} ${symlink-extra-fonts-to-firefox-fonts-dir:install}
......
...@@ -282,12 +282,12 @@ class TestBrowserSelection(WebServerMixin, SeleniumServerTestCase): ...@@ -282,12 +282,12 @@ class TestBrowserSelection(WebServerMixin, SeleniumServerTestCase):
webdriver_url = parameter_dict['backend-url'] webdriver_url = parameter_dict['backend-url']
desired_capabilities = DesiredCapabilities.FIREFOX.copy() desired_capabilities = DesiredCapabilities.FIREFOX.copy()
desired_capabilities['version'] = '60.0.2esr' desired_capabilities['version'] = '102.15.1esr'
driver = webdriver.Remote( driver = webdriver.Remote(
command_executor=webdriver_url, command_executor=webdriver_url,
desired_capabilities=desired_capabilities) desired_capabilities=desired_capabilities)
self.assertIn( self.assertIn(
'Gecko/20100101 Firefox/60.0', 'Gecko/20100101 Firefox/102.0',
driver.execute_script('return navigator.userAgent')) driver.execute_script('return navigator.userAgent'))
driver.quit() driver.quit()
desired_capabilities['version'] = '115.3.1esr' desired_capabilities['version'] = '115.3.1esr'
...@@ -409,31 +409,18 @@ class TestSSHServer(SeleniumServerTestCase): ...@@ -409,31 +409,18 @@ class TestSSHServer(SeleniumServerTestCase):
self.assertIn(b"Welcome to SlapOS Selenium Server.", received) self.assertIn(b"Welcome to SlapOS Selenium Server.", received)
class TestFirefox60( class TestFirefox102(
BrowserCompatibilityMixin, BrowserCompatibilityMixin,
SeleniumServerTestCase, SeleniumServerTestCase,
ImageComparisonTestCase, ImageComparisonTestCase,
): ):
desired_capabilities = dict(DesiredCapabilities.FIREFOX, version='60.0.2esr') desired_capabilities = dict(DesiredCapabilities.FIREFOX, version='102.15.1esr')
user_agent = 'Gecko/20100101 Firefox/60.0' user_agent = 'Gecko/20100101 Firefox/102.0'
# resizing window does not work, but we don't really depend on it
class TestFirefox68( @unittest.expectedFailure
BrowserCompatibilityMixin, def test_resize_window(self):
SeleniumServerTestCase, super().test_resize_window()
ImageComparisonTestCase,
):
desired_capabilities = dict(DesiredCapabilities.FIREFOX, version='68.0.2esr')
user_agent = 'Gecko/20100101 Firefox/68.0'
class TestFirefox78(
BrowserCompatibilityMixin,
SeleniumServerTestCase,
ImageComparisonTestCase,
):
desired_capabilities = dict(DesiredCapabilities.FIREFOX, version='78.1.0esr')
user_agent = 'Gecko/20100101 Firefox/78.0'
class TestFirefox115( class TestFirefox115(
...@@ -450,19 +437,19 @@ class TestFirefox115( ...@@ -450,19 +437,19 @@ class TestFirefox115(
super().test_resize_window() super().test_resize_window()
class TestChrome69( class TestChrome91(
BrowserCompatibilityMixin, BrowserCompatibilityMixin,
SeleniumServerTestCase, SeleniumServerTestCase,
ImageComparisonTestCase, ImageComparisonTestCase,
): ):
desired_capabilities = dict(DesiredCapabilities.CHROME, version='69.0.3497.0') desired_capabilities = dict(DesiredCapabilities.CHROME, version='91.0.4472.114')
user_agent = 'Chrome/69.0.3497.0' user_agent = 'Chrome/91.0.4472.0'
class TestChrome91( class TestChrome120(
BrowserCompatibilityMixin, BrowserCompatibilityMixin,
SeleniumServerTestCase, SeleniumServerTestCase,
ImageComparisonTestCase, ImageComparisonTestCase,
): ):
desired_capabilities = dict(DesiredCapabilities.CHROME, version='91.0.4472.114') desired_capabilities = dict(DesiredCapabilities.CHROME, version='120.0.6099.109')
user_agent = 'Chrome/91.0.4472.0' user_agent = 'Chrome/120.0.0.0'
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
# not need these here). # not need these here).
[template-erp5] [template-erp5]
filename = instance-erp5.cfg.in filename = instance-erp5.cfg.in
md5sum = 2491969d49f8bf9b172e89b1c0e9d98e md5sum = 1fbfca2d64a9824054f7a3281e71efdc
[template-balancer] [template-balancer]
filename = instance-balancer.cfg.in filename = instance-balancer.cfg.in
......
...@@ -303,12 +303,13 @@ config-longrequest-logger-interval = {{ dumps(zope_parameter_dict.get('longreque ...@@ -303,12 +303,13 @@ config-longrequest-logger-interval = {{ dumps(zope_parameter_dict.get('longreque
config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longrequest-logger-timeout', 1)) }} config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longrequest-logger-timeout', 1)) }}
config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }} config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }}
config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }} config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }}
config-with-max-rlimit-nofile = {{ dumps(slapparameter_dict.get('with-max-rlimit-nofile', false)) }}
{# BBB: zope_parameter_dict used to contain 'webdav', so fallback to it -#} {# BBB: zope_parameter_dict used to contain 'webdav', so fallback to it -#}
config-webdav = {{ dumps(current_zope_family_override_dict.get('webdav', zope_parameter_dict.get('webdav', False))) }} config-webdav = {{ dumps(current_zope_family_override_dict.get('webdav', zope_parameter_dict.get('webdav', False))) }}
config-publisher-timeout = {{ dumps(current_zope_family_override_dict.get('publisher-timeout', global_publisher_timeout)) }} config-publisher-timeout = {{ dumps(current_zope_family_override_dict.get('publisher-timeout', global_publisher_timeout)) }}
config-activity-timeout = {{ dumps(current_zope_family_override_dict.get('activity-timeout', global_activity_timeout)) }} config-activity-timeout = {{ dumps(current_zope_family_override_dict.get('activity-timeout', global_activity_timeout)) }}
{% if test_runner_enabled -%} {% if test_runner_enabled -%}
config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list} config-test-runner-balancer-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list}
[{{ promise_software_url_section_name }}] [{{ promise_software_url_section_name }}]
# Promise to wait for zope partition to use the expected software URL, # Promise to wait for zope partition to use the expected software URL,
......
...@@ -337,7 +337,6 @@ eggs += ...@@ -337,7 +337,6 @@ eggs +=
${python-pynacl:egg} ${python-pynacl:egg}
${python-cryptography:egg} ${python-cryptography:egg}
${python-mysqlclient:egg} ${python-mysqlclient:egg}
${backports.lzma:egg}
${bcrypt:egg} ${bcrypt:egg}
${psycopg2:egg} ${psycopg2:egg}
${selenium:egg} ${selenium:egg}
......
...@@ -141,11 +141,8 @@ eggs += ...@@ -141,11 +141,8 @@ eggs +=
${lxml-python:egg} ${lxml-python:egg}
${python-PyYAML:egg} ${python-PyYAML:egg}
${python-cryptography:egg} ${python-cryptography:egg}
${backports.lzma:egg}
${pycurl:egg} ${pycurl:egg}
${bcrypt:egg} ${bcrypt:egg}
dnspython
Jinja2
${kedifa-setup:egg} ${kedifa-setup:egg}
${caucase-setup:egg} ${caucase-setup:egg}
${erp5.util-setup:egg} ${erp5.util-setup:egg}
...@@ -163,12 +160,13 @@ eggs += ...@@ -163,12 +160,13 @@ eggs +=
${statsmodels:egg} ${statsmodels:egg}
${scipy:egg} ${scipy:egg}
zope.testing zope.testing
supervisor
${extra-eggs:eggs} ${extra-eggs:eggs}
[eggs/scripts] [eggs/scripts]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = ${eggs:eggs} eggs =
${eggs:eggs}
supervisor
scripts = scripts =
slapos slapos
supervisord supervisord
......
[buildout] [buildout]
find-links += find-links +=
http://www.nexedi.org/static/packages/source/ http://www.nexedi.org/static/packages/source/slapos.buildout/zc.buildout-2.7.1%2Bslapos010.tar.gz
http://www.nexedi.org/static/packages/source/slapos.buildout/ http://www.nexedi.org/static/packages/source/zc.recipe.egg-2.0.3%2Bslapos003.tar.gz
parts = parts =
instance-template instance-template
......
...@@ -40,9 +40,8 @@ import netaddr ...@@ -40,9 +40,8 @@ import netaddr
import pexpect import pexpect
import psutil import psutil
import requests import requests
import six
from six.moves.urllib.parse import urlparse, urljoin from urllib.parse import urlparse, urljoin, parse_qsl
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass, SlapOSNodeCommandError from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass, SlapOSNodeCommandError
from slapos.grid.svcbackend import getSupervisorRPC, _getSupervisordSocketPath from slapos.grid.svcbackend import getSupervisorRPC, _getSupervisordSocketPath
...@@ -621,7 +620,34 @@ class ResilientTheiaMixin(object): ...@@ -621,7 +620,34 @@ class ResilientTheiaMixin(object):
class TestTheiaResilientInterface(ResilientTheiaMixin, TestTheia): class TestTheiaResilientInterface(ResilientTheiaMixin, TestTheia):
pass
def test_all_monitor_url_use_same_password(self):
monitor_setup_params = dict(
parse_qsl(
urlparse(
self.computer_partition.getConnectionParameterDict()
['monitor-setup-url']).fragment))
monitor_url_list = [
u for u in [
p.getConnectionParameterDict().get('monitor-base-url')
for p in self.slap.computer.getComputerPartitionList()
] if u is not None
]
self.assertEqual(len(monitor_url_list), 4)
for url in monitor_url_list:
self.assertEqual(
requests.get(url, verify=False).status_code,
requests.codes.unauthorized)
requests.get(
url,
verify=False,
auth=(
monitor_setup_params['username'],
monitor_setup_params['password'],
)).raise_for_status()
class TestTheiaResilientWithEmbeddedInstance(ResilientTheiaMixin, TestTheiaWithEmbeddedInstance): class TestTheiaResilientWithEmbeddedInstance(ResilientTheiaMixin, TestTheiaWithEmbeddedInstance):
......
...@@ -63,8 +63,11 @@ repository_id_list += wendelin ...@@ -63,8 +63,11 @@ repository_id_list += wendelin
list += ${wendelin:location}/bt5 list += ${wendelin:location}/bt5
# Jupyter is by default enabled in Wendelin # Jupyter is by default enabled in Wendelin
# and also the soft limit of open file descriptors is set
# to the hard limit.
[erp5-defaults] [erp5-defaults]
jupyter-enable-default = true jupyter-enable-default = true
with-max-rlimit-nofile-enable-default = true
[wendelin] [wendelin]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
......
...@@ -185,6 +185,7 @@ context = ...@@ -185,6 +185,7 @@ context =
key instance_common_cfg instance-common:output key instance_common_cfg instance-common:output
key jupyter_enable_default erp5-defaults:jupyter-enable-default key jupyter_enable_default erp5-defaults:jupyter-enable-default
key wcfs_enable_default erp5-defaults:wcfs-enable-default key wcfs_enable_default erp5-defaults:wcfs-enable-default
key with_max_rlimit_nofile_enable_default erp5-defaults:with-max-rlimit-nofile-enable-default
key kumo_location kumo:location key kumo_location kumo:location
key local_bt5_repository local-bt5-repository:list key local_bt5_repository local-bt5-repository:list
key logrotate_location logrotate:location key logrotate_location logrotate:location
...@@ -338,6 +339,8 @@ cloudooo-connection-url-list = ...@@ -338,6 +339,8 @@ cloudooo-connection-url-list =
jupyter-enable-default = false jupyter-enable-default = false
# WCFS is by default disabled in ERP5 # WCFS is by default disabled in ERP5
wcfs-enable-default = false wcfs-enable-default = false
# with-max-rlimit-nofile option of zopewsgi is disabled in ERP5
with-max-rlimit-nofile-enable-default = false
[erp5] [erp5]
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
...@@ -684,6 +687,12 @@ extra-paths = ...@@ -684,6 +687,12 @@ extra-paths =
patch-binary = ${patch:location}/bin/patch patch-binary = ${patch:location}/bin/patch
Acquisition-patches = ${:_profile_base_location_}/../../component/egg-patch/Acquisition/aq_dynamic-4.7.patch#85b0090e216cead0fc86c5c274450d96 Acquisition-patches = ${:_profile_base_location_}/../../component/egg-patch/Acquisition/aq_dynamic-4.7.patch#85b0090e216cead0fc86c5c274450d96
Acquisition-patch-options = -p1 Acquisition-patch-options = -p1
DateTime-patches =
${:_profile_base_location_}/../../component/egg-patch/DateTime/0001-Cast-int-to-float-in-compare-methods.patch#9898a58ce90dd31c884a7183aeec4361
${:_profile_base_location_}/../../component/egg-patch/DateTime/0002-Fix-compare-methods-between-DateTime-0-and-None-fix-.patch#733903a564c8b14df65c45c4f2eec262
${:_profile_base_location_}/../../component/egg-patch/DateTime/0003-Make-it-possible-to-pickle-datetimes-returned-by-asd.patch#e94a71ef40de130720e621e296537000
${:_profile_base_location_}/../../component/egg-patch/DateTime/0004-Repair-equality-comparison-between-DateTime-instance.patch#ea146c00dfbc31c7d96af8abc6f0b301
DateTime-patch-options = -p1
Products.BTreeFolder2-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.BTreeFolder2/0001-Add-a-confirmation-prompt-on-Delete-All-Objects-butt.patch#44de3abf382e287b8766c2f29ec1cf74 Products.BTreeFolder2-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.BTreeFolder2/0001-Add-a-confirmation-prompt-on-Delete-All-Objects-butt.patch#44de3abf382e287b8766c2f29ec1cf74
Products.BTreeFolder2-patch-options = -p1 Products.BTreeFolder2-patch-options = -p1
Products.CMFCore-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.CMFCore/portal_skins_ZMI_find.patch#19ec05c0477c50927ee1df6eb75d1e7f Products.CMFCore-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.CMFCore/portal_skins_ZMI_find.patch#19ec05c0477c50927ee1df6eb75d1e7f
...@@ -740,6 +749,7 @@ depends = ...@@ -740,6 +749,7 @@ depends =
# patched eggs # patched eggs
Acquisition = 4.7+SlapOSPatched001 Acquisition = 4.7+SlapOSPatched001
DateTime = 4.9+SlapOSPatched004
Products.DCWorkflow = 2.4.1+SlapOSPatched001 Products.DCWorkflow = 2.4.1+SlapOSPatched001
ocropy = 1.0+SlapOSPatched001 ocropy = 1.0+SlapOSPatched001
PyPDF2 = 1.26.0+SlapOSPatched002 PyPDF2 = 1.26.0+SlapOSPatched002
......
...@@ -70,11 +70,11 @@ md5sum = b95084ae9eed95a68eada45e28ef0c04 ...@@ -70,11 +70,11 @@ md5sum = b95084ae9eed95a68eada45e28ef0c04
[template] [template]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 3f7b28085ceff321a3cb785db60f7c3e md5sum = 55232eae0bcdb68a7cb2598d2ba9d60c
[template-erp5] [template-erp5]
filename = instance-erp5.cfg.in filename = instance-erp5.cfg.in
md5sum = 098e1d02159aeca9b36f2a0726b7b230 md5sum = 359bab24aec7772adb5d822c1389b1bd
[template-zeo] [template-zeo]
filename = instance-zeo.cfg.in filename = instance-zeo.cfg.in
...@@ -86,7 +86,7 @@ md5sum = 0ac4b74436f554cd677f19275d18d880 ...@@ -86,7 +86,7 @@ md5sum = 0ac4b74436f554cd677f19275d18d880
[template-zope] [template-zope]
filename = instance-zope.cfg.in filename = instance-zope.cfg.in
md5sum = 9e6440425167a506bd473a3697eaa9e6 md5sum = 2439b90d6f707f47050fc9074fa4d810
[template-balancer] [template-balancer]
filename = instance-balancer.cfg.in filename = instance-balancer.cfg.in
......
...@@ -11,6 +11,10 @@ ...@@ -11,6 +11,10 @@
{% set jupyter_zope_family = jupyter_dict.get('zope-family', '') -%} {% set jupyter_zope_family = jupyter_dict.get('zope-family', '') -%}
{% set wcfs_dict = slapparameter_dict.get('wcfs', {}) -%} {% set wcfs_dict = slapparameter_dict.get('wcfs', {}) -%}
{% set wcfs_enable = wcfs_dict.get('enable', wcfs_enable_default.lower() in ('true', 'yes')) -%} {% set wcfs_enable = wcfs_dict.get('enable', wcfs_enable_default.lower() in ('true', 'yes')) -%}
{% set with_max_rlimit_nofile_enable = slapparameter_dict.get(
'with-max-rlimit-nofile',
with_max_rlimit_nofile_enable_default.lower() in ('true', 'yes'))
%}
{% set test_runner_enabled = slapparameter_dict.get('test-runner', {}).get('enabled', True) -%} {% set test_runner_enabled = slapparameter_dict.get('test-runner', {}).get('enabled', True) -%}
{% set test_runner_node_count = slapparameter_dict.get('test-runner', {}).get('node-count', 3) -%} {% set test_runner_node_count = slapparameter_dict.get('test-runner', {}).get('node-count', 3) -%}
{% set test_runner_extra_database_count = slapparameter_dict.get('test-runner', {}).get('extra-database-count', 3) -%} {% set test_runner_extra_database_count = slapparameter_dict.get('test-runner', {}).get('extra-database-count', 3) -%}
...@@ -311,17 +315,18 @@ config-longrequest-logger-interval = {{ dumps(zope_parameter_dict.get('longreque ...@@ -311,17 +315,18 @@ config-longrequest-logger-interval = {{ dumps(zope_parameter_dict.get('longreque
config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longrequest-logger-timeout', 1)) }} config-longrequest-logger-timeout = {{ dumps(zope_parameter_dict.get('longrequest-logger-timeout', 1)) }}
config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }} config-large-file-threshold = {{ dumps(zope_parameter_dict.get('large-file-threshold', "10MB")) }}
config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }} config-port-base = {{ dumps(zope_parameter_dict.get('port-base', 2200)) }}
config-with-max-rlimit-nofile = {{ dumps(with_max_rlimit_nofile_enable) }}
{# BBB: zope_parameter_dict used to contain 'webdav', so fallback to it -#} {# BBB: zope_parameter_dict used to contain 'webdav', so fallback to it -#}
config-webdav = {{ dumps(current_zope_family_override_dict.get('webdav', zope_parameter_dict.get('webdav', False))) }} config-webdav = {{ dumps(current_zope_family_override_dict.get('webdav', zope_parameter_dict.get('webdav', False))) }}
config-publisher-timeout = {{ dumps(current_zope_family_override_dict.get('publisher-timeout', global_publisher_timeout)) }} config-publisher-timeout = {{ dumps(current_zope_family_override_dict.get('publisher-timeout', global_publisher_timeout)) }}
config-activity-timeout = {{ dumps(current_zope_family_override_dict.get('activity-timeout', global_activity_timeout)) }} config-activity-timeout = {{ dumps(current_zope_family_override_dict.get('activity-timeout', global_activity_timeout)) }}
{% if test_runner_enabled -%} {% if test_runner_enabled -%}
config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list} config-test-runner-balancer-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list}
[{{ check_test_runner_url_section_name }}] [{{ check_test_runner_url_section_name }}]
# Promise to wait for zope partition to receive the expected test-runner URL # Promise to wait for zope partition to receive the expected test-runner URL
recipe = slapos.cookbook:check_parameter recipe = slapos.cookbook:check_parameter
value = {{ '${' ~ section_name ~ ':config-test-runner-apache-url-list}' }} value = {{ '${' ~ section_name ~ ':config-test-runner-balancer-url-list}' }}
expected-not-value = not-ready expected-not-value = not-ready
expected-value = expected-value =
path = ${directory:bin}/${:_buildout_section_name_} path = ${directory:bin}/${:_buildout_section_name_}
......
{% from "instance_zodb_base" import zodb_dict with context %} {% from "instance_zodb_base" import zodb_dict with context %}
{% set webdav = slapparameter_dict['webdav'] -%} {% set webdav = slapparameter_dict['webdav'] -%}
{% set with_max_rlimit_nofile = slapparameter_dict['with-max-rlimit-nofile'] -%}
{% set thread_amount = slapparameter_dict['thread-amount'] %} {% set thread_amount = slapparameter_dict['thread-amount'] %}
{% set use_ipv6 = slapparameter_dict.get('use-ipv6', False) -%} {% set use_ipv6 = slapparameter_dict.get('use-ipv6', False) -%}
{% set ports = itertools.count(slapparameter_dict['port-base']) -%} {% set ports = itertools.count(slapparameter_dict['port-base']) -%}
...@@ -346,6 +347,7 @@ wrapped-command-line = ...@@ -346,6 +347,7 @@ wrapped-command-line =
--access-log-file={{ '${' ~ conf_parameter_name ~ ':z2-log}' }} --access-log-file={{ '${' ~ conf_parameter_name ~ ':z2-log}' }}
{% if longrequest_logger_interval > 0 %} --long-request-log-file={{ '${' ~ conf_parameter_name ~ ':longrequest-logger-file}' }} {% endif %} {% if longrequest_logger_interval > 0 %} --long-request-log-file={{ '${' ~ conf_parameter_name ~ ':longrequest-logger-file}' }} {% endif %}
{% if webdav %}-w{% endif %} {% if webdav %}-w{% endif %}
{% if with_max_rlimit_nofile %}--with-max-rlimit-nofile{% endif %}
{{ ipv4 }}:${:port} {{ ipv4 }}:${:port}
{% if timerserver_interval %}--timerserver-interval={{ timerserver_interval }}{% endif %} {% if timerserver_interval %}--timerserver-interval={{ timerserver_interval }}{% endif %}
'${:configuration-file}' '${:configuration-file}'
...@@ -521,7 +523,7 @@ command-line-extra = ...@@ -521,7 +523,7 @@ command-line-extra =
--erp5_sql_connection_string '{{ connection_string_list[0] }}' --erp5_sql_connection_string '{{ connection_string_list[0] }}'
--extra_sql_connection_string_list '{{ ','.join(connection_string_list[1:]) }}' --extra_sql_connection_string_list '{{ ','.join(connection_string_list[1:]) }}'
--zserver {{ test_runner_address_list[0][0] ~ ':' ~ test_runner_address_list[0][1] }} --zserver {{ test_runner_address_list[0][0] ~ ':' ~ test_runner_address_list[0][1] }}
--zserver_frontend_url {{ slapparameter_dict['test-runner-apache-url-list'][0] }} --zserver_frontend_url {{ slapparameter_dict['test-runner-balancer-url-list'][0] }}
{% if test_runner_random_activity_priority is not none %} {% if test_runner_random_activity_priority is not none %}
--random_activity_priority={{ test_runner_random_activity_priority }} --random_activity_priority={{ test_runner_random_activity_priority }}
{%- endif %} {%- endif %}
...@@ -538,21 +540,21 @@ environment-extra += ...@@ -538,21 +540,21 @@ environment-extra +=
{% do zserver_address_list.append(ip ~ ':' ~ port) %} {% do zserver_address_list.append(ip ~ ':' ~ port) %}
{% endfor -%} {% endfor -%}
zserver_address_list={{ ','.join(zserver_address_list) }} zserver_address_list={{ ','.join(zserver_address_list) }}
zserver_frontend_url_list={{ ','.join(slapparameter_dict['test-runner-apache-url-list']) }} zserver_frontend_url_list={{ ','.join(slapparameter_dict['test-runner-balancer-url-list']) }}
[promise-test-runner-apache-url-executable] [promise-test-runner-balancer-url-executable]
# promise to wait for apache partition to have returned the parameter # promise to wait for balancer partition to have returned the parameter
recipe = slapos.cookbook:check_parameter recipe = slapos.cookbook:check_parameter
value = {{ slapparameter_dict['test-runner-apache-url-list'] }} value = {{ slapparameter_dict['test-runner-balancer-url-list'] }}
expected-not-value = not-ready expected-not-value = not-ready
path = ${directory:bin}/${:_buildout_section_name_} path = ${directory:bin}/${:_buildout_section_name_}
expected-value = expected-value =
[{{ section("promise-test-runner-apache-url") }}] [{{ section("promise-test-runner-balancer-url") }}]
<= monitor-promise-base <= monitor-promise-base
promise = check_command_execute promise = check_command_execute
name = ${:_buildout_section_name_}.py name = ${:_buildout_section_name_}.py
config-command = ${promise-test-runner-apache-url-executable:path} config-command = ${promise-test-runner-balancer-url-executable:path}
{%- endif %} {%- endif %}
{%- endif %} {%- endif %}
......
...@@ -43,6 +43,7 @@ init = ...@@ -43,6 +43,7 @@ init =
default-cloudooo-url-list = ${default-cloudooo-url-list:url-list} default-cloudooo-url-list = ${default-cloudooo-url-list:url-list}
jupyter-enable-default = {{ jupyter_enable_default }} jupyter-enable-default = {{ jupyter_enable_default }}
wcfs-enable-default = {{ wcfs_enable_default }} wcfs-enable-default = {{ wcfs_enable_default }}
with-max-rlimit-nofile-enable-default = {{ with_max_rlimit_nofile_enable_default }}
local-bt5-repository = {{ ' '.join(local_bt5_repository.split()) }} local-bt5-repository = {{ ' '.join(local_bt5_repository.split()) }}
[context] [context]
...@@ -58,6 +59,7 @@ extra-context = ...@@ -58,6 +59,7 @@ extra-context =
key default_cloudooo_url_list dynamic-template-erp5-parameters:default-cloudooo-url-list key default_cloudooo_url_list dynamic-template-erp5-parameters:default-cloudooo-url-list
key jupyter_enable_default dynamic-template-erp5-parameters:jupyter-enable-default key jupyter_enable_default dynamic-template-erp5-parameters:jupyter-enable-default
key wcfs_enable_default dynamic-template-erp5-parameters:wcfs-enable-default key wcfs_enable_default dynamic-template-erp5-parameters:wcfs-enable-default
key with_max_rlimit_nofile_enable_default dynamic-template-erp5-parameters:with-max-rlimit-nofile-enable-default
key local_bt5_repository dynamic-template-erp5-parameters:local-bt5-repository key local_bt5_repository dynamic-template-erp5-parameters:local-bt5-repository
key openssl_location :openssl-location key openssl_location :openssl-location
import re re import re re
......
...@@ -63,6 +63,8 @@ part-list = ...@@ -63,6 +63,8 @@ part-list =
# database information # database information
db-name = lamp db-name = lamp
db-user = lamp db-user = lamp
# Publish default lamp slave frontend url
default-frontend = True
#---------------- #----------------
#-- Instance-level buildout profiles. #-- Instance-level buildout profiles.
...@@ -113,6 +115,7 @@ context = ...@@ -113,6 +115,7 @@ context =
key custom_application_template custom-application-deployment:path key custom_application_template custom-application-deployment:path
key db_name custom-application-deployment:db-name key db_name custom-application-deployment:db-name
key db_user custom-application-deployment:db-user key db_user custom-application-deployment:db-user
key default_frontend custom-application-deployment:default-frontend
key lamp_apache_httpd template-apache-httpd:target key lamp_apache_httpd template-apache-httpd:target
[instance-apache-php] [instance-apache-php]
......
...@@ -14,15 +14,15 @@ ...@@ -14,15 +14,15 @@
# not need these here). # not need these here).
[lamp-instance] [lamp-instance]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 7854dd0edd48f2d91c16412c4a875ca4 md5sum = 6395a5d69e2fee494a7d00fd2ac563cb
[instance-apache-php] [instance-apache-php]
filename = instance-apache-php.cfg.in filename = instance-apache-php.cfg.in
md5sum = 1e4762a6a7631d517fb45c84f9c989fe md5sum = 41602a61d5f9609281dbfa3f27da0626
[instance-lamp] [instance-lamp]
filename = instance-lamp.cfg.jinja2.in filename = instance-lamp.cfg.jinja2.in
md5sum = 347ddf1516bf2ddb5f6fb23539382847 md5sum = de1f450a80547d12334b712016138078
[template-apache.conf] [template-apache.conf]
filename = apache.conf.in filename = apache.conf.in
...@@ -30,7 +30,7 @@ md5sum = e49410f0a4bf28993a56bb28aff0a6f0 ...@@ -30,7 +30,7 @@ md5sum = e49410f0a4bf28993a56bb28aff0a6f0
[template-php.ini] [template-php.ini]
filename = php.ini.in filename = php.ini.in
md5sum = bf21c6d68ef85ee7de090375424d0c5c md5sum = 677e1185a99d337cd1be778c548a6d30
[template-apache-httpd] [template-apache-httpd]
filename = apache-httpd.conf.in filename = apache-httpd.conf.in
......
...@@ -152,7 +152,7 @@ context = ...@@ -152,7 +152,7 @@ context =
[apache-php-conf] [apache-php-conf]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
url = {{ parameter_dict['template-apache-conf'] }} url = {{ parameter_dict['template-apache-conf'] }}
output = ${directory:etc}/apache.confgraceful output = ${directory:etc}/apache.conf
context = context =
section parameter_dict apache-php-configuration section parameter_dict apache-php-configuration
extensions = jinja2.ext.do extensions = jinja2.ext.do
...@@ -230,6 +230,27 @@ command-line = ${instance-parameter:php-bin} -c ${php.ini-conf:output} ...@@ -230,6 +230,27 @@ command-line = ${instance-parameter:php-bin} -c ${php.ini-conf:output}
<= monitor-publish <= monitor-publish
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
backend-url = ${apache-php-configuration:url} backend-url = ${apache-php-configuration:url}
{% if parameter_dict['publish-frontend'] -%}
url = ${lamp-frontend-promise:url}
{% endif -%}
[request-frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Instance Frontend
# XXX We have hardcoded SR URL here.
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
shared = true
config-url = ${apache-php-configuration:url}
config-https-only = true
return = domain secure_access
[lamp-frontend-promise]
<= monitor-promise-base
promise = check_url_available
name = lamp-http-frontend.py
url = ${request-frontend:connection-secure_access}
config-url = ${:url}
#---------------- #----------------
......
...@@ -30,11 +30,13 @@ config-{{ key }} = {{ dumps(value) }} ...@@ -30,11 +30,13 @@ config-{{ key }} = {{ dumps(value) }}
config-monitor-passwd = ${monitor-instance-parameter:password} config-monitor-passwd = ${monitor-instance-parameter:password}
config-database-list = ${request-mariadb:connection-database-list} config-database-list = ${request-mariadb:connection-database-list}
return = return =
url
backend-url backend-url
monitor-base-url monitor-base-url
{% do part_list.append('request-apache') -%} {% do part_list.append('request-apache') -%}
{% do publish_dict.__setitem__('backend-url', '${request-apache:connection-backend-url}') -%} {% do publish_dict.__setitem__('backend-url', '${request-apache:connection-backend-url}') -%}
{% do publish_dict.__setitem__('url', '${request-apache:connection-url}') -%}
{% do monitor_base_url_dict.__setitem__('apache', '${request-apache:connection-monitor-base-url}') -%} {% do monitor_base_url_dict.__setitem__('apache', '${request-apache:connection-monitor-base-url}') -%}
[request-mariadb] [request-mariadb]
...@@ -59,27 +61,6 @@ return = ...@@ -59,27 +61,6 @@ return =
{% do publish_dict.__setitem__('mariadb-url-list', '${request-mariadb:connection-database-list}') -%} {% do publish_dict.__setitem__('mariadb-url-list', '${request-mariadb:connection-database-list}') -%}
{% do monitor_base_url_dict.__setitem__('mariadb', '${request-mariadb:connection-monitor-base-url}') -%} {% do monitor_base_url_dict.__setitem__('mariadb', '${request-mariadb:connection-monitor-base-url}') -%}
[request-frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Instance Frontend
# XXX We have hardcoded SR URL here.
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
shared = true
config-url = ${request-apache:connection-backend-url}
config-https-only = true
return = domain secure_access
[lamp-frontend-promise]
<= monitor-promise-base
promise = check_url_available
name = lamp-http-frontend.py
url = ${request-frontend:connection-secure_access}
config-url = ${:url}
config-custom-domain = {{ slapparameter_dict.get('custom-domain', '') }}
{% do publish_dict.__setitem__('url', '${lamp-frontend-promise:url}') -%}
[publish-early] [publish-early]
recipe = slapos.cookbook:publish-early recipe = slapos.cookbook:publish-early
-init = -init =
......
...@@ -67,6 +67,7 @@ template-apache-conf = {{ template_apache_conf }} ...@@ -67,6 +67,7 @@ template-apache-conf = {{ template_apache_conf }}
apache-location = {{ apache_location }} apache-location = {{ apache_location }}
apache-php-location = {{ apache_php_location }} apache-php-location = {{ apache_php_location }}
template-php-ini = {{ template_php_ini }} template-php-ini = {{ template_php_ini }}
publish-frontend = !py!{{ default_frontend }}
# XXX no failure if `custom_application_template` is empty # XXX no failure if `custom_application_template` is empty
[application-parameters] [application-parameters]
......
...@@ -65,7 +65,7 @@ apc.lazy_functions=0 ...@@ -65,7 +65,7 @@ apc.lazy_functions=0
opcache.enable=1 opcache.enable=1
opcache.enable_cli=1 opcache.enable_cli=1
opcache.memory_consumption=128 opcache.memory_consumption=128
opcache.interned_strings_buffer=8 opcache.interned_strings_buffer={{ instance_dict.get('php.opcache.interned-strings-buffer', 8) }}
opcache.max_accelerated_files=10000 opcache.max_accelerated_files=10000
opcache.revalidate_freq=1 opcache.revalidate_freq={{ instance_dict.get('php.opcache.revalidate-freq', 1) }}
opcache.save_comments=1 opcache.save_comments=1
...@@ -118,7 +118,10 @@ eggs = ...@@ -118,7 +118,10 @@ eggs =
${lxml-python:egg} ${lxml-python:egg}
${pycurl:egg} ${pycurl:egg}
${python-cryptography:egg} ${python-cryptography:egg}
${backports.lzma:egg} ${:extra-eggs}
extra-eggs =
[slapos-toolbox-dependencies:python2]
extra-eggs = ${backports.lzma:egg}
# Install a slapos command with networkcache enabled in ${buildout:bin-directory} # Install a slapos command with networkcache enabled in ${buildout:bin-directory}
[slapos-command] [slapos-command]
...@@ -300,7 +303,7 @@ simplegeneric = 0.8.1 ...@@ -300,7 +303,7 @@ simplegeneric = 0.8.1
singledispatch = 3.4.0.3 singledispatch = 3.4.0.3
six = 1.16.0 six = 1.16.0
slapos.cookbook = 1.0.329 slapos.cookbook = 1.0.329
slapos.core = 1.10.7 slapos.core = 1.11.0
slapos.extension.shared = 1.0 slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.25 slapos.libnetworkcache = 0.25
slapos.rebootstrap = 4.5 slapos.rebootstrap = 4.5
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment