Commit 1cb88cb0 authored by Stephane VAROQUI's avatar Stephane VAROQUI

Merge branch 'master' of https://lab.nexedi.com/nexedi/slapos into replication-manager-2.1

parents cc6b9ed5 9f148593
......@@ -38,9 +38,9 @@ md5sum = 2202b18f269ad606d70e1864857ed93c
# inspired on http://old.aclark.net/team/aclark/blog/a-lamp-buildout-for-wordpress-and-other-php-apps/
recipe = slapos.recipe.cmmi
shared = false
version = 2.4.39
version = 2.4.41
url = https://archive.apache.org/dist/httpd/httpd-${:version}.tar.bz2
md5sum = 930e217ba2d71e708a3f1521ecae7ec0
md5sum = dfc674f8f454e3bc2d4ccd73ad3b5f1e
pre-configure =
cp -ar ${apr:location}/apr-${apr:version} srclib/apr/ &&
cp -ar ${apr-util:location}/apr-util-${apr-util:version} srclib/apr-util
......
......@@ -6,7 +6,7 @@ recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/babeld.git
branch = master
git-executable = ${git:location}/bin/git
revision = hmac-nxd1
revision = hmac-nxd2
[babeld]
recipe = slapos.recipe.cmmi
......
......@@ -11,6 +11,9 @@ recipe = slapos.recipe.cmmi
shared = true
url = https://ftp.gnu.org/gnu/coreutils/coreutils-8.31.tar.xz
md5sum = 0009a224d8e288e8ec406ef0161f9293
configure-options =
--disable-libcap
--prefix=@@LOCATION@@
environment =
PATH=${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
LDFLAGS=-Wl,--as-needed -L${gmp:location}/lib -Wl,-rpath=${gmp:location}/lib
......
......@@ -15,10 +15,11 @@ parts =
[curl]
recipe = slapos.recipe.cmmi
shared = true
url = http://curl.haxx.se/download/curl-7.60.0.tar.xz
md5sum = a889cd11b4ae8794b7030472cb4df0a0
url = http://curl.haxx.se/download/curl-7.67.0.tar.xz
md5sum = d55351b88dec558dd3a24dabb2c2d899
configure-options =
--disable-static
--disable-esni
--disable-ldap
--disable-ldaps
--disable-rtsp
......@@ -29,22 +30,28 @@ configure-options =
--disable-imap
--disable-smtp
--disable-gopher
--disable-manual
--enable-ipv6
--disable-sspi
--disable-alt-svc
--with-zlib=${zlib:location}
--with-ssl=${openssl:location}
--without-gnutls
--without-polarssl
--without-mbedtls
--without-cyassl
--without-mesalink
--without-nss
--without-axtls
--without-libpsl
--without-libmetalink
--without-libssh2
--without-librtmp
--without-libidn
--without-libidn2
--with-nghttp2=${nghttp2:location}
--without-ngtcp2
--without-nghttp3
--without-quiche
--without-zsh-functions-dir
--without-fish-functions-dir
environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
......
......@@ -8,6 +8,7 @@ parts = dcron-output
[dcron]
recipe = slapos.recipe.cmmi
shared = false
url = http://www.jimpryor.net/linux/releases/dcron-4.4.tar.gz
md5sum = 02d848ba043a9df5bf2102a9f4bc04bd
configure-command = true
......
From a975c66c81e45433a668b7daeb4c903a78cb9d33 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Fri, 27 Sep 2019 01:56:24 +0200
Subject: [PATCH] fix libmagic correctly detect msooxml files
Coming from slapos patch on component:
commit 7a24c4e4ff96a7d00072db891761cea8db7b9122
Author: Boris Kocherov <bk@raskon.ru>
Date: Sun Feb 4 10:52:17 2018 +0300
component/file: fix libmagic correctly detect msooxml files
that was reformatted as a patch so that component/file supports shared
slapos.recipe.cmmi. Maybe file now include a proper fix.
---
magic/Magdir/msooxml | 27 ++++++++++++++++++---------
1 file changed, 18 insertions(+), 9 deletions(-)
diff --git a/magic/Magdir/msooxml b/magic/Magdir/msooxml
index 059e729c..f8431dc8 100644
--- a/magic/Magdir/msooxml
+++ b/magic/Magdir/msooxml
@@ -1,4 +1,3 @@
-
#------------------------------------------------------------------------------
# $File: msooxml,v 1.5 2014/08/05 07:38:45 christos Exp $
# msooxml: file(1) magic for Microsoft Office XML
@@ -13,24 +12,34 @@
# which can distinguish between the three types
# start by checking for ZIP local file header signature
-0 string PK\003\004
+0 string PK\003\004
!:strength +10
# make sure the first file is correct
->0x1E regex \\[Content_Types\\]\\.xml|_rels/\\.rels
+>0x1E regex \\[Content_Types\\]\\.xml|_rels/\\.rels
# skip to the second local file header
# since some documents include a 520-byte extra field following the file
# header, we need to scan for the next header
->>(18.l+49) search/2000 PK\003\004
+>>(18.l+49) search/2000 PK\003\004
# now skip to the *third* local file header; again, we need to scan due to a
# 520-byte extra field following the file header
->>>&26 search/1000 PK\003\004
+>>>&26 search/1000 PK\003\004
+# and check the subdirectory name to determine which type of OOXML
+# file we have. Correct the mimetype with the registered ones:
+# http://technet.microsoft.com/en-us/library/cc179224.aspx
+>>>>&26 string word/ Microsoft Word 2007+
+!:mime application/vnd.openxmlformats-officedocument.wordprocessingml.document
+>>>>&26 string ppt/ Microsoft PowerPoint 2007+
+!:mime application/vnd.openxmlformats-officedocument.presentationml.presentation
+>>>>&26 string xl/ Microsoft Excel 2007+
+!:mime application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
+
+>>1104 search/300 PK\003\004
# and check the subdirectory name to determine which type of OOXML
# file we have. Correct the mimetype with the registered ones:
# http://technet.microsoft.com/en-us/library/cc179224.aspx
->>>>&26 string word/ Microsoft Word 2007+
+>>>&26 string word/ Microsoft Word 2007+
!:mime application/vnd.openxmlformats-officedocument.wordprocessingml.document
->>>>&26 string ppt/ Microsoft PowerPoint 2007+
+>>>&26 string ppt/ Microsoft PowerPoint 2007+
!:mime application/vnd.openxmlformats-officedocument.presentationml.presentation
->>>>&26 string xl/ Microsoft Excel 2007+
+>>>&26 string xl/ Microsoft Excel 2007+
!:mime application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
->>>>&26 default x Microsoft OOXML
--
2.11.0
......@@ -10,8 +10,8 @@ extends =
[file]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.icm.edu.pl/packages/file/file-5.32.tar.gz
md5sum = 4f2503752ff041895090ed6435610435
url = http://ftp.icm.edu.pl/packages/file/file-5.37.tar.gz
md5sum = 80c29aca745466c6c24d11f059329075
configure-options =
--disable-static
environment =
......@@ -21,4 +21,4 @@ environment =
patch-binary = ${patch:location}/bin/patch
patch-options = -p1
patches =
${:_profile_base_location_}/0001-fix-libmagic-correctly-detect-msooxml-files.patch#5e66a340d8ec7212d485e17d9af95f24
https://sources.debian.org/data/main/f/file/1:5.37-6/debian/patches/cherry-pick.FILE5_37-67-g46a8443f.limit-the-number-of-elements-in-a-vector-found-by-oss-fuzz.patch#fb6f7d32ce89573bf4b4b302c812e394
......@@ -17,5 +17,8 @@ rpath = ${:library-dirs}
[geolite2-country]
recipe = slapos.recipe.build:download-unpacked
url = http://geolite.maxmind.com/download/geoip/database/GeoLite2-Country.tar.gz#${:md5sum}
md5sum = ff7bcb609291b9f5ab14397ccc506254
md5sum = dc6224c648350d90f344a0c5c3ca5474
strip-top-level-dir = true
[versions]
geoip2 = 2.9.0
......@@ -32,5 +32,5 @@ environment =
[ghostscript-9]
<= ghostscript-common
url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs927/ghostscript-9.27.tar.xz
md5sum = dd531503dbbc524f73528359e2ea145c
url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs950/ghostscript-9.50.tar.xz
md5sum = 6cea6bae4a7cdfac6ccb09f07f0caf8c
......@@ -10,13 +10,21 @@ extends =
parts =
glib
# --with-python=${buildout:executable} is simpler but we may end up with
# scripts whose shebang exceeds the kernel limit.
# And ${buildout:executable}/.. is not a valid $PATH part.
[glib-python]
recipe = collective.recipe.shelloutput
commands =
bin-directory = dirname ${buildout:executable}
[glib]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/glib-2.50.2.tar.xz
md5sum = 5eeb2bfaf78a07be59585e8b6e80b1d6
url = https://ftp.gnome.org/pub/gnome/sources/glib/2.56/glib-2.56.4.tar.xz
md5sum = 17c3dca43d99a4882384f1a7b530b80b
configure-options =
--with-python=${buildout:executable}
--with-python=python
--disable-libmount
--disable-static
--disable-selinux
......@@ -24,7 +32,7 @@ configure-options =
--disable-xattr
--disable-man
environment =
PATH=${gettext:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${glib-python:bin-directory}:${gettext:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${gettext:location}/include -I${zlib:location}/include
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LIBFFI_CFLAGS=-I${libffi:location}/include
......
......@@ -15,8 +15,8 @@ extends =
[groonga]
recipe = slapos.recipe.cmmi
shared = false
url = https://packages.groonga.org/source/groonga/groonga-9.0.9.tar.gz
md5sum = 877bc3e2840e2935f606fb3d2d359a2c
url = https://packages.groonga.org/source/groonga/groonga-9.1.0.tar.gz
md5sum = 81722e0bc8cc0cfdddcafde2087b71df
# temporary patch to respect more tokens in natural language mode.
patches =
${:_profile_base_location_}/groonga.patch#9ed02fbe8400402d3eab47eee149978b
......
......@@ -13,8 +13,8 @@ parts = haproxy
[haproxy]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.haproxy.org/download/1.8/src/haproxy-1.8.4.tar.gz
md5sum = 540cd21169e8828d5d11894b2fa74ab4
url = http://www.haproxy.org/download/1.8/src/haproxy-1.8.23.tar.gz
md5sum = 6c35b83a9969449c4b79783a2119551e
configure-command = true
# If the system is running on Linux 2.6, we use "linux26" as the TARGET,
# otherwise use "generic".
......
[buildout]
extends =
../lunzip/buildout.cfg
parts =
libexpat
[libexpat]
recipe = slapos.recipe.cmmi
shared = true
url = http://downloads.sourceforge.net/project/expat/expat/2.2.1/expat-2.2.1.tar.bz2
md5sum = d9c3baeab58774cefc2f04faf29f2cf8
url = https://github.com/libexpat/libexpat/releases/download/R_2_2_9/expat-2.2.9.tar.lz
md5sum = c356e4f2092df4f0b0ffef904f001842
configure-options =
--disable-static
--without-xmlwf
--without-examples
--without-tests
--without-docbook
environment =
PATH=${lunzip:location}/bin:%(PATH)s
......@@ -19,15 +19,10 @@ environment =
[libpng12]
<= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.2.58.tar.xz
md5sum = 1fe68fa3cdab99dbcfd2a6b4de95645f
[libpng15]
<= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.5.29.tar.xz
md5sum = b9e5452ee9681c313638efedb16c12a6
url = http://download.sourceforge.net/libpng/libpng-1.2.59.tar.xz
md5sum = e120f48f4e27e72255bc366c73aae1db
[libpng]
<= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.6.32.tar.xz
md5sum = e01be057a9369183c959b793a685ad15
url = http://download.sourceforge.net/libpng/libpng-1.6.37.tar.xz
md5sum = 015e8e15db1eecde5f2eb9eb5b6e59e9
......@@ -26,4 +26,4 @@ environment =
PATH=${cmake:location}/bin:%(PATH)s
CMAKE_INCLUDE_PATH=${zlib:location}/include:${bzip2:location}/include
CMAKE_LIBRARY_PATH=${zlib:location}/lib:${bzip2:location}/lib
LDFLAGS=-L${:location}/lib -Wl,-rpath=${:location}/lib
LDFLAGS=-L${:location}/lib -Wl,-rpath=${:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
......@@ -29,8 +29,8 @@ parts =
[mariadb]
recipe = slapos.recipe.cmmi
url = https://downloads.mariadb.org/f/mariadb-${:version}/source/mariadb-${:version}.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
version = 10.3.20
md5sum = a87a9532568c0ed50d25ce7e7c60d67d
version = 10.3.21
md5sum = c5b9ed98f2f57f7b809d4ec9c1515107
location = ${buildout:parts-directory}/${:_buildout_section_name_}
pre-configure =
set -e '\bSET(PLUGIN_AUTH_PAM YES)' cmake/build_configurations/mysql_release.cmake
......@@ -84,8 +84,8 @@ post-install =
# mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users.
# http://mroonga.github.com/
recipe = slapos.recipe.cmmi
url = https://packages.groonga.org/source/mroonga/mroonga-9.09.tar.gz
md5sum = 1b4ec3a8c6b4b459bd7a94f2bb8ad585
url = https://packages.groonga.org/source/mroonga/mroonga-9.10.tar.gz
md5sum = 30a3010fd5ef11a8bd440053f1624bdb
pre-configure = set -e
rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source
......
......@@ -9,6 +9,7 @@ extends =
[mosh]
recipe = slapos.recipe.cmmi
shared = true
url = https://mosh.org/mosh-1.3.0.tar.gz
md5sum = d961276995936953bf2d5a794068b076
configure-options =
......
......@@ -8,8 +8,10 @@ extends =
[nano]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.nano-editor.org/dist/v2.8/nano-2.8.4.tar.xz
md5sum = 02ff28870194178595b287fc16fa611b
location = @@LOCATION@@
# The dummy PKG_CONFIG is in the case that both pkg-config and ncursesw
# are installed on the system.
environment=
......@@ -17,7 +19,6 @@ environment=
NCURSESW_CONFIG=${ncurses:location}/bin/ncursesw6-config
CPPFLAGS=-I${file:location}/include
LDFLAGS=-L${file:location}/lib/ -Wl,-rpath=${file:location}/lib/
location = ${buildout:parts-directory}/${:_buildout_section_name_}
post-install =
cd ${:location} && mkdir etc &&
echo include "${:location}/share/nano/*.nanorc" > etc/nanorc
......@@ -12,11 +12,13 @@ parts = nginx-output
[nginx-common]
recipe = slapos.recipe.cmmi
url = https://nginx.org/download/nginx-1.17.1.tar.gz
md5sum = 51021f3e8204a5fc809f5e695a4508db
shared = false
url = https://nginx.org/download/nginx-1.17.6.tar.gz
md5sum = 55022aa5715386c994f6773478822853
[nginx]
<= nginx-common
shared = true
configure-options=
--with-http_ssl_module
--with-http_v2_module
......
......@@ -15,9 +15,14 @@ parts =
[nodejs]
<= nodejs-8.9.4
[nodejs-10.6.0]
<= nodejs-base
openssl_location = ${openssl:location}
version = v10.6.0
md5sum = 9df233b86244ebda1ded1f91694fbe86
[nodejs-8.9.4]
<= nodejs-base
recipe = slapos.recipe.cmmi
version = v8.9.4
md5sum = 4ddc1daff327d7e6f63da57fdfc24f55
......@@ -26,24 +31,27 @@ md5sum = 4ddc1daff327d7e6f63da57fdfc24f55
version = v8.6.0
md5sum = 0c95e08220667d8a18b97ecec8218ac6
[nodejs-base]
# Server-side Javascript.
version =
md5sum =
openssl_location = ${openssl-1.0:location}
recipe = slapos.recipe.cmmi
shared = true
url = https://nodejs.org/dist/${:version}/node-${:version}.tar.gz
configure-options =
--shared-openssl
--shared-openssl-includes=${openssl-1.0:location}/include
--shared-openssl-libpath=${openssl-1.0:location}/lib
--shared-openssl-includes=${:openssl_location}/include
--shared-openssl-libpath=${:openssl_location}/lib
environment =
HOME=${buildout:parts-directory}/${:_buildout_section_name_}
HOME=@@LOCATION@@
PATH=${gcc:location}/bin:${pkgconfig:location}/bin:${python2.7:location}/bin/:%(PATH)s
PKG_CONFIG_PATH=${openssl-1.0:location}/lib/pkgconfig/
PKG_CONFIG_PATH=${:openssl_location}/lib/pkgconfig/
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -Wl,-rpath=${openssl-1.0:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LD_LIBRARY_PATH=${openssl-1.0:location}/lib
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -Wl,-rpath=${:openssl_location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LD_LIBRARY_PATH=${:openssl_location}/lib
[nodejs-8.6.0-output]
# Shared binary location to ease migration
......
......@@ -15,7 +15,9 @@ parts =
[openssh]
recipe = slapos.recipe.cmmi
shared = true
md5sum = 68ba883aff6958297432e5877e9a0fe2
location = @@LOCATION@@
url = https://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-7.7p1.tar.gz
patch-binary = ${patch:location}/bin/patch
patch-options = -p1
......@@ -25,9 +27,9 @@ environment =
CPPFLAGS=-I${zlib:location}/include -I${openssl-1.0:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl-1.0:location}/lib -Wl,-rpath=${openssl-1.0:location}/lib
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--exec-prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-privsep-path=${buildout:parts-directory}/${:_buildout_section_name_}/var/empty
--prefix=${:location}
--exec-prefix=${:location}
--with-privsep-path=${:location}/var/empty
[openssh-output]
# Shared binary location to ease migration
......
......@@ -17,8 +17,8 @@ parts =
[openssl]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.openssl.org/source/openssl-1.1.0j.tar.gz
md5sum = b4ca5b78ae6ae79da80790b30dbedbdc
url = https://www.openssl.org/source/openssl-1.1.1d.tar.gz
md5sum = 3be209000dbc7e1b95bcdf47980a3baa
location = @@LOCATION@@
# 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with DESTDIR). Used by slapos.package.git/obs
......@@ -57,8 +57,8 @@ openssl = ${openssl:location}/bin/openssl
[openssl-1.0]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.openssl.org/source/openssl-1.0.2p.tar.gz
md5sum = ac5eb30bf5798aa14b1ae6d0e7da58df
url = https://www.openssl.org/source/openssl-1.0.2t.tar.gz
md5sum = ef66581b80f06eae42f5268bc0b50c6d
location = @@LOCATION@@
# 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with INSTALL_PREFIX). Used by slapos.package.git/obs
......
......@@ -4,30 +4,12 @@
[buildout]
parts =
pkgconfig
extends =
../gettext/buildout.cfg
../glib/buildout.cfg
[pkg-config]
<= pkgconfig
[pkgconfig]
recipe = slapos.recipe.cmmi
shared = true
url = https://pkgconfig.freedesktop.org/releases/pkg-config-0.29.tar.gz
md5sum = 77f27dce7ef88d0634d0d6f90e03a77f
location = @@LOCATION@@
# build pkg-config twice so that second configure can use pkg-config
# to compute GLIB_CFLAGS and GLIB_LIBS.
pre-configure =
./configure --prefix=${:location} --with-installed-glib && make
url = https://pkgconfig.freedesktop.org/releases/pkg-config-0.29.2.tar.gz
md5sum = f6e931e319531b736fadc017f470e68a
configure-options =
--with-installed-glib
--with-internal-glib
--disable-host-tool
environment =
PATH=.:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
CPPFLAGS=-I${glib:location}/include
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib
GLIB_CFLAGS=-I${glib:location}/include/glib-2.0 -I${glib:location}/lib/glib-2.0/include
GLIB_LIBS=-L${glib:location}/lib -lglib-2.0 -lintl
......@@ -22,25 +22,21 @@ recipe = collective.recipe.grp
[proftpd]
recipe = slapos.recipe.cmmi
md5sum = 13270911c42aac842435f18205546a1b
url = ftp://ftp.proftpd.org/distrib/source/proftpd-1.3.6.tar.gz
# 1.3.6 is not compatible with openssl 1.1.x
# https://github.com/proftpd/proftpd/issues/674
# (fix is in master, we are waiting for next release)
md5sum = 4040f6a6b86173e2a03f4ccdb9b9af6e
url = ftp://ftp.proftpd.org/distrib/source/proftpd-1.3.6b.tar.gz
configure-options =
--enable-openssl
--enable-nls
--enable-ctrls
--enable-dso
--disable-cap
--with-modules=mod_sftp:mod_ban
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
environment =
CPPFLAGS=${:cppflags}
LDFLAGS=${:ldflags}
CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib
install_user=${proftpd-environment:USER}
install_group=${proftpd-grp:GROUP}
cppflags=-I${zlib:location}/include -I${openssl-1.0:location}/include
ldflags=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl-1.0:location}/lib -Wl,-rpath=${openssl-1.0:location}/lib
patch-binary = ${patch:location}/bin/patch
patch-options = -p1
patches =
......@@ -59,14 +55,14 @@ git-executable = ${git:location}/bin/git
[proftpd-mod_auth_web]
recipe = plone.recipe.command
stop-on-error = true
# prxs does not support setting rpath, but we can "inject" -R that will be passed to libtool
command =
LIBTOOL=${libtool:location}/bin/libtool \
LDFLAGS="-Wl,-rpath=${curl:location}/lib ${proftpd:ldflags}" \
${proftpd-output:prxs} -c -i \
-I ${curl:location}/include ${proftpd:cppflags} \
-L ${curl:location}/lib/ \
${proftpd-output:prxs} -c -i -d \
-I ${curl:location}/include \
-L '${curl:location}/lib/ -R ${openssl:location}/lib -R ${zlib:location}/lib' \
-l curl \
-d ${proftpd-mod_auth_web-repository:location}/mod_auth_web.c
${proftpd-mod_auth_web-repository:location}/mod_auth_web.c
location=${proftpd:location}/libexec/mod_auth_web.so
......
......@@ -3,10 +3,12 @@ parts = protobuf
[protobuf]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/google/protobuf/releases/download/v3.4.0/protobuf-python-3.4.0.tar.gz
md5sum = 0820cc2e56d71aef8e99794fcbd184cd
[protobuf-cpp]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/google/protobuf/releases/download/v3.4.0/protobuf-cpp-3.4.0.tar.gz
md5sum = 6d59dad503bea5ad420fd09ddad84481
[buildout]
parts =
pylint
extends =
../patch/buildout.cfg
[pylint]
recipe = zc.recipe.egg:custom
egg = pylint
patches =
${:_profile_base_location_}/pylint-super_on_old_class.patch#cb0c3f8c091bf4980be395c917edc435
patch-options = -p1
patch-binary = ${patch:location}/bin/patch
commit 6cf07b3483ec0d921296aefa8a28a8491f13786e
Author: Claudiu Popa <pcmanticore@gmail.com>
Date: Sun Dec 6 14:54:52 2015 +0200
Don't emit super-on-old-class on classes with unknown bases.
The change also removes the confidence handling for super-on-old-class,
which isn't used enough to merit its existence.
Closes issue #721.
--- a/pylint/checkers/newstyle.py 2019-12-25 13:51:01.611586402 +0900
+++ b/pylint/checkers/newstyle.py 2019-12-25 13:50:31.861542768 +0900
@@ -127,12 +127,9 @@
if isinstance(call, astroid.CallFunc) and \
isinstance(call.func, astroid.Name) and \
call.func.name == 'super':
- confidence = (INFERENCE if has_known_bases(klass)
- else INFERENCE_FAILURE)
- if not klass.newstyle:
+ if not klass.newstyle and has_known_bases(klass):
# super should not be used on an old style class
- self.add_message('super-on-old-class', node=node,
- confidence=confidence)
+ self.add_message('super-on-old-class', node=node)
else:
# super first arg should be the class
if not call.args and sys.version_info[0] == 3:
@@ -146,8 +143,7 @@
continue
if supcls is None:
- self.add_message('missing-super-argument', node=call,
- confidence=confidence)
+ self.add_message('missing-super-argument', node=call)
continue
if klass is not supcls:
@@ -163,8 +159,7 @@
if name is not None:
self.add_message('bad-super-call',
node=call,
- args=(name, ),
- confidence=confidence)
+ args=(name, ))
def register(linter):
......@@ -29,9 +29,9 @@ python = python2.7
[python2.7]
recipe = slapos.recipe.cmmi
shared = true
package_version = 2.7.15
package_version = 2.7.17
package_version_suffix =
md5sum = a80ae3cc478460b922242f43a1b4094d
md5sum = b3b6d2c92f42a60667814358ab9f0cfd
# This is actually the default setting for prefix, but we can't use it in
# other settings in this part if we don't set it explicitly here.
......@@ -42,7 +42,7 @@ executable = ${:prefix}/bin/python${:version}
patch-options = -p1
patches =
${:_profile_base_location_}/fix_compiler_module_issue_20613.patch#94443a77f903e9de880a029967fa6aa7
${:_profile_base_location_}/pytracemalloc_pep445.patch#3dfad79654af9671325f988c36fb6be2
${:_profile_base_location_}/pytracemalloc_pep445.patch#9f3145817afa2b7fad801fde8447e396
${:_profile_base_location_}/disabled_module_list.patch#71ad30d32bcdbc50c19cf48675b1246e
${:_profile_base_location_}/asyncore_poll_insteadof_select.patch#ab6991c0ee6e25aeb8951e71f280a2f1
url =
......@@ -56,6 +56,16 @@ configure-options =
# Profiled build:
make-binary =
make-targets = make profile-opt && make install
# Mangle shebang to use installed Python. This is not only a convenience:
# when building system packages (e.g. re6st-node using OBS), some OS like
# recent RedHat would otherwise mangle them, either failing ('python' is
# ambiguous and 'python2' is not supported anymore) or replacing with
# something that's really wrong (/usr/bin/python...).
post-install = cd '${:prefix}' &&
find -executable -type f -print0 |
xargs -0 grep -I -m 1 '' |
sed -n 's,:#! */usr/bin/env \+python2\?$,,p' |
xargs -d '\n' sed -i '1s,.*,#!${:executable},'
extra-ldflags =
# the entry "-Wl,-rpath=${file:location}/lib" below is needed by python-magic,
......
......@@ -492,7 +492,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
#ifdef WITH_PYMALLOC
#ifdef HAVE_MMAP
@@ -214,7 +489,7 @@
@@ -220,7 +495,7 @@
* Arenas are allocated with mmap() on systems supporting anonymous memory
* mappings to reduce heap fragmentation.
*/
......@@ -501,7 +501,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
#ifdef WITH_MEMORY_LIMITS
#define MAX_ARENAS (SMALL_MEMORY_LIMIT / ARENA_SIZE)
@@ -581,7 +856,7 @@
@@ -587,7 +862,7 @@
return NULL; /* overflow */
#endif
nbytes = numarenas * sizeof(*arenas);
......@@ -510,7 +510,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
if (arenaobj == NULL)
return NULL;
arenas = arenaobj;
@@ -785,9 +1060,8 @@
@@ -791,9 +1066,8 @@
* Unless the optimizer reorders everything, being too smart...
*/
......@@ -522,7 +522,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
{
block *bp;
poolp pool;
@@ -802,15 +1076,6 @@
@@ -808,15 +1082,6 @@
#endif
/*
......@@ -538,7 +538,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
* This implicitly redirects malloc(0).
*/
if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) {
@@ -981,17 +1246,13 @@
@@ -987,17 +1252,13 @@
* last chance to serve the request) or when the max memory limit
* has been reached.
*/
......@@ -559,7 +559,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
{
poolp pool;
block *lastfree;
@@ -1211,7 +1472,7 @@
@@ -1217,7 +1478,7 @@
redirect:
#endif
/* We didn't allocate this address. */
......@@ -568,7 +568,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
}
/* realloc. If p is NULL, this acts like malloc(nbytes). Else if nbytes==0,
@@ -1219,10 +1480,8 @@
@@ -1225,10 +1486,8 @@
* return a non-NULL result.
*/
......@@ -581,7 +581,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
{
void *bp;
poolp pool;
@@ -1232,16 +1491,7 @@
@@ -1238,16 +1497,7 @@
#endif
if (p == NULL)
......@@ -599,7 +599,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
#ifdef WITH_VALGRIND
/* Treat running_on_valgrind == -1 the same as 0 */
@@ -1269,10 +1519,10 @@
@@ -1275,10 +1525,10 @@
}
size = nbytes;
}
......@@ -612,7 +612,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
}
return bp;
}
@@ -1290,40 +1540,17 @@
@@ -1296,40 +1546,17 @@
* at p. Instead we punt: let C continue to manage this block.
*/
if (nbytes)
......@@ -655,7 +655,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
#endif /* WITH_PYMALLOC */
#ifdef PYMALLOC_DEBUG
@@ -1343,10 +1570,6 @@
@@ -1349,10 +1576,6 @@
#define DEADBYTE 0xDB /* dead (newly freed) memory */
#define FORBIDDENBYTE 0xFB /* untouchable bytes at each end of a block */
......@@ -666,7 +666,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
static size_t serialno = 0; /* incremented on each debug {m,re}alloc */
/* serialno is always incremented via calling this routine. The point is
@@ -1429,58 +1652,18 @@
@@ -1467,58 +1690,18 @@
p[2*S+n: 2*S+n+S]
Copies of FORBIDDENBYTE. Used to catch over- writes and reads.
p[2*S+n+S: 2*S+n+2*S]
......@@ -722,7 +722,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
-
-/* generic debug memory api, with an "id" to identify the API in use */
-void *
-_PyObject_DebugMallocApi(char id, size_t nbytes)
-_PyObject_DebugMallocApi(char api, size_t nbytes)
+static void *
+_PyMem_DebugMallocCtx(void *ctx, size_t nbytes)
{
......@@ -730,25 +730,27 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
uchar *p; /* base address of malloc'ed block */
uchar *tail; /* p + 2*SST + nbytes == pointer to tail pad bytes */
size_t total; /* nbytes + 4*SST */
@@ -1491,14 +1674,14 @@
/* overflow: can't represent total as a size_t */
@@ -1530,7 +1713,7 @@
return NULL;
- p = (uchar *)PyObject_Malloc(total);
+ p = (uchar *)api->alloc.malloc(api->alloc.ctx, total);
if (p == NULL)
return NULL;
if (api == _PYMALLOC_OBJ_ID) {
- p = (uchar *)PyObject_Malloc(total);
+ p = (uchar *)api->alloc.malloc(api->alloc.ctx, total);
}
else {
p = (uchar *)_PyMem_Malloc(total);
@@ -1540,8 +1723,8 @@
/* at p, write size (SST bytes), id (1 byte), pad (SST-1 bytes) */
/* at p, write size (SST bytes), api (1 byte), pad (SST-1 bytes) */
write_size_t(p, nbytes);
- p[SST] = (uchar)id;
- p[SST] = (uchar)api;
- memset(p + SST + 1 , FORBIDDENBYTE, SST-1);
+ p[SST] = (uchar)api->api_id;
+ memset(p + SST + 1, FORBIDDENBYTE, SST-1);
if (nbytes > 0)
memset(p + 2*SST, CLEANBYTE, nbytes);
@@ -1516,35 +1699,37 @@
@@ -1559,40 +1742,42 @@
Then fills the original bytes with DEADBYTE.
Then calls the underlying free.
*/
......@@ -769,8 +771,13 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
nbytes += 4*SST;
if (nbytes > 0)
memset(q, DEADBYTE, nbytes);
- PyObject_Free(q);
+ api->alloc.free(api->alloc.ctx, q);
if (api == _PYMALLOC_OBJ_ID) {
- PyObject_Free(q);
+ api->alloc.free(api->alloc.ctx, q);
}
else {
_PyMem_Free(q);
}
}
-void *
......@@ -795,7 +802,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
bumpserialno();
original_nbytes = read_size_t(q - 2*SST);
total = nbytes + 4*SST;
@@ -1552,16 +1737,12 @@
@@ -1600,17 +1785,13 @@
/* overflow: can't represent total as a size_t */
return NULL;
......@@ -808,13 +815,14 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
* case we didn't get the chance to mark the old memory with DEADBYTE,
* but we live with that.
*/
- q = (uchar *)PyObject_Realloc(q - 2*SST, total);
+ oldq = q;
+ q = (uchar *)api->alloc.realloc(api->alloc.ctx, q - 2*SST, total);
if (q == NULL) {
if (nbytes <= original_nbytes) {
/* bpo-31626: the memset() above expects that realloc never fails
@@ -1571,11 +1752,17 @@
if (api == _PYMALLOC_OBJ_ID) {
- q = (uchar *)PyObject_Realloc(q - 2*SST, total);
+ oldq = q;
+ q = (uchar *)api->alloc.realloc(api->alloc.ctx, q - 2*SST, total);
}
else {
q = (uchar *)_PyMem_Realloc(q - 2*SST, total);
@@ -1624,11 +1805,17 @@
return NULL;
}
......@@ -833,7 +841,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
tail = q + nbytes;
memset(tail, FORBIDDENBYTE, SST);
write_size_t(tail + SST, serialno);
@@ -1594,8 +1781,8 @@
@@ -1647,8 +1834,8 @@
* and call Py_FatalError to kill the program.
* The API id, is also checked.
*/
......@@ -844,7 +852,7 @@ diff -Naru a/Objects/obmalloc.c b/Objects/obmalloc.c
{
const uchar *q = (const uchar *)p;
char msgbuf[64];
@@ -1941,3 +2128,44 @@
@@ -1994,3 +2181,44 @@
arenas[arenaindex_temp].address != 0;
}
#endif
......
......@@ -10,6 +10,8 @@ extends =
../pkgconfig/buildout.cfg
../xorg/buildout.cfg
../zlib/buildout.cfg
../pcre/buildout.cfg
../glib/buildout.cfg
# XXX Change all reference to kvm section to qemu section, then
# use qemu as main name section.
......
......@@ -7,9 +7,10 @@ parts =
[rsync]
recipe = slapos.recipe.cmmi
shared = true
url = https://download.samba.org/pub/rsync/src/rsync-3.1.3.tar.gz
md5sum = 1581a588fde9d89f6bc6201e8129afaf
make-options =
PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
PREFIX=@@LOCATION@@
environment =
PATH=${perl:location}/bin:%(PATH)s
......@@ -7,6 +7,7 @@ extends =
[screen]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.gnu.org/gnu/screen/screen-${:version}.tar.gz
version = 4.5.1
md5sum = a8c5da2f42f8a18fa4dada2419d1549b
......
......@@ -6,8 +6,8 @@ parts =
[snappy]
recipe = slapos.recipe.cmmi
url = https://github.com/google/snappy/archive/1.1.7.tar.gz
md5sum = ee9086291c9ae8deb4dac5e0b85bf54a
url = https://github.com/google/snappy/archive/1.1.8.tar.gz
md5sum = 70e48cba7fecf289153d009791c9977f
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = ${cmake:location}/bin/cmake
configure-options =
......
......@@ -6,11 +6,13 @@ parts =
[sqlite3]
recipe = slapos.recipe.cmmi
shared = true
url = https://sqlite.org/2018/sqlite-autoconf-3220000.tar.gz
md5sum = 96b5648d542e8afa6ab7ffb8db8ddc3d
url = https://sqlite.org/2019/sqlite-autoconf-3300100.tar.gz
md5sum = 51252dc6bc9094ba11ab151ba650ff3c
configure-options =
--disable-static
--enable-readline
# Increase MAX_VARIABLE_NUMBER like many os. For example:
# https://git.archlinux.org/svntogit/packages.git/tree/trunk/PKGBUILD?h=packages/sqlite
environment =
CPPFLAGS=-I${readline:location}/include -I${ncurses:location}/include
CPPFLAGS=-I${readline:location}/include -I${ncurses:location}/include -DSQLITE_MAX_VARIABLE_NUMBER=250000
LDFLAGS=-L@@LOCATION@@ -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${readline:location}/lib -L${ncurses:location}/lib
[buildout]
extends =
buildout.hash.cfg
../python3/buildout.cfg
parts =
surykatka
[surykatka-requirements]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
[surykatka]
recipe = plone.recipe.command
command =
bash -c "${python3.7:executable} -m venv ${:location} && \
. ${:location}/bin/activate && \
pip install -r ${surykatka-requirements:target}"
location = ${buildout:parts-directory}/${:_buildout_section_name_}
stop-on-error = true
executable = ${:location}/bin/surykatka
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[surykatka-requirements]
_update_hash_filename_ = requirements.txt
md5sum = 2dfe4f8b8c5b6f5d3478b70e53c49201
certifi==2019.11.28
chardet==3.0.4
Click==7.0
dnspython==1.16.0
forcediphttpsadapter==1.0.1
idna==2.8
miniupnpc==2.0.2
peewee==3.13.1
requests==2.22.0
surykatka==0.2.0
urllib3==1.25.7
......@@ -9,6 +9,7 @@ extends =
[texinfo4]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.gnu.org/gnu/texinfo/texinfo-4.13.tar.gz
md5sum = 71ba711519209b5fb583fed2b3d86fcb
configure-options =
......
......@@ -7,6 +7,7 @@ extends =
[tig]
recipe = slapos.recipe.cmmi
shared = true
url = http://jonas.nitro.dk/tig/releases/tig-2.1.tar.gz
md5sum = d6c237aba2c03d85897da79789fd6104
environment =
......
......@@ -20,8 +20,8 @@ md5sum = 3dde098fd0b3a08d3f2867e4a95591ba
recipe = hexagonit.recipe.download
ignore-existing = true
strip-top-level-dir = true
url = http://www-us.apache.org/dist/tomcat/tomcat-7/v7.0.96/bin/apache-tomcat-7.0.96.tar.gz
md5sum = 0669aa2996b67c61662a5a4f993767b8
url = http://www-us.apache.org/dist/tomcat/tomcat-7/v7.0.99/bin/apache-tomcat-7.0.99.tar.gz
md5sum = ab39c15461f2a99493528b4a5819bc56
[tomcat9]
recipe = hexagonit.recipe.download
......
......@@ -8,6 +8,7 @@ extends =
[vim]
recipe = slapos.recipe.cmmi
shared = true
url = ftp://ftp.vim.org/pub/vim/unix/vim-8.0.586.tar.bz2
md5sum = b35e794140c196ff59b492b56c1e73db
environment=
......
......@@ -563,9 +563,9 @@ url = https://www.x.org/releases/individual/app/xwd-1.0.7.tar.gz
md5sum = 3ebd74f7a1980305e5e19ec8ff7aa794
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig:${xproto:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libxkbfile:location}/lib/pkgconfig:${kbproto:location}/lib/pkgconfig:${libxcb:location}/lib/pkgconfig:${xorg-libpthread-stubs:location}/lib/pkgconfig:${libXau:location}/lib/pkgconfig:${xextproto:location}/lib/pkgconfig
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig:${libxkbfile:location}/lib/pkgconfig:${xproto:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libxkbfile:location}/lib/pkgconfig:${kbproto:location}/lib/pkgconfig:${libxcb:location}/lib/pkgconfig:${xorg-libpthread-stubs:location}/lib/pkgconfig:${libXau:location}/lib/pkgconfig:${xextproto:location}/lib/pkgconfig
CPPFLAGS=-I${libXt:location}/include
LDFLAGS=-L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib
LDFLAGS=-L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libxkbfile:location}/lib -Wl,-rpath=${libxkbfile:location}/lib
[xserver]
# Adds Xvfb functionnality
......
......@@ -3,8 +3,8 @@ parts = zip
[zip]
recipe = slapos.recipe.cmmi
shared = true
url = http://downloads.sourceforge.net/project/infozip/Zip%203.x%20%28latest%29/3.0/zip30.tar.gz
md5sum = 7b74551e63f8ee6aab6fbc86676c0d37
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = true
make-options = prefix=${:location} NO_BZIP2_SUPPORT=1 -f unix/Makefile generic
make-options = prefix=@@LOCATION@@ NO_BZIP2_SUPPORT=1 -f unix/Makefile generic
[buildout]
extends = ../zlib/buildout.cfg
parts =
zstd
......@@ -10,4 +11,7 @@ md5sum = 487f7ee1562dee7c1c8adf85e2a63df9
shared = true
location = @@LOCATION@@
configure-command = :
make-options = PREFIX=${:location}
environment =
PREFIX=${:location}
LD_FLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
CFLAGS=-I${zlib:location}/include
Tests for backupserver software release
##############################################################################
#
# Copyright (c) 2019 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from setuptools import setup, find_packages
version = '0.0.1.dev0'
name = 'slapos.test.backupserver'
long_description = open("README.md").read()
setup(name=name,
version=version,
description="Test for SlapOS' backupserver",
long_description=long_description,
long_description_content_type='text/markdown',
maintainer="Nexedi",
maintainer_email="info@nexedi.com",
url="https://lab.nexedi.com/nexedi/slapos",
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.libnetworkcache',
'erp5.util',
'requests',
],
zip_safe=True,
test_suite='test',
)
##############################################################################
#
# Copyright (c) 2019 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import httplib
import json
import os
import requests
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, InstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class TestBackupServer(InstanceTestCase):
def test(self):
parameter_dict = self.computer_partition.getConnectionParameterDict()
# Check that there is a RSS feed
self.assertTrue('rss' in parameter_dict)
self.assertTrue(parameter_dict['rss'].startswith(
'https://[%s]:9443/' % (self._ipv6_address, )
))
result = requests.get(
parameter_dict['rss'], verify=False, allow_redirects=False)
# XXX crontab not triggered yet
self.assertEqual(
[httplib.NOT_FOUND, False],
[result.status_code, result.is_redirect]
)
# Check monitor
self.assertTrue('monitor-base-url' in parameter_dict)
self.assertTrue('monitor-setup-url' in parameter_dict)
result = requests.get(
parameter_dict['monitor-base-url'], verify=False, allow_redirects=False)
self.assertEqual(
[httplib.UNAUTHORIZED, False],
[result.status_code, result.is_redirect]
)
......@@ -22,7 +22,7 @@ md5sum = c801b7f9f11f0965677c22e6bbe9281b
[template-apache-frontend]
filename = instance-apache-frontend.cfg.in
md5sum = c6d78b2856d9d0ec63728e668e3395d8
md5sum = 378f6da53a02b2bfe7777a493fc95585
[template-caddy-replicate]
filename = instance-apache-replicate.cfg.in
......
......@@ -320,32 +320,36 @@ extra-context =
# BBB: SlapOS Master non-zero knowledge END
[caddy-wrapper]
recipe = slapos.cookbook:wrapper
environment =
CADDYPATH=${directory:frontend_cluster}
command-line = {{ parameter_dict['caddy'] }}
-conf ${dynamic-caddy-frontend-template:rendered}
-log ${caddy-configuration:error-log}
-log-roll-mb 0
recipe = slapos.recipe.template:jinja2
template = inline:
#!/bin/sh
export CADDYPATH=${directory:frontend_cluster}
ulimit -n $(ulimit -Hn)
exec {{ parameter_dict['caddy'] }} \
-conf ${dynamic-caddy-frontend-template:rendered} \
-log ${caddy-configuration:error-log} \
-log-roll-mb 0 \
{% if instance_parameter['configuration.global-disable-http2'].lower() in TRUE_VALUES %}
-http2=false
-http2=false \
{% else %}
-http2=true
-http2=true \
{% endif %}
{% if instance_parameter['configuration.enable-quic'].lower() in TRUE_VALUES %}
-quic
-quic \
{% endif %}
-grace {{ instance_parameter['configuration.mpm-graceful-shutdown-timeout'] }}s
-disable-http-challenge
-disable-tls-alpn-challenge
wrapper-path = ${directory:bin}/caddy-wrapper
-grace {{ instance_parameter['configuration.mpm-graceful-shutdown-timeout'] }}s \
-disable-http-challenge \
-disable-tls-alpn-challenge \
"$@"
rendered = ${directory:bin}/caddy-wrapper
mode = 0755
[caddy-frontend]
recipe = slapos.cookbook:wrapper
command-line = ${caddy-wrapper:wrapper-path} -pidfile ${caddy-configuration:pid-file}
command-line = ${caddy-wrapper:rendered} -pidfile ${caddy-configuration:pid-file}
wrapper-path = ${directory:service}/frontend_caddy
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
hash-files = ${caddy-wrapper:wrapper-path}
hash-files = ${caddy-wrapper:rendered}
[not-found-html]
recipe = slapos.cookbook:symbolic.link
......@@ -594,7 +598,7 @@ rendered = ${directory:bin}/frontend-caddy-validate
mode = 0700
last_state_file = ${directory:run}/caddy_configuration_last_state
extra-context =
key wrapper caddy-wrapper:wrapper-path
key wrapper caddy-wrapper:rendered
key caddy_configuration_state frontend-caddy-configuration-state-validate:rendered
key last_state_file :last_state_file
......
......@@ -27,4 +27,3 @@ smmap = 0.9.0
numpy = 1.16.4
websockify = 0.8.0
collective.recipe.shelloutput = 0.1
......@@ -534,7 +534,7 @@ class SlaveHttpFrontendTestCase(SlapOSInstanceTestCase):
cls.startServerProcess()
except BaseException:
cls.logger.exception("Error during setUpClass")
cls._cleanup()
cls._cleanup("{}.{}.setUpClass".format(cls.__module__, cls.__name__))
cls.setUp = lambda self: self.fail('Setup Class failed.')
raise
......@@ -559,8 +559,9 @@ class SlaveHttpFrontendTestCase(SlapOSInstanceTestCase):
cls.waitForCaddy()
except BaseException:
cls.logger.exception("Error during setUpClass")
cls._storeSnapshot("{}.setUpClass".format(cls.__name__))
cls._cleanup()
# "{}.{}.setUpClass".format(cls.__module__, cls.__name__) is already used by SlapOSInstanceTestCase.setUpClass
# so we use another name for snapshot, to make sure we don't store another snapshot in same directory.
cls._cleanup("{}.SlaveHttpFrontendTestCase.{}.setUpClass".format(cls.__module__, cls.__name__))
cls.setUp = lambda self: self.fail('Setup Class failed.')
raise
......@@ -854,9 +855,9 @@ class SlaveHttpFrontendTestCase(SlapOSInstanceTestCase):
break
@classmethod
def _cleanup(cls):
def _cleanup(cls, snapshot_name):
cls.stopServerProcess()
super(SlaveHttpFrontendTestCase, cls)._cleanup()
super(SlaveHttpFrontendTestCase, cls)._cleanup(snapshot_name)
def patchRequests(self):
HTTPResponse = requests.packages.urllib3.response.HTTPResponse
......
......@@ -4,9 +4,7 @@
"additionalProperties": false,
"definitions": {
"tcpv4port": {
"minimum": 0,
"maximum": 65535,
"type": "integer"
"$ref": "./schemas-definitions.json#/tcpv4port"
}
},
"properties": {
......@@ -194,16 +192,34 @@
},
"kumofs": {
"description": "Persistent memcached service",
"additionalProperties": {
"$ref": "./instance-kumofs-schema.json"
},
"allOf": [
{
"$ref": "./instance-kumofs-schema.json"
},
{
"properties": {
"tcpv4-port": {
"default": 2000
}
}
}
],
"type": "object"
},
"memcached": {
"description": "Volatile memcached service",
"additionalProperties": {
"$ref": "./instance-kumofs-schema.json"
},
"allOf": [
{
"$ref": "./instance-kumofs-schema.json"
},
{
"properties": {
"tcpv4-port": {
"default": 2010
}
}
}
],
"type": "object"
},
"cloudooo-url": {
......@@ -218,16 +234,34 @@
},
"smtp": {
"description": "Mail queuing and relay service",
"additionalProperties": {
"$ref": "./instance-smtp-schema.json"
},
"allOf": [
{
"$ref": "./instance-smtp-schema.json"
},
{
"properties": {
"tcpv4-port": {
"default": 2010
}
}
}
],
"type": "object"
},
"mariadb": {
"description": "Relational database service",
"additionalProperties": {
"$ref": "./instance-mariadb-schema.json"
},
"allOf": [
{
"$ref": "./instance-mariadb-schema.json"
},
{
"properties": {
"tcpv4-port": {
"default": 2099
}
}
}
],
"type": "object"
},
"zodb-zeo": {
......@@ -257,7 +291,7 @@
"type": "object"
},
"zodb": {
"description": "Zope Object DataBase mountpoints. See https://github.com/zopefoundation/ZODB/blob/3.10/src/ZODB/component.xml for extra options.",
"description": "Zope Object DataBase mountpoints. See https://github.com/zopefoundation/ZODB/blob/4/src/ZODB/component.xml for extra options.",
"items": {
"required": [
"type"
......@@ -282,8 +316,13 @@
"type": "boolean"
}
},
"patternProperties": {
".!$": {
"$ref": "#/properties/zodb/items/patternProperties/.!$"
}
},
"additionalProperties": {
"type": "string"
"$ref": "#/properties/zodb/items/additionalProperties"
},
"type": "object"
},
......@@ -313,13 +352,38 @@
},
"server": {
"description": "Instantiate a server. If missing, 'storage-dict' must contain the necessary properties to mount the ZODB. Partitions references are 'neo-0', 'neo-1', ...",
"$ref": "../neoppod/instance-neo-input-schema.json"
"$ref": "../neoppod/instance-neo-input-schema.json#/definitions/neo-cluster"
}
}
}
],
"patternProperties": {
".!$": {
"description": "Override with the value of the first item whose zope id matches against the pattern.",
"items": {
"items": [
{
"description": "Override pattern (Python regular expression).",
"type": "string"
},
{
"description": "Override value (parameter for maching nodes).",
"type": [
"integer",
"string"
]
}
],
"type": "array"
},
"type": "array"
}
},
"additionalProperties": {
"type": "string"
"type": [
"integer",
"string"
]
},
"type": "object"
},
......
......@@ -8,7 +8,7 @@
"tcpv4-port": {
"allOf": [
{
"$ref": "#/definitions/tcpv4port"
"$ref": "./schemas-definitions.json#/tcpv4port"
},
{
"description": "Start allocating ports at this value, going upward"
......
{
"$schema": "http://json-schema.org/draft-04/schema#",
"required": [
"tcpv4-port"
],
"type": "object",
"properties": {
"tcpv4-port": {
"allOf": [
{
"$ref": "#/definitions/tcpv4port"
"$ref": "./schemas-definitions.json#/tcpv4port"
},
{
"description": "Start allocating ports at this value, going downward"
......
{
"$schema": "http://json-schema.org/draft-04/schema#",
"extends": "./schema-definitions.json#",
"required": [
"tcpv4-port"
],
......@@ -9,7 +8,7 @@
"tcpv4-port": {
"allOf": [
{
"$ref": "#/definitions/tcpv4port"
"$ref": "./schemas-definitions.json#/tcpv4port"
},
{
"description": "Start allocating ports at this value, going upward"
......
{
"$schema": "http://json-schema.org/draft-07/schema#",
"tcpv4port": {
"minimum": 0,
"maximum": 65535,
"type": "integer"
}
}
......@@ -46,3 +46,13 @@ class ERP5InstanceTestCase(SlapOSInstanceTestCase):
"""Return the output paramters from the root partition"""
return json.loads(
self.computer_partition.getConnectionParameterDict()['_'])
def getComputerPartition(self, partition_reference):
for computer_partition in self.slap.computer.getComputerPartitionList():
if partition_reference == computer_partition.getInstanceParameter(
'instance_title'):
return computer_partition
def getComputerPartitionPath(self, partition_reference):
partition_id = self.getComputerPartition(partition_reference).getId()
return os.path.join(self.slap._instance_root, partition_id)
......@@ -195,8 +195,8 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
"""
# self.computer_partition_root_path is the path of root partition.
# we want to assert that no scripts exist in any partition.
bin_programs = [os.path.basename(path) for path in
glob.glob("{}/../*/bin/*".format(self.computer_partition_root_path))]
bin_programs = map(os.path.basename,
glob.glob(self.computer_partition_root_path + "/../*/bin/*"))
self.assertTrue(bin_programs) # just to check the glob was correct.
self.assertNotIn('runUnitTest', bin_programs)
......@@ -210,8 +210,99 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
apache_process = psutil.Process(process_info['pid'])
self.assertEqual(
sorted([socket.AF_INET, socket.AF_INET6]),
sorted([
sorted(
c.family
for c in apache_process.connections()
if c.status == 'LISTEN'
]))
))
class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
"""Test override zope node parameters
"""
__partition_reference__ = 'override'
@classmethod
def getInstanceParameterDict(cls):
# The following example includes the most commonly used options,
# but not necessarily in a meaningful way.
return {'_': json.dumps({
"zodb": [{
"type": "zeo",
"server": {},
"cache-size-bytes": "20MB",
"cache-size-bytes!": [
("bb-0", 1<<20),
("bb-.*", "500MB"),
],
"pool-timeout": "10m",
"storage-dict": {
"cache-size!": [
("a-.*", "50MB"),
],
},
}],
"zope-partition-dict": {
"a": {
"instance-count": 3,
},
"bb": {
"instance-count": 5,
"port-base": 2300,
},
},
})}
def test_zope_conf(self):
zeo_addr = json.loads(
self.getComputerPartition('zodb').getConnectionParameter('_')
)["storage-dict"]["root"]["server"]
def checkParameter(line, kw):
k, v = line.split()
self.assertFalse(k.endswith('!'), k)
try:
expected = kw.pop(k)
except KeyError:
if k == 'server':
return
self.assertIsNotNone(expected)
self.assertEqual(str(expected), v)
def checkConf(zodb, storage):
zodb["mount-point"] = "/"
zodb["pool-size"] = 4
zodb["pool-timeout"] = "10m"
storage["storage"] = "root"
storage["server"] = zeo_addr
with open('%s/etc/zope-%s.conf' % (partition, zope)) as f:
conf = map(str.strip, f.readlines())
i = conf.index("<zodb_db root>") + 1
conf = iter(conf[i:conf.index("</zodb_db>", i)])
for line in conf:
if line == '<zeoclient>':
for line in conf:
if line == '</zeoclient>':
break
checkParameter(line, storage)
for k, v in storage.iteritems():
self.assertIsNone(v, k)
del storage
else:
checkParameter(line, zodb)
for k, v in zodb.iteritems():
self.assertIsNone(v, k)
partition = self.getComputerPartitionPath('zope-a')
for zope in xrange(3):
checkConf({
"cache-size-bytes": "20MB",
}, {
"cache-size": "50MB",
})
partition = self.getComputerPartitionPath('zope-bb')
for zope in xrange(5):
checkConf({
"cache-size-bytes": "500MB" if zope else 1<<20,
}, {
"cache-size": None,
})
......@@ -14,7 +14,7 @@
# not need these here).
[deploy-script-controller-script]
filename = deploy-script-controller
md5sum = f0f5dd379361eb37f84e0bc7639f645f
md5sum = 8288e59eb442c662544daffbf446a033
[template-deploy-test]
filename = instance.cfg.in
......
......@@ -72,6 +72,8 @@ function add_checks ()
echo 'iptables-save' >> $LOG_FILE 2>&1
iptables-save >> $LOG_FILE 2>&1
for f in /tmp/playbook-* ; do echo $f ; cat $f; echo; done >> $LOG_FILE 2>&1
echo 'slapos node status' >> $LOG_FILE 2>&1
slapos node status >> $LOG_FILE 2>&1
}
function upload ()
{
......
[buildout]
extends =
buildout.hash.cfg
https://lab.nexedi.com/nexedi/slapos/raw/1.0.112/software/kvm/software.cfg
# PyPi servers from new host since some time and change is not incorporated in extended KVM SR
allow-hosts +=
files.pythonhosted.org
https://lab.nexedi.com/nexedi/slapos/raw/1.0.134/software/kvm/software.cfg
parts =
eggs
......
......@@ -66,7 +66,7 @@ md5sum = a56a44e96f65f5ed20211bb6a54279f4
[nginx-gitlab-http.conf.in]
_update_hash_filename_ = template/nginx-gitlab-http.conf.in
md5sum = abcc5eda03e10b26c74619f299a7f6a8
md5sum = e74695aa1be60f0ffac64ddbe1c8eaf1
[nginx.conf.in]
_update_hash_filename_ = template/nginx.conf.in
......
......@@ -121,6 +121,13 @@ server {
# Set CORS header
add_header 'Access-Control-Allow-Origin' {{ cfg('nginx_header_allow_origin') }};
add_header 'Access-Control-Allow-Credentials' true;
if ($request_method = OPTIONS ) {
add_header Allow "GET, OPTIONS";
add_header Content-Type text/plain;
add_header 'Access-Control-Allow-Origin' $http_origin;
add_header Access-Control-Allow-Headers "Origin, X-Requested-With, Authorization, Content-Type, Accept";
return 200;
}
## If you use HTTPS make sure you disable gzip compression
## to be safe against BREACH attack.
{{ 'gzip off;' if cfg_https else ''}}
......
......@@ -14,4 +14,4 @@
# not need these here).
[instance-profile]
filename = instance.cfg.in
md5sum = cc8902e44c1d50804b570775633b8c2a
md5sum = dcb9d2f540e0e397c9346c8b0c05f233
......@@ -26,11 +26,11 @@ extends = {{ template_monitor }}
# Always the same. Just copy/paste.
# See docstring of slapos.cookbook:slapconfiguration for more information.
recipe = slapos.cookbook:slapconfiguration
computer = ${slap_connection:computer_id}
partition = ${slap_connection:partition_id}
url = ${slap_connection:server_url}
key = ${slap_connection:key_file}
cert = ${slap_connection:cert_file}
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
# Define default parameter(s) that will be used later, in case user didn't
# specify it.
......
Tests for htmlvalidator software release
##############################################################################
#
# Copyright (c) 2019 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from setuptools import setup, find_packages
version = '0.0.1.dev0'
name = 'slapos.test.htmlvalidatorserver'
long_description = open("README.md").read()
setup(name=name,
version=version,
description="Test for SlapOS' htmlvalidatorserver",
long_description=long_description,
long_description_content_type='text/markdown',
maintainer="Nexedi",
maintainer_email="info@nexedi.com",
url="https://lab.nexedi.com/nexedi/slapos",
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.libnetworkcache',
'erp5.util',
'requests',
],
zip_safe=True,
test_suite='test',
)
##############################################################################
#
# Copyright (c) 2019 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import httplib
import json
import os
import requests
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, InstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class TestHtmlValidatorServer(InstanceTestCase):
def test(self):
parameter_dict = self.computer_partition.getConnectionParameterDict()
# Check that there is a RSS feed
self.assertTrue('vnu-url' in parameter_dict)
self.assertEqual(
'https://[%s]:8899/' % (self._ipv6_address, ),
parameter_dict['vnu-url']
)
result = requests.get(
parameter_dict['vnu-url'], verify=False, allow_redirects=False)
self.assertEqual(
[httplib.OK, False, 'Apache-Coyote/1.1'],
[result.status_code, result.is_redirect, result.headers['Server']]
)
# Check monitor
self.assertTrue('monitor-base-url' in parameter_dict)
self.assertTrue('monitor-setup-url' in parameter_dict)
result = requests.get(
parameter_dict['monitor-base-url'], verify=False, allow_redirects=False)
self.assertEqual(
[httplib.UNAUTHORIZED, False],
[result.status_code, result.is_redirect]
)
......@@ -27,4 +27,4 @@ md5sum = 9f22db89a2679534aa8fd37dbca86782
[template-runTestSuite]
filename = runTestSuite.in
md5sum = b44268d46a41042a879f47babb66c922
md5sum = 5cac160fd6f14cd69cc8d63f87cc9726
......@@ -92,7 +92,7 @@ def main():
firefox_capabilities['marionette'] = True
browser = webdriver.Firefox(
capabilities=firefox_capabilities,
firefox_binary='${firefox-wrapper:location}',
firefox_binary='${firefox-wrapper-68:location}',
executable_path='${geckodriver:location}')
else:
assert target == 'selenium-server', "Unsupported target {}".format(test_runner['target'])
......
......@@ -18,7 +18,7 @@ parts =
git
eggs
xserver
firefox
firefox-68
xwd
renderjs-install
jio-install
......
......@@ -15,11 +15,11 @@
[template]
filename = instance.cfg.in
md5sum = 2cbfd6b08c65369c1d45cf3ba2ff335a
md5sum = a236b719aaac61ac342ada0ce569151a
[template-kvm]
filename = instance-kvm.cfg.jinja2
md5sum = 36e7a8656e52d3aace8d9e52dcb3864e
md5sum = d604d8696815716ac51af770164e86d5
[template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in
......@@ -27,7 +27,7 @@ md5sum = 2e743132ba4e001f784791311df9ba6a
[template-kvm-resilient]
filename = instance-kvm-resilient.cfg.jinja2
md5sum = e50e45c3097ed5a7115817fbc967f173
md5sum = 7de5756f59ef7d823cd8ed33e6d15230
[template-kvm-import]
filename = instance-kvm-import.cfg.jinja2.in
......@@ -46,8 +46,8 @@ filename = template/kvm-export.sh.jinja2
md5sum = b617d64de73de1eed518185f310bbc82
[template-nbd]
filename = instance-nbd.cfg.in
md5sum = a05b581d65768ac55faf3b06d4aec447
filename = instance-nbd.cfg.jinja2
md5sum = 003112c44ef8d536861a46e3dfc67a37
[template-ansible-promise]
filename = template/ansible-promise.in
......@@ -55,7 +55,7 @@ md5sum = 2036bf145f472f62ef8dee5e729328fd
[template-kvm-run]
filename = template/template-kvm-run.in
md5sum = 71e0c6c8a0b9a46498f25b67a3ab6894
md5sum = 08af4ed0e2a53e76c844e3d7325aac09
[template-kvm-controller]
filename = template/kvm-controller-run.in
......
......@@ -384,8 +384,7 @@
"title": "NBD hostname or IP",
"description": "hostname (or IP) of the NBD server containing the boot image.",
"type": "string",
"format": "internet-address",
"default": "debian.nbd.vifib.net"
"format": "internet-address"
},
"nbd-port": {
"title": "NBD port",
......
......@@ -64,8 +64,7 @@
"title": "NBD hostname or IP",
"description": "hostname (or IP) of the NBD server containing the boot image.",
"type": "string",
"format": "internet-address",
"default": "debian.nbd.vifib.net"
"format": "internet-address"
},
"nbd-port": {
"title": "NBD port",
......
......@@ -172,8 +172,7 @@
"title": "NBD hostname",
"description": "hostname (or IP) of the NBD server containing the boot image.",
"type": "string",
"format": "internet-address",
"default": "debian.nbd.vifib.net"
"format": "internet-address"
},
"nbd-port": {
"title": "NBD port",
......
......@@ -13,6 +13,7 @@
{% set monitor_dict = {'parameter': monitor_parameter, 'return': monitor_return} -%}
{% endif -%}
{% set monitor_interface_url = slapparameter_dict.pop('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
{% set additional_frontend = (slapparameter_dict.get('frontend-additional-instance-guid', '').strip() != '') %}
[buildout]
eggs-directory = {{ eggs_directory }}
......@@ -66,6 +67,9 @@ return =
url ssh-public-key resilient-ssh-url notification-id ip {{ monitor_return | join(' ') }}
# KVM related parameters
# XXX: return ALL parameters (like nat rules), through jinja
{% if additional_frontend %}
url-additional
{% endif %}
backend-url url ip
{{ ' ' }}ipv6-network-info
......@@ -81,6 +85,9 @@ monitor-user = ${monitor-publish-parameters:monitor-user}
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${publish-early:monitor-password}
{% endif -%}
{% if additional_frontend %}
url-additional = ${request-kvm:connection-url-additional}
{% endif %}
[kvm-frontend-url-bin]
recipe = collective.recipe.template
......
......@@ -88,6 +88,7 @@ nbd2-host = ${slap-parameter:nbd2-host}
nbd2-port = ${slap-parameter:nbd2-port}
tap-interface = {{ slap_configuration.get('tap-name', '') }}
tap-ipv6-addr = {{ slap_configuration.get('tap-ipv6-addr', '') }}
disk-size = ${slap-parameter:disk-size}
disk-type = ${slap-parameter:disk-type}
......@@ -532,7 +533,7 @@ key_info = Get the publick key file in your VM with the command: wget {{ kvm_htt
{% if use_tap == 'true' and slap_configuration.get('tap-ipv4-addr') -%}
ipv4-network-info =
Use this configuration in /etc/network/interfaces to configure IPv4 on interface {{ iface }} in your VM.
PERMANENT SOLUTION: in your VM, add the lines below in /etc/network/interfaces and then run: "ifup {{ iface }}"
auto {{ iface }}
iface {{ iface }} inet static
address {{ slap_configuration.get('tap-ipv4-addr') }}
......@@ -540,13 +541,14 @@ ipv4-network-info =
gateway {{ slap_configuration.get('tap-ipv4-gateway') }}
{% if enable_http == 'true' %}
${helper:blank-line}
Or run in your VM the command: wget -O- {{ kvm_http }}/${network-config-ipv4:filename} | /bin/sh -
TEMPORARY SOLUTION: run in your VM the command: "wget -O- {{ kvm_http }}/${network-config-ipv4:filename} | /bin/sh -"
(the configuration will be gone after the next reboot)
{% endif %}
{% endif %}
ipv6-network-info =
{% if use_tap == 'true' and slap_configuration.get('tap-ipv6-addr') %}
Use this configuration in /etc/network/interfaces to configure IPv6 on interface {{ iface }} in your VM.
PERMANENT SOLUTION: in your VM, add the lines below in /etc/network/interfaces and then run: "ifup {{ iface }}"
auto {{ iface }}
iface {{ iface }} inet6 static
address {{ slap_configuration.get('tap-ipv6-gateway') }}
......@@ -554,7 +556,8 @@ ipv6-network-info =
gateway {{ slap_configuration.get('tap-ipv6-addr') }}
{% if enable_http == 'true' %}
${helper:blank-line}
Or run in your VM the command: wget -O- {{ kvm_http }}/${network-config-ipv6:filename} | /bin/sh -
TEMPORARY SOLUTION: run in your VM the command: "wget -O- {{ kvm_http }}/${network-config-ipv6:filename} | /bin/sh -"
(the configuration will be gone after the next reboot)
{% endif %}
{% endif %}
......@@ -820,6 +823,14 @@ template = inline:
rendered = ${buildout:directory}/.slapos-disk-permission
context =
raw disk_device_path {{disk_device_path}}
{% do part_list.append('wipe-disk-device-wrapper') -%}
[wipe-disk-device-wrapper]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:prerm}/slapos_wipe_device_disk
command-line =
dd if=/dev/zero of={{disk_device_path}} bs=4096 count=500k
{% endif -%}
[buildout]
......
#############################
#
# Instanciate nbdserver
#
#############################
[buildout]
parts =
nbd-promise
onetimeupload-promise
publish-connection-information
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
srv = $${buildout:directory}/srv
log = $${buildout:directory}/log
[basedirectory]
recipe = slapos.cookbook:mkdirectory
services = $${rootdirectory:etc}/run
watched-services = $${rootdirectory:etc}/service
[nbd-instance]
recipe = slapos.cookbook:nbdserver
ip = $${slap-network-information:global-ipv6}
port = 1024
image-path = $${onetimeupload-instance:image-path}
qemu-path = ${kvm:location}/bin/qemu-nbd
shell-path = ${dash:location}/bin/dash
# XXX TODO: Wait for the iso to be uploaded (execute_wait)
path = $${basedirectory:services}/nbdserver
[nbd-promise]
<= monitor-promise-base
module = check_port_listening
name = nbd_promise.py
config-hostname = $${nbd-instance:ip}
config-port = $${nbd-instance:port}
[gen-passwd]
recipe = slapos.cookbook:generate.password
storage-path = $${rootdirectory:srv}/passwd
bytes = 24
[onetimeupload-instance]
recipe = slapos.cookbook:onetimeupload
ip = $${slap-network-information:global-ipv6}
port = 8080
image-path = $${rootdirectory:srv}/cdrom.iso
log-path = $${rootdirectory:log}/onetimeupload.log
shell-path = ${dash:location}/bin/dash
onetimeupload-path = ${buildout:bin-directory}/onetimeupload
path = $${basedirectory:watched-services}/onetimeupload
key = $${gen-passwd:passwd}
[onetimeupload-promise]
<= monitor-promise-base
module = check_port_listening
name = onetimeupload_promise.py
config-hostname = $${onetimeupload-instance:ip}
config-port = $${onetimeupload-instance:port}
[publish-connection-information]
recipe = slapos.cookbook:publish
nbd_url = nbd://[$${nbd-instance:ip}]:$${nbd-instance:port}
upload_url = http://[$${onetimeupload-instance:ip}]:$${onetimeupload-instance:port}/
upload_key = $${onetimeupload-instance:key}
#############################
#
# Instanciate nbdserver
#
#############################
[buildout]
parts =
nbd-promise
onetimeupload-promise
publish-connection-information
extends = {{ template_monitor }}
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
bin = ${buildout:directory}/bin
etc = ${buildout:directory}/etc
srv = ${buildout:directory}/srv
log = ${buildout:directory}/log
[basedirectory]
recipe = slapos.cookbook:mkdirectory
services = ${rootdirectory:etc}/run
watched-services = ${rootdirectory:etc}/service
[nbd-instance]
recipe = slapos.cookbook:nbdserver
ip = ${slap-network-information:global-ipv6}
port = 1024
image-path = ${onetimeupload-instance:image-path}
qemu-path = {{ qemu_nbd_executable_location }}
shell-path = {{ dash_executable_location }}
# XXX TODO: Wait for the iso to be uploaded (execute_wait)
path = ${basedirectory:services}/nbdserver
[nbd-checker-bin]
recipe = slapos.recipe.template:jinja2
template = inline:#!/bin/sh
[ ! -f ${onetimeupload-instance:image-path} ] ||
${buildout:executable} -c 'import socket ; socket.create_connection(("${nbd-instance:ip}","${nbd-instance:port}")).close()'
rendered = ${rootdirectory:bin}/check-nbd-running.sh
[nbd-promise]
<= monitor-promise-base
module = check_command_execute
name = nbd_promise.py
config-command = ${nbd-checker-bin:rendered}
[gen-passwd]
recipe = slapos.cookbook:generate.password
storage-path = ${rootdirectory:srv}/passwd
bytes = 24
[onetimeupload-instance]
recipe = slapos.cookbook:onetimeupload
ip = ${slap-network-information:global-ipv6}
port = {{ slapparameter_dict.get('otu-port', 8080) }}
image-path = ${rootdirectory:srv}/cdrom.iso
log-path = ${rootdirectory:log}/onetimeupload.log
shell-path = {{ dash_executable_location }}
onetimeupload-path = {{ onetimeupload_executable_location }}
path = ${basedirectory:watched-services}/onetimeupload
key = ${gen-passwd:passwd}
[onetimeupload-promise]
<= monitor-promise-base
module = check_port_listening
name = onetimeupload_promise.py
config-hostname = ${onetimeupload-instance:ip}
config-port = ${onetimeupload-instance:port}
[publish-connection-information]
recipe = slapos.cookbook:publish
nbd_hostname = ${nbd-instance:ip}
nbd_port = ${nbd-instance:port}
upload_url = http://[${onetimeupload-instance:ip}]:${onetimeupload-instance:port}
upload_key = ${onetimeupload-instance:key}
status_message = ${detect-if-cdrom-present:status}
[detect-if-cdrom-present]
recipe = collective.recipe.shelloutput
commands =
status = [ -f ${onetimeupload-instance:image-path} ] && echo "image already uploaded, you can't upload it again" || echo "WARNING: no image yet, the NBD server doesn't work"
......@@ -12,7 +12,7 @@ recipe = slapos.cookbook:softwaretype
default = $${:kvm}
kvm-cluster = $${dynamic-template-kvm-cluster:rendered}
kvm = $${dynamic-template-kvm:rendered}
nbd = ${template-nbd:output}
nbd = $${dynamic-template-nbd:rendered}
kvm-resilient = $${dynamic-template-kvm-resilient:rendered}
kvm-import = $${dynamic-template-kvm-import:rendered}
......@@ -149,3 +149,16 @@ context =
raw gzip_binary ${gzip:location}/bin/gzip
mode = 0644
[dynamic-template-nbd]
<= jinja2-template-base
template = ${template-nbd:location}/instance-nbd.cfg.jinja2
filename = template-nbd.cfg
context =
section slap_configuration slap-configuration
key slapparameter_dict slap-configuration:configuration
key eggs_directory buildout:eggs-directory
key develop_eggs_directory buildout:develop-eggs-directory
raw qemu_nbd_executable_location ${kvm:location}/bin/qemu-nbd
raw dash_executable_location ${dash:location}/bin/dash
raw onetimeupload_executable_location ${buildout:bin-directory}/onetimeupload
raw template_monitor ${monitor2-template:rendered}
......@@ -144,8 +144,8 @@ filename = kvm-export.sh.jinja2
mode = 0755
[template-nbd]
<= template-file-base
output = ${buildout:directory}/template-nbd.cfg
<= download-file-base
on-update = true
[template-ansible-promise]
<= download-template-base
......@@ -196,7 +196,6 @@ context =
websockify = 0.5.1
collective.recipe.environment = 0.2.0
collective.recipe.shelloutput = 0.1
gitdb = 0.6.4
pycurl = 7.43.0
slapos.recipe.template = 4.3
......
......@@ -44,6 +44,7 @@ tap_interface = '{{ parameter_dict.get("tap-interface") }}'
listen_ip = '{{ parameter_dict.get("ipv4") }}'
mac_address = '{{ parameter_dict.get("mac-address") }}'
tap_mac_address = '{{ parameter_dict.get("tap-mac-address") }}'
tap_ipv6_addr = '{{ parameter_dict.get("tap-ipv6-addr") }}'
numa_list = '{{ parameter_dict.get("numa", "") }}'.split()
ram_size = {{ parameter_dict.get("ram-size") }}
ram_max_size = '{{ parameter_dict.get("ram-max-size") }}'
......@@ -278,6 +279,8 @@ if use_nat == 'true':
cluster_doc_host, cluster_doc_port)
if set_nat_restrict == 'true':
rules += ',restrict=on'
if use_tap == 'true' and tap_ipv6_addr != '':
rules += ',ipv6=off'
nat_network_parameter = ['-netdev', rules,
'-device', 'virtio-net-pci,netdev=lan%s,mac=%s' % (number, mac_address)]
if use_tap == 'true':
......
......@@ -308,3 +308,63 @@ class TestInstanceResilient(InstanceTestCase):
'takeover-kvm-1-password',
'takeover-kvm-1-url',
'url']))
@unittest.skipIf(not sanityCheck(), 'missing kvm_intel module')
class TestAccessResilientAdditional(InstanceTestCase):
__partition_reference__ = 'ara'
expected_partition_with_monitor_base_url_count = 1
@classmethod
def getInstanceSoftwareType(cls):
return 'kvm-resilient'
@classmethod
def getInstanceParameterDict(cls):
return {
'frontend-additional-instance-guid': 'SOMETHING'
}
def test(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
result = requests.get(connection_parameter_dict['url'], verify=False)
self.assertEqual(
httplib.OK,
result.status_code
)
self.assertIn('<title>noVNC</title>', result.text)
result = requests.get(
connection_parameter_dict['url-additional'], verify=False)
self.assertEqual(
httplib.OK,
result.status_code
)
self.assertIn('<title>noVNC</title>', result.text)
class TestInstanceNbdServer(InstanceTestCase):
__partition_reference__ = 'ins'
instance_max_retry = 5
@classmethod
def getInstanceSoftwareType(cls):
return 'nbd'
@classmethod
def getInstanceParameterDict(cls):
# port 8080 is used by testnode, use another one
return {
'otu-port': '8090'
}
def test(self):
connection_parameter_dict = self.computer_partition\
.getConnectionParameterDict()
result = requests.get(connection_parameter_dict['upload_url'].strip(), verify=False)
self.assertEqual(
httplib.OK,
result.status_code
)
self.assertIn('<title>Upload new File</title>', result.text)
self.assertIn("WARNING", connection_parameter_dict['status_message'])
============
Edge testing
============
``edgetest`` is a special software type of monitor software release used for website monitoring by using bots.
It uses `surykatka <https://lab.nexedi.com/nexedi/surykatka>`_ and `check_surykatka_json <https://lab.nexedi.com/nexedi/slapos.toolbox/blob/master/slapos/promise/plugin/check_surykatka_json.py>`_ to monitor websites.
``surykatka`` provides a bot to query list of hosts and a JSON reporting system.
``check_surykatka_json`` is used in promises to provide monitoring information about the websites.
In order to monitor an url one need to:
* request a monitor software release with ``edgetest`` software type, configured as described in ``instance-edgetest-input-schema.json``,
* request a slave to monitor with ``edgetest`` software type, configured as described in ``instance-edgetest-slave-input-schema.json``.
......@@ -14,16 +14,32 @@
# not need these here).
[template]
filename = instance.cfg
md5sum = 1b7d2d097f208f6641bf98a17df079c8
md5sum = d778b6f436ae6864819eb2ff2d12a86f
[template-monitor]
_update_hash_filename_ = instance-monitor.cfg.jinja2
md5sum = 373c79480e6425c20480fc911a56c3fd
[template-monitor-distributor]
_update_hash_filename_ = instance-monitor-distributor.cfg.jinja2
md5sum = 61c0bfdfc0a2b51ba15fe4a49baf6091
md5sum = 165a15672fc85981f68b9af2d6253254
[json-test-template]
_update_hash_filename_ = json-test-template.json.in.jinja2
md5sum = 2eb5596544d9c341acf653d4f7ce2680
[template-monitor-edgetest]
_update_hash_filename_ = instance-monitor-edgetest.cfg.jinja2
md5sum = 9e237dbdda59e788202f0da194a57d41
[template-monitor-edgebot]
_update_hash_filename_ = instance-monitor-edgebot.cfg.jinja2
md5sum = 8786e4245db0d27dfa4815222d970e52
[network-bench-cfg]
filename = network_bench.cfg.in
md5sum = cfcbf2002b8eff5153e2bf68ed24b720
[monitor-collect-csv-dump]
filename = script/collect_csv_dump.py
md5sum = cad2402bbd21907cfed6bc5af8c5d3ab
[template-surykatka-ini]
_update_hash_filename_ = surykatka.ini.jinja2
md5sum = 40870921e05d93b5843ab34abd7e3902
{
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"dummy": {
"title": "dummy",
"description": "Dummy",
"type": "string"
}
}
}
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Values returned by Re6st Master instanciation",
"properties": {
"re6stry-url": {
"description": "ipv6 url to access your re6st registry service",
"type": "string"
}
},
"type": "object"
}
{
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"nameserver": {
"default": "",
"title": "Nameserver",
"description": "Space separated list of name servers to use.",
"type": "string"
},
"check-status-code": {
"default": "200",
"title": "Default Check HTTP Code",
"description": "Default HTTP code to check against (default: 200).",
"type": "string"
},
"check-frontend-ip": {
"default": "",
"title": "Default space separated list of Frontend IPs to check",
"description": "Default list of Frontend IPs to check, if empty no constraint is used.",
"type": "string"
},
"check-certificate-expiration-days": {
"default": "15",
"title": "Default certificate expiration days check",
"description": "Default amount of days to consider certitifcate as being to-be-expired (default: 15).",
"type": "string"
}
}
}
{
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"url": {
"title": "URL to check",
"description": "URL to check, like https://example.com",
"type": "string"
},
"check-status-code": {
"default": "Master default",
"title": "Default Check HTTP Code.",
"description": "HTTP code to check against (default: comes from master partition).",
"type": "string"
},
"check-frontend-ip": {
"default": "Master default",
"title": "Space separated list of Frontend IPs to check",
"description": "List of Frontend IPs to check, if empty no constraint is used (default: comes from master partition).",
"type": "string"
},
"check-certificate-expiration-days": {
"default": "Master default",
"title": "Certificate expiration days check",
"description": "Default amount of days to consider certitifcate as being to-be-expired (default: comes from master partition).",
"type": "string"
}
}
}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:directory}/${:filename}
extra-context =
context =
import json_module json
${:extra-context}
[slave-test-configuration]
<=jinja2-template-base
template = {{ template_json_distributor_test }}
filename = srv/monitor/private/test.json
extensions = jinja2.ext.do
extra-context =
section slave_information slap-configuration
{% set part_list = [] -%}
# Publish information for each slave
{% set directory_list = [] -%}
{% for slave_instance in slave_instance_list -%}
{% set publish_section_title = 'publish-%s' % slave_instance.get('slave_reference') -%}
{% do part_list.append(publish_section_title) -%}
[{{ publish_section_title }}]
recipe = slapos.cookbook:publish
-slave-reference = {{ slave_instance.get('slave_reference') }}
log-access-url = ${monitor-frontend:connection-site_url}/{{ slave_instance.get('slave_reference') }}
log-access-url-v6 = ${monitor-httpd-conf-parameter:url}/{{ slave_instance.get('slave_reference') }}
{% endfor %}
{% set data_source_dict = slapparameter_dict.get('data-source', None) -%}
{% set cron_min_count = 0 -%}
{% if data_source_dict %}
{% for entry in data_source_dict -%}
{% set cron_min_count = cron_min_count + 1 -%}
{% set cron_min = cron_min_count%60 -%}
{% do part_list.append('cron-crawl-' + entry) -%}
{% do directory_list.append(entry) -%}
[cron-crawl-{{ entry }}]
<= cron
recipe = slapos.cookbook:cron.d
name = cron-crawler-{{ entry }}
frequency = * * * * *
command = cd ${monitor-directory:crawl-log}/{{ entry }} && ${crawler-bin:wrapper-path} {{ data_source_dict.get(entry) }}
{% endfor %}
{% endif %}
[monitor-directory]
crawl-log = ${:srv}/crawlog
network-user-logs = ${:private}/network-user-logs/
{% for slave_instance in slave_instance_list -%}
user-log-{{ slave_instance.get('slave_reference') }}-folder = ${:private}/network-user-logs/{{ slave_instance.get('slave_reference') }}
user-log-{{ slave_instance.get('slave_reference') }}-ping-folder = ${:private}/network-user-logs/{{ slave_instance.get('slave_reference') }}/ping
user-log-{{ slave_instance.get('slave_reference') }}-ping6-folder = ${:private}/network-user-logs/{{ slave_instance.get('slave_reference') }}/ping6
{% endfor -%}
{% for directory in directory_list %}
{{ '%s = ${:crawl-log}/%s' % (directory, directory) }}
{% endfor %}
[crawler-bin]
recipe = slapos.cookbook:wrapper
command-line =
{{ wget_bin }} --no-check-certificate -l1 -r -nd --timestamp
wrapper-path = ${monitor-directory:bin}/log-crawler
[buildout]
extends = {{ instance_base_monitor }}
parts +=
slave-test-configuration
{% for part in part_list %}
{{ ' %s' % part }}
{% endfor %}
{%- if slap_software_type == software_type %}
{%- set CONFIGURATION = {} %}
{%- for k, v in sorted(slap_configuration.items()) %}
{%- if k.startswith('configuration.') %}
{%- do CONFIGURATION.__setitem__(k[14:], v) %}
{%- endif %}
{%- endfor %}
{%- set slave_instance_list = [] %}
{%- set extra_slave_instance_list = slapparameter_dict.get('extra_slave_instance_list') %}
{%- if extra_slave_instance_list %}
{#- Create slaves to process with setting up defaults #}
{%- for slave in sorted(json_module.loads(extra_slave_instance_list)) %}
{%- if 'check-status-code' not in slave %}
{%- do slave.__setitem__('check-status-code', CONFIGURATION['check-status-code']) %}
{%- endif %}
{%- if 'check-certificate-expiration-days' not in slave %}
{%- do slave.__setitem__('check-certificate-expiration-days', CONFIGURATION['check-certificate-expiration-days']) %}
{%- endif %}
{%- if 'check-frontend-ip' not in slave %}
{%- do slave.__setitem__('check-frontend-ip', CONFIGURATION['check-frontend-ip']) %}
{%- endif %}
{%- if 'url' in slave %}
{%- do slave_instance_list.append(slave) %}
{%- endif %}
{%- endfor %}
{%- endif %}
{%- set part_list = [] %}
{%- for slave in sorted(slave_instance_list) %}
{%- set part_id = 'http-query-' ~ slave['slave_reference'] ~ '-promise' %}
{%- do part_list.append(part_id) %}
{%- set safe_name = part_id.replace('_', '').replace('.', '-').replace(' ', '-') %}
[{{part_id}}]
<= monitor-promise-base
module = check_surykatka_json
name = {{ safe_name }}.py
config-report = http_query
config-url = {{ slave['url'] }}
config-status-code = {{ slave['check-status-code'] }}
config-certificate-expiration-days = {{ slave['check-certificate-expiration-days'] }}
config-ip-list = {{ slave['check-frontend-ip'] }}
config-json-file = ${surykatka-config:json}
{% endfor %}
[surykatka-bot-promise]
<= monitor-promise-base
module = check_surykatka_json
name = surykatka-bot-promise.py
config-report = bot_status
config-json-file = ${surykatka-config:json}
[buildout]
extends = {{ monitor_template_output }}
parts =
cron
cron-entry-surykatka-status
monitor-base
publish-connection-information
surykatka
surykatka-bot-promise
{% for part_id in sorted(part_list) %}
{{ part_id }}
{% endfor %}
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[surykatka-config]
recipe = slapos.recipe.template:jinja2
db = ${directory:srv}/surykatka.db
rendered = ${directory:etc}/surykatka.ini
template = {{ template_surykatka_ini }}
slave_instance_list = {{ dumps(slave_instance_list) }}
nameserver = {{ dumps(CONFIGURATION['nameserver']) }}
json = ${directory:srv}/surykatka.json
context =
import json_module json
key db :db
key nameserver :nameserver
key slave_instance_list :slave_instance_list
[surykatka]
recipe = slapos.cookbook:wrapper
config = ${surykatka-config:rendered}
command-line =
{{ surykatka_binary }} --run crawl --reload --configuration ${:config}
wrapper-path = ${monitor-directory:service}/${:_buildout_section_name_}
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[surykatka-status-json]
recipe = slapos.recipe.template:jinja2
template = inline:#!/bin/sh
if {{ surykatka_binary }} --run status --configuration ${surykatka:config} --output json > ${surykatka-config:json}.tmp ; then
mv -f ${surykatka-config:json}.tmp ${surykatka-config:json}
else
rm -f ${surykatka-config:json}.tmp
fi
rendered = ${monitor-directory:bin}/${:_buildout_section_name_}
mode = 0755
[cron-entry-surykatka-status]
recipe = slapos.cookbook:cron.d
cron-entries = ${directory:etc}/cron.d
name = surykatka-status
frequency = */2 * * * *
command = ${surykatka-status-json:rendered}
[publish-connection-information]
recipe = slapos.cookbook:publish.serialised
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
monitor-url = ${monitor-publish-parameters:monitor-url}
monitor-user = ${monitor-publish-parameters:monitor-user}
monitor-password = ${monitor-publish-parameters:monitor-password}
[monitor-instance-parameter]
monitor-httpd-port = {{ slapparameter_dict['monitor-httpd-port'] }}
cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
{% if slapparameter_dict.get('monitor-username', '') -%}
username = {{ slapparameter_dict['monitor-username'] }}
{% endif -%}
{% if slapparameter_dict.get('monitor-password', '') -%}
password = {{ slapparameter_dict['monitor-password'] }}
{% endif -%}
interface-url = {{ slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') }}
[monitor-directory]
service = ${buildout:directory}/etc/service
var = ${buildout:directory}/var
srv = ${buildout:directory}/srv
server-log = ${:private}/server-log
monitor-log = ${:private}/monitor-log
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
[slap-parameter]
{%- endif %}
{%- if slap_software_type == software_type %}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:directory}/${:filename}
extra-context =
context =
import json_module json
${:extra-context}
[slave-test-configuration]
<=jinja2-template-base
template = {{ template_json_edgetest_test }}
filename = srv/monitor/private/test.json
extensions = jinja2.ext.do
extra-context =
section slave_information slap-configuration
{% set part_list = [] -%}
# Publish information for each slave
{%- set edgebot_software_type = 'edgebot' %}
{%- set edgebot_quantity = slapparameter_dict.pop('edgebot-quantity', '1') | int %}
{%- set edgebot_list = [] %}
{%- set edgebot_section_list = [] %}
{%- set slave_list_name = 'extra_slave_instance_list' %}
{%- set request_dict = {} %}
{%- set namebase = "edgebot" %}
{%- set authorized_slave_list = [] %}
{%- set monitor_base_url_dict = {} -%}
{%- for slave in sorted(slave_instance_list) %}
{%- do authorized_slave_list.append(slave) %}
{%- endfor %}
{%- set monitor_base_port = int(slap_configuration['configuration.monitor-base-port']) %}
{%- for i in range(1, edgebot_quantity + 1) %}
{%- set edgebot_name = "%s-%s" % (namebase, i) %}
{%- set request_section_title = 'request-%s' % edgebot_name %}
{%- do edgebot_list.append(edgebot_name) %}
{%- do edgebot_section_list.append(request_section_title) %}
{%- do part_list.append(request_section_title) %}
{%- do request_dict.__setitem__(request_section_title,
{
'config': {'monitor-httpd-port': monitor_base_port + i},
'name': edgebot_name,
'sla': {},
'state': 'started',
}) %}
{%- endfor %}
[replicate]
<= slap-connection
recipe = slapos.cookbook:request.serialised
config-monitor-cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
config-monitor-username = ${monitor-instance-parameter:username}
config-monitor-password = ${monitor-htpasswd:passwd}
software-url = ${slap-connection:software-release-url}
software-type = {{edgebot_software_type}}
return = monitor-base-url
{% for section, edgebot_request in request_dict.iteritems() %}
[{{section}}]
<= replicate
name = {{ edgebot_request.get('name') }}
{%- if edgebot_request.get('state') %}
state = {{ edgebot_request.get('state') }}
{%- endif%}
{%- set slave_configuration_dict = slapparameter_dict %}
{%- do slave_configuration_dict.update(edgebot_request.get('config')) %}
{%- do slave_configuration_dict.__setitem__(slave_list_name, json_module.dumps(authorized_slave_list)) %}
{%- for config_key, config_value in slave_configuration_dict.iteritems() %}
config-{{ config_key }} = {{ dumps(config_value) }}
{% endfor -%}
{%- if edgebot_request.get('sla') %}
{%- for parameter, value in edgebot_request.get('sla').iteritems() %}
sla-{{ parameter }} = {{ value }}
{%- endfor %}
{%- else %}
# As no SLA was provided, by default it is requested on the same computer
sla-computer_guid = ${slap-connection:computer-id}
{% endif %}
{%- do monitor_base_url_dict.__setitem__(section, '${' ~ section ~ ':connection-monitor-base-url}') -%}
{%- endfor %}
{%- set directory_list = [] -%}
{%- for slave_instance in slave_instance_list -%}
{%- set publish_section_title = 'publish-%s' % slave_instance.get('slave_reference') -%}
{%- do part_list.append(publish_section_title) %}
[{{ publish_section_title }}]
recipe = slapos.cookbook:publish
-slave-reference = {{ slave_instance.get('slave_reference') }}
{% endfor %}
[monitor-conf-parameters]
monitor-title = Monitor
password = ${monitor-htpasswd:passwd}
[monitor-base-url-dict]
{% for key, value in monitor_base_url_dict.items() -%}
{{ key }} = {{ value }}
{% endfor %}
[buildout]
extends = {{ instance_base_monitor }}
parts +=
slave-test-configuration
{% for part in part_list %}
{{ ' %s' % part }}
{%- endfor %}
{%- endif %}
......@@ -57,14 +57,15 @@ recipe = slapos.cookbook:generate.password
user = admin
bytes = 16
[monitor-instance-parameter]
monitor-httpd-port = {{ slap_configuration['configuration.monitor-base-port'] }}
[monitor-directory]
service = ${buildout:directory}/etc/service
var = ${buildout:directory}/var
srv = ${buildout:directory}/srv
server-log = ${:private}/server-log
monitor-log = ${:private}/monitor-log
cache = ${:var}/cache
mod-ssl = ${:cache}/httpd_mod_ssl
system-log = ${:private}/system-log
consumption = ${:log}/consumption
......
......@@ -8,38 +8,65 @@ develop-eggs-directory = ${buildout:develop-eggs-directory}
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = $${instance-base-monitor:rendered}
distributor = $${instance-base-distributor:rendered}
edgetest = $${instance-base-edgetest:rendered}
edgebot = $${instance-base-edgebot:rendered}
[instance-base-monitor]
recipe = slapos.recipe.template:jinja2
template = ${template-monitor:destination}
template = ${template-monitor:target}
rendered = $${buildout:directory}/template-base-monitor.cfg
extensions = jinja2.ext.do
context = key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
section slap_configuration slap-configuration
raw buildout_bin ${buildout:bin-directory}
raw monitor_template_output ${monitor-template:output}
raw network_benck_cfg_output ${network-bench-cfg:output}
raw monitor_collect_csv_dump ${monitor-collect-csv-dump:output}
mode = 0644
[instance-base-distributor]
[instance-base-edgetest]
recipe = slapos.recipe.template:jinja2
template = ${template-monitor-distributor:destination}
rendered = $${buildout:directory}/template-monitor-base-distributor.cfg
template = ${template-monitor-edgetest:target}
rendered = $${buildout:directory}/template-monitor-base-edgetest.cfg
extensions = jinja2.ext.do
context = import json_module json
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
key slap_software_type slap-configuration:slap-software-type
section slap_configuration slap-configuration
raw software_type edgetest
key instance_base_monitor instance-base-monitor:rendered
key slave_instance_list slap-configuration:slave-instance-list
raw buildout_bin ${buildout:bin-directory}
raw template_json_distributor_test ${json-test-template:destination}
raw wget_bin ${wget:location}/bin/wget
raw template_json_edgetest_test ${json-test-template:target}
mode = 0644
[instance-base-edgebot]
recipe = slapos.recipe.template:jinja2
template = ${template-monitor-edgebot:target}
rendered = $${buildout:directory}/template-monitor-edgebot.cfg
extensions = jinja2.ext.do
surykatka-binary = ${surykatka:executable}
template-surykatka-ini = ${template-surykatka-ini:target}
context = import json_module json
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
section slap_configuration slap-configuration
key slapparameter_dict slap-configuration:configuration
key slap_software_type slap-configuration:slap-software-type
raw software_type edgebot
key surykatka_binary :surykatka-binary
key template_surykatka_ini :template-surykatka-ini
raw buildout_bin ${buildout:bin-directory}
raw monitor_template_output ${monitor-template:output}
raw monitor_collect_csv_dump ${monitor-collect-csv-dump:output}
mode = 0644
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id}
......@@ -47,3 +74,13 @@ partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
# Defaults
configuration.check-status-code = 200
configuration.nameserver =
configuration.check-frontend-ip =
configuration.check-certificate-expiration-days = 15
# use monitor-base-port to have monitor listening on each instance
# on different port and also on different port than other services
# it makes it possible to instantiate it correctly on signle IP, for
# example in case of webrunner
configuration.monitor-base-port = 9700
......@@ -4,17 +4,16 @@ extends =
buildout.hash.cfg
../../component/pycurl/buildout.cfg
../../component/python-cryptography/buildout.cfg
../../component/wget/buildout.cfg
../../component/surykatka/buildout.cfg
../../stack/monitor/buildout.cfg
../../stack/slapos.cfg
parts =
wget
slapos-cookbook
network-bench-cfg
json-test-template
template
template-monitor-distributor
template-monitor-edgetest
template-monitor
monitor-collect-csv-dump
......@@ -27,26 +26,31 @@ mode = 0644
[template-monitor]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
destination = ${buildout:directory}/template-base-monitor.cfg
mode = 0644
[template-monitor-distributor]
[template-monitor-edgetest]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
destination = ${buildout:directory}/template-monitor-base-distributor.cfg
mode = 0644
[template-monitor-edgebot]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
mode = 0644
[template-surykatka-ini]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
[json-test-template]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
destination = ${buildout:directory}/json-test-template.json.in.jinja2
mode = 0644
[network-bench-cfg]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/network_bench.cfg.in
md5sum = cfcbf2002b8eff5153e2bf68ed24b720
output = ${buildout:directory}/template-network-bench-cfg.in
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:parts-directory}/${:_buildout_section_name_}
mode = 0644
[monitor-collect-csv-dump]
......@@ -54,7 +58,6 @@ mode = 0644
url = ${:_profile_base_location_}/script/${:filename}
filename = collect_csv_dump.py
output = ${:destination}/${:filename}
md5sum = cad2402bbd21907cfed6bc5af8c5d3ab
[extra-eggs]
<= monitor-eggs
......@@ -83,6 +86,5 @@ eggs +=
hexagonit.recipe.download
plone.recipe.command
[versions]
slapos.recipe.template = 4.3
{
"name": "Monitor",
"description": "Software release for Monitoring purpose",
"serialisation": "xml",
"software-type": {
"default": {
"title": "Default",
"description": "Standalone Monitor",
"request": "instance-default-input-schema.json",
"response": "instance-default-output-schema.json",
"index": 0
},
"edgetest": {
"title": "Edge Test",
"description": "Cluster of bots to perform a distributed monitoring ",
"request": "instance-edgetest-input-schema.json",
"response": "instance-default-output-schema.json",
"index": 1
},
"edgetest-slave": {
"title": "Edge Test Slave",
"software-type": "edgetest",
"description": "Cluster of bots to perform a distributed monitoring ",
"request": "instance-edgetest-slave-input-schema.json",
"response": "instance-default-output-schema.json",
"index": 2
}
}
}
[SURYKATKA]
INTERVAL = 120
SQLITE = {{ db }}
{%- set nameserver_list = nameserver.split() %}
{%- if len(nameserver_list) > 0 %}
NAMESERVER =
{%- for nameserver_entry in sorted(nameserver_list) %}
{{ nameserver_entry }}
{%- endfor %}
{% endif %}
URL =
{%- for slave in sorted(slave_instance_list) %}
{%- if 'url' in slave %}
{{ slave['url'] }}
{%- endif -%}
{% endfor %}
This diff is collapsed.
......@@ -14,4 +14,4 @@
# not need these here).
[template-instance]
filename = instance.cfg
md5sum = ae26da39d6a156a6164782086d6aa00d
md5sum = aa91245be4b946d0537f6e007f0d72ea
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment