Commit e3318a1e authored by Rafael Monnerat's avatar Rafael Monnerat

Update Release Candidate

parents 2af51b35 958a5fe1
......@@ -4,8 +4,8 @@ parts = attr
[attr]
recipe = slapos.recipe.cmmi
url = ftp://ftp.igh.cnrs.fr/pub/nongnu/attr/attr-2.4.46.src.tar.gz
md5sum = db557c17fdfa4f785333ecda08654010
url = http://download.savannah.gnu.org/releases/attr/attr-2.4.47.src.tar.gz
md5sum = 84f58dec00b60f2dc8fd1c9709291cc7
configure-options =
--enable-shared=yes
--enable-gettext=no
......
......@@ -62,7 +62,7 @@ script =
download = lambda x, dl=Download(self.buildout['buildout']): (
dl("http://downloads.sf.net/corefonts/%%s32.exe" %% name, md5sum=md5sum)
for md5sum, name in (x.split() for x in x.splitlines() if x))
extract = lambda x, d=d, p7z="${p7zip:location}/bin/7za": any(
extract = lambda x, d=d, p7z="${p7zip:location}/bin/7z": any(
subprocess.check_call((p7z, "x", "-ssc-", path, "*.ttf"), cwd=d)
for path, is_temp in x)
try: fonts += download(self.options['fonts']); os.makedirs(d); extract(fonts)
......
[buildout]
extends =
../dash/buildout.cfg
../curl/buildout.cfg
parts +=
onlyoffice-x2t
[onlyoffice-x2t]
recipe = slapos.recipe.build
url = https://lab.nexedi.com/tc/bin/raw/fc3af671d3b19e9d25b40326373222b601f23edc/onlyoffice-x2t-part.tar.gz
md5sum = 3e08a8b1345c301078cdce3a7f7360b2
# script to install.
script =
location = %(location)r
self.failIfPathExists(location)
import sys
extract_dir = self.extract(self.download(self.options['url'], self.options.get('md5sum')))
shutil.move(extract_dir, location)
wrapper_location = os.path.join("%(location)s", "x2t")
wrapper = open(wrapper_location, 'w')
wrapper.write('''#!${dash:location}/bin/dash
cd %(location)s
export LD_LIBRARY_PATH=%(location)s/lib:${curl:location}/lib
exec %(location)s/bin/x2t "$@"''')
wrapper.close()
os.chmod(wrapper_location, 0755)
......@@ -8,5 +8,7 @@ url = http://downloads.sf.net/project/p7zip/p7zip/${:version}/p7zip_${:version}_
version = 9.38.1
md5sum = 6cba8402ccab2370d3b70c5e28b3d651
configure-command = rm -r DOC
make-binary =
make-options =
make-targets = make -j1 7z install
DEST_HOME=${buildout:parts-directory}/${:_buildout_section_name_}
[buildout]
extends =
../../component/gnutls/buildout.cfg
../../component/libpng/buildout.cfg
../../component/libuuid/buildout.cfg
../../component/pkgconfig/buildout.cfg
../../component/xorg/buildout.cfg
../../component/zlib/buildout.cfg
../../component/libaio/buildout.cfg
../attr/buildout.cfg
../gnutls/buildout.cfg
../libaio/buildout.cfg
../libcap/buildout.cfg
../libpng/buildout.cfg
../libuuid/buildout.cfg
../ncurses/buildout.cfg
../pkgconfig/buildout.cfg
../xorg/buildout.cfg
../zlib/buildout.cfg
# XXX Change all reference to kvm section to qemu section, then
# use qemu as main name section.
......@@ -25,30 +28,46 @@ configure-options =
--disable-sdl
--disable-xen
--disable-vnc-sasl
--disable-curses
--disable-curl
--enable-kvm
--enable-linux-aio
--enable-virtfs
--disable-docs
--enable-vnc
--enable-vnc-png
--disable-vnc-jpeg
--extra-cflags="-I${gnutls:location}/include -I${libuuid:location}/include -I${zlib:location}/include -I${libpng:location}/include -I${libaio:location}/include"
--extra-ldflags="-Wl,-rpath -Wl,${glib:location}/lib -L${glib:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -L${gnutls:location}/lib -Wl,-rpath -Wl,${gpg-error:location}/lib -L${gpg-error:location}/lib -L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib -Wl,-rpath -Wl,${libpng:location}/lib -L${libpng:location}/lib -L${libuuid:location}/lib -Wl,-rpath -Wl,${libuuid:location}/lib -L${libaio:location}/lib -Wl,-rpath=${libaio:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -lpng -lz -lgnutls"
--extra-cflags="-I${gnutls:location}/include -I${libuuid:location}/include -I${ncurses:location}/include -I${zlib:location}/include -I${libpng:location}/include -I${libaio:location}/include -I${attr:location}/include -I${libcap:location}/usr/include"
--extra-ldflags="-Wl,-rpath -Wl,${glib:location}/lib -L${glib:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -L${gnutls:location}/lib -Wl,-rpath -Wl,${gpg-error:location}/lib -L${gpg-error:location}/lib -L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib -Wl,-rpath -Wl,${libpng:location}/lib -L${libpng:location}/lib -L${libuuid:location}/lib -Wl,-rpath -Wl,${libuuid:location}/lib -L${libaio:location}/lib -Wl,-rpath=${libaio:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -lpng -lz -lgnutls -L${attr:location}/lib -Wl,-rpath=${attr:location}/lib -L${libcap:location}/lib -Wl,-rpath=${libcap:location}/lib"
--disable-werror
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${gnutls:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig:${pixman:location}/lib/pkgconfig
LDFLAGS=-L${pixman:location}/lib -Wl,-rpath=${pixman:location}/lib
[debian-amd64-netinst.iso]
# Download the installer of Debian 8 (Jessie)
[debian-netinst-base]
recipe = hexagonit.recipe.download
ignore-existing = true
filename = ${:_buildout_section_name_}
url = http://cdimage.debian.org/debian-cd/${:version}/amd64/iso-cd/debian-${:version}-amd64-netinst.iso
version = 8.6.0
md5sum = e9f61bf327db6d8f7cee05a99f2353cc
url = http://cdimage.debian.org/cdimage/${:release}/${:arch}/iso-cd/debian-${:version}-${:arch}-netinst.iso
release = archive/${:version}
download-only = true
mode = 0644
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[debian-amd64-netinst-base]
<= debian-netinst-base
arch = amd64
[debian-amd64-netinst.iso]
# Download the installer of Debian 8 (Jessie)
<= debian-amd64-netinst-base
release = release/current
version = 8.6.0
md5sum = e9f61bf327db6d8f7cee05a99f2353cc
[debian-amd64-testing-netinst.iso]
# Download the installer of Debian Stretch
<= debian-amd64-netinst-base
release = stretch_di_alpha7
version = stretch-DI-alpha7
md5sum = 3fe53635b904553b26588491e1473e99
......@@ -27,7 +27,7 @@ script =
shutil.move(download_file, auto_extract_bin)
os.chmod(auto_extract_bin, 0755)
subprocess.call([auto_extract_bin])
self.cleanup_dir_list.append(extract_dir)
self.cleanup_list.append(extract_dir)
workdir = guessworkdir(extract_dir)
import pdb; pdb.set_trace()
self.copyTree(os.path.join(workdir, "jre1.6.0_27"), "%(location)s")
......
......@@ -7,15 +7,15 @@ extends =
parts = rina-tools
[rina-tools-repository]
[irati-stack]
recipe = slapos.recipe.build:gitclone
repository = https://github.com/jmuchemb/irati-stack.git
revision = 47e2b6a5f32d1090ec75206ccdb797b78a1a3330
revision = af3cd8350ec43ab2d309c785daf0dd16d3835005
git-executable = ${git:location}/bin/git
[rina-tools]
recipe = slapos.recipe.cmmi
path = ${rina-tools-repository:location}/rina-tools
path = ${irati-stack:location}/rina-tools
pre-configure =
autoreconf -fisv -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal
environment =
......
......@@ -36,11 +36,6 @@ extensions +=
show-picked-versions = true
# separate from system python
include-site-packages = false
exec-sitecustomize = false
allowed-eggs-from-site-packages =
[environment]
# Note: For now original PATH is appended to the end, as not all tools are
# provided by SlapOS
......
[buildout]
extends =
../file/buildout.cfg
../openssh/buildout.cfg
../p7zip/buildout.cfg
../qemu-kvm/buildout.cfg
parts = vm-debian
[vm-run-environment]
PATH = ${openssh:location}/bin:${qemu:location}/bin:%(PATH)s
[vm-install-environment]
PATH = ${file:location}/bin:${p7zip:location}/bin:${vm-run-environment:PATH}
[vm-run-base]
recipe = slapos.recipe.build:vm.run
environment = vm-run-environment
vm = ${vm-debian:location}
dist = ${vm-debian:dists}
[vm-debian]
recipe = slapos.recipe.build:vm.install-debian
environment = vm-install-environment
dists = debian-jessie
size = 1Gi
late-command =
# rdnssd causes too much trouble with recent QEMU, because the latter acts as
# a DNS proxy on both IPv4 and IPv6 without translating queries to what the
# host supports.
dpkg -P rdnssd
debconf.debconf =
debconf/frontend noninteractive
debconf/priority critical
# minimal size
preseed.recommends = false
preseed.tasks =
packages = localepurge ssh
[debian-jessie]
x86_64.iso = debian-amd64-netinst.iso
x86_64.kernel = install.amd/vmlinuz
x86_64.initrd = install.amd/initrd.gz
[debian-stretch]
<= debian-jessie
x86_64.iso = debian-amd64-testing-netinst.iso
......@@ -57,14 +57,11 @@ setup(name=name,
install_requires=[
'jsonschema',
'hexagonit.recipe.download',
'lxml', # for full blown python interpreter
'netaddr', # to manipulate on IP addresses
'setuptools', # namespaces
'inotifyx', # to watch filesystem changes (used in lockfile)
'lock_file', #another lockfile implementation for multiprocess
'slapos.core', # uses internally
# 'slapos.toolbox', # needed for libcloud, cloudmgr, disabled for now
'xml_marshaller', # need to communication with slapgrid
'zc.buildout', # plays with buildout
'zc.recipe.egg', # for scripts generation
'pytz', # for timezone database
......
......@@ -156,47 +156,15 @@ class Request(Recipe):
return path_list
def _checkCertificateKeyConsistency(self, key, certificate, ca=""):
def _checkCertificateKeyConsistency(self, key, certificate):
openssl_binary = self.options.get('openssl-binary', 'openssl')
tmpdir = tempfile.mkdtemp()
with open(tmpdir + "/ca", "w") as f:
f.write(ca)
with open(tmpdir + "/key", "w") as f:
f.write(key)
with open(tmpdir + "/cert", "w") as f:
f.write(certificate)
try:
# Simple test if the user/certificates are readable and don't raise
popenCommunicate([openssl_binary, 'x509', '-noout', '-text', '-in', tmpdir + "/cert"])
popenCommunicate([openssl_binary, 'rsa', '-noout', '-text', '-in', tmpdir + "/key"])
# Get md5 to check if the key and certificate matches
modulus_cert = popenCommunicate([openssl_binary, 'x509', '-noout', '-modulus', '-in', tmpdir + "/cert"])
modulus_key = popenCommunicate([openssl_binary, 'rsa', '-noout', '-modulus', '-in', tmpdir + "/key"])
md5sum_cert = popenCommunicate([openssl_binary, 'md5'], modulus_cert)
md5sum_key = popenCommunicate([openssl_binary, 'md5'], modulus_key)
if md5sum_cert != md5sum_key:
raise ValueError("The key and certificate provided don't patch each other. Please check your parameters")
except:
try:
file_list = [tmpdir + "/ca", tmpdir + "/key", tmpdir + "/cert"]
for f in file_list:
if os.path.exists(f):
os.unlink(f)
if os.path.exists(tmpdir):
os.rmdir(tmpdir)
except:
# do not raise during cleanup
pass
raise
else:
pass
# Simple test if the user/certificates are readable and don't raise
popenCommunicate((openssl_binary, 'x509', '-noout', '-text'), certificate)
popenCommunicate((openssl_binary, 'rsa', '-noout', '-text'), key)
# Check if the key and certificate match
modulus_cert = popenCommunicate((openssl_binary, 'x509', '-noout', '-modulus'), certificate)
modulus_key = popenCommunicate((openssl_binary, 'rsa', '-noout', '-modulus'), key)
if modulus_cert != modulus_key:
raise ValueError("The key and certificate provided don't patch each other. Please check your parameters")
......@@ -26,6 +26,8 @@
##############################################################################
import os
from random import randint
from slapos.recipe.librecipe import GenericBaseRecipe
from zc.buildout import UserError
......@@ -54,21 +56,36 @@ class Recipe(GenericBaseRecipe):
class Part(GenericBaseRecipe):
def install(self):
try:
periodicity = self.options['frequency']
except KeyError:
periodicity = self.options['time']
def _options(self, options):
periodicity = None
if options.get('frequency', '') != '':
periodicity = options['frequency']
elif 'time' in options:
periodicity = options['time']
try:
periodicity = systemd_to_cron(periodicity)
except Exception:
raise UserError("Invalid systemd calendar spec %r" % periodicity)
if periodicity is None and self.isTrueValue(options.get('once-a-day', False)):
# Migration code, to force a random value for already instanciated softwares
previous_periodicity = self.getValueFromPreviousRun(self.name, 'periodicity')
if previous_periodicity in ("0 0 * * *", '', None):
periodicity = "%d %d * * *" % (randint(0, 59), randint(0, 23))
else:
periodicity = previous_periodicity
if periodicity is None:
raise UserError("Missing one of 'frequency', 'once-a-day' or 'time' parameter")
options['periodicity'] = periodicity
def install(self):
cron_d = self.options['cron-entries']
name = self.options['name']
filename = os.path.join(cron_d, name)
with open(filename, 'w') as part:
part.write('%s %s\n' % (periodicity, self.options['command']))
part.write('%s %s\n' % (self.options['periodicity'], self.options['command']))
return [filename]
......@@ -138,4 +155,3 @@ def systemd_to_cron(spec):
continue
raise ValueError
return ' '.join(spec)
......@@ -26,6 +26,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import ConfigParser
import io
import logging
import os
......@@ -155,9 +156,11 @@ class GenericBaseRecipe(object):
lines.append(dedent("""
# Check for other instances
pidfile=%s
if pid=`pgrep -F $pidfile -f "$COMMAND" 2>/dev/null`; then
echo "Already running with pid $pid."
exit 1
if [ -s $pidfile ]; then
if pid=`pgrep -F $pidfile -f "$COMMAND" 2>/dev/null`; then
echo "Already running with pid $pid."
exit 1
fi
fi
echo $$ > $pidfile""" % shlex.quote(pidfile)))
......@@ -280,3 +283,18 @@ class GenericBaseRecipe(object):
except:
shutil.rmtree(destination)
raise
def getValueFromPreviousRun(self, section, parameter):
"""
Returns the value of a parameter from a previous run, if it exists.
Otherwise, returns None
"""
if os.path.exists(self.buildout['buildout']['installed']):
with open(self.buildout['buildout']['installed']) as config_file:
try:
parser = ConfigParser.RawConfigParser()
parser.readfp(config_file)
return parser.get(section, parameter)
except:
pass
return None
\ No newline at end of file
......@@ -247,7 +247,7 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
print 'Processing PBS slave %s with type %s' % (slave_id, slave_type)
promise_path = os.path.join(self.options['promises-directory'], slave_id)
promise_path = os.path.join(self.options['promises-directory'], "ssh-to-%s" % slave_id)
promise_dict = dict(ssh_client=self.options['sshclient-binary'],
user=parsed_url.username,
host=parsed_url.hostname,
......
......@@ -160,9 +160,10 @@ class Recipe(object):
# Note: SlapOS Master does not support it for slave instances
if not slave:
try:
options['instance-guid'] = self.instance.getInstanceGuid()
options['instance-guid'] = self.instance.getInstanceGuid() \
.encode('UTF-8')
# XXX: deprecated, to be removed
options['instance_guid'] = self.instance.getInstanceGuid()
options['instance_guid'] = options['instance-guid']
options['instance-state'] = self.instance.getState()
options['instance-status'] = self.instance.getStatus()
except (slapmodule.ResourceNotReady, AttributeError):
......
......@@ -155,7 +155,8 @@ class Recipe(object):
'UNKNOWN Instance').encode('UTF-8')
options['root-instance-title'] = parameter_dict.pop('root_instance_title',
'UNKNOWN').encode('UTF-8')
options['instance-guid'] = computer_partition.getInstanceGuid()
options['instance-guid'] = computer_partition.getInstanceGuid() \
.encode('UTF-8')
ipv4_set = set()
v4_add = ipv4_set.add
......
......@@ -35,6 +35,7 @@ import slapos.slap
import netaddr
import logging
import errno
import re
import zc.buildout
......@@ -59,14 +60,16 @@ class SlapConfigParser(ConfigParser, object):
if sys.version_info[0] > 2:
return super(SlapConfigParser, self).write(fp)
regex = re.compile(r'^(.*)\s+([+-]{1})$')
if self._defaults:
fp.write("[%s]\n" % DEFAULTSECT)
for (key, value) in self._defaults.items():
if key.endswith(" +") or key.endswith(" -"):
line = "%s += %s\n" % (key.replace(' +', '').replace(' -', ''),
str(value).replace('\n', '\n\t'))
else:
line = "%s = %s\n" % (key, str(value).replace('\n', '\n\t'))
op = ""
result = regex.match(key)
if result is not None:
key, op = result.groups()
line = "%s %s= %s\n" % (key, op, str(value).replace('\n', '\n\t'))
fp.write(line)
fp.write("\n")
for section in self._sections:
......@@ -75,11 +78,12 @@ class SlapConfigParser(ConfigParser, object):
if key == "__name__":
continue
if (value is not None) or (self._optcre == self.OPTCRE):
if key.endswith(" +") or key.endswith(" -"):
key = " += ".join((key.replace(' +', '').replace(' -', ''),
str(value).replace('\n', '\n\t')))
else:
key = " = ".join((key, str(value).replace('\n', '\n\t')))
op = ""
result = regex.match(key)
if result is not None:
key, op = result.groups()
key = "%s %s= %s" % (key, op, str(value).replace('\n', '\n\t'))
fp.write("%s\n" % key)
fp.write("\n")
......
import os
import sys
import unittest
from textwrap import dedent
from slapos.recipe import dcron
from slapos.recipe.dcron import systemd_to_cron
class TestDcron(unittest.TestCase):
......@@ -36,3 +40,72 @@ class TestDcron(unittest.TestCase):
_("1-0"); _("1-32"); _("1-14/18")
_("24:0"); _("8/16:0")
_("0:60"); _("0:15/45")
def setUp(self):
self.installed_file = './.installed.cfg'
def tearDown(self):
if os.path.exists(self.installed_file):
os.unlink(self.installed_file)
def new_recipe(self, extra_options=None, **kw):
buildout = {
'buildout': {
'bin-directory': '',
'find-links': '',
'allow-hosts': '',
'develop-eggs-directory': '',
'eggs-directory': '',
'python': 'testpython',
'installed': '.installed.cfg',
},
'testpython': {
'executable': sys.executable,
},
'slap-connection': {
'computer-id': '',
'partition-id': '',
'server-url': '',
'software-release-url': '',
}
}
options = {
'cron-entries': '.cron',
'name': 'test',
'command': 'true',
}
if isinstance(extra_options, dict):
options.update(extra_options)
options.update(kw)
return dcron.Part(buildout=buildout, name='cron-entry-test', options=options)
def test_onceADayIsOverwrittenIfGivenFrequency(self):
parameter_dict = {'once-a-day': True}
recipe = self.new_recipe(parameter_dict)
random_periodicity = recipe.options['periodicity']
parameter_dict['frequency'] = '0 1 * * *'
recipe = self.new_recipe(parameter_dict)
new_periodicity = recipe.options['periodicity']
self.assertEqual(new_periodicity, '0 1 * * *')
self.assertNotEqual(random_periodicity, new_periodicity)
def test_periodicityNeverChangeIfOnceADay(self):
parameter_dict = {'once-a-day': True}
periodicity = None
for _ in range(5):
recipe = self.new_recipe(parameter_dict)
recipe_periodicity = recipe.options['periodicity']
if periodicity is not None:
self.assertEqual(periodicity, recipe_periodicity)
else:
periodicity = recipe_periodicity
with open(recipe.buildout['buildout']['installed'], 'w') as file:
file.write(dedent("""
[cron-entry-test]
periodicity = %s
""" % periodicity))
if __name__ == '__main__':
unittest.main()
......@@ -123,7 +123,7 @@ class PBSTest(unittest.TestCase):
recipe._install()
self.assertItemsEqual(os.listdir(promises_directory),
['pulltest', 'pushtest'])
['ssh-to-pulltest', 'ssh-to-pushtest'])
self.assertItemsEqual(os.listdir(wrappers_directory),
['pulltest_raw', 'pulltest', 'pushtest_raw', 'pushtest'])
......
......@@ -69,7 +69,7 @@ mode = 0644
[template-apache-frontend]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-apache-frontend.cfg
md5sum = cc64151e4fe953f15f7ea8cf20718d84
md5sum = 3e52cdd1fba381cdb98b438589d1c4ab
output = ${buildout:directory}/template-apache-frontend.cfg
mode = 0644
......@@ -82,7 +82,7 @@ mode = 0644
[template-slave-list]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache-custom-slave-list.cfg.in
md5sum = 8b278b34a4fb063ba94e10186f725fcb
md5sum = ee18e498f2868735e0c0ddf209454c37
mode = 640
[template-slave-configuration]
......@@ -106,7 +106,7 @@ mode = 640
[template-apache-cached-configuration]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache_cached.conf.in
md5sum = a1c744e48b465a63c2d6f0f384466013
md5sum = 2c6c09390a9d778aecba75f569fb486c
mode = 640
[template-rewrite-cached]
......@@ -137,13 +137,13 @@ mode = 640
[template-default-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/default-virtualhost.conf.in
md5sum = c22de91b25d0597fecc7082a153b87bc
md5sum = c50959e7c38b307f1a1c274505a7e1c1
mode = 640
[template-cached-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/cached-virtualhost.conf.in
md5sum = b1fd5f2b94f026ccca5ff47167015f23
md5sum = 432e55df3b42243a98b564cca57e2396
mode = 640
[template-log-access]
......
......@@ -23,6 +23,7 @@ parts =
promise-apache-frontend-v6-https
promise-apache-frontend-v6-http
promise-apache-cached
promise-apache-ssl-cached
trafficserver-launcher
trafficserver-reload
......@@ -104,6 +105,7 @@ configuration.extra_slave_instance_list =
configuration.disk-cache-size = 8G
configuration.trafficserver-autoconf-port = 8083
configuration.trafficserver-mgmt-port = 8084
configuration.re6st-verification-url = http://[2001:67c:1254:4::1]/index.html
[frontend-configuration]
template-log-access = ${template-log-access:target}
......@@ -143,6 +145,7 @@ extra-context =
key apache_configuration_directory apache-directory:slave-configuration
key apache_cached_configuration_directory apache-directory:slave-with-cache-configuration
key cached_port apache-configuration:cache-through-port
key ssl_cached_port apache-configuration:ssl-cache-through-port
key http_port instance-parameter:configuration.plain_http_port
key https_port instance-parameter:configuration.port
key public_ipv4 instance-parameter:configuration.public-ipv4
......@@ -224,6 +227,7 @@ extra-context =
key instance_home buildout:directory
key ipv4_addr instance-parameter:ipv4-random
key cached_port apache-configuration:cache-through-port
key ssl_cached_port apache-configuration:ssl-cache-through-port
key server_admin instance-parameter:configuration.server-admin
key protected_path apache-configuration:protected-path
key access_control_string apache-configuration:access-control-string
......@@ -270,9 +274,9 @@ pid-file = $${directory:run}/httpd.pid
protected-path = /
access-control-string = none
cached-rewrite-file = $${directory:etc}/apache_rewrite_cached.txt
frontend-configuration-verification = ${apache-2.2:location}/bin/httpd -Sf $${:frontend-configuration}
frontend-configuration-verification = ${apache-2.2:location}/bin/httpd -Sf $${:frontend-configuration} > /dev/null
frontend-graceful-command = $${:frontend-configuration-verification}; if [ $? -eq 0 ]; then kill -USR1 $(cat $${:pid-file}); fi
cached-configuration-verification = ${apache-2.2:location}/bin/httpd -Sf $${:cached-configuration}
cached-configuration-verification = ${apache-2.2:location}/bin/httpd -Sf $${:cached-configuration} > /dev/null
cached-graceful-command = $${:cached-configuration-verification}; if [ $? -eq 0 ]; then kill -USR1 $(cat $${apache-configuration:cache-pid-file}); fi
# Apache for cache configuration
......@@ -283,6 +287,7 @@ cache-pid-file = $${directory:run}/httpd-cached.pid
# Comunication with ats
cache-port = $${trafficserver-variable:input-port}
cache-through-port = 26011
ssl-cache-through-port = 26012
# Create wrapper for "apachectl conftest" in bin
[configtest]
......@@ -404,7 +409,9 @@ reload-path = $${directory:etc-run}/trafficserver-reload
local-ip = $${instance-parameter:ipv4-random}
input-port = 23432
hostname = $${instance-parameter:configuration.frontend-name}
remap = map / http://$${instance-parameter:ipv4-random}:$${apache-configuration:cache-through-port}
remap = map /HTTPS/ http://$${instance-parameter:ipv4-random}:$${apache-configuration:ssl-cache-through-port}
map / http://$${instance-parameter:ipv4-random}:$${apache-configuration:cache-through-port}
plugin-config = ${trafficserver:location}/libexec/trafficserver/rfc5861.so
cache-path = $${trafficserver-directory:cache-path}
disk-cache-size = $${instance-parameter:configuration.disk-cache-size}
......@@ -534,6 +541,12 @@ path = $${directory:promise}/apache_cached
hostname = $${instance-parameter:ipv4-random}
port = $${apache-configuration:cache-through-port}
[promise-apache-ssl-cached]
recipe = slapos.cookbook:check_port_listening
path = $${directory:promise}/apache_ssl_cached
hostname = $${instance-parameter:ipv4-random}
port = $${apache-configuration:ssl-cache-through-port}
[slap_connection]
# Kept for backward compatiblity
computer_id = $${slap-connection:computer-id}
......@@ -600,9 +613,6 @@ context =
[monitor-verify-re6st-connectivity]
recipe = slapos.cookbook:check_url_available
path = $${directory:promise}/re6st-connectivity
url = $${public:re6st-verification-url}
url = $${instance-parameter:configuration.re6st-verification-url}
dash_path = ${dash:location}/bin/dash
curl_path = ${curl:location}/bin/curl
[public]
re6st-verification-url = http://[2001:67c:1254:4::1]/index.html
......@@ -15,6 +15,11 @@
"type": "integer",
"default": 10
},
"re6st-verification-url": {
"title": "Test Verification URL",
"description": "Url to verify if the internet and/or re6stnet is working.",
"type": "string"
},
"-frontend-authorized-slave-string": {
"title": "Authorized Slave String",
"description": "List of SOFTINST-XXX of Slaves, separated by space which is allowed to use custom configuration.",
......
......@@ -10,6 +10,12 @@
"type": "string",
"pattern": "^(http|https|ftp)://"
},
"https-url": {
"title": "HTTPS Backend URL",
"description": "Url of the backend if it is diferent from url parameter",
"type": "string",
"pattern": "^(http|https|ftp)://"
},
"custom_domain": {
"title": "Custom Domain",
"description": "Custom Domain to use for the website",
......@@ -23,7 +29,6 @@
"type": "string",
"default": ""
},
"type": {
"title": "Backend Type",
"description": "Type of slave. If redirect, the slave will redirect to the given url. If zope, the rewrite rules will be compatible with Virtual Host Monster",
......
......@@ -3,6 +3,7 @@
{% set cached_server_dict = {} -%}
{% set part_list = [] -%}
{% set cache_access = "http://%s:%s" % (local_ipv4, cache_port) -%}
{% set ssl_cache_access = "http://%s:%s/HTTPS" % (local_ipv4, cache_port) -%}
{% set TRUE_VALUES = ['y', 'yes', '1', 'true'] -%}
{% set generic_instance_parameter_dict = {'cache_access': cache_access,} -%}
{% set slave_log_dict = {} -%}
......@@ -178,6 +179,7 @@ extra-context =
{% if 'enable_cache' in slave_instance and 'url' in slave_instance and 'domain' in slave_instance -%}
{% do slave_instance.__setitem__('custom_domain', slave_instance.get('domain')) -%}
{% do slave_instance.__setitem__('backend_url', slave_instance.get('url')) -%}
{% do slave_instance.__setitem__('https_backend_url', slave_instance.get('https-url', slave_instance.get('url'))) -%}
{% do cached_server_dict.__setitem__(slave_reference, slave_configuration_section_name) -%}
{% endif -%}
......@@ -208,7 +210,9 @@ apache_custom_https = {{ dumps(apache_custom_https) }}
{% set enable_cache = (('' ~ slave_instance.get('enable_cache', '')).lower() in TRUE_VALUES and slave_instance.get('type', '') != 'redirect') -%}
{% if enable_cache -%}
{% do slave_instance.__setitem__('backend_url', slave_instance.get('url')) -%}
{% do slave_instance.__setitem__('https_backend_url', slave_instance.get('https-url', slave_instance.get('url'))) -%}
{% do slave_instance.__setitem__('url', cache_access) -%}
{% do slave_instance.__setitem__('https-url', ssl_cache_access) -%}
{% do cached_server_dict.__setitem__(slave_reference, slave_configuration_section_name) -%}
{% endif -%}
{% do part_list.append(slave_section_title) -%}
......@@ -251,6 +255,7 @@ extensions = jinja2.ext.do
extra-context =
section slave_parameter {{ slave_configuration_section_name }}
raw cached_port {{ cached_port }}
raw ssl_cached_port {{ ssl_cached_port }}
{{ '\n' }}
{% endfor %}
......
......@@ -8,6 +8,8 @@ DocumentRoot {{ document_root }}
ServerRoot {{ instance_home }}
{{ "Listen %s:%s" % (ipv4_addr, cached_port) }}
{{ "Listen %s:%s" % (ipv4_addr, ssl_cached_port) }}
ServerAdmin {{ server_admin }}
DefaultType text/plain
......
......@@ -2,6 +2,12 @@
# Only accept generic (i.e not Zope) backends on http
<VirtualHost *:{{ cached_port }}>
ServerName {{ slave_parameter.get('custom_domain') }}
{%- if 'server-alias' in slave_parameter -%}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor %}
{%- endif %}
SSLProxyEngine on
{% set ssl_proxy_verify = ('' ~ slave_parameter.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES -%}
......@@ -21,3 +27,31 @@
RewriteRule ^/(.*)$ {{ slave_parameter.get('backend_url', '') }}/$1 [L,P]
</VirtualHost>
<VirtualHost *:{{ ssl_cached_port }}>
ServerName {{ slave_parameter.get('custom_domain') }}
{%- if 'server-alias' in slave_parameter -%}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor %}
{%- endif %}
SSLProxyEngine on
{% set ssl_proxy_verify = ('' ~ slave_parameter.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES -%}
{% if ssl_proxy_verify -%}
{% if 'ssl_proxy_ca_crt' in slave_parameter -%}
SSLProxyCACertificateFile {{ slave_parameter.get('path_to_ssl_proxy_ca_crt', '') }}
{% endif %}
SSLProxyVerify require
#SSLProxyCheckPeerCN on
SSLProxyCheckPeerExpire on
{% endif %}
# Rewrite part
ProxyPreserveHost On
ProxyTimeout 600
RewriteEngine On
RewriteRule ^/(.*)$ {{ slave_parameter.get('https_backend_url', '') }}/$1 [L,P]
</VirtualHost>
......@@ -78,14 +78,14 @@
# First, we check if we have a zope backend server
# If so, let's use Virtual Host Monster rewrite
# We suppose that Apache listens to 443 (even indirectly thanks to things like iptables)
RewriteRule ^/(.*)$ {{ slave_parameter.get('url', '') }}/VirtualHostBase/https//%{SERVER_NAME}:443/{{ slave_parameter.get('path', '') }}/VirtualHostRoot/$1 [L,P]
RewriteRule ^/(.*)$ {{ slave_parameter.get('https-url', '') }}/VirtualHostBase/https//%{SERVER_NAME}:443/{{ slave_parameter.get('path', '') }}/VirtualHostRoot/$1 [L,P]
{% elif slave_parameter.get('type', '') == 'redirect' -%}
RewriteRule (.*) {{slave_parameter.get('url', '')}}$1 [R,L]
RewriteRule (.*) {{ slave_parameter.get('https-url', '')}}$1 [R,L]
{% else -%}
{% if 'default-path' in slave_parameter %}
RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L]
{% endif -%}
RewriteRule ^/(.*)$ {{ slave_parameter.get('url', '') }}/$1 [L,P]
RewriteRule ^/(.*)$ {{ slave_parameter.get('https-url', '') }}/$1 [L,P]
{% endif -%}
</VirtualHost>
......
[buildout]
extends =
../../component/vm-img/debian.cfg
parts =
debuild-rina-tools
test-rina
[vm-run-base]
smp = 4
# estimate the of RAM needed depending on the number CPUs
mem = 2048
apt-sources =
apt-update =
for x in ${:apt-sources}
do echo deb [trusted=yes] file:`map $x` ./
done >>/etc/apt/sources.list
apt-get update
[debuild-rina-base]
<= vm-run-base
mount.rina = ${irati-stack:location}
cd-source =
git clone -snc core.sparseCheckout=true /mnt/rina /rina
cd /rina
echo /${:component} > .git/info/sparse-checkout
git reset --hard
cd ${:component}
# following lines are for debchange
export NAME=`git show -s --format=%cn`
export EMAIL=`git show -s --format=%ce`
dist=${:suite}
[ "$dist" ] && dist=`lsb_release -sc`/$dist || dist=UNRELEASED
prepare =
build-deps =
mk-build-deps -irt 'apt-get -y'
build =
dpkg-buildpackage -uc -b -jauto
finalize =
df
cd ..
mv *.changes *.deb $PARTDIR
cd $PARTDIR
dpkg-scanpackages -m . /dev/null |gzip --best >Packages.gz
r=`apt-ftparchive release .`
echo "$r" > Release
command =
${:apt-update}
${:cd-source}
( ${:prepare}
${:build-deps}
${:build}
) 2>&1 |tee $PARTDIR/build.log
${:finalize}
[debuild-rina-kernel]
<= debuild-rina-base
component = linux
prepare =
debian/dch-snapshot --force-distribution -D $dist
apt-get -y install kernel-wedge quilt python-six
QUILT_PATCHES=$PWD/debian/patches QUILT_PC=.pc quilt push --quiltrc - -a -q --fuzz=0
debian/rules debian/rules.gen || :
build =
arch=`dpkg-architecture -qDEB_HOST_ARCH`
make -j${:smp} -f debian/rules.gen binary-arch_$${arch}_none binary-libc-dev_$${arch}
dpkg-genchanges -b -UBinary -UDescription \
>../linux_`dpkg-parsechangelog -S Version`_$${arch}.changes
[debuild-librina]
<= debuild-rina-base
component = librina
apt-sources = ${debuild-rina-kernel:location}
prepare =
debian/rules DIST=$dist
[debuild-rinad]
<= debuild-librina
component = rinad
apt-sources = ${debuild-librina:location}
[rina-tools-deps]
apt-sources = ${debuild-librina:location} ${debuild-rinad:location}
[debuild-rina-tools]
<= debuild-librina
rina-tools-deps
component = rina-tools
# check that the built packages install
finalize =
dpkg -i ../*.deb
${debuild-librina:finalize}
[test-rina]
<= vm-run-base
rina-tools-deps
mount.rina = ${irati-stack:location}
mount.slapos = ${:_profile_base_location_}/../..
mount.slapos.package = ${slapos.package-repository:location}
commands = install test
install =
${:apt-update}
apt-get -y install librinad-dev rinad
: || {
mkdir rina-tools
cd rina-tools
cat <<EOF >buildout.cfg
[buildout]
extends = /mnt/slapos/software/hellorina/software.cfg
develop = /mnt/slapos
[slapos-cookbook-develop]
recipe =
setup =
[irati-stack]
repository = /mnt/rina
[versions]
slapos.cookbook =
EOF
sed s,${buildout:directory}/,/mnt/buildout/, /mnt/buildout/bin/buildout |
python - bootstrap
MAKEFLAGS=-j${:smp} bin/buildout
}
/mnt/slapos.package/playbook/roles/rina/gen-ipcm-conf
systemctl enable ipcm-re6st
arch=`dpkg-architecture -qDEB_HOST_ARCH`
dpkg -i `map ${debuild-rina-kernel:location}`/linux-image-*-$${arch}_*.deb
reboot
test = ( set -x
# TODO: more tests
grep re6st/1// /sys/rina/ipcps/1/name
) 2>&1 |tee $PARTDIR/result
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Parameters to build/publish RINA packages and test hellorina SR.",
"type": "object",
"additionalProperties": false,
"properties": {
"publish": {
"description": "Upload built packages automatically to a Debian repository when successful.",
"type": "object",
"required": ["suite", "host", "key"],
"properties": {
"suite": {
"type": "string"
},
"host": {
"type": "string"
},
"port": {
"type": "integer",
"default": 22
},
"key": {
"type": "string"
}
}
}
}
}
#!{{parameter_dict['runTestSuite_py']}}
from __future__ import print_function
import argparse, os, subprocess, sys, traceback
from time import gmtime, strftime, time
# These are the 2 modules to reuse when using ERP5 for managing test bots.
# What we do here is currently too new to reuse more from testsuite.
from erp5.util import taskdistribution
from erp5.util.testsuite import format_command
{% set vm = parameter_dict['vm'] -%}
dist_list = {{vm['dists'].split()}}
publish = {{slapparameter_dict.get('publish')}}
# ERP5 must be changed to only distinguish SKIP/EXPECTED/UNEXPECTED,
# instead of SKIP/FAIL/ERROR. Unlike NEO, we categorize XFAIL as SKIP
# so that the overall status is PASS if there's no FAIL/XPASS/ERROR.
STAT_MAP = dict(
TOTAL = 'test_count',
PASS = None,
SKIP = 'skip_count',
XFAIL = 'skip_count',
FAIL = 'failure_count',
XPASS = 'failure_count',
ERROR = 'error_count',
)
class DummyTestResult:
class DummyTestResultLine:
def stop(self, duration, stdout='', **kw):
print('\n' + stdout)
print('Ran in %.3fs' % duration)
done = 0
def __init__(self, test_name_list):
self.test_name_list = test_name_list
def start(self):
test_result_line = self.DummyTestResultLine()
try:
test_result_line.name = self.test_name_list[self.done]
except IndexError:
return
self.done += 1
return test_result_line
def main():
os.environ.update({k: v.strip() % os.environ
for k, v in {{parameter_dict['environment'].items()}}})
parser = argparse.ArgumentParser(description='Run a test suite.')
parser.add_argument('--test_suite', help='The test suite name')
parser.add_argument('--test_suite_title', help='The test suite title')
parser.add_argument('--test_node_title', help='The test node title')
parser.add_argument('--project_title', help='The project title')
parser.add_argument('--revision', help='The revision to test',
default='dummy_revision')
parser.add_argument('--node_quantity', type=int,
help='Number of CPUs to use for the VM')
parser.add_argument('--master_url',
help='The Url of Master controling many suites')
args = parser.parse_args()
test_title = args.test_suite_title or args.test_suite
if args.master_url:
tool = taskdistribution.TaskDistributionTool(args.master_url)
test_result = tool.createTestResult(args.revision,
dist_list,
args.test_node_title,
test_title=test_title,
project_title=args.project_title)
if test_result is None:
return
else:
test_result = DummyTestResult(dist_list)
fd = os.open('buildout.cfg', os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0666)
try:
os.write(fd, """\
[buildout]
extends = {{parameter_dict['profile_base_location']}}/build.cfg
offline = true
develop-eggs-directory = {{buildout['develop-eggs-directory']}}
eggs-directory = {{buildout['eggs-directory']}}
[vm-run-base]
environment =
vm = {{vm['location']}}
[irati-stack]
location = {{parameter_dict['irati_stack']}}
[slapos.package-repository]
location = {{parameter_dict['slapos_package']}}
""")
finally:
os.close(fd)
librina_log = os.path.join('parts', 'debuild-librina', 'build.log')
stderr_write = sys.stderr.write
while 1:
test_result_line = test_result.start()
if not test_result_line:
break
dist = test_result_line.name
cmd = [{{repr(parameter_dict['buildout'])}},
'vm-run-base:dist=' + dist,
'debuild-rina-base:suite=' + (publish['suite'] if publish else ''),
]
if args.node_quantity:
cmd.append('vm-run-base:smp=%s' % args.node_quantity)
status_dict = {'command': format_command(*cmd)}
print('$', status_dict['command'])
# Wanted on test result lines:
# status: UNKNOWN in case of buildout failure
# (even if the test suite could be run)
# output: test suite summary if any
# error: buildout traceback or test suite log
start = time()
try:
try:
p = subprocess.Popen(cmd, stderr=subprocess.PIPE)
stderr = []
while 1:
line = p.stderr.readline()
if not line:
break
stderr_write(line)
stderr.append(line)
returncode = p.wait()
finally:
end = time()
del p
if returncode:
iter_err = enumerate(reversed(stderr), 1)
for i, line in iter_err:
if line == "Traceback (most recent call last):\n":
for i, line in iter_err:
if line == '\n':
break
for i, line in iter_err:
if line[0] != ' ':
break
break
if line == "While:\n":
del stderr[:-i]
status_dict['stderr'] = ''.join(stderr)
with open(librina_log) as f:
log = f.readlines()
del log[:log.index('make check-TESTS\n')]
for i, line in enumerate(log):
if line.startswith('Testsuite summary'):
del log[log.index(log[i+1], i+2):]
status_dict['stdout'] = ''.join(log[i:])
stat = {}
for line in log[i+2:]:
k, v = line[2:].split(':')
k = STAT_MAP[k]
if k:
stat[k] = stat.get(k, 0) + int(v.strip())
if not returncode:
status_dict.update(stat)
status_dict.setdefault('stderr', ''.join(log[:i-1]))
break
except Exception:
status_dict.setdefault('stderr', traceback.format_exc())
test_result_line.stop(
date = strftime("%Y/%m/%d %H:%M:%S", gmtime(end)),
duration = end - start,
**status_dict)
# TODO: upload packages if 'publish' parameter is given
if __name__ == "__main__":
main()
[buildout]
extends =
../../stack/slapos.cfg
../../component/vm-img/debian.cfg
../../component/rina-tools/buildout.cfg
parts =
slapos-cookbook
template
[template]
recipe = slapos.recipe.template:jinja2
# XXX: "template.cfg" is hardcoded in instanciation recipe
rendered = ${buildout:directory}/template.cfg
template = inline:
[buildout]
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
parts = runTestSuite
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[directory]
recipe = slapos.cookbook:mkdirectory
bin = $${buildout:directory}/bin
[runTestSuite]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/$${:_buildout_section_name_}
template = ${:_profile_base_location_}/$${:_buildout_section_name_}.in
mode = 0755
context =
{##} section buildout buildout
{##} section parameter_dict runTestSuite-parameters
{##} key slapparameter_dict slap-configuration:configuration
[runTestSuite-parameters]
environment = {{dumps(environment)}}
vm = {{dumps(vm)}}
runTestSuite_py = ${buildout:bin-directory}/${runTestSuite_py:interpreter}
buildout = ${buildout:bin-directory}/buildout
profile_base_location = ${:_profile_base_location_}
irati_stack = ${irati-stack:location}
slapos_package = ${slapos.package-repository:location}
context =
section environment vm-run-environment
section vm vm-debian
[runTestSuite_py]
recipe = zc.recipe.egg
eggs = erp5.util
interpreter = ${:_buildout_section_name_}
[irati-stack]
sparse-checkout = /.gitignore
[slapos.package-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.package.git
git-executable = ${git:location}/bin/git
sparse-checkout = /playbook/roles/rina
[vm-debian]
# building a generic Debian kernel uses a lot of space
size = 16Gi
packages +=
# generic (another SR that build packages automatically would use the same list)
apt-utils build-essential devscripts equivs lsb-release
# specific
git ca-certificates python
# biggest and common build-deps for RINA
dh-autoreconf pkg-config doxygen maven xmlto
......@@ -250,6 +250,7 @@ link-binary =
{{ parameter_dict['poppler'] }}/bin/pdfinfo
{{ parameter_dict['poppler'] }}/bin/pdftotext
{{ parameter_dict['poppler'] }}/bin/pdftohtml
{{ parameter_dict['onlyoffice-x2t'] }}/x2t
# rest of parts are candidates for some generic stuff
[directory]
......
......@@ -69,6 +69,7 @@ libreoffice-bin = ${libreoffice-bin:location}
libxcb = ${libxcb:location}
mesa = ${mesa:location}
openssl = ${openssl:location}
onlyoffice-x2t = ${onlyoffice-x2t:location}
poppler = ${poppler:location}
pixman = ${pixman:location}
wkhtmltopdf = ${wkhtmltopdf:location}
......@@ -82,5 +83,5 @@ template-logrotate-base = ${template-logrotate-base:rendered}
[template-cloudooo-instance]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-cloudooo.cfg.in
md5sum = 84b9da151545039f6755c969b1532ec2
md5sum = 3de0b4732e5132448027c1387a4de3c9
mode = 640
......@@ -20,10 +20,12 @@ ssl-dict-parameter-name = ssl
mimetype-entry-addition-parameter-name = mimetype-entry-addition
mimetype-entry-addition =
text/html application/pdf wkhtmltopdf
# Release Incremments
[cloudooo-software-parameter-dict]
mimetype-entry-addition =
application/vnd.openxmlformats-officedocument.wordprocessingml.document application/x-asc-text x2t
application/vnd.openxmlformats-officedocument.spreadsheetml.sheet application/x-asc-spreadsheet x2t
application/vnd.openxmlformats-officedocument.presentationml.presentation application/x-asc-presentation x2t
application/x-asc-text application/vnd.openxmlformats-officedocument.wordprocessingml.document x2t
application/x-asc-spreadsheet application/vnd.openxmlformats-officedocument.spreadsheetml.sheet x2t
application/x-asc-presentation application/vnd.openxmlformats-officedocument.presentationml.presentation x2t
[cloudooo-repository]
branch = master
......@@ -31,4 +33,3 @@ revision = da0308870ce2b3dde192b0613f456d83e0b04afd
[slapos-cookbook.repository]
revision = 0af8127c34e691f04b6307acd7db4b8e82b82620
......@@ -26,6 +26,7 @@ context =
<= jinja2-template-base
template = {{ instance_root }}
extra-context =
import urlparse urlparse
key ipv6 slap-configuration:ipv6-random
raw python_location {{ python_location }}
raw rina_proxy {{ rina_proxy }}
......
......@@ -9,7 +9,7 @@ parts =
[template]
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/instance.cfg.in
md5sum = d2fd3ed7df0e3082608b9caf72e4f700
md5sum = e513082329961dfbbcec94e817f1e3bc
# XXX: "template.cfg" is hardcoded in instanciation recipe
rendered = ${buildout:directory}/template.cfg
context =
......
......@@ -87,8 +87,6 @@ def main():
'html_test_result': elt.find_element_by_tag_name('ol').get_attribute('innerHTML')
}
browser.quit()
tool = taskdistribution.TaskDistributionTool(portal_url=args.master_url)
test_result = tool.createTestResult(revision = revision,
test_name_list = test_line_dict.keys(),
......@@ -119,5 +117,8 @@ def main():
# XXX: inform test node master of error
raise EnvironmentError(result)
finally:
browser.quit()
if __name__ == "__main__":
main()
\ No newline at end of file
......@@ -106,7 +106,7 @@ mode = 0644
[template-runTestSuite]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/runTestSuite.in
md5sum = bc1eca131cfa4e9f424d52788a6e051e
md5sum = 13a56b1b6b2d54dc27ed6570e4b5f1d7
output = ${buildout:directory}/runTestSuite.in
mode = 0644
......
......@@ -92,7 +92,7 @@ command =
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
md5sum = 92c62e050aac47097f5ca81cb7f2acec
md5sum = 110df709a7c8a5c749f93663f6ab0d28
output = ${buildout:directory}/template.cfg
mode = 0644
......@@ -101,7 +101,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm.cfg.jinja2
mode = 644
md5sum = 7abec10f8e24e7a75935a0637a006329
md5sum = d09d37dc3bcf34da8d31b11215866b27
download-only = true
on-update = true
......@@ -110,7 +110,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-cluster.cfg.jinja2.in
mode = 644
md5sum = d29f02443f48096f176c8ae78cc5596c
md5sum = 6ef7e38526f024356743e77973f0fb5f
download-only = true
on-update = true
......@@ -119,7 +119,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-resilient.cfg.jinja2
mode = 644
md5sum = c420aae56ecd64c94814647b74d9a4fb
md5sum = 700676dff4a3835a9d6cde015d91922e
download-only = true
on-update = true
......@@ -127,7 +127,7 @@ on-update = true
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-resilient-test.cfg.jinja2
md5sum = e3d275621420f0b8c081228caeb571f9
md5sum = 6b40e280201aaf9258c4cb5de7c1f5b4
mode = 0644
download-only = true
on-update = true
......@@ -136,7 +136,7 @@ on-update = true
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-import.cfg.jinja2.in
md5sum = 3177381b65b4b95ba29190a6ac03b771
md5sum = d5d3b7debf3141913eac926830ea166d
mode = 0644
download-only = true
on-update = true
......@@ -155,7 +155,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-export.cfg.jinja2
mode = 644
md5sum = 4c71fbd0f27616d945df9669c57d302e
md5sum = 13276ead8cf5f9eda28e8dbda35a9bcf
download-only = true
on-update = true
......@@ -186,7 +186,7 @@ mode = 0644
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/ansible-promise.in
md5sum = d0bd07d5a7799b7aea7720ffdf46f322
md5sum = 2036bf145f472f62ef8dee5e729328fd
mode = 0644
download-only = true
filename = ansible-promise.in
......@@ -207,7 +207,7 @@ ignore-existing = true
url = ${:_profile_base_location_}/template/kvm-controller-run.in
mode = 644
filename = kvm-controller-run.in
md5sum = b61ef9c54d912fdbfed3899fa985f79c
md5sum = 71afd2d13f6e56993ae413a168e012d7
download-only = true
on-update = true
......@@ -231,6 +231,16 @@ md5sum = 822737e483864bf255ad1259237bef2a
download-only = true
on-update = true
[template-qemu-ready]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/qemu-is-ready.in
mode = 644
filename = qemu-is-ready.in
md5sum = 0066fa0f5f3dd47bded5e5924df2550d
download-only = true
on-update = true
[file-download-script]
recipe = hexagonit.recipe.download
ignore-existing = true
......
......@@ -63,6 +63,7 @@ context =
raw python_executable ${buildout:executable}
raw qemu_executable_location ${kvm:location}/bin/qemu-system-x86_64
raw qemu_img_executable_location ${kvm:location}/bin/qemu-img
raw qemu_start_promise_tpl ${template-qemu-ready:location}/${template-qemu-ready:filename}
raw sixtunnel_executable_location ${6tunnel:location}/bin/6tunnel
raw template_httpd_cfg ${template-httpd:rendered}
raw template_content ${template-content:location}/${template-content:filename}
......
......@@ -157,17 +157,24 @@
"description": "Text content which will be written in a file data of cluster http server. All VM will be able to download that file via the static URL of cluster HTTP server: https://10.0.2.101/FOLDER_HASH/data.",
"type": "string"
},
"enable-monitor": {
"title": "Enable Monitoring on this cluster",
"description": "Deploy monitor instance to this kvm instance. It help to check instance status, log and promise results.",
"type": "boolean",
"default": true
},
"monitor-interface-url": {
"title": "Monitor Web Interface URL",
"description": "Give Url of HTML web interface that will be used to render this monitor instance.",
"type": "string",
"format": "uri"
"format": "uri",
"default": "https://monitor.app.officejs.com"
},
"monitor-cors-domains": {
"title": "Monitor CORS domains",
"description": "List of cors domains separated with space. Needed for ajax query on this monitor instance from a different domain.",
"type": "string",
"default": ""
"default": "monitor.app.officejs.com"
},
"kvm-partition-dict": {
"title": "kvm instances definition",
......@@ -377,11 +384,11 @@
"description": "Text content which will be written in a file 'data' of http server of this virtual machine instance. The file will be available via URL: http://10.0.2.100/data in the VM.",
"type": "string"
},
"enable-monitor": {
"title": "Deploy monitoring tools",
"description": "Deploy monitor instance to this kvm instance. It help to check instance status, log and promise results.",
"disable-ansible-promise": {
"title": "Desactivate Ansible promise check",
"description": "If the VM of cluster doesn't run Ansible and report status to this SlapOS instances, then this allow to disable ansible promise so your instance will not fail to check ansible promise.",
"type": "boolean",
"default": true
"default": false
}
},
"type": "object"
......
......@@ -9,6 +9,7 @@
{% set kvm_instance_dict = {} -%}
{% set kvm_hostname_list = [] -%}
{% set monitor_url_list = [] -%}
{% set enable_monitoring = slapparameter_dict.get('enable-monitor', True) -%}
[request-common]
recipe = slapos.cookbook:request
......@@ -76,7 +77,8 @@ config-httpd-port = {{ dumps(kvm_parameter_dict.get('httpd-port', 8081)) }}
{% if kvm_parameter_dict.get('data-to-vm', '') -%}
config-data-to-vm = {{ dumps(kvm_parameter_dict.get('data-to-vm', '')) }}
{% endif -%}
config-enable-monitor = {{ dumps(kvm_parameter_dict.get('enable-monitor', True)) }}
config-disable-ansible-promise = {{ dumps(kvm_parameter_dict.get('disable-ansible-promise', False)) }}
config-enable-monitor = {{ enable_monitoring }}
config-monitor-cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
config-monitor-username = ${monitor-htpasswd:username}
config-monitor-password = ${monitor-htpasswd:passwd}
......@@ -102,21 +104,21 @@ sla-fw_restricted_access = {{ dumps(slapparameter_dict.get('fw-restricted-access
return =
url
backend-url
monitor-base-url
{% if str(use_nat).lower() == 'true' -%}
{% for port in nat_rules_list -%}
{{ ' ' }}nat-rule-url-{{ port }}
{% endfor -%}
{% endif -%}
{% if enable_monitoring -%}
{{ ' ' }}monitor-base-url
{% do monitor_url_list.append('${' ~ section ~ ':connection-monitor-base-url}') -%}
{% endif -%}
{% if str(kvm_parameter_dict.get('use-tap', 'True')).lower() == 'true' -%}
{{ ' ' }}tap-ipv4
{% do publish_dict.__setitem__('lan-' ~ instance_name, '${' ~ section ~ ':connection-tap-ipv4}') -%}
{% do kvm_hostname_list.append(instance_name ~ ' ' ~ '${' ~ section ~ ':connection-tap-ipv4}') -%}
{% endif -%}
{% if str(kvm_parameter_dict.get('enable-monitor', 'True')).lower() == 'true' -%}
{% do monitor_url_list.append('${' ~ section ~ ':connection-monitor-base-url}') -%}
{% endif -%}
{% do publish_dict.__setitem__(instance_name ~ '-backend-url', '${' ~ section ~ ':connection-backend-url}') -%}
{% do publish_dict.__setitem__(instance_name ~ '-url', '${' ~ section ~ ':connection-url}') -%}
{% do kvm_instance_dict.__setitem__(instance_name, (use_nat, nat_rules_list)) -%}
......@@ -232,7 +234,6 @@ username = admin
[monitor-instance-parameter]
monitor-httpd-port = 8060
monitor-title = KVM Cluster Main Instance
cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
username = ${monitor-htpasswd:username}
password = ${monitor-htpasswd:passwd}
......@@ -250,13 +251,18 @@ recipe = slapos.cookbook:publish
{% for name, value in publish_dict.items() -%}
{{ name }} = {{ value }}
{% endfor %}
{% set monitor_interface_url = slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
{% if enable_monitoring -%}
{% set monitor_interface_url = slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
{% do part_list.append('monitor-base') -%}
monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${publish:monitor-url}&username=${publish:monitor-user}&password=${publish:monitor-password}
{% endif -%}
[buildout]
extends =
{{ template_httpd_cfg }}
{{ template_monitor }}
{% if enable_monitoring -%}
{{ ' ' ~ template_monitor }}
{% endif -%}
parts =
httpd
......@@ -265,9 +271,6 @@ parts =
publish-connection-information
directory-doc
monitor-base
# End monitor
# Complete parts with sections
{{ part_list | join('\n ') }}
......
{% set monitor = True -%}
{% if slapparameter_dict.get('enable-monitor', 'True').lower() == 'false' -%}
{% set monitor = False -%}
{% endif -%}
{% set monitor = str(slapparameter_dict.get('enable-monitor', False)).lower() == 'true' -%}
[buildout]
extends =
......
......@@ -8,7 +8,7 @@ offline = true
# even extend the kvm instance profile.
extends =
{{ pbsready_import_template }}
{% if slapparameter_dict.get('enable-monitor', 'True').lower() == 'true' -%}
{% if str(slapparameter_dict.get('enable-monitor', True)).lower() == 'true' -%}
{{ ' ' ~ template_monitor }}
[resilient-publish-connection-parameter]
......
......@@ -60,7 +60,7 @@ config-virtual-hard-drive-url = ${slap-parameter:virtual-hard-drive-url}
config-virtual-hard-drive-md5sum = ${slap-parameter:virtual-hard-drive-md5sum}
config-resiliency-backup-periodicity = */5 * * * *
config-resilient-clone-number = 1
config-ignore-known-hosts-file = true
config-ignore-known-hosts-file = false
return = ipv6
# XXX What to do?
sla-computer_guid = ${slap-connection:computer-id}
......
......@@ -5,8 +5,9 @@
{% set backup_amount = slapparameter_dict.pop('resilient-clone-number', "1")|int + 1 -%}
{% set monitor_dict = {} -%}
{% set monitor_return = [] -%}
{% if slapparameter_dict.get('enable-monitor', 'True').lower() == 'true' -%}
{% if slapparameter_dict.get('enable-monitor', True) == True -%}
{% set monitor_return = ['monitor-base-url'] -%}
{% set monitor_parameter = {'monitor-cors-domains': slapparameter_dict.pop('monitor-cors-domains', "monitor.app.officejs.com")} -%}
{% set monitor_dict = {'parameter': monitor_parameter, 'return': monitor_return} -%}
......@@ -25,7 +26,7 @@ parts +=
kvm-frontend-url-promise
kvm-backend-url-promise
{% if slapparameter_dict.get('enable-monitor', 'True').lower() == 'true' -%}
{% if monitor_dict -%}
extends = {{ template_monitor }}
......@@ -35,6 +36,11 @@ storage-path = ${directory:etc}/.monitor_user
bytes = 8
username = admin
# XXX Monitoring Main Instane
[monitor-instance-parameter]
monitor-httpd-port = 8160
cors-domains = {{ monitor_parameter.get('monitor-cors-domains', '') }}
{% do monitor_parameter.__setitem__('monitor-username', slapparameter_dict.get('monitor-username', 'admin'))%}
{% do monitor_parameter.__setitem__('monitor-password', slapparameter_dict.get('monitor-password', '${monitor-htpasswd:passwd}'))%}
{% endif -%}
......@@ -56,18 +62,15 @@ return =
# XXX: return ALL parameters (like nat rules), through jinja
backend-url url ip
# XXX Monitoring Main Instane
[monitor-instance-parameter]
monitor-httpd-port = 8160
cors-domains = {{ monitor_parameter.get('monitor-cors-domains', '') }}
[publish-connection-information]
recipe = slapos.cookbook:publish
backend-url = ${request-kvm:connection-backend-url}
url = ${request-kvm:connection-url}
ipv6 = ${request-kvm:connection-ip}
{% if monitor_dict -%}
monitor-base-url = ${publish:monitor-base-url}
monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${publish:monitor-url}&username=${publish:monitor-user}&password=${publish:monitor-password}
{% endif -%}
[kvm-frontend-url-promise]
# Check that url parameter is complete
......
......@@ -3,17 +3,13 @@
{% set use_nat = slapparameter_dict.get('use-nat', 'True').lower() -%}
{% set nat_restrict = slapparameter_dict.get('nat-restrict-mode', 'False').lower() -%}
{% set name = slapparameter_dict.get('name', 'localhost') -%}
{% set disable_ansible_promise = slapparameter_dict.get('disable-ansible-promise', 'False').lower() -%}
{% set disable_ansible_promise = slapparameter_dict.get('disable-ansible-promise', 'True').lower() -%}
{% set instance_type = slapparameter_dict.get('type', 'standalone') -%}
{% set nat_rule_list = slapparameter_dict.get('nat-rules', '22 80 443') -%}
{% set frontend_software_type = 'default' -%}
{% set extends_list = [] -%}
{% set part_list = [] -%}
{% set monitor = True -%}
{% if slapparameter_dict.get('enable-monitor', 'True').lower() == 'false' -%}
{% set monitor = False -%}
{% endif -%}
{% set monitor = str(slapparameter_dict.get('enable-monitor', True)).lower() == 'true' -%}
{% set bootstrap_url = '' -%}
{% set bootstrap_url_md5sum = '' -%}
......@@ -34,7 +30,6 @@
{% do extends_list.append(template_monitor) -%}
{% endif -%}
{% do extends_list.append(logrotate_cfg) -%}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
......@@ -70,6 +65,7 @@ bytes = 8
python-path = {{ python_executable }}
vnc-passwd = ${gen-passwd:passwd}
socket-path = ${directory:var}/qmp_socket
kvm-status-path = ${directory:var}/qemu-vm-is-ready
[kvm-parameter-dict]
python-path = {{ python_executable }}
......@@ -217,6 +213,15 @@ input = inline:#!/bin/sh
output = ${directory:promises}/kvm-disk-image-corruption
mode = 700
[kvm-started-promise]
recipe = slapos.recipe.template:jinja2
template = {{ qemu_start_promise_tpl }}
rendered = ${directory:promises}/qemu-virtual-machine-is-ready
mode = 700
context =
raw dash {{ dash_executable_location }}
raw qemu_ready_path ${kvm-controller-parameter-dict:kvm-status-path}
raw qemu_service_log_file ${buildout:directory}/.${slap-connection:partition-id}_kvm.log
[novnc-instance]
recipe = slapos.cookbook:novnc
......@@ -606,7 +611,7 @@ keyboard-layout-language = fr
{% if slapparameter_dict.get('data-to-vm', '') %}
{% do part_list.append('vm-data-content') -%}
{% endif -%}
{% if use_tap == 'true' and tap_network_dict.has_key('ipv4') and disable_ansible_promise == 'false' %}
{% if disable_ansible_promise == 'false' %}
{% do part_list.extend(['ansible-vm-promise', 'logrotate-vm-bootstrap']) -%}
{% endif -%}
{% if slapparameter_dict.get('authorized-key', '') and slapparameter_dict.get('type', '') == 'cluster' %}
......@@ -629,7 +634,7 @@ parts =
kvm-disk-image-corruption-promise
websockify-sighandler
novnc-promise
# kvm-monitor
kvm-started-promise
cron
cron-entry-logrotate
frontend-promise
......
......@@ -96,6 +96,7 @@ context =
raw python_executable ${buildout:executable}
raw qemu_executable_location ${kvm:location}/bin/qemu-system-x86_64
raw qemu_img_executable_location ${kvm:location}/bin/qemu-img
raw qemu_start_promise_tpl ${template-qemu-ready:location}/${template-qemu-ready:filename}
raw sixtunnel_executable_location ${6tunnel:location}/bin/6tunnel
raw template_httpd_cfg ${template-httpd:rendered}
raw template_content ${template-content:location}/${template-content:filename}
......
......@@ -5,6 +5,6 @@ extends = development.cfg
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-for-erp5testnode.cfg.in
md5sum = 71b730997a6e3ce7f0579901401eab9c
md5sum = 61414eefb6641f74d5f2b4ffc23af393
output = ${buildout:directory}/template.cfg
mode = 0644
\ No newline at end of file
......@@ -36,6 +36,8 @@ def check_result():
success_file = os.path.join(ansible_log_dir, result_OK)
error_file = os.path.join(ansible_log_dir, result_failed)
if not os.path.exists(ansible_log_dir):
return (0, "File %s doesn't exist.\nThis mean that ansible report log is not uploaded yet!!!" % ansible_log_dir)
if not len(os.listdir(ansible_log_dir)):
return (0, 'No Ansible promise uploaded!')
if os.path.exists(error_file):
......@@ -54,4 +56,4 @@ if __name__ == "__main__":
result = check_result()
if not result[0]:
raise Exception('Failed to run Ansible in %s, result is: \n%s' % (vm_name,
result[1]))
result[1]))
\ No newline at end of file
......@@ -5,11 +5,16 @@
# Echo client program
import socket
import time
import os
# XXX: to be factored with slapos.toolbox qemu qmp wrapper.
socket_path = '{{ parameter_dict.get("socket-path") }}'
vnc_password = '{{ parameter_dict.get("vnc-passwd") }}'
status_path = '{{ parameter_dict.get("kvm-status-path") }}'
if os.path.exists(status_path):
os.unlink(status_path)
# Connect to KVM qmp socket
so = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
......@@ -35,3 +40,6 @@ data = so.recv(1024)
# Finish
so.close()
with open(status_path, 'w') as status_file:
status_file.write("OK")
#!{{ dash }}
if [ -f "{{ qemu_ready_path }}" ]; then
echo "VM correctly started."
else
log_file="{{ qemu_service_log_file }}"
>&2 echo "Qemu process is not correctly started."
if [ -f "$log_file" ]; then
>&2 echo "** Latest ouput logs **"
>&2 echo
>&2 echo "$(tail $log_file)"
exit 1
fi
fi
\ No newline at end of file
......@@ -10,6 +10,7 @@ parts =
monitor-collect-csv-wrapper
fluentd-wrapper
monitor-base
promise-check-slapgrid
publish-connection-information
......@@ -105,6 +106,16 @@ recipe = slapos.cookbook:publish
monitor-setup-url = https://monitor.app.officejs.com/#page=settings_configurator&url=${publish:monitor-url}&username=${publish:monitor-user}&password=${publish:monitor-password}
server_log_url = ${publish:monitor-base-url}/${slap-configuration:private-hash}/
[promise-check-slapgrid]
recipe = collective.recipe.template
error-log-file = ${buildout:directory}/.slapgrid-${slap-connection:partition-id}-error.log
input = inline:#!/bin/sh
if [ -f "${:error-log-file}" ]; then
>&2 cat ${:error-log-file}
exit 1
fi
output = ${monitor-directory:promises}/slapgrid-${slap-connection:partition-id}-status
mode = 700
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
......
......@@ -32,7 +32,7 @@ mode = 0644
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-monitor.cfg.jinja2
destination = ${buildout:directory}/template-base-monitor.cfg
md5sum = 4649ce8bc496cf7560c2e994f0b332ee
md5sum = ec16fdaa0e6e13dcc7d3007908182c20
mode = 0644
[template-monitor-distributor]
......
......@@ -10,17 +10,8 @@ parts +=
template-instance
install-eggs-for-the-instance
#add eggs needed by the instance
find-links +=
https://pypi.python.org/packages/source/s/slapos.recipe.build/slapos.recipe.build-0.21.tar.gz
https://pypi.python.org/packages/source/p/plone.recipe.command/plone.recipe.command-1.1.zip
versions = versions
[versions]
plone.recipe.command = 1.1
slapos.recipe.template = 2.8
slapos.recipe.build = 0.21
[verify-packages-exist-on-host]
recipe = plone.recipe.command
......
......@@ -68,7 +68,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/runner-import.sh.jinja2
download-only = true
md5sum = d3ce78b35cb47dcb647772891a1bf814
md5sum = 3cebc5d793ff1b5c786392723babc510
filename = runner-import.sh.jinja2
mode = 0644
......@@ -76,7 +76,7 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner-import.cfg.in
output = ${buildout:directory}/instance-runner-import.cfg
md5sum = 4db13f6fad1483bf7c663a1485e56098
md5sum = 91c34a55b7a45b14b0fac8b7faa202fe
mode = 0644
[template-runner-export-script]
......@@ -92,7 +92,7 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner-export.cfg.in
output = ${buildout:directory}/instance-runner-export.cfg
md5sum = a898117fe5b001747ef6a273fd03b42c
md5sum = ec92773be8f8a2ad20dc0661d58d7717
mode = 0644
[template-resilient]
......@@ -107,7 +107,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-resilient-test.cfg.jinja2
download-only = true
md5sum = c31de887459dfb45fe1213f2837cadcb
md5sum = 09a955bde79fb11565d689b0415e135c
filename = instance-resilient-test.cfg.jinja2
mode = 0644
......@@ -218,7 +218,10 @@ eggs =
plone.recipe.command
slapos.recipe.build
slapos.toolbox[flask_auth]
Gunicorn
# gunicorn downgraded because of bug in latter versions
# setting version here allows to use an older version in tests
# see more here : https://lab.nexedi.com/nexedi/slapos/commit/391b026e22b05e9a6fba5e063f64f18544a34b92
gunicorn==19.4.5
futures
${slapos-cookbook:eggs}
......
......@@ -42,7 +42,7 @@ mode = 755
# Used to manually run the resilient test if we don't have a running testnode.
recipe = slapos.cookbook:wrapper
test-suite-title = slaprunner
command-line = {{ bin_directory }}/runStandaloneResiliencyTestSuite --test-suite-title=${:test-suite-title} ${deploy-scalability-test:test-parameters}
command-line = {{ bin_directory }}/runStandaloneResiliencyTest --test-suite-title=${:test-suite-title} ${deploy-scalability-test:test-parameters}
wrapper-path = ${directory:bin}/runStandaloneResiliencyTestSuite
[request-resilient-instance]
......@@ -56,7 +56,7 @@ config-{{ key }} = {{ dumps(value) }}
{% endfor -%}
config-resiliency-backup-periodicity = */10 * * * *
config-resilient-clone-number = 1
config-ignore-known-hosts-file = true
config-ignore-known-hosts-file = false
# XXX hardcoded
#config-frontend-domain = google.com
# XXX Hack to deploy Root Instance on the same computer as the type-test Instance
......
......@@ -41,7 +41,7 @@ parts +=
recipe = slapos.cookbook:free_port
minimum = 49980
maximum = 49989
ip = $${slap-network-information:global-ipv6}
ip = $${slap-network-information:local-ipv4}
[runner-free-port]
recipe = slapos.cookbook:free_port
......
......@@ -42,7 +42,7 @@ parts +=
recipe = slapos.cookbook:free_port
minimum = 49990
maximum = 49999
ip = $${slap-network-information:global-ipv6}
ip = $${slap-network-information:local-ipv4}
[runner-free-port]
recipe = slapos.cookbook:free_port
......@@ -63,11 +63,15 @@ ip = $${slaprunner:ipv4}
[supervisord]
port = $${supervisord-free-port:port}
# Idem for some other services
[runner-sshd-port]
minimum = 22232
maximum = 22241
# Deactivate the call to prepareSoftware, and let the importer script
# handle the build&run of the instance.
[cron-entry-prepare-software]
recipe =
[importer]
recipe = slapos.recipe.template:jinja2
template = ${template-runner-import-script:location}/${template-runner-import-script:filename}
......@@ -78,7 +82,9 @@ mode = 700
restore-exit-code-file=$${directory:srv}/importer-exit-code-file
context =
key backend_url slaprunner:access-url
key proxy_host slaprunner:ipv4
key ipv4 slaprunner:ipv4
key ipv6 slaprunner:ipv6
key proxy_port slaprunner:proxy_port
section directory directory
raw output_log_file $${directory:log}/resilient.log
raw shell_binary ${bash:location}/bin/bash
......
......@@ -13,7 +13,7 @@ cns.recipe.symlink = 0.2.3
collective.recipe.environment = 0.2.0
futures = 3.0.5
gitdb = 0.6.4
gunicorn = 19.6.0
gunicorn = 19.4.5
prettytable = 0.7.2
pycurl = 7.43.0
slapos.recipe.template = 2.9
......
......@@ -20,7 +20,7 @@ restore_element () {
do
cd $backup_path;
if [ -f $element ] || [ -d $element ]; then
command="{{ rsync_binary }} --stats -av --delete --exclude *.sock --exclude *.pid --exclude .installed.cfg --exclude .installed-switch-softwaretype.cfg $backup_path/$element $restore_path"
command="{{ rsync_binary }} --stats -av --delete --exclude *.sock --exclude *.socket --exclude *.pid --exclude .installed.cfg --exclude .installed-switch-softwaretype.cfg $backup_path/$element $restore_path"
echo "Running: \"$command\""
$command
fi
......@@ -60,31 +60,29 @@ SOFTWARE_RELEASE=$(echo $OLD_SOFTWARE_RELEASE | sed -e 's/\(.*\)\(slappart\|test
$SQLITE3 $DATABASE "update partition11 set software_release='$SOFTWARE_RELEASE' where software_release NOT NULL;"
$SQLITE3 $DATABASE "update software11 set url='$SOFTWARE_RELEASE' where url='$OLD_SOFTWARE_RELEASE';" || $SQLITE3 $DATABASE "delete from software11 where url='$OLD_SOFTWARE_RELEASE';"
# Change slapproxy database to have all instances stopped
$SQLITE3 $DATABASE "update partition11 set slap_state='stopped';"
$SQLITE3 $DATABASE "update partition11 set requested_state='stopped';"
# Change slapproxy database to get correct IPs
IPV4={{ ipv4 }}
IPV6={{ ipv6 }}
$SQLITE3 $DATABASE "update partition_network11 set address='$IPV4' where netmask='255.255.255.255';"
$SQLITE3 $DATABASE "update partition_network11 set address='$IPV6' where netmask='ffff:ffff:ffff::';"
# Run slapproxy on different port (in case of running inside of erp5testnode
# with only one IP and port 50000 already taken by slapproxy of main instance)
HOST="{{ proxy_host }}"
PORT="50001"
URL="http://$HOST:$PORT"
$SLAPOS proxy start --cfg $HOME/etc/slapos.cfg --port $PORT >/dev/null 2>&1 &
SLAPPROXY_PID=$!
trap "kill $SLAPPROXY_PID" EXIT TERM INT
sleep 5
MASTERURL="http://{{ ipv4 }}:{{ proxy_port }}"
echo "Building newest software..."
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$URL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1 ||
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$URL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1 ||
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$URL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$MASTERURL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1 ||
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$MASTERURL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1 ||
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$MASTERURL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1
# Remove defined scripts to force buildout to recreate them to have updated paths
rm $srv_directory/runner/instance/slappart*/srv/runner-import-restore || true
echo "Running slapos node instance..."
# XXX hardcoded
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$URL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 || true
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$URL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 || true
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$URL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 || true
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$MASTERURL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 ||
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$MASTERURL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 ||
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$MASTERURL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1
# Invoke defined scripts for each partition inside of slaprunner
echo "Invoke custom import scripts defined by each instances..."
for partition in $srv_directory/runner/instance/slappart*/
do
script=$partition/srv/runner-import-restore
......@@ -95,9 +93,11 @@ do
done
# Change back slapproxy database to have all instances started
$SQLITE3 $DATABASE "update partition11 set slap_state='started';"
echo "Start instances..."
$SQLITE3 $DATABASE "update partition11 set requested_state='started';"
# Write exit code to an arbitrary file that will be checked by promise/monitor
echo "Write status file... End"
RESTORE_EXIT_CODE_FILE="{{ restore_exit_code_file }}"
echo $RESTORE_EXIT_CODE > $RESTORE_EXIT_CODE_FILE
exit $RESTORE_EXIT_CODE
# Dummy Software Release
The main purpose of this Software Release is to be used in tests !
As a consequence, any change to it must be tied with corresponding changes in
tests using this Software Release.
Dummy has be written with the purpose to be fast, so it embeeds the
minimum set of needed features. Please, do not change anything of it
without any strong motivation.
[buildout]
parts =
log-writer
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
bin = $${:home}/bin
etc = $${:home}/etc
service = $${:etc}/service
script = $${:etc}/run
var = $${:home}/var
log = $${:var}/log
[log-writer]
recipe = slapos.recipe.template:jinja2
template = inline:#!/bin/sh
echo "Hello : $(date)" >> $${directory:log}/log.log
rendered = $${directory:script}/log-writer
mode = 700
\ No newline at end of file
[buildout]
extends =
../../../../stack/slapos.cfg
parts =
slapos-cookbook
instance-template
[instance-template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
md5sum = efd3b712a2294207f265a9c45648d5cf
mode = 0644
[versions]
slapos.recipe.template = 2.9
[buildout]
extends = development.cfg
extends = ../development.cfg
parts += template-erp5testnode
# Change default software-type to be "test", so that it can be run using erp5testnode.
[template-erp5testnode]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-for-erp5testnode.cfg.in
url = ${:_profile_base_location_}/instance.cfg.in
md5sum = 59a13145d3df30f38442ce172330dfb3
output = ${buildout:directory}/template.cfg
mode = 0644
......
......@@ -58,7 +58,7 @@ eggs = collective.recipe.template
[versions]
collective.recipe.template = 1.11
plone.recipe.command = 1.1
slapos.recipe.build = 0.13
slapos.recipe.build = 0.25
slapos.recipe.template = 2.7
# Replicate slapos stack, but without shacache to not have to compile the entire world for a simple test.
......
[buildout]
# Separate from site eggs
allowed-eggs-from-site-packages =
include-site-packages = false
exec-sitecustomize = false
extends =
../stack/slapos.cfg
../component/apache/buildout.cfg
......@@ -27,6 +22,7 @@ extends =
../component/logrotate/buildout.cfg
../component/lxml-python/buildout.cfg
../component/mesa/buildout.cfg
../component/onlyoffice-x2t/buildout.cfg
../component/poppler/buildout.cfg
../component/python-2.7/buildout.cfg
../component/rdiff-backup/buildout.cfg
......
......@@ -93,7 +93,7 @@ recipe = slapos.recipe.template:jinja2
filename = template-monitor.cfg
template = ${:_profile_base_location_}/instance-monitor.cfg.jinja2.in
rendered = ${buildout:directory}/template-monitor.cfg
md5sum = 61734742e3a01a8a1d512f491b2190cc
md5sum = 0955530291105a322f2972244caf307d
context =
key apache_location apache:location
key gzip_location gzip:location
......
......@@ -137,6 +137,7 @@ crond-folder = ${logrotate-directory:cron-entries}
logrotate-folder = ${logrotate:logrotate-entries}
promise-runner = {{ monitor_runpromise }}
promise-folder = ${directory:promises}
monitor-promise-folder = ${monitor-directory:promises}
pid-file = ${monitor-directory:pids}/monitor-bootstrap.pid
public-path-list =
......
......@@ -50,7 +50,7 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready-import.cfg.in
output = ${buildout:directory}/pbsready-import.cfg
md5sum = a13be3bd76d6a52b6527c7035ba33a06
md5sum = 10264fe1cfb7ebe567d50ebabbd93a43
mode = 0644
[pbsready-export]
......@@ -59,14 +59,14 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready-export.cfg.in
output = ${buildout:directory}/pbsready-export.cfg
md5sum = cb2537598981a1a4af0dea05cc6ac6bd
md5sum = 031fb7785cd5e99c2d794fa7e00e5190
mode = 0644
[template-pull-backup]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-pull-backup.cfg.in
output = ${buildout:directory}/instance-pull-backup.cfg
md5sum = 3892f86fcf850511dc0e6e4983b1961a
md5sum = 3ef8f98ff013f06fcd81bba18872e561
mode = 0644
[template-replicated]
......
......@@ -8,7 +8,7 @@ parts =
cron-entry-logrotate
pbs-sshkeys-authority
sshkeys-openssh
backup-transfer-integrity-promise
backup-checksum-integrity-promise
resilient-genstatrss-wrapper
pbs-push-history-log
backup-signature-link
......@@ -264,7 +264,7 @@ monitor-username = $${htpasswd:username}
#--
#-- Resiliency promises.
[backup-transfer-integrity-promise]
[backup-checksum-integrity-promise]
recipe = slapos.recipe.template:jinja2
template = inline:
#!${dash:location}/bin/dash
......@@ -276,9 +276,15 @@ template = inline:
if [ -z "$backup_signature" ]; then
exit 0;
else
diff "proof.signature" "$backup_signature";
diff -q "proof.signature" "$backup_signature";
if [ "$?" -eq 0 ]; then
exit 0;
else
echo "Signature file is not the same before and after transfer"
exit 1
fi
fi
rendered = $${basedirectory:promises}/backup-transfer-integrity-promise
rendered = $${basedirectory:promises}/backup-checksum-integrity
mode = 700
[resilient-genstatrss-wrapper]
......
......@@ -77,11 +77,9 @@ rendered = $${basedirectory:promises}/exporter-status
recipe = slapos.cookbook:cron.d
name = backup
frequency = $${slap-parameter:resiliency-backup-periodicity}
# Sleep from 1 to 6 hours before backing up (disks/network IO optimization)
sleep-command = ${bash:location}/bin/bash -c "sleep $((RANDOM%(60*60*6)))"
command = ($${:sleep-command}; $${notifier-exporter:wrapper} --transaction-id `date +%s`)
once-a-day = true
command = $${notifier-exporter:wrapper} --transaction-id `date +%s`
[slap-parameter]
# In cron.d format (i.e things like */15 * * * * are accepted).
resiliency-backup-periodicity = 0 0 * * *
resiliency-backup-periodicity =
......@@ -26,7 +26,7 @@ parts =
check-backup-integrity-on-notification
import-on-notification
backup-transfer-integrity-promise
backup-checksum-integrity-promise
resilient-publish-connection-parameter
backup-signature-link
......@@ -78,7 +78,7 @@ recipe = slapos.cookbook:notifier.callback
on-notification-id = $${slap-parameter:on-notification}
callback = $${post-notification-run:output}
[backup-transfer-integrity-promise]
[backup-checksum-integrity-promise]
recipe = slapos.recipe.template:jinja2
template = inline:
#!/${bash:location}/bin/bash
......@@ -90,10 +90,10 @@ template = inline:
exit 1;
fi
else
# If file doesn't exist, promise should raise false positive
# If file doesn't exist, promise shouldnt raise false positive
exit 0;
fi
rendered = $${basedirectory:promises}/backup-transfer-integrity-promise
rendered = $${basedirectory:promises}/backup-checksum-integrity
mode = 700
###########
......
......@@ -99,7 +99,7 @@ eggs =
[versions]
# Use SlapOS patched zc.buildout
zc.buildout = 1.7.1.post13
zc.buildout = 1.7.1.post14
# Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2)
zc.recipe.egg = 1.3.2.post5
# Use own version of h.r.download to be able to open .xz and .lz archives
......@@ -137,7 +137,7 @@ slapos.cookbook = 1.0.35
slapos.core = 1.3.16
slapos.extension.strip = 0.1
slapos.libnetworkcache = 0.14.5
slapos.recipe.build = 0.23
slapos.recipe.build = 0.25
slapos.recipe.cmmi = 0.2
stevedore = 1.17.1
unicodecsv = 0.14.1
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment