Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
slapos
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Steven Gueguen
slapos
Commits
a4cd4d27
Commit
a4cd4d27
authored
Nov 24, 2016
by
Rafael Monnerat
Browse files
Options
Browse Files
Download
Plain Diff
Update Release Candidate
parents
acb4cab8
cf57b1f4
Changes
72
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
72 changed files
with
1476 additions
and
844 deletions
+1476
-844
component/curl/buildout.cfg
component/curl/buildout.cfg
+2
-2
component/firewalld/buildout.cfg
component/firewalld/buildout.cfg
+3
-2
component/leptonica/buildout.cfg
component/leptonica/buildout.cfg
+5
-2
component/leptonica/leptonica-1.69-zlib-include.patch
component/leptonica/leptonica-1.69-zlib-include.patch
+22
-0
component/mariadb/buildout.cfg
component/mariadb/buildout.cfg
+2
-2
component/nginx/buildout.cfg
component/nginx/buildout.cfg
+2
-0
component/qemu-kvm/buildout.cfg
component/qemu-kvm/buildout.cfg
+10
-0
component/slapos/buildout.cfg
component/slapos/buildout.cfg
+1
-1
component/tesseract/buildout.cfg
component/tesseract/buildout.cfg
+8
-2
component/tesseract/tesseract-3.00-gcc-4.7-build.patch
component/tesseract/tesseract-3.00-gcc-4.7-build.patch
+10
-0
component/tesseract/tesseract-3.01-gcc6-ftbs.patch
component/tesseract/tesseract-3.01-gcc6-ftbs.patch
+100
-0
component/tesseract/tesseract-3.01-remove-bom.patch
component/tesseract/tesseract-3.01-remove-bom.patch
+15
-0
component/vm-img/debian.cfg
component/vm-img/debian.cfg
+26
-1
setup.py
setup.py
+3
-2
slapos/recipe/dcron.py
slapos/recipe/dcron.py
+7
-23
slapos/recipe/erp5testnode/__init__.py
slapos/recipe/erp5testnode/__init__.py
+1
-0
slapos/recipe/erp5testnode/template/erp5testnode.cfg.in
slapos/recipe/erp5testnode/template/erp5testnode.cfg.in
+1
-0
slapos/recipe/erp5testnode/template/httpd.conf.in
slapos/recipe/erp5testnode/template/httpd.conf.in
+7
-7
slapos/recipe/generatemac.py
slapos/recipe/generatemac.py
+0
-50
slapos/recipe/librecipe/generic.py
slapos/recipe/librecipe/generic.py
+0
-16
slapos/recipe/notifier.py
slapos/recipe/notifier.py
+3
-0
slapos/recipe/pbs.py
slapos/recipe/pbs.py
+60
-61
slapos/recipe/random.py
slapos/recipe/random.py
+47
-3
slapos/recipe/seleniumrunner/erp5functionaltestreporthandler.py
.../recipe/seleniumrunner/erp5functionaltestreporthandler.py
+1
-0
slapos/test/recipe/test_dcron.py
slapos/test/recipe/test_dcron.py
+0
-73
software/apache-frontend/common.cfg
software/apache-frontend/common.cfg
+29
-53
software/apache-frontend/instance-apache-frontend.cfg
software/apache-frontend/instance-apache-frontend.cfg
+108
-76
software/apache-frontend/instance-slave-apache-input-schema.json
...e/apache-frontend/instance-slave-apache-input-schema.json
+1
-1
software/apache-frontend/software.cfg
software/apache-frontend/software.cfg
+15
-0
software/apache-frontend/templates/apache-custom-slave-list.cfg.in
...apache-frontend/templates/apache-custom-slave-list.cfg.in
+115
-135
software/apache-frontend/templates/apache.conf.in
software/apache-frontend/templates/apache.conf.in
+6
-0
software/apache-frontend/templates/apache_cached.conf.in
software/apache-frontend/templates/apache_cached.conf.in
+0
-128
software/apache-frontend/templates/apache_cached_rewrite.txt.in
...re/apache-frontend/templates/apache_cached_rewrite.txt.in
+0
-3
software/apache-frontend/templates/custom-virtualhost.conf.in
...ware/apache-frontend/templates/custom-virtualhost.conf.in
+7
-0
software/apache-frontend/templates/default-virtualhost.conf.in
...are/apache-frontend/templates/default-virtualhost.conf.in
+28
-39
software/apache-frontend/templates/nginx-eventsource-slave.conf.in
...apache-frontend/templates/nginx-eventsource-slave.conf.in
+115
-0
software/apache-frontend/templates/nginx-notebook-slave.conf.in
...re/apache-frontend/templates/nginx-notebook-slave.conf.in
+106
-0
software/apache-frontend/templates/nginx.cfg.in
software/apache-frontend/templates/nginx.cfg.in
+103
-0
software/build-rina/build.cfg
software/build-rina/build.cfg
+7
-1
software/build-rina/software.cfg
software/build-rina/software.cfg
+2
-1
software/erp5testnode/instance-default.cfg
software/erp5testnode/instance-default.cfg
+14
-0
software/erp5testnode/software.cfg
software/erp5testnode/software.cfg
+2
-2
software/ipython_notebook/software.cfg
software/ipython_notebook/software.cfg
+1
-1
software/ipython_notebook/template/ERP5kernel.py.jinja
software/ipython_notebook/template/ERP5kernel.py.jinja
+5
-11
software/jstestnode/instance.cfg.in
software/jstestnode/instance.cfg.in
+2
-2
software/jstestnode/runTestSuite.in
software/jstestnode/runTestSuite.in
+101
-17
software/jstestnode/software.cfg
software/jstestnode/software.cfg
+3
-2
software/kvm/common.cfg
software/kvm/common.cfg
+1
-1
software/kvm/instance.cfg.in
software/kvm/instance.cfg.in
+3
-1
software/neoppod/software-common.cfg
software/neoppod/software-common.cfg
+3
-3
software/slapos-testing/software.cfg
software/slapos-testing/software.cfg
+1
-0
software/slaprunner/common.cfg
software/slaprunner/common.cfg
+9
-8
software/slaprunner/httpd_conf.in
software/slaprunner/httpd_conf.in
+0
-3
software/slaprunner/instance-runner-export.cfg.in
software/slaprunner/instance-runner-export.cfg.in
+20
-20
software/slaprunner/instance-runner-import.cfg.in
software/slaprunner/instance-runner-import.cfg.in
+18
-8
software/slaprunner/instance.cfg
software/slaprunner/instance.cfg
+18
-2
software/slaprunner/template/resilient_software_release_information.py.in
...ner/template/resilient_software_release_information.py.in
+3
-1
software/slaprunner/template/runner-import.sh.jinja2
software/slaprunner/template/runner-import.sh.jinja2
+49
-16
software/test-slave-instance-deployment/software.cfg
software/test-slave-instance-deployment/software.cfg
+1
-1
stack/erp5/buildout.cfg
stack/erp5/buildout.cfg
+16
-15
stack/monitor/buildout.cfg
stack/monitor/buildout.cfg
+7
-4
stack/monitor/templates/httpd-cors.cfg.in
stack/monitor/templates/httpd-cors.cfg.in
+4
-1
stack/monitor/templates/monitor-httpd.conf.in
stack/monitor/templates/monitor-httpd.conf.in
+0
-3
stack/resilient/buildout.cfg
stack/resilient/buildout.cfg
+22
-7
stack/resilient/instance-pull-backup.cfg.in
stack/resilient/instance-pull-backup.cfg.in
+28
-3
stack/resilient/pbsready-export.cfg.in
stack/resilient/pbsready-export.cfg.in
+38
-18
stack/resilient/pbsready-import.cfg.in
stack/resilient/pbsready-import.cfg.in
+6
-1
stack/resilient/pbsready.cfg.in
stack/resilient/pbsready.cfg.in
+61
-0
stack/resilient/resilient-web-takeover-cgi-script.py.in
stack/resilient/resilient-web-takeover-cgi-script.py.in
+6
-0
stack/resilient/template-resilient-templates.cfg.in
stack/resilient/template-resilient-templates.cfg.in
+10
-0
stack/resilient/templates/notifier-feed-promise.py.in
stack/resilient/templates/notifier-feed-promise.py.in
+37
-0
stack/slapos.cfg
stack/slapos.cfg
+9
-9
No files found.
component/curl/buildout.cfg
View file @
a4cd4d27
...
...
@@ -12,8 +12,8 @@ parts =
[curl]
recipe = slapos.recipe.cmmi
url = http://curl.haxx.se/download/curl-7.5
0.3
.tar.bz2
md5sum =
bd177fd6deecce00cfa7b5916d831c5
e
url = http://curl.haxx.se/download/curl-7.5
1.0
.tar.bz2
md5sum =
09a7c5769a7eae676d5e2c86d51f167
e
configure-options =
--disable-static
--disable-ldap
...
...
component/firewalld/buildout.cfg
View file @
a4cd4d27
...
...
@@ -129,11 +129,12 @@ command =
sed -i 's#/usr/share/#${firewalld:location}/share#' ${firewalld:location}/lib/python2.7/site-packages/firewall/config/__init__.py
sed -i "s#import sys#import sys, os\n\nos.environ['GI_TYPELIB_PATH'] = '${gobject-introspection:location}/lib/girepository-1.0/'#" ${:python}
sed -i 's#<syslog/>#<!-- no syslog -->#' ${dbus:location}/etc/dbus-1/system.conf
sed -i 's#<user>messagebus</user>#<user>slapsoft</user>#' ${dbus:location}/
etc
/dbus-1/system.conf
sed -i 's#<user>messagebus</user>#<user>slapsoft</user>#' ${dbus:location}/
share
/dbus-1/system.conf
cp -f ${firewalld:location}/lib/firewalld/zones/trusted.xml ${firewalld:location}/etc/firewalld/zones/
cp -f ${firewalld:location}/etc/dbus-1/system.d/FirewallD.conf ${dbus:location}/
etc
/dbus-1/system.d/
cp -f ${firewalld:location}/etc/dbus-1/system.d/FirewallD.conf ${dbus:location}/
share
/dbus-1/system.d/
mkdir -p ${buildout:directory}/sbin
update-command = ${:command}
stop-on-error = true
[firewalld-eggs]
recipe = zc.recipe.egg:scripts
...
...
component/leptonica/buildout.cfg
View file @
a4cd4d27
...
...
@@ -11,10 +11,13 @@ extends =
[leptonica]
recipe = slapos.recipe.cmmi
url = http://
www.leptonica.org/source/leptonica-1.73
.tar.gz
md5sum =
092cea2e568cada79fff178820397922
url = http://
leptonica.googlecode.com/files/leptonica-1.68
.tar.gz
md5sum =
5cd7092f9ff2ca7e3f3e73bfcd556403
configure-options =
--disable-static
patch-options = -p1
patches =
${:_profile_base_location_}/leptonica-1.69-zlib-include.patch#cff3dc942075190939b407c38e0d3201
environment =
CPPFLAGS=-I${zlib:location}/include -I${libjpeg:location}/include -I${libpng:location}/include -I${libtiff:location}/include -I${webp:location}/include -I${giflib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${libjpeg:location}/lib -Wl,-rpath=${libjpeg:location}/lib -L${libpng:location}/lib -Wl,-rpath=${libpng:location}/lib -L${libtiff:location}/lib -Wl,-rpath=${libtiff:location}/lib -L${webp:location}/lib -Wl,-rpath=${webp:location}/lib -L${giflib:location}/lib -Wl,-rpath=${giflib:location}/lib
...
...
component/leptonica/leptonica-1.69-zlib-include.patch
0 → 100644
View file @
a4cd4d27
This patch is originally taken from:
http://leptonica.googlecode.com/issues/attachment?aid=560001000&name=zlib-include.patch&token=m2sugSYxB4xwAuNgrKXyHTxBYNg%3A1337345966091
To fix the following issue with leptonica:
http://code.google.com/p/leptonica/issues/detail?id=56
diff -Nurd -x'*~' leptonica-1.68.orig/src/pngio.c leptonica-1.68/src/pngio.c
--- leptonica-1.68.orig/src/pngio.c 2011-02-01 00:41:12.000000000 -0500
+++ leptonica-1.68/src/pngio.c 2011-07-09 09:17:17.000000000 -0400
@@ -108,6 +108,10 @@
#include "png.h"
+#ifdef HAVE_LIBZ
+#include "zlib.h"
+#endif
+
/* ----------------Set defaults for read/write options ----------------- */
/* strip 16 bpp --> 8 bpp on reading png; default is for stripping */
static l_int32 var_PNG_STRIP_16_TO_8 = 1;
component/mariadb/buildout.cfg
View file @
a4cd4d27
...
...
@@ -22,9 +22,9 @@ parts =
[mariadb]
recipe = slapos.recipe.cmmi
version = 10.1.1
8
version = 10.1.1
9
url = https://downloads.mariadb.org/f/mariadb-${:version}/source/mariadb-${:version}.tar.gz/from/http:/ftp.osuosl.org/pub/mariadb/?serve
md5sum =
173b88ab54bdd1fc51483b6b26bef849
md5sum =
e22e491eb3766bc3151cc0e96a7531c2
location = ${buildout:parts-directory}/${:_buildout_section_name_}
patch-options = -p0
patches =
...
...
component/nginx/buildout.cfg
View file @
a4cd4d27
...
...
@@ -87,6 +87,8 @@ git-executable = ${git:location}/bin/git
configure-options=
--with-ipv6
--with-http_ssl_module
--with-http_v2_module
--with-http_gzip_static_module
--with-ld-opt="-L ${zlib:location}/lib -L ${openssl:location}/lib -L ${pcre:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib"
--with-cc-opt="-I ${pcre:location}/include -I ${openssl:location}/include -I ${zlib:location}/include"
--add-module=${nginx-push-stream-module:location}
...
...
component/qemu-kvm/buildout.cfg
View file @
a4cd4d27
...
...
@@ -58,6 +58,16 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_}
<= debian-netinst-base
arch = amd64
[debian-amd64-squeeze-netinst.iso]
<= debian-amd64-netinst-base
version = 6.0.10
md5sum = 7f82d341561035f65933da43f94d5b52
[debian-amd64-wheezy-netinst.iso]
<= debian-amd64-netinst-base
version = 7.11.0
md5sum = 096c1c18b44c269808bd815d58c53c8f
[debian-amd64-netinst.iso]
# Download the installer of Debian 8 (Jessie)
<= debian-amd64-netinst-base
...
...
component/slapos/buildout.cfg
View file @
a4cd4d27
...
...
@@ -131,7 +131,7 @@ interpreter = py
scripts = py
[versions]
setuptools =
19.6.2
setuptools =
28.8.0
slapos.rebootstrap = 3.7
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
component/tesseract/buildout.cfg
View file @
a4cd4d27
...
...
@@ -14,8 +14,14 @@ parts =
[tesseract]
recipe = slapos.recipe.cmmi
url = https://github.com/tesseract-ocr/tesseract/archive/3.04.01.tar.gz
md5sum = 645a21effcf2825a3473849d72a7fd90
url = http://tesseract-ocr.googlecode.com/files/tesseract-3.01.tar.gz
md5sum = 1ba496e51a42358fb9d3ffe781b2d20a
patch-options =
-p1
patches =
${:_profile_base_location_}/tesseract-3.00-gcc-4.7-build.patch#ca80db3ec489c547b03f3ee48879c1b1
${:_profile_base_location_}/tesseract-3.01-remove-bom.patch#2e691858cb492b7c17d23bf0912b3d24
${:_profile_base_location_}/tesseract-3.01-gcc6-ftbs.patch#f7a6140c0fe390b96fe753a70e9d59fd
pre-configure =
libtoolize -f -c
aclocal -I ${libtool:location}/share/aclocal -I config
...
...
component/tesseract/tesseract-3.00-gcc-4.7-build.patch
0 → 100644
View file @
a4cd4d27
--- tesseract-3.00/viewer/svutil.cpp.old 2012-04-11 09:34:26.168608940 +0200
+++ tesseract-3.00/viewer/svutil.cpp 2012-04-11 09:34:46.108565692 +0200
@@ -21,6 +21,7 @@
// thread/process creation & synchronization and network connection.
#include <stdio.h>
+#include <unistd.h>
#ifdef WIN32
#include <windows.h>
#include <winsock.h>
component/tesseract/tesseract-3.01-gcc6-ftbs.patch
0 → 100644
View file @
a4cd4d27
commit 58e79a222e12280984ed19ab4d3bcac654e121fa
Author: Arnaud Fontaine <arnaud.fontaine@nexedi.com>
Date: Fri Nov 4 19:10:13 2016 +0900
Fix compilation errors.
diff --git a/cube/char_set.cpp b/cube/char_set.cpp
index 3cf4798..6b29883 100644
--- a/cube/char_set.cpp
+++ b/cube/char_set.cpp
@@ -65,13 +65,13 @@
CharSet *CharSet::Create(TessdataManager *tessdata_manager,
!tessdata_manager->SeekToStart(TESSDATA_UNICHARSET)) {
fprintf(stderr, "Cube ERROR (CharSet::Create): could not find "
"either cube or tesseract unicharset\n");
- return false;
+ return NULL;
}
FILE *charset_fp = tessdata_manager->GetDataFilePtr();
if (!charset_fp) {
fprintf(stderr, "Cube ERROR (CharSet::Create): could not load "
"a unicharset\n");
- return false;
+ return NULL;
}
// If we found a cube unicharset separate from tesseract's, load it and
@@ -84,7 +84,7 @@
CharSet *CharSet::Create(TessdataManager *tessdata_manager,
loaded = char_set->LoadSupportedCharList(charset_fp, NULL);
if (!loaded) {
delete char_set;
- return false;
+ return NULL;
}
char_set->init_ = true;
diff --git a/cube/cube_line_segmenter.cpp b/cube/cube_line_segmenter.cpp
index deee573..3f0b762 100644
--- a/cube/cube_line_segmenter.cpp
+++ b/cube/cube_line_segmenter.cpp
@@ -124,7 +124,7 @@
Pixa *CubeLineSegmenter::CrackLine(Pix *cracked_line_pix,
if (line_con_comps == NULL) {
delete []lines_pixa;
- return false;
+ return NULL;
}
// assign each conn comp to the a line based on its centroid
@@ -142,7 +142,7 @@
Pixa *CubeLineSegmenter::CrackLine(Pix *cracked_line_pix,
delete []lines_pixa;
boxaDestroy(&line_con_comps);
pixaDestroy(&line_con_comps_pix);
- return false;
+ return NULL;
}
}
@@ -413,14 +413,14 @@
Pix *CubeLineSegmenter::Pixa2Pix(Pixa *pixa, Box **dest_box,
(*dest_box) = boxCreate(min_x, min_y, max_x - min_x, max_y - min_y);
if ((*dest_box) == NULL) {
- return false;
+ return NULL;
}
// create the union pix
Pix *union_pix = pixCreate((*dest_box)->w, (*dest_box)->h, img_->d);
if (union_pix == NULL) {
boxDestroy(dest_box);
- return false;
+ return NULL;
}
// create a pix corresponding to the union of all pixs
diff --git a/cube/cube_object.cpp b/cube/cube_object.cpp
index 48bce64..b9a7113 100644
--- a/cube/cube_object.cpp
+++ b/cube/cube_object.cpp
@@ -165,7 +165,7 @@
WordAltList *CubeObject::Recognize(LangModel *lang_mod, bool word_mode) {
if (deslanted_beam_obj_ == NULL) {
fprintf(stderr, "Cube ERROR (CubeObject::Recognize): could not "
"construct deslanted BeamSearch\n");
- return false;
+ return NULL;
}
}
diff --git a/cube/word_list_lang_model.cpp b/cube/word_list_lang_model.cpp
index 18f85c1..0f7f562 100644
--- a/cube/word_list_lang_model.cpp
+++ b/cube/word_list_lang_model.cpp
@@ -74,7 +74,7 @@
LangModEdge **WordListLangModel::GetEdges(CharAltList *alt_list,
// initialize if necessary
if (init_ == false) {
if (Init() == false) {
- return false;
+ return NULL;
}
}
component/tesseract/tesseract-3.01-remove-bom.patch
0 → 100644
View file @
a4cd4d27
The patch below removes a utf-8 BOM mark.
Avoid touching it as the BOM is invisible, and copy/pasting might not work.
It is needed because old compilers treat the BOM as garbage instead of
whitespace.
--- tesseract-3.01/ccutil/strngs.h.orig 2012-05-24 15:13:22.743808379 +0200
+++ tesseract-3.01/ccutil/strngs.h 2012-05-24 15:16:54.468858282 +0200
@@ -1,4 +1,4 @@
-/**********************************************************************
+/**********************************************************************
* File: strngs.h (Formerly strings.h)
* Description: STRING class definition.
* Author: Ray Smith
component/vm-img/debian.cfg
View file @
a4cd4d27
...
...
@@ -29,13 +29,38 @@ late-command =
# a DNS proxy on both IPv4 and IPv6 without translating queries to what the
# host supports.
dpkg -P rdnssd
[ $DIST != debian-squeeze ] || {
# Squeeze kernel (2.6.32) is too old for 9p mount points.
set squeeze-backports /etc/apt/sources.list.d/backports.list `
aptitude -F %p search '?source-package("^linux-latest-2.6$")~i'`
echo 'Acquire::Check-Valid-Until "false";' >/etc/apt/apt.conf.d/00AcceptExpired
echo deb http://archive.debian.org/debian-backports/ $1 main >$2
apt-get update --no-list-cleanup -o Dir::Etc::SourceList=/dev/null
apt-get install -y -t $1 $3
}
mount |grep -q 'on / .*\bdiscard\b' || ! type fstrim || {
apt-get clean
sync
fstrim -v /
}
debconf.debconf =
debconf/frontend noninteractive
debconf/priority critical
debian-squeeze/preseed.mirror/country = manual
debian-squeeze/preseed.mirror/http/hostname = archive.debian.org
debian-squeeze/preseed.mirror/http/directory = /debian
# minimal size
preseed.apt-setup/enable-source-repositories = false
preseed.recommends = false
preseed.tasks =
packages = localepurge
[debian-squeeze]
<= debian-jessie
x86_64.iso = debian-amd64-squeeze-netinst.iso
[debian-wheezy]
<= debian-jessie
x86_64.iso = debian-amd64-wheezy-netinst.iso
[debian-jessie]
x86_64.iso = debian-amd64-netinst.iso
...
...
setup.py
View file @
a4cd4d27
...
...
@@ -109,8 +109,8 @@ setup(name=name,
'firefox = slapos.recipe.firefox:Recipe'
,
'fontconfig = slapos.recipe.fontconfig:Recipe'
,
'free_port = slapos.recipe.free_port:Recipe'
,
'generate.mac = slapos.recipe.
generatemac:Recipe
'
,
'generate.password = slapos.recipe.
generatepassword:Recipe
'
,
'generate.mac = slapos.recipe.
random:Mac
'
,
'generate.password = slapos.recipe.
random:Password
'
,
'generic.cloudooo = slapos.recipe.generic_cloudooo:Recipe'
,
'generic.kumofs = slapos.recipe.generic_kumofs:Recipe'
,
'generic.memcached = slapos.recipe.generic_memcached:Recipe'
,
...
...
@@ -162,6 +162,7 @@ setup(name=name,
'publish-early = slapos.recipe.publish_early:Recipe'
,
'publishsection = slapos.recipe.publish:PublishSection'
,
'publishurl = slapos.recipe.publishurl:Recipe'
,
'random.time = slapos.recipe.random:Time'
,
'readline = slapos.recipe.readline:Recipe'
,
'redis.server = slapos.recipe.redis:Recipe'
,
'request = slapos.recipe.request:Recipe'
,
...
...
slapos/recipe/dcron.py
View file @
a4cd4d27
...
...
@@ -26,8 +26,6 @@
##############################################################################
import
os
from
random
import
randint
from
slapos.recipe.librecipe
import
GenericBaseRecipe
from
zc.buildout
import
UserError
...
...
@@ -56,36 +54,21 @@ class Recipe(GenericBaseRecipe):
class
Part
(
GenericBaseRecipe
):
def
_options
(
self
,
options
):
periodicity
=
None
if
options
.
get
(
'frequency'
,
''
)
!=
''
:
periodicity
=
options
[
'frequency'
]
elif
'time'
in
options
:
periodicity
=
options
[
'time'
]
def
install
(
self
):
try
:
periodicity
=
self
.
options
[
'frequency'
]
except
KeyError
:
periodicity
=
self
.
options
[
'time'
]
try
:
periodicity
=
systemd_to_cron
(
periodicity
)
except
Exception
:
raise
UserError
(
"Invalid systemd calendar spec %r"
%
periodicity
)
if
periodicity
is
None
and
self
.
isTrueValue
(
options
.
get
(
'once-a-day'
,
False
)):
# Migration code, to force a random value for already instanciated softwares
previous_periodicity
=
self
.
getValueFromPreviousRun
(
self
.
name
,
'periodicity'
)
if
previous_periodicity
in
(
"0 0 * * *"
,
''
,
None
):
periodicity
=
"%d %d * * *"
%
(
randint
(
0
,
59
),
randint
(
0
,
23
))
else
:
periodicity
=
previous_periodicity
if
periodicity
is
None
:
raise
UserError
(
"Missing one of 'frequency', 'once-a-day' or 'time' parameter"
)
options
[
'periodicity'
]
=
periodicity
def
install
(
self
):
cron_d
=
self
.
options
[
'cron-entries'
]
name
=
self
.
options
[
'name'
]
filename
=
os
.
path
.
join
(
cron_d
,
name
)
with
open
(
filename
,
'w'
)
as
part
:
part
.
write
(
'%s %s
\
n
'
%
(
self
.
options
[
'periodicity'
]
,
self
.
options
[
'command'
]))
part
.
write
(
'%s %s
\
n
'
%
(
periodicity
,
self
.
options
[
'command'
]))
return
[
filename
]
...
...
@@ -155,3 +138,4 @@ def systemd_to_cron(spec):
continue
raise
ValueError
return
' '
.
join
(
spec
)
slapos/recipe/erp5testnode/__init__.py
View file @
a4cd4d27
...
...
@@ -58,6 +58,7 @@ class Recipe(GenericBaseRecipe):
"
\
n
path_list = %s"
%
","
.
join
(
software_path_list
)
CONFIG
[
'computer_id'
]
=
self
.
buildout
[
'slap-connection'
][
'computer-id'
]
CONFIG
[
'server_url'
]
=
self
.
buildout
[
'slap-connection'
][
'server-url'
]
CONFIG
[
'frontend_url'
]
=
self
.
buildout
[
'testnode-frontend'
][
'connection-secure_access'
]
configuration_file
=
self
.
createFile
(
self
.
options
[
'configuration-file'
],
self
.
substituteTemplate
(
...
...
slapos/recipe/erp5testnode/template/erp5testnode.cfg.in
View file @
a4cd4d27
...
...
@@ -18,6 +18,7 @@ httpd_port = %(httpd_port)s
httpd_software_access_port = %(httpd_software_access_port)s
computer_id = %(computer_id)s
server_url = %(server_url)s
frontend_url = %(frontend_url)s
# Binaries
git_binary = %(git_binary)s
...
...
slapos/recipe/erp5testnode/template/httpd.conf.in
View file @
a4cd4d27
...
...
@@ -22,7 +22,6 @@ Header set Access-Control-Allow-Origin "*"
# List of modules
LoadModule unixd_module modules/mod_unixd.so
LoadModule access_compat_module modules/mod_access_compat.so
LoadModule authz_core_module modules/mod_authz_core.so
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
...
...
@@ -68,20 +67,21 @@ SSLProxyEngine On
<Directory />
Options Indexes FollowSymLinks
IndexOptions FancyIndexing
order allow,deny
Allow from All
Require all granted
</Directory>
</VirtualHost>
# IMPORTANT NOTE: It is very important to deny access by default, otherwise this
# open access to private repositories. If someone would like to open access to
# some code, it would be needed to explicitely add a .htaccess file.
Listen [%(ip)s]:%(software_access_port)s
<VirtualHost *:%(software_access_port)s>
SSLEngine on
RewriteRule (.*) http://[%(ip)s]:%(software_access_port)s/VirtualHostBase/https/[%(ip)s]:%(software_access_port)s/VirtualHostRoot/$1 [L,P]
DocumentRoot "%(testnode_software_directory)s"
DocumentRoot "%(testnode_srv_directory)s"
<Directory />
AllowOverride FileInfo AuthConfig
Options FollowSymLinks
IndexOptions FancyIndexing
order allow,deny
Allow from All
Require all denied
</Directory>
</VirtualHost>
slapos/recipe/generatemac.py
deleted
100644 → 0
View file @
acb4cab8
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import
random
import
os
from
slapos.recipe.librecipe
import
GenericBaseRecipe
class
Recipe
(
GenericBaseRecipe
):
def
__init__
(
self
,
buildout
,
name
,
options
):
if
os
.
path
.
exists
(
options
[
'storage-path'
]):
open_file
=
open
(
options
[
'storage-path'
],
'r'
)
options
[
'mac-address'
]
=
open_file
.
read
()
open_file
.
close
()
if
options
.
get
(
'mac-address'
,
''
)
==
''
:
# First octet has to represent a locally administered address
octet_list
=
[
254
]
+
[
random
.
randint
(
0x00
,
0xff
)
for
x
in
range
(
5
)]
options
[
'mac-address'
]
=
':'
.
join
([
'%02x'
%
x
for
x
in
octet_list
])
return
GenericBaseRecipe
.
__init__
(
self
,
buildout
,
name
,
options
)
def
install
(
self
):
open_file
=
open
(
self
.
options
[
'storage-path'
],
'w'
)
open_file
.
write
(
self
.
options
[
'mac-address'
])
open_file
.
close
()
return
[
self
.
options
[
'storage-path'
]]
slapos/recipe/librecipe/generic.py
View file @
a4cd4d27
...
...
@@ -26,7 +26,6 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import
ConfigParser
import
io
import
logging
import
os
...
...
@@ -283,18 +282,3 @@ class GenericBaseRecipe(object):
except
:
shutil
.
rmtree
(
destination
)
raise
def
getValueFromPreviousRun
(
self
,
section
,
parameter
):
"""
Returns the value of a parameter from a previous run, if it exists.
Otherwise, returns None
"""
if
os
.
path
.
exists
(
self
.
buildout
[
'buildout'
][
'installed'
]):
with
open
(
self
.
buildout
[
'buildout'
][
'installed'
])
as
config_file
:
try
:
parser
=
ConfigParser
.
RawConfigParser
()
parser
.
readfp
(
config_file
)
return
parser
.
get
(
section
,
parameter
)
except
:
pass
return
None
\ No newline at end of file
slapos/recipe/notifier.py
View file @
a4cd4d27
...
...
@@ -90,6 +90,9 @@ class Notify(GenericBaseRecipe):
parameters
.
extend
(
notification_url
.
split
(
' '
))
parameters
.
extend
([
'--executable'
,
executable
])
# For a more verbose mode, writing feed items for any action
instance_root_name
=
instance_root_name
or
self
.
options
.
get
(
'instance-root-name'
,
None
)
log_url
=
log_url
or
self
.
options
.
get
(
'log-url'
,
None
)
status_item_directory
=
status_item_directory
or
self
.
options
.
get
(
'status-item-directory'
,
None
)
if
instance_root_name
and
log_url
and
status_item_directory
:
parameters
.
extend
([
'--instance-root-name'
,
instance_root_name
,
...
...
slapos/recipe/pbs.py
View file @
a4cd4d27
...
...
@@ -82,7 +82,7 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
LC_ALL=C
export LC_ALL
RDIFF_BACKUP=%(rdiffbackup_binary)s
until
$RDIFF_BACKUP
\
\
$RDIFF_BACKUP
\
\
--remote-schema %(remote_schema)s
\
\
--restore-as-of now
\
\
--force
\
\
...
...
@@ -90,7 +90,6 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
%(remote_dir)s; do
echo "repeating rdiff-backup..."
sleep 10
done
"""
)
template_dict
=
{
...
...
@@ -122,7 +121,7 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
exit 1
}
trap sigint
SIG
INT # we can CTRL-C for ease of debugging
trap sigint INT # we can CTRL-C for ease of debugging
LC_ALL=C
export LC_ALL
...
...
@@ -137,76 +136,75 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
CANTFIND_FILE=$TMPDIR/$$.rdiff_cantfind
SUCCEEDED=false
while ! $SUCCEEDED; do
# not using --fix-corrupted can lead to an infinite loop
# in case of manual changes to the backup repository.
# not using --fix-corrupted can lead to an infinite loop
# in case of manual changes to the backup repository.
CORRUPTED_ARGS=""
if [ "$1" = "--fix-corrupted" ]; then
VERIFY=$($RDIFF_BACKUP --verify $BACKUP_DIR 2>&1 >/dev/null)
echo "$VERIFY" | egrep "$CORRUPTED_MSG" | sed "s/$CORRUPTED_MSG//g" > $CORRUPTED_FILE
CORRUPTED_ARGS=""
if [ "$1" = "--fix-corrupted" ]; then
VERIFY=$($RDIFF_BACKUP --verify $BACKUP_DIR 2>&1 >/dev/null)
echo "$VERIFY" | egrep "$CORRUPTED_MSG" | sed "s/$CORRUPTED_MSG//g" > $CORRUPTED_FILE
# Sometimes --verify reports this spurious warning:
echo "$VERIFY" | egrep "$CANTFIND_MSG" | sed "s/$CANTFIND_MSG
\
(.*
\
),/--always-snapshot
\
'
\
\
1'/g" > $CANTFIND_FILE
# Sometimes --verify reports this spurious warning:
echo "$VERIFY" | egrep "$CANTFIND_MSG" | sed "s/$CANTFIND_MSG
\
(.*
\
),/--always-snapshot
\
'
\
\
1'/g" > $CANTFIND_FILE
# There can be too many files, better not to provide them through separate command line parameters
CORRUPTED_ARGS="--always-snapshot-fromfile $CORRUPTED_FILE --always-snapshot-fromfile $CANTFIND_FILE"
# There can be too many files, better not to provide them through separate command line parameters
CORRUPTED_ARGS="--always-snapshot-fromfile $CORRUPTED_FILE --always-snapshot-fromfile $CANTFIND_FILE"
if [ -s "$CORRUPTED_FILE" -o -s "$CANTFIND_FILE" ]; then
echo Retransmitting $(cat "$CORRUPTED_FILE" "$CANTFIND_FILE" | wc -l) corrupted/missing files
else
echo "No corrupted or missing files to retransmit"
fi
if [ -s "$CORRUPTED_FILE" -o -s "$CANTFIND_FILE" ]; then
echo Retransmitting $(cat "$CORRUPTED_FILE" "$CANTFIND_FILE" | wc -l) corrupted/missing files
else
echo "No corrupted or missing files to retransmit"
fi
fi
$RDIFF_BACKUP
\
\
$CORRUPTED_ARGS
\
\
--remote-schema %(remote_schema)s
\
\
%(remote_dir)s
\
\
$BACKUP_DIR
RDIFF_BACKUP_STATUS=$?
[ "$CORRUPTED_ARGS" ] && rm -f "$CORRUPTED_FILE" "$CANTFIND_FILE"
if [ ! $RDIFF_BACKUP_STATUS -eq 0 ]; then
# Check the backup, go to the last consistent backup, so that next
# run will be okay.
echo "Checking backup directory..."
$RDIFF_BACKUP --check-destination-dir $BACKUP_DIR
if [ ! $? -eq 0 ]; then
# Here, two possiblities:
if [ is_first_backup ]; then
continue
# The first backup failed, and check-destination as well.
# we may want to remove the backup.
else
continue
# The backup command has failed, while transferring an increment, and check-destination as well.
# XXX We may need to publish the failure and ask the the equeue, re-run this script again,
# instead do a push to the clone.
fi
$RDIFF_BACKUP
\
\
$CORRUPTED_ARGS
\
\
--remote-schema %(remote_schema)s
\
\
%(remote_dir)s
\
\
$BACKUP_DIR
RDIFF_BACKUP_STATUS=$?
[ "$CORRUPTED_ARGS" ] && rm -f "$CORRUPTED_FILE" "$CANTFIND_FILE"
if [ ! $RDIFF_BACKUP_STATUS -eq 0 ]; then
# Check the backup, go to the last consistent backup, so that next
# run will be okay.
echo "Checking backup directory..."
$RDIFF_BACKUP --check-destination-dir $BACKUP_DIR
if [ ! $? -eq 0 ]; then
# Here, two possiblities:
if [ is_first_backup ]; then
continue
# The first backup failed, and check-destination as well.
# we may want to remove the backup.
else
continue
# The backup command has failed, while transferring an increment, and check-destination as well.
# XXX We may need to publish the failure and ask the the equeue, re-run this script again,
# instead do a push to the clone.
fi
else
# Everything's okay, cleaning up...
$RDIFF_BACKUP --remove-older-than %(remove_backup_older_than)s --force $BACKUP_DIR
fi
else
# Everything's okay, cleaning up...
$RDIFF_BACKUP --remove-older-than %(remove_backup_older_than)s --force $BACKUP_DIR
fi
SUCCEEDED=true
SUCCEEDED=true
if [ -e %(backup_signature)s ]; then
cd $BACKUP_DIR
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > ../proof.signature
cmp backup.signature ../proof.signature || SUCCEEDED=false
diff -ruw backup.signature ../proof.signature > ../backup.diff
# XXX If there is a difference on the backup, we should publish the
# failure and ask the equeue, re-run this script again,
# instead do a push it to the clone.
fi
if [ -e %(backup_signature)s ]; then
cd $BACKUP_DIR
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > ../proof.signature
cmp backup.signature ../proof.signature || SUCCEEDED=false
diff -ruw backup.signature ../proof.signature > ../backup.diff
# XXX If there is a difference on the backup, we should publish the
# failure and ask the equeue, re-run this script again,
# instead do a push it to the clone.
fi
$SUCCEEDED || find $BACKUP_DIR -name rdiff-backup.tmp.* -exec rm -rf {}
\
;
$SUCCEEDED || find $BACKUP_DIR -name rdiff-backup.tmp.* -exec rm -rf {}
\
;
done
"""
)
template_dict
=
{
...
...
@@ -305,6 +303,7 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
title
=
entry
.
get
(
'title'
,
slave_id
),
notification_url
=
entry
[
'notify'
]
or
''
,
feed_url
=
'%s/get/%s'
%
(
self
.
options
[
'notifier-url'
],
entry
[
'notification-id'
]),
max_run
=
self
.
options
.
get
(
'pull-push-maximum-run'
,
1
),
pidfile
=
os
.
path
.
join
(
self
.
options
[
'run-directory'
],
'%s.pid'
%
slave_id
),
instance_root_name
=
self
.
options
.
get
(
'instance-root-name'
,
None
),
log_url
=
self
.
options
.
get
(
'log-url'
,
None
),
...
...
slapos/recipe/
generatepassword
.py
→
slapos/recipe/
random
.py
View file @
a4cd4d27
# vim: set et sts=2:
##############################################################################
#
# Copyright (c) 201
2
Vifib SARL and Contributors. All Rights Reserved.
# Copyright (c) 201
6
Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
...
...
@@ -26,16 +25,61 @@
#
##############################################################################
"""
Collects various random generators to be used in
buildout Software Releases and Instances developments.
"""
from
__future__
import
absolute_import
import
errno
import
os
import
random
import
string
from
slapos.recipe.librecipe
import
GenericBaseRecipe
class
Time
(
object
):
"""Generate a random time from a 24h time clock"""
def
__init__
(
self
,
buildout
,
name
,
options
):
self
.
name
=
name
self
.
buildout
=
buildout
self
.
options
=
options
self
.
options
[
'time'
]
=
"%d:%d"
%
(
random
.
randint
(
0
,
23
),
random
.
randint
(
0
,
59
))
def
install
(
self
):
pass
update
=
install
class
Mac
(
GenericBaseRecipe
):
def
__init__
(
self
,
buildout
,
name
,
options
):
if
os
.
path
.
exists
(
options
[
'storage-path'
]):
open_file
=
open
(
options
[
'storage-path'
],
'r'
)
options
[
'mac-address'
]
=
open_file
.
read
()
open_file
.
close
()
if
options
.
get
(
'mac-address'
,
''
)
==
''
:
# First octet has to represent a locally administered address
octet_list
=
[
254
]
+
[
random
.
randint
(
0x00
,
0xff
)
for
x
in
range
(
5
)]
options
[
'mac-address'
]
=
':'
.
join
([
'%02x'
%
x
for
x
in
octet_list
])
return
GenericBaseRecipe
.
__init__
(
self
,
buildout
,
name
,
options
)
def
install
(
self
):
open_file
=
open
(
self
.
options
[
'storage-path'
],
'w'
)
open_file
.
write
(
self
.
options
[
'mac-address'
])
open_file
.
close
()
return
[
self
.
options
[
'storage-path'
]]
def
generatePassword
(
length
):
return
''
.
join
(
random
.
SystemRandom
().
sample
(
string
.
ascii_lowercase
,
length
))
class
Recipe
(
object
):
class
Password
(
object
):
"""Generate a password that is only composed of lowercase letters
This recipe only makes sure that ${:passwd} does not end up in `.installed`
...
...
slapos/recipe/seleniumrunner/erp5functionaltestreporthandler.py
View file @
a4cd4d27
...
...
@@ -24,6 +24,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#############################################################################
from
__future__
import
absolute_import
import
re
import
urlparse
...
...
slapos/test/recipe/test_dcron.py
View file @
a4cd4d27
import
os
import
sys
import
unittest
from
textwrap
import
dedent
from
slapos.recipe
import
dcron
from
slapos.recipe.dcron
import
systemd_to_cron
class
TestDcron
(
unittest
.
TestCase
):
...
...
@@ -40,72 +36,3 @@ class TestDcron(unittest.TestCase):
_
(
"1-0"
);
_
(
"1-32"
);
_
(
"1-14/18"
)
_
(
"24:0"
);
_
(
"8/16:0"
)
_
(
"0:60"
);
_
(
"0:15/45"
)
def
setUp
(
self
):
self
.
installed_file
=
'./.installed.cfg'
def
tearDown
(
self
):
if
os
.
path
.
exists
(
self
.
installed_file
):
os
.
unlink
(
self
.
installed_file
)
def
new_recipe
(
self
,
extra_options
=
None
,
**
kw
):
buildout
=
{
'buildout'
:
{
'bin-directory'
:
''
,
'find-links'
:
''
,
'allow-hosts'
:
''
,
'develop-eggs-directory'
:
''
,
'eggs-directory'
:
''
,
'python'
:
'testpython'
,
'installed'
:
'.installed.cfg'
,
},
'testpython'
:
{
'executable'
:
sys
.
executable
,
},
'slap-connection'
:
{
'computer-id'
:
''
,
'partition-id'
:
''
,
'server-url'
:
''
,
'software-release-url'
:
''
,
}
}
options
=
{
'cron-entries'
:
'.cron'
,
'name'
:
'test'
,
'command'
:
'true'
,
}
if
isinstance
(
extra_options
,
dict
):
options
.
update
(
extra_options
)
options
.
update
(
kw
)
return
dcron
.
Part
(
buildout
=
buildout
,
name
=
'cron-entry-test'
,
options
=
options
)
def
test_onceADayIsOverwrittenIfGivenFrequency
(
self
):
parameter_dict
=
{
'once-a-day'
:
True
}
recipe
=
self
.
new_recipe
(
parameter_dict
)
random_periodicity
=
recipe
.
options
[
'periodicity'
]
parameter_dict
[
'frequency'
]
=
'0 1 * * *'
recipe
=
self
.
new_recipe
(
parameter_dict
)
new_periodicity
=
recipe
.
options
[
'periodicity'
]
self
.
assertEqual
(
new_periodicity
,
'0 1 * * *'
)
self
.
assertNotEqual
(
random_periodicity
,
new_periodicity
)
def
test_periodicityNeverChangeIfOnceADay
(
self
):
parameter_dict
=
{
'once-a-day'
:
True
}
periodicity
=
None
for
_
in
range
(
5
):
recipe
=
self
.
new_recipe
(
parameter_dict
)
recipe_periodicity
=
recipe
.
options
[
'periodicity'
]
if
periodicity
is
not
None
:
self
.
assertEqual
(
periodicity
,
recipe_periodicity
)
else
:
periodicity
=
recipe_periodicity
with
open
(
recipe
.
buildout
[
'buildout'
][
'installed'
],
'w'
)
as
file
:
file
.
write
(
dedent
(
"""
[cron-entry-test]
periodicity = %s
"""
%
periodicity
))
if
__name__
==
'__main__'
:
unittest
.
main
()
software/apache-frontend/common.cfg
View file @
a4cd4d27
...
...
@@ -21,9 +21,6 @@ extends =
../../stack/monitor/buildout.cfg
parts +=
slapos-cookbook
slapos-toolbox
eggs
template
template-apache-frontend
template-apache-replicate
...
...
@@ -40,39 +37,11 @@ parts +=
proxy-by-url
http-proxy
[slapos-toolbox]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
${python-cryptography:egg}
${pycurl:egg}
slapos.toolbox
scripts =
killpidfromfile
onetimedownload
[eggs]
recipe = zc.recipe.egg
eggs =
${python-cffi:egg}
${python-cryptography:egg}
${lxml-python:egg}
# Extent extra-eggs.
[extra-eggs]
eggs +=
websockify
slapos.cookbook
slapos.toolbox
erp5.util
cns.recipe.symlink
collective.recipe.template
plone.recipe.command
${pycurl:egg}
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command =
grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link &&
[template]
recipe = slapos.recipe.template
...
...
@@ -84,7 +53,7 @@ mode = 0644
[template-apache-frontend]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-apache-frontend.cfg
md5sum =
3e52cdd1fba381cdb98b438589d1c4ab
md5sum =
6d437f8a3836484d42bf9bf8d747e201
output = ${buildout:directory}/template-apache-frontend.cfg
mode = 0644
...
...
@@ -97,13 +66,13 @@ mode = 0644
[template-slave-list]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache-custom-slave-list.cfg.in
md5sum =
ee18e498f2868735e0c0ddf209454c37
md5sum =
6828096d9ec4333b8c72a2e2ab768ea0
mode = 640
[template-slave-configuration]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/
slave
-virtualhost.conf.in
md5sum =
a7ad2e83b7f919fc45a7ef1e64344dcb
url = ${:_profile_base_location_}/templates/
custom
-virtualhost.conf.in
md5sum =
d103143e5d50682bd5ad43117d82e2fa
mode = 640
[template-replicate-publish-slave-information]
...
...
@@ -115,25 +84,13 @@ mode = 640
[template-apache-frontend-configuration]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache.conf.in
md5sum = 8ff17b2a0d0495ec935e378f3976de71
mode = 640
[template-apache-cached-configuration]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache_cached.conf.in
md5sum = 86dcbdc0874d3c1b8971b76b70c4df65
mode = 640
[template-rewrite-cached]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache_cached_rewrite.txt.in
md5sum = 2f30af4f9da340c2b0618599da03ed4b
md5sum = 7d5f0f38e4dd81ff26f2499890b13315
mode = 640
[template-custom-slave-list]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache-default-slave-list.cfg.in
md5sum =
9362384cd80727987b34c7746a6de196
md5sum =
5252c0db72b57ce6debb9d4fb4e706a1
mode = 640
[template-not-found-html]
...
...
@@ -152,7 +109,7 @@ mode = 640
[template-default-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/default-virtualhost.conf.in
md5sum =
8975fd41fae2dcac92e18df3c6375f9a
md5sum =
e5ed71c5e22ab91e33a71bd09879e23c
mode = 640
[template-cached-slave-virtualhost]
...
...
@@ -200,6 +157,25 @@ filename = storage.config.jinja2
download-only = true
mode = 0644
# NGINX Configuration
[template-nginx-configuration]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/templates/nginx.cfg.in
md5sum = 18633ce55e53340efa1ba7693aac4152
output = ${buildout:directory}/template-nginx.cfg.in
mode = 0644
[template-nginx-eventsource-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/nginx-eventsource-slave.conf.in
md5sum = a5186f666acb2f040ede04c91e60408f
mode = 0644
[template-nginx-notebook-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/nginx-notebook-slave.conf.in
md5sum = 82d74a7f2aceb2b4a7acc6259291b7f2
mode = 0644
# Migrated from KVM recipe
[http-proxy]
...
...
software/apache-frontend/instance-apache-frontend.cfg
View file @
a4cd4d27
This diff is collapsed.
Click to expand it.
software/apache-frontend/instance-slave-apache-input-schema.json
View file @
a4cd4d27
...
...
@@ -34,7 +34,7 @@
"description"
:
"Type of slave. If redirect, the slave will redirect to the given url. If zope, the rewrite rules will be compatible with Virtual Host Monster"
,
"type"
:
"string"
,
"default"
:
""
,
"enum"
:
[
""
,
"zope"
,
"redirect"
]
"enum"
:
[
""
,
"zope"
,
"redirect"
,
"notebook"
,
"eventsource"
]
},
"path"
:
{
...
...
software/apache-frontend/software.cfg
View file @
a4cd4d27
...
...
@@ -13,6 +13,21 @@ rdiff-backup = 1.0.5
slapos.recipe.template = 2.8
slapos.toolbox = 0.61
smmap = 0.9.0
numpy = 1.11.2
websockify = 0.8.0
# Required by:
# slapos.toolbox==0.61
dnspython = 1.15.0
# Required by:
# slapos.toolbox==0.61
erp5.util = 0.4.46
# Required by:
# slapos.toolbox==0.61
passlib = 1.6.5
# Required by:
# slapos.toolbox = 0.61
...
...
software/apache-frontend/templates/apache-custom-slave-list.cfg.in
View file @
a4cd4d27
This diff is collapsed.
Click to expand it.
software/apache-frontend/templates/apache.conf.in
View file @
a4cd4d27
...
...
@@ -7,6 +7,8 @@ ServerName {{ domain }}
DocumentRoot {{ document_root }}
ServerRoot {{ instance_home }}
{{ "Listen %s:%s" % (ipv4_addr, cached_port) }}
{{ "Listen %s:%s" % (ipv4_addr, ssl_cached_port) }}
{% for ip in (ipv4_addr, "[%s]" % ipv6_addr) -%}
{% for port in (http_port, https_port) -%}
{{ "Listen %s:%s" % (ip, port) }}
...
...
@@ -138,5 +140,9 @@ NameVirtualHost *:{{ http_port }}
NameVirtualHost *:{{ https_port }}
include {{ slave_configuration_directory }}/*.conf
NameVirtualHost *:{{ cached_port }}
NameVirtualHost *:{{ ssl_cached_port }}
include {{ slave_with_cache_configuration_directory }}/*.conf
ErrorDocument 404 /notfound.html
RewriteRule (.*) /notfound.html [R=404,L]
software/apache-frontend/templates/apache_cached.conf.in
deleted
100644 → 0
View file @
acb4cab8
# Apache configuration file for Zope
# Automatically generated
# Basic server configuration
PidFile "{{ pid_file }}"
ServerName {{ domain }}
DocumentRoot {{ document_root }}
ServerRoot {{ instance_home }}
{{ "Listen %s:%s" % (ipv4_addr, cached_port) }}
{{ "Listen %s:%s" % (ipv4_addr, ssl_cached_port) }}
ServerAdmin {{ server_admin }}
DefaultType text/plain
TypesConfig {{ httpd_home }}/conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
# As backend is trusting REMOTE_USER header unset it always
RequestHeader unset REMOTE_USER
ServerTokens Prod
# Log configuration
ErrorLog "{{ error_log }}"
LogLevel info
# LogFormat "%h %{REMOTE_USER}i %{Host}i %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
# LogFormat "%h %{REMOTE_USER}i %{Host}i %l %u %t \"%r\" %>s %b" common
# CustomLog "{{ access_log }}" common
LogFormat "%h %l %{REMOTE_USER}i %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %D" combined
CustomLog "{{ access_log }}" combined
<Directory {{ protected_path }}>
Order Deny,Allow
Allow from {{ access_control_string }}
</Directory>
<Directory {{ document_root }}>
Order Allow,Deny
Allow from All
</Directory>
# List of modules
#LoadModule unixd_module modules/mod_unixd.so
#LoadModule access_compat_module modules/mod_access_compat.so
#LoadModule authz_core_module modules/mod_authz_core.so
LoadModule authz_host_module {{ httpd_home }}/modules/mod_authz_host.so
LoadModule log_config_module {{ httpd_home }}/modules/mod_log_config.so
LoadModule deflate_module {{ httpd_home }}/modules/mod_deflate.so
LoadModule setenvif_module {{ httpd_home }}/modules/mod_setenvif.so
LoadModule version_module {{ httpd_home }}/modules/mod_version.so
LoadModule proxy_module {{ httpd_home }}/modules/mod_proxy.so
LoadModule proxy_http_module {{ httpd_home }}/modules/mod_proxy_http.so
LoadModule ssl_module {{ httpd_home }}/modules/mod_ssl.so
LoadModule mime_module {{ httpd_home }}/modules/mod_mime.so
LoadModule dav_module {{ httpd_home }}/modules/mod_dav.so
LoadModule dav_fs_module {{ httpd_home }}/modules/mod_dav_fs.so
LoadModule negotiation_module {{ httpd_home }}/modules/mod_negotiation.so
LoadModule rewrite_module {{ httpd_home }}/modules/mod_rewrite.so
LoadModule headers_module {{ httpd_home }}/modules/mod_headers.so
LoadModule cache_module {{ httpd_home }}/modules/mod_cache.so
LoadModule mem_cache_module {{ httpd_home }}/modules/mod_mem_cache.so
LoadModule antiloris_module {{ httpd_home }}/modules/mod_antiloris.so
# The following directives modify normal HTTP response behavior to
# handle known problems with browser implementations.
BrowserMatch "Mozilla/2" nokeepalive
BrowserMatch ".*MSIE.*" nokeepalive ssl-unclean-shutdown \
downgrade-1.0 force-response-1.0
BrowserMatch "RealPlayer 4\.0" force-response-1.0
BrowserMatch "Java/1\.0" force-response-1.0
BrowserMatch "JDK/1\.0" force-response-1.0
# The following directive disables redirects on non-GET requests for
# a directory that does not include the trailing slash. This fixes a
# problem with Microsoft WebFolders which does not appropriately handle
# redirects for folders with DAV methods.
# Same deal with Apple's DAV filesystem and Gnome VFS support for DAV.
BrowserMatch "Microsoft Data Access Internet Publishing Provider" redirect-carefully
BrowserMatch "MS FrontPage" redirect-carefully
BrowserMatch "^WebDrive" redirect-carefully
BrowserMatch "^WebDAVFS/1.[0123]" redirect-carefully
BrowserMatch "^gnome-vfs" redirect-carefully
BrowserMatch "^XML Spy" redirect-carefully
BrowserMatch "^Dreamweaver-WebDAV-SCM1" redirect-carefully
# Cache directives
CacheEnable mem /
CacheDefaultExpire 3600
MCacheSize 8192
MCacheMaxObjectCount 1000
MCacheMaxObjectSize 8192
MCacheRemovalAlgorithm LRU
# Deflate
AddOutputFilterByType DEFLATE text/html text/plain text/xml text/css text/javascript application/x-javascript application/javascript
BrowserMatch ^Mozilla/4 gzip-only-text/html
BrowserMatch ^Mozilla/4\.0[678] no-gzip
BrowserMatch \bMSIE !no-gzip !gzip-only-text/html
# SSL Configuration
SSLCertificateFile {{ login_certificate }}
SSLCertificateKeyFile {{ login_key }}
SSLRandomSeed startup builtin
SSLRandomSeed connect builtin
SSLSessionCache shmcb:/{{ httpd_mod_ssl_cache_directory }}/ssl_scache(512000)
SSLSessionCacheTimeout 300
SSLRandomSeed startup /dev/urandom 256
SSLRandomSeed connect builtin
SSLProtocol all -SSLv2 -SSLv3
SSLCipherSuite ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:HIGH:!aNULL:!MD5
SSLHonorCipherOrder on
<FilesMatch "\.(cgi|shtml|phtml|php)$">
SSLOptions +StdEnvVars
</FilesMatch>
# Accept proxy to sites using self-signed SSL certificates
SSLProxyCheckPeerCN off
SSLProxyCheckPeerExpire off
NameVirtualHost *:{{ cached_port }}
NameVirtualHost *:{{ ssl_cached_port }}
include {{ slave_with_cache_configuration_directory }}/*.conf
ErrorDocument 404 /notfound.html
RewriteRule (.*) /notfound.html [R=404,L]
software/apache-frontend/templates/apache_cached_rewrite.txt.in
deleted
100644 → 0
View file @
acb4cab8
{% for server_tuple in server_dict.items() -%}
{{ "%s %s" % server_tuple }}
{% endfor -%}
software/apache-frontend/templates/
slave
-virtualhost.conf.in
→
software/apache-frontend/templates/
custom
-virtualhost.conf.in
View file @
a4cd4d27
<VirtualHost *:{{ https_port }}>
{{
apache_custom_https
}}
{{
slave_parameter.get('apache_custom_https', '')
}}
</VirtualHost>
<VirtualHost *:{{ http_port }}>
{{
apache_custom_http
}}
{{
slave_parameter.get('apache_custom_https', '')
}}
</VirtualHost>
software/apache-frontend/templates/default-virtualhost.conf.in
View file @
a4cd4d27
{% set TRUE_VALUES = ['y', 'yes', '1', 'true'] -%}
{% set disable_no_cache_header = ('' ~ slave_parameter.get('disable-no-cache-request', '')).lower() in TRUE_VALUES -%}
{% set disable_via_header = ('' ~ slave_parameter.get('disable-via-header', '')).lower() in TRUE_VALUES -%}
{%
-
set TRUE_VALUES = ['y', 'yes', '1', 'true'] -%}
{%
-
set disable_no_cache_header = ('' ~ slave_parameter.get('disable-no-cache-request', '')).lower() in TRUE_VALUES -%}
{%
-
set disable_via_header = ('' ~ slave_parameter.get('disable-via-header', '')).lower() in TRUE_VALUES -%}
{%- set prefer_gzip = ('' ~ slave_parameter.get('prefer-gzip-encoding-to-backend', '')).lower() in TRUE_VALUES -%}
{%- set server_alias_list = slave_parameter.get('server-alias', '').split() -%}
{%- set ssl_proxy_verify = ('' ~ slave_parameter.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES -%}
{%- set disabled_cookie_list = slave_parameter.get('disabled-cookie-list', '').split() -%}
{%- set https_only = ('' ~ slave_parameter.get('https-only', '')).lower() in TRUE_VALUES -%}
{%- set slave_type = slave_parameter.get('type', '') -%}
{%- set ssl_configuration_list = [('SSLCertificateFile', 'path_to_ssl_crt'),
('SSLCertificateKeyFile', 'path_to_ssl_key'),
('SSLCACertificateFile', 'path_to_ssl_ca_crt'),
('SSLCertificateChainFile', 'path_to_ssl_ca_crt')] -%}
<VirtualHost *:{{ https_port }}>
ServerName {{ slave_parameter.get('custom_domain') }}
ServerAlias {{ slave_parameter.get('custom_domain') }}
{%- if 'server-alias' in slave_parameter -%}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{%- for server_alias in server_alias_list %}
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor %}
{%- endif %}
{% endfor %}
SSLEngine on
SSLProxyEngine on
{% set ssl_proxy_verify = ('' ~ slave_parameter.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES -%}
{% if ssl_proxy_verify -%}
{% if 'ssl_proxy_ca_crt' in slave_parameter -%}
SSLProxyCACertificateFile {{ slave_parameter.get('path_to_ssl_proxy_ca_crt', '') }}
...
...
@@ -29,18 +34,12 @@
SSLCipherSuite ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:HIGH:!aNULL:!MD5
SSLHonorCipherOrder on
{% set ssl_configuration_list = [('SSLCertificateFile', 'path_to_ssl_crt'),
('SSLCertificateKeyFile', 'path_to_ssl_key'),
('SSLCACertificateFile', 'path_to_ssl_ca_crt'),
('SSLCertificateChainFile', 'path_to_ssl_ca_crt')] -%}
{% for key, value in ssl_configuration_list -%}
{% if value in slave_parameter -%}
{{ ' %s' % key }} {{ slave_parameter.get(value) }}
{% endif -%}
{% endfor -%}
# One Slave two logs
ErrorLog "{{ slave_parameter.get('error_log') }}"
LogLevel info
...
...
@@ -50,28 +49,26 @@
# Rewrite part
ProxyPreserveHost On
ProxyTimeout 600
RewriteEngine On
{% if disable_via_header %}
Header unset Via
{% endif -%}
RewriteEngine On
{% if disable_no_cache_header %}
RequestHeader unset Cache-Control
RequestHeader unset Pragma
{% endif -%}
{% if 'disabled-cookie-list' in slave_parameter -%}
{% set disabled_cookie_list = slave_parameter.get('disabled-cookie-list', '').split() %}
{%- for disabled_cookie in disabled_cookie_list %}
{%- for disabled_cookie in disabled_cookie_list %}
{{' RequestHeader edit Cookie "(^%(disabled_cookie)s=[^;]*; |; %(disabled_cookie)s=[^;]*|^%(disabled_cookie)s=[^;]*$)" ""' % dict(disabled_cookie=disabled_cookie) }}
{% endfor -%}
{% endif %}
{% endfor -%}
{%- if prefer_gzip %}
RequestHeader edit Accept-Encoding "(^gzip,.*|.*, gzip,.*|.*, gzip$|^gzip$)" "gzip"
{% endif %}
{% if slave_
parameter.get('type', '')
== 'zope' -%}
{% if slave_
type
== 'zope' -%}
{% if 'default-path' in slave_parameter %}
RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L]
{% endif -%}
...
...
@@ -79,8 +76,8 @@
# If so, let's use Virtual Host Monster rewrite
# We suppose that Apache listens to 443 (even indirectly thanks to things like iptables)
RewriteRule ^/(.*)$ {{ slave_parameter.get('https-url', slave_parameter.get('url', '')) }}/VirtualHostBase/https//%{SERVER_NAME}:443/{{ slave_parameter.get('path', '') }}/VirtualHostRoot/$1 [L,P]
{% elif slave_
parameter.get('type', '')
== 'redirect' -%}
RewriteRule
(.*) {{ slave_parameter.get('https-url', slave_parameter.get('url', ''))}}$1 [R,L]
{% elif slave_
type
== 'redirect' -%}
RewriteRule (.*) {{ slave_parameter.get('https-url', slave_parameter.get('url', ''))}}$1 [R,L]
{% else -%}
{% if 'default-path' in slave_parameter %}
RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L]
...
...
@@ -93,15 +90,11 @@
ServerName {{ slave_parameter.get('custom_domain') }}
ServerAlias {{ slave_parameter.get('custom_domain') }}
{%- if 'server-alias' in slave_parameter %}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{%- for server_alias in server_alias_list %}
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor -%}
{% endif %}
{% endfor -%}
SSLProxyEngine on
{% set ssl_proxy_verify = ('' ~ slave_parameter.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES -%}
{% if ssl_proxy_verify -%}
{% if 'ssl_proxy_ca_crt' in slave_parameter -%}
SSLProxyCACertificateFile {{ slave_parameter.get('path_to_ssl_proxy_ca_crt', '') }}
...
...
@@ -132,28 +125,24 @@
RequestHeader unset Pragma
{% endif -%}
{% if 'disabled-cookie-list' in slave_parameter -%}
{% set disabled_cookie_list = slave_parameter.get('disabled-cookie-list', '').split() %}
{%- for disabled_cookie in disabled_cookie_list %}
{%- for disabled_cookie in disabled_cookie_list %}
{{' RequestHeader edit Cookie "(^%(disabled_cookie)s=[^;]*; |; %(disabled_cookie)s=[^;]*|^%(disabled_cookie)s=[^;]*$)" ""' % dict(disabled_cookie=disabled_cookie) }}
{% endfor -%}
{% endif %}
{% endfor -%}
{%- if prefer_gzip %}
RequestHeader edit Accept-Encoding "(^gzip,.*|.*, gzip,.*|.*, gzip$|^gzip$)" "gzip"
{% endif %}
# Next line is forbidden and people who copy it will be hanged short
{% set https_only = ('' ~ slave_parameter.get('https-only', '')).lower() in TRUE_VALUES -%}
{% if https_only -%}
# Not using HTTPS? Ask that guy over there.
# Dummy redirection to https. Note: will work only if https listens
# on standard port (443).
RewriteCond %{SERVER_PORT} !^{{ https_port }}$
RewriteRule ^/(.*) https://%{SERVER_NAME}/$1 [NC,R,L]
{% elif slave_
parameter.get('type', '')
== 'redirect' -%}
{% elif slave_
type
== 'redirect' -%}
RewriteRule (.*) {{slave_parameter.get('url', '')}}$1 [R,L]
{% elif slave_
parameter.get('type', '')
== 'zope' -%}
{% elif slave_
type
== 'zope' -%}
{% if 'default-path' in slave_parameter %}
RewriteRule ^/?$ {{ slave_parameter.get('default-path') }} [R=301,L]
{% endif -%}
...
...
software/apache-frontend/templates/nginx-eventsource-slave.conf.in
0 → 100644
View file @
a4cd4d27
{% set url = slave_parameter.get('url') -%}
{% set https_url = slave_parameter.get('https-url', url) -%}
{% if url.startswith("http://") or url.startswith("https://") -%}
{% set upstream = url.split("/")[2] -%}
{% set https_upstream = https_url.split("/")[2] -%}
{% set protocol = url.split("/")[0] -%}
{% set https_protocol = https_url.split("/")[0] -%}
{% set proxy_pass = '%s//%s' % (protocol, slave_parameter.get('slave_reference')) -%}
{% set https_proxy_pass = '%s//https_%s' % (protocol, slave_parameter.get('slave_reference')) -%}
{%- set ssl_configuration_list = [('ssl_certificate', 'path_to_ssl_crt'),
('ssl_certificate_key', 'path_to_ssl_key')] -%}
upstream {{ slave_parameter.get('slave_reference') }} {
server {{ upstream }};
}
upstream https_{{ slave_parameter.get('slave_reference') }} {
server {{ https_upstream }};
}
server {
listen [{{ global_ipv6 }}]:{{ nginx_http_port }};
listen {{ local_ipv4 }}:{{ nginx_http_port }};
server_name {{ slave_parameter.get('custom_domain') }};
error_log {{ slave_parameter.get('error_log') }} error;
access_log {{ slave_parameter.get('access_log') }} custom;
location /pub {
push_stream_publisher;
push_stream_channels_path $arg_id;
# store messages in memory
push_stream_store_messages off;
# Message size limit
# client_max_body_size MUST be equal to client_body_buffer_size or
# you will be sorry.
client_max_body_size 16k;
client_body_buffer_size 16k;
}
location ~ /sub/(.*) {
# activate subscriber mode for this location
add_header "Access-Control-Allow-Origin" "*";
add_header 'Access-Control-Allow-Credentials' 'false';
add_header 'Access-Control-Allow-Methods' 'GET, HEAD, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since';
push_stream_subscriber eventsource;
# positional channel path
push_stream_channels_path $1;
# content-type
default_type "text/event-stream; charset=utf-8";
}
}
server {
listen [{{ global_ipv6 }}]:{{ nginx_https_port }} ssl;
listen {{ local_ipv4 }}:{{ nginx_https_port }} ssl;
server_name {{ slave_parameter.get('custom_domain') }};
error_log {{ slave_parameter.get('error_log') }} error;
access_log {{ slave_parameter.get('access_log') }} custom;
ssl on;
ssl_session_timeout 5m;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_ciphers 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:HIGH:!aNULL:!MD5';
ssl_prefer_server_ciphers on;
ssl_session_cache shared:SSL:10m;
{% for key, value in ssl_configuration_list -%}
{% if value in slave_parameter -%}
{{ ' %s' % key }} {{ slave_parameter.get(value) }};
{% endif -%}
{% endfor %}
location /pub {
push_stream_publisher;
push_stream_channels_path $arg_id;
# store messages in memory
push_stream_store_messages off;
# Message size limit
# client_max_body_size MUST be equal to client_body_buffer_size or
# you will be sorry.
client_max_body_size 16k;
client_body_buffer_size 16k;
}
location ~ /sub/(.*) {
# activate subscriber mode for this location
add_header "Access-Control-Allow-Origin" "*";
add_header 'Access-Control-Allow-Credentials' 'false';
add_header 'Access-Control-Allow-Methods' 'GET, HEAD, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since';
push_stream_subscriber eventsource;
# positional channel path
push_stream_channels_path $1;
# content-type
default_type "text/event-stream; charset=utf-8";
}
}
{% endif -%}
software/apache-frontend/templates/nginx-notebook-slave.conf.in
0 → 100644
View file @
a4cd4d27
{% set url = slave_parameter.get('url') -%}
{% set https_url = slave_parameter.get('https-url', url) -%}
{% if url.startswith("http://") or url.startswith("https://") -%}
{% set upstream = url.split("/")[2] -%}
{% set https_upstream = https_url.split("/")[2] -%}
{% set protocol = url.split("/")[0] -%}
{% set https_protocol = https_url.split("/")[0] -%}
{% set proxy_pass = '%s//%s' % (protocol, slave_parameter.get('slave_reference')) -%}
{% set https_proxy_pass = '%s//https_%s' % (protocol, slave_parameter.get('slave_reference')) -%}
{%- set ssl_configuration_list = [('ssl_certificate', 'path_to_ssl_crt'),
('ssl_certificate_key', 'path_to_ssl_key')] -%}
upstream {{ slave_parameter.get('slave_reference') }} {
server {{ upstream }};
}
upstream https_{{ slave_parameter.get('slave_reference') }} {
server {{ https_upstream }};
}
server {
listen [{{ global_ipv6 }}]:{{ nginx_http_port }};
listen {{ local_ipv4 }}:{{ nginx_http_port }};
server_name {{ slave_parameter.get('custom_domain') }};
error_log {{ slave_parameter.get('error_log') }} error;
access_log {{ slave_parameter.get('access_log') }} custom;
location / {
proxy_pass {{ proxy_pass }};
proxy_set_header Host $host;
}
location ~ /api/kernels/ {
proxy_pass {{ proxy_pass }};
proxy_set_header Host $host;
# websocket support
proxy_http_version 1.1;
proxy_set_header Upgrade "websocket";
proxy_set_header Connection "Upgrade";
proxy_read_timeout 86400;
}
location ~ /terminals/ {
proxy_pass {{ proxy_pass }};
proxy_set_header Host $host;
# websocket support
proxy_http_version 1.1;
proxy_set_header Upgrade "websocket";
proxy_set_header Connection "Upgrade";
proxy_read_timeout 86400;
}
}
server {
listen [{{ global_ipv6 }}]:{{ nginx_https_port }} ssl;
listen {{ local_ipv4 }}:{{ nginx_https_port }} ssl;
server_name {{ slave_parameter.get('custom_domain') }};
error_log {{ slave_parameter.get('error_log') }} error;
access_log {{ slave_parameter.get('access_log') }} custom;
ssl on;
ssl_session_timeout 5m;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_ciphers 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:HIGH:!aNULL:!MD5';
ssl_prefer_server_ciphers on;
ssl_session_cache shared:SSL:10m;
{% for key, value in ssl_configuration_list -%}
{% if value in slave_parameter -%}
{{ ' %s' % key }} {{ slave_parameter.get(value) }};
{% endif -%}
{% endfor %}
location / {
proxy_pass {{ https_proxy_pass }};
proxy_set_header Host $host;
}
location ~ /api/kernels/ {
proxy_pass {{ https_proxy_pass }};
proxy_set_header Host $host;
# websocket support
proxy_http_version 1.1;
proxy_set_header Upgrade "websocket";
proxy_set_header Connection "Upgrade";
proxy_read_timeout 86400;
}
location ~ /terminals/ {
proxy_pass {{ https_proxy_pass }};
proxy_set_header Host $host;
# websocket support
proxy_http_version 1.1;
proxy_set_header Upgrade "websocket";
proxy_set_header Connection "Upgrade";
proxy_read_timeout 86400;
}
}
{% endif -%}
software/apache-frontend/templates/nginx.cfg.in
0 → 100644
View file @
a4cd4d27
daemon off; # run in the foreground so supervisord can look after it
worker_processes $${nginx-configuration:worker_processes};
pid $${nginx-configuration:pid-file};
events {
worker_connections $${nginx-configuration:worker_connections};
# multi_accept on;
}
error_log $${nginx-configuration:error_log};
http {
##
# Basic Settings
##
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
server_tokens off;
error_log $${nginx-configuration:error_log};
log_format custom '$remote_addr - $remote_user $time_local $status $body_bytes_sent "$http_referer" "$http_user_agent" $request_time';
access_log $${nginx-configuration:access_log} custom;
# server_names_hash_bucket_size 64;
# server_name_in_redirect off;
default_type application/octet-stream;
ssl_certificate $${ca-frontend:cert-file};
ssl_certificate_key $${ca-frontend:key-file};
##
# Gzip Settings
##
gzip on;
gzip_disable "msie6";
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;
##
# Push stream Settings
##
push_stream_shared_memory_size 32m;
fastcgi_temp_path $${directory:varnginx} 1 2;
uwsgi_temp_path $${directory:varnginx} 1 2;
scgi_temp_path $${directory:varnginx} 1 2;
client_body_temp_path $${directory:varnginx} 1 2;
proxy_temp_path $${directory:varnginx} 1 2;
include $${nginx-configuration:slave-configuration-directory}/*.conf;
server {
listen [$${nginx-configuration:ip}]:$${nginx-configuration:plain_port};
listen $${nginx-configuration:local_ip}:$${nginx-configuration:plain_port};
## Serve an error 204 (No Content) for favicon.ico
location = /favicon.ico {
return 204;
}
location / {
root $${apache-directory:document-root};
index notfound.html;
}
}
server {
listen [$${nginx-configuration:ip}]:$${nginx-configuration:port} ssl;
listen $${nginx-configuration:local_ip}:$${nginx-configuration:port} ssl;
ssl on;
ssl_session_timeout 5m;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_ciphers 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:HIGH:!aNULL:!MD5';
ssl_prefer_server_ciphers on;
ssl_session_cache shared:SSL:10m;
## Serve an error 204 (No Content) for favicon.ico
location = /favicon.ico {
return 204;
}
location / {
root $${apache-directory:document-root};
index notfound.html;
}
}
}
software/build-rina/build.cfg
View file @
a4cd4d27
...
...
@@ -109,6 +109,9 @@ install =
extends = /mnt/slapos/software/hellorina/software.cfg
develop = /mnt/slapos
download-cache = /mnt/download-cache
# Required for recent buildout.
extensions -=
buildout-versions
[slapos-cookbook-develop]
recipe =
setup =
...
...
@@ -118,7 +121,10 @@ install =
[versions]
slapos.cookbook =
EOF
MAKEFLAGS=-j${:smp} python -S /mnt/buildout/bin/buildout
:||{ # https://github.com/pypa/setuptools/pull/846
python -S /mnt/buildout/bin/buildout bootstrap
MAKEFLAGS=-j${:smp} bin/buildout
}
arch=`dpkg-architecture -qDEB_HOST_ARCH`
sudo sh -c "/mnt/slapos.package/playbook/roles/rina/gen-ipcm-conf
...
...
software/build-rina/software.cfg
View file @
a4cd4d27
...
...
@@ -6,7 +6,7 @@ extends =
parts =
slapos-cookbook
template
download-cache = download-cache
download-cache =
${:directory}/
download-cache
# Required for recent buildout.
extensions -=
...
...
@@ -79,5 +79,6 @@ packages +=
dh-autoreconf pkg-config doxygen maven xmlto
[versions]
setuptools = 28.8.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
software/erp5testnode/instance-default.cfg
View file @
a4cd4d27
...
...
@@ -16,11 +16,13 @@ parts =
ca-httpd-testnode
monitor-base
monitor-publish
testnode-frontend
[monitor-publish]
recipe = slapos.cookbook:publish
url = http://[$${shellinabox:ipv6}]:$${shellinabox:port}/
password = $${pwgen:passwd}
frontend-url = $${testnode-frontend:connection-secure_access}
[pwgen]
recipe = slapos.cookbook:generate.password
...
...
@@ -160,6 +162,18 @@ software = $${rootdirectory:srv}/software
shellinabox = $${rootdirectory:srv}/shellinabox
ca-dir = $${rootdirectory:srv}/ca
[testnode-frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Test Node Frontend $${testnode:test-node-title}
# XXX We have hardcoded SR URL here.
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
slave = true
config-url = https://[$${testnode:httpd-ip}]:$${testnode:httpd-software-access-port}
config-https-only = true
#software-type = custom-personal
return = domain secure_access
[slap-parameter]
node-quantity = 1
test-suite-master-url =
...
...
software/erp5testnode/software.cfg
View file @
a4cd4d27
...
...
@@ -61,12 +61,12 @@ recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-default.cfg
output = ${buildout:directory}/template-default.cfg
mode = 0644
md5sum =
7fb3f4177dccac601d8fafd342af3c38
md5sum =
8e171816b6caef52ac75c2f8f6a69fc3
[versions]
PyXML = 0.8.5
erp5.util = 0.4.46
slapos.recipe.template = 2.7
setuptools =
19.6.2
setuptools =
28.8.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
software/ipython_notebook/software.cfg
View file @
a4cd4d27
...
...
@@ -43,7 +43,7 @@ md5sum = d7d4a7e19d55bf14007819258bf42100
[erp5-kernel]
<= download-file-base
filename = ERP5kernel.py.jinja
md5sum =
eb07520fc189dcac2d3bd2db1368b1ba
md5sum =
a0d13f4f5a9cd22a7b5d2c901aa9faae
[kernel-json]
<= download-file-base
...
...
software/ipython_notebook/template/ERP5kernel.py.jinja
View file @
a4cd4d27
...
...
@@ -2,9 +2,7 @@
from ipykernel.kernelbase import Kernel
from ipykernel.kernelapp import IPKernelApp
from IPython.core.display import HTML
import requests
import json
...
...
@@ -46,8 +44,7 @@ MAGICS = {
'erp5_url': MagicInfo('erp5_url', 'url', True, False, True),
'notebook_set_reference': MagicInfo('notebook_set_reference', 'reference', True, False, True),
'notebook_set_title': MagicInfo('notebook_set_title', 'title', False, False, True),
'my_notebooks': MagicInfo('my_notebooks', '', True, True, False)
}
'my_notebooks': MagicInfo('my_notebooks', '', True, True, False)}
class ERP5Kernel(Kernel):
"""
...
...
@@ -105,7 +102,6 @@ class ERP5Kernel(Kernel):
"""
# Set attributes only for magic who do have any varible to set value to
if magic_info.variable_name:
try:
# Get the magic value recived via code from frontend
magic_value = code.split()[1]
...
...
@@ -181,8 +177,7 @@ class ERP5Kernel(Kernel):
'reference': self.reference,
'title': self.title,
'request_reference': request_reference,
},
)
})
# Set value for status_code for self object which would later be used to
# dispaly response after statement check
...
...
@@ -311,8 +306,7 @@ class ERP5Kernel(Kernel):
data = {
'data': {mime_type: code_result},
'metadata': {}
}
'metadata': {}}
self.send_response(self.iopub_socket, 'display_data', data)
reply_content = {
...
...
@@ -320,8 +314,8 @@ class ERP5Kernel(Kernel):
# The base class increments the execution count
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {}
,
}
'user_expressions': {}
}
return reply_content
if __name__ == '__main__':
...
...
software/jstestnode/instance.cfg.in
View file @
a4cd4d27
...
...
@@ -2,7 +2,6 @@
parts =
nginx-service
runTestSuite-instance
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
...
...
@@ -36,6 +35,7 @@ framebuffer = $${:srv}/framebuffer
recipe = slapos.recipe.template
url = ${template-runTestSuite:output}
output = $${directory:bin}/runTestSuite
buildout-directory = $${buildout:directory}
mode = 0700
[firefox-instance]
...
...
@@ -91,4 +91,4 @@ computer = $${slap_connection:computer_id}
partition = $${slap_connection:partition_id}
url = $${slap_connection:server_url}
key = $${slap_connection:key_file}
cert = $${slap_connection:cert_file}
\ No newline at end of file
cert = $${slap_connection:cert_file}
software/jstestnode/runTestSuite.in
View file @
a4cd4d27
...
...
@@ -7,6 +7,7 @@
import argparse, os, re, shutil, subprocess, sys, traceback
from erp5.util import taskdistribution
from time import gmtime, strftime
from lxml import etree
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
...
...
@@ -30,9 +31,44 @@ def main():
parser.add_argument('--node_quantity', help='ignored', type=int)
parser.add_argument('--master_url',
help='The Url of Master controling many suites')
parser.add_argument('--frontend_url',
help='The url of frontend of the test suite')
parser.add_argument('--target',
help='Target OS to run tests on',
type=str)
parser.add_argument('--target_version',
help='Target OS version to use',
type=str,)
parser.add_argument('--target_browser',
help='The desired browser of the target OS to be used. Example: Firefox if target is Android.',
type=str,)
parser.add_argument('--target_device',
help='The desired device running the target OS. Example: iPad Simulator, if target is iOS.',
type=str,)
parser.add_argument('--appium_server_auth',
help='Combination of user and token to access SauceLabs service. (i.e. user:token)',
type=str)
args = parser.parse_args()
import json
parsed_parameters = json.loads('$${instance-parameter:configuration._}')
if not getattr(args, 'target', None):
args.target = parsed_parameters.get('target', 'firefox')
if not getattr(args, 'test_suite', None):
args.test_suite = parsed_parameters.get('test-suite')
if not getattr(args, 'target_version', None):
args.target_version = parsed_parameters.get('target-version')
if not getattr(args, 'appium_server_auth', None):
args.appium_server_auth = parsed_parameters.get('appium-server-auth')
if not getattr(args, 'target_browser', None):
args.target_browser = parsed_parameters.get('target-browser')
if not getattr(args, 'target_device', None):
args.target_device = parsed_parameters.get('target-device')
is_browser_running = False
try:
test_suite_title = args.test_suite_title or args.test_suite
test_suite = args.test_suite
...
...
@@ -47,12 +83,48 @@ def main():
date = strftime("%Y/%m/%d %H:%M:%S", gmtime())
##########################
# Run all tests
##########################
firefox_binary = webdriver.firefox.firefox_binary.FirefoxBinary(firefox_path=FIREFOX_EXECUTABLE)
browser = webdriver.Firefox(firefox_binary=firefox_binary)
is_appium = False
if args.target == 'firefox':
firefox_binary = webdriver.firefox.firefox_binary.FirefoxBinary(firefox_path=FIREFOX_EXECUTABLE)
browser = webdriver.Firefox(firefox_binary=firefox_binary)
elif args.target in ['iOS', 'Android']:
# parameters for mobile emulators have different names then parameters for
# desktop OSes
is_appium = True
capabilities = {
'platformName': args.target,
'platformVersion': args.target_version,
'deviceName': args.target_device,
'browserName': args.target_browser
}
elif 'Windows' in args.target or 'OS X' in args.target:
# parameters for mobile emulators have different names then parameters for
# desktop OSes
is_appium = True
capabilities = {
'browserName': args.target_browser,
'platform': args.target,
'version': args.target_version
}
if is_appium:
if not args.appium_server_auth:
raise RuntimeError('--appium_server_auth is required.')
appium_url = "http://%s@ondemand.saucelabs.com/wd/hub" % (args.appium_server_auth)
browser = webdriver.Remote(appium_url, capabilities)
# adjust make path to testnode's frontend
full_path = '$${runTestSuite-instance:buildout-directory}/software_release/parts/%s' % parsed_parameters['test-url']
full_path = full_path.split('srv')[-1]
url = "%s%s" % (args.frontend_url, full_path)
is_browser_running = True
agent = browser.execute_script("return navigator.userAgent")
print agent
print url
...
...
@@ -62,38 +134,47 @@ def main():
By.XPATH, '//p[@id="qunit-testresult" and contains(text(), "completed")]')
))
browser.title.encode('UTF-8'
)
print browser.find_element_by_id("qunit-testresult").text
html_parser = etree.HTMLParser(recover=True
)
body = etree.fromstring(browser.page_source.encode('UTF-8'), html_parser)
for elt in browser.find_elements_by_xpath('//ol[@id="qunit-tests"]/li'):
print ' '.join(body.xpath('//*[@id="qunit-testresult"]//text()'))
for elt in body.xpath('.//ol[@id="qunit-tests"]/li'):
test_name = '%s: %s' % (
elt.
find_element_by_xpath('.//span[@class="module-name"]')
.text,
elt.
find_element_by_xpath('.//span[@class="test-name"]')
.text
elt.
xpath('.//span[@class="module-name"]')[0]
.text,
elt.
xpath('.//span[@class="test-name"]')[0]
.text
)
print elt.get
_attribute('class'), elt.find_element_by_tag_name('strong').text
print elt.get
('class'), ''.join(elt.xpath('.//strong')[0].itertext())
# print elt.find_element_by_tag_name('ol').get_attribute('innerHTML')
failure = int(elt.
find_element_by_xpath('.//b[@class="failed"]')
.text)
success = int(elt.
find_element_by_xpath('.//b[@class="passed"]')
.text)
failure = int(elt.
xpath('.//b[@class="failed"]')[0]
.text)
success = int(elt.
xpath('.//b[@class="passed"]')[0]
.text)
test_line_dict[test_name] = {
'test_count': success + failure,
'error_count': 0,
'failure_count': failure,
'skip_count': 0,
'duration': int(elt.
find_element_by_xpath('.//span[@class="runtime"]')
.text.split()[0]),
'command': elt.
find_element_by_xpath('.//a[text()="Rerun"]').get_attribute
('href'),
'duration': int(elt.
xpath('.//span[@class="runtime"]')[0]
.text.split()[0]),
'command': elt.
xpath('.//a[text()="Rerun"]')[0].get
('href'),
'stdout': agent,
'stderr': '',
'html_test_result': e
lt.find_element_by_tag_name('ol').get_attribute('innerHTML'
)
'html_test_result': e
tree.tostring(elt.xpath('.//ol')[0]
)
}
# do quit browser asap as we have results. this is required in case of timeout of
# remote appium service which will close test session of no command received within
# usually 90s and thus fail this script. And it costs processing time as well
# to keep test session needlessly opened.
browser.quit()
is_browser_running = False
tool = taskdistribution.TaskDistributionTool(portal_url=args.master_url)
test_result = tool.createTestResult(revision = revision,
test_name_list = test_line_dict.keys(),
node_title = args.test_node_title,
test_title = test_suite_title,
project_title = args.project_title)
if test_result is None:
if test_result is None
or not hasattr(args, 'master_url')
:
return
# report test results
while 1:
...
...
@@ -116,9 +197,12 @@ def main():
stdout='')
# XXX: inform test node master of error
raise EnvironmentError(result)
finally:
browser.quit()
if is_browser_running:
# if by any chance browser is still running due to
# traceback raised make sure we cleanup
browser.quit()
if __name__ == "__main__":
main()
\ No newline at end of file
main()
software/jstestnode/software.cfg
View file @
a4cd4d27
...
...
@@ -25,7 +25,7 @@ parts =
[instance]
recipe = slapos.recipe.template
md5sum =
25a9c895fff279b71b0dbbad6647181b
md5sum =
929a2b6cf6bb16e22e49984563547ca9
url = ${:_profile_base_location_}/instance.cfg.in
output = ${buildout:directory}/instance.cfg
mode = 0644
...
...
@@ -35,6 +35,7 @@ recipe = zc.recipe.egg
eggs =
erp5.util
selenium
${lxml-python:egg}
interpreter = pythonwitheggs
[renderjs-repository.git]
...
...
@@ -106,7 +107,7 @@ mode = 0644
[template-runTestSuite]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/runTestSuite.in
md5sum =
13a56b1b6b2d54dc27ed6570e4b5f1d7
md5sum =
ef4118cb653838bf5c875c6fcac1677f
output = ${buildout:directory}/runTestSuite.in
mode = 0644
...
...
software/kvm/common.cfg
View file @
a4cd4d27
...
...
@@ -92,7 +92,7 @@ command =
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
md5sum =
110df709a7c8a5c749f93663f6ab0d28
md5sum =
061604d32cc626352dc3d221bdeaf804
output = ${buildout:directory}/template.cfg
mode = 0644
...
...
software/kvm/instance.cfg.in
View file @
a4cd4d27
...
...
@@ -5,6 +5,8 @@ parts =
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
extends = ${template-resilient-templates:output}
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = $${:kvm}
...
...
@@ -137,7 +139,7 @@ context =
key eggs_directory buildout:eggs-directory
raw kvm_template $${dynamic-template-kvm:rendered}
raw template_kvm_export ${template-kvm-export-script:location}/${template-kvm-export-script:filename}
raw pbsready_export_template ${pbsready-export:output}
key pbsready_export_template template-pbsready-export:rendered
raw gzip_binary ${gzip:location}/bin/gzip
key slapparameter_dict slap-configuration:configuration
mode = 0644
...
...
software/neoppod/software-common.cfg
View file @
a4cd4d27
...
...
@@ -111,12 +111,12 @@ atomize = 0.2.0
dnspython = 1.14.0
ecdsa = 0.13
feedparser = 5.2.1
GitPython = 2.
0.8
gitdb
= 0.6.4
GitPython = 2.
1.0
gitdb
2 = 2.0.0
lockfile = 0.12.2
mysqlclient = 1.3.9
paramiko = 2.0.2
passlib = 1.6.5
pycrypto = 2.6.1
pycurl = 7.43.0
smmap
= 0.9.0
smmap
2 = 2.0.1
software/slapos-testing/software.cfg
View file @
a4cd4d27
...
...
@@ -30,6 +30,7 @@ eggs =
${lxml-python:egg}
${python-cryptography:egg}
${pycurl:egg}
dnspython
Jinja2
erp5.util
slapos.cookbook
...
...
software/slaprunner/common.cfg
View file @
a4cd4d27
...
...
@@ -53,7 +53,7 @@ parts =
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
md5sum =
bb7e0bf9959c4437ff1e23e645315ccf
md5sum =
06107f93ebe78905c957a4c4fc4edf16
mode = 0644
[template-runner]
...
...
@@ -68,7 +68,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/runner-import.sh.jinja2
download-only = true
md5sum =
3cebc5d793ff1b5c786392723babc510
md5sum =
275ae222cd9a560c08748d7502824885
filename = runner-import.sh.jinja2
mode = 0644
...
...
@@ -76,7 +76,7 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner-import.cfg.in
output = ${buildout:directory}/instance-runner-import.cfg
md5sum = 9
1c34a55b7a45b14b0fac8b7faa202fe
md5sum = 9
db9957f452bda370cb2d5cc2e833e85
mode = 0644
[template-runner-export-script]
...
...
@@ -89,10 +89,10 @@ filename = runner-export.sh.jinja2
mode = 0644
[instance-runner-export]
recipe = slapos.recipe.
template
recipe = slapos.recipe.
build:download
url = ${:_profile_base_location_}/instance-runner-export.cfg.in
output = ${buildout:directory}/instance-runner-export.cfg
md5sum =
ec92773be8f8a2ad20dc0661d58d7717
filename = instance-runner-export.cfg.in
md5sum =
852a2ed99af566d27e5e4403334a3376
mode = 0644
[template-resilient]
...
...
@@ -125,7 +125,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/httpd_conf.in
download-only = true
md5sum =
2e8440fa4b589be649a72108faec7745
md5sum =
112cf8ada9e5c4172fa6fc464df0fd3a
filename = httpd_conf.in
mode = 0644
...
...
@@ -201,7 +201,7 @@ mode = 0644
recipe = hexagonit.recipe.download
ignore-existing = true
download-only = true
md5sum =
922498a301ab3defe412602f626e02ec
md5sum =
2451072826a9ad9425d62c9e9c7f6284
url = ${:_profile_base_location_}/template/${:filename}
filename = resilient_software_release_information.py.in
mode = 0644
...
...
@@ -232,5 +232,6 @@ eggs +=
supervisor
[versions]
setuptools = 28.8.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
software/slaprunner/httpd_conf.in
View file @
a4cd4d27
...
...
@@ -50,9 +50,6 @@ SSLHonorCipherOrder on
SSLEngine On
Include {{ parameters.httpd_cors_file }}
Header set Access-Control-Allow-Credentials "true"
Header set Access-Control-Allow-Methods "PROPFIND, PROPPATCH, COPY, MOVE, DELETE, MKCOL, LOCK, UNLOCK, PUT, GETLIB, VERSION-CONTROL, CHECKIN, CHECKOUT, UNCHECKOUT, REPORT, UPDATE, CANCELUPLOAD, HEAD, OPTIONS, GET, POST"
Header set Access-Control-Allow-Headers "Overwrite, Destination, Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Authorization"
DocumentRoot {{ parameters.runner_home }}/public
...
...
software/slaprunner/instance-runner-export.cfg.in
View file @
a4cd4d27
[buildout]
extends =
template-runner.cfg
${pbsready-export:output
}
extends =
{{ template_runner_path }}
{{ pbsready_export_template_path }
}
parts +=
nginx_conf
...
...
@@ -41,51 +41,51 @@ parts +=
recipe = slapos.cookbook:free_port
minimum = 49980
maximum = 49989
ip = $
$
{slap-network-information:local-ipv4}
ip = ${slap-network-information:local-ipv4}
[runner-free-port]
recipe = slapos.cookbook:free_port
minimum = 50005
maximum = 50014
ip = $
$
{slap-network-information:global-ipv6}
ip = ${slap-network-information:global-ipv6}
[slaprunner]
proxy_port = $
$
{proxy-free-port:port}
runner_port = $
$
{runner-free-port:port}
proxy_port = ${proxy-free-port:port}
runner_port = ${runner-free-port:port}
[supervisord-free-port]
recipe = slapos.cookbook:free_port
minimum = 39986
maximum = 39995
ip = $
$
{slaprunner:ipv4}
ip = ${slaprunner:ipv4}
[supervisord]
port = $
$
{supervisord-free-port:port}
port = ${supervisord-free-port:port}
[exporter]
recipe = slapos.recipe.template:jinja2
template =
${template-runner-export-script:location}/${template-runner-export-script:filename
}
rendered = $
${directory:bin}/$
${slap-parameter:namebase}-exporter
template =
{{ exporter_script_path }
}
rendered = $
{directory:bin}/
${slap-parameter:namebase}-exporter
# backward compatibility for resilient stack
wrapper = $
$
{:rendered}
wrapper = ${:rendered}
mode = 700
context =
section directory directory
raw output_log_file $
$
{directory:log}/resilient.log
raw shell_binary
${bash:location}/bin/bash
raw rsync_binary
${rsync:location}/bin/rsync
raw output_log_file ${directory:log}/resilient.log
raw shell_binary
{{ bash_executable_location }}
raw rsync_binary
{{ rsync_executable_location }}
[monitor-httpd-free-port]
recipe = slapos.cookbook:free_port
minimum = 8437
maximum = 8446
ip = $
$
{slap-network-information:global-ipv6}
ip = ${slap-network-information:global-ipv6}
[monitor-instance-parameter]
monitor-httpd-port = $
$
{monitor-httpd-free-port:port}
monitor-httpd-port = ${monitor-httpd-free-port:port}
# Pass some parameter to dispay in monitoring interface
instance-configuration =
httpdcors cors-domain $
${slaprunner-httpd-cors:location} $
${httpd-graceful-wrapper:output}
httpdcors cors-domain $
{slaprunner-httpd-cors:location}
${httpd-graceful-wrapper:output}
# Extends publish section with resilient parameters
[publish-connection-information]
...
...
@@ -93,11 +93,11 @@ instance-configuration =
[monitor-check-resilient-feed-file]
recipe = slapos.recipe.template:jinja2
template =
${template-monitor-check-resilient-feed:location}/${template-monitor-check-resilient-feed:filename
}
rendered = $
$
{monitor-directory:reports}/check-create-resilient-feed-files
template =
{{ monitor_check_resilient_feed_template_path }
}
rendered = ${monitor-directory:reports}/check-create-resilient-feed-files
mode = 700
context =
key input_feed_directory directory:notifier-feeds
key monitor_feed_directory monitor-directory:public
raw base_url http://[$
${notifier:host}]:$
${notifier:port}/get/
raw base_url http://[$
{notifier:host}]:
${notifier:port}/get/
raw python_executable ${buildout:executable}
software/slaprunner/instance-runner-import.cfg.in
View file @
a4cd4d27
...
...
@@ -79,34 +79,43 @@ rendered = $${directory:bin}/$${slap-parameter:namebase}-importer
# backward compatibility for resilient stack
wrapper = $${:rendered}
mode = 700
restore-exit-code-file=$${directory:srv}/importer-exit-code-file
restore-exit-code-file = $${directory:srv}/$${:restore-exit-code-file-basename}
restore-exit-code-file-basename = importer-exit-code-file
restore-error-message-file = $${directory:srv}/$${:restore-error-message-file-basename}
restore-error-message-file-basename = importer-error-message-file
resilient-log-basename = resilient.log
context =
key backend_url slaprunner:access-url
key ipv4 slaprunner:ipv4
key ipv6 slaprunner:ipv6
key proxy_port slaprunner:proxy_port
key instance_folder slaprunner:instance_root
section directory directory
raw output_log_file $${directory:log}/resilient.log
section supervisord supervisord
raw output_log_file $${directory:log}/$${:resilient-log-basename}
raw shell_binary ${bash:location}/bin/bash
raw rsync_binary ${rsync:location}/bin/rsync
raw restore_exit_code_file $${:restore-exit-code-file}
raw restore_error_message_file $${:restore-error-message-file}
[importer-consistency-promise]
# Test that the importer script and "after-import" subscripts
# are not older than
1 day (24h
), and have succeeded
# are not older than
2 days (1 day + some slack
), and have succeeded
recipe = collective.recipe.template
input = inline: #!/bin/sh
EXIT_CODE_FILE=$(find "$${importer:restore-exit-code-file}")
RECENT_EXIT_CODE_FILE=$(find "$${importer:restore-exit-code-file}" -mtime -1)
if [ -z "$EXIT_CODE_FILE" ]; then
EXIT_CODE_FILE="$${importer:restore-exit-code-file}"
RECENT_EXIT_CODE_FILE=$(find $${directory:srv} -maxdepth 1 -name "$${importer:restore-exit-code-file-basename}" -mtime -2)
RESILIENT_LOG_URL=$${publish:monitor-base-url}/log/$${importer:resilient-log-basename}
if [ ! -f "$EXIT_CODE_FILE" ]; then
exit 0;
else
if [ -z "$RECENT_EXIT_CODE_FILE" ]; then
echo "Consistency check is too old.";
exit 1;
else
EXIT_CODE=$(cat $EXIT_CODE_FILE)
exit $EXIT_CODE
cat $${importer:restore-error-message-file}
echo "More information can be found here : $RESILIENT_LOG_URL";
exit $(cat $EXIT_CODE_FILE);
fi
fi
exit 1; # Something else went wrong
...
...
@@ -135,6 +144,7 @@ mode = 755
recipe = slapos.recipe.template
url = ${template-resilient-software-release-information:destination}/${template-resilient-software-release-information:filename}
output = $${directory:cgi-bin}/resilient_software_release_information.py
resilient-log-url = $${publish:monitor-base-url}/log/$${importer:resilient-log-basename}
mode = 0600
[slap-parameter]
...
...
software/slaprunner/instance.cfg
View file @
a4cd4d27
...
...
@@ -5,6 +5,8 @@ parts =
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
extends = ${template-resilient-templates:output}
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = $${instance-base-runner:rendered}
...
...
@@ -12,7 +14,7 @@ resilient = $${instance-resilient:rendered}
test = $${instance-resilient-test:rendered}
runner = $${instance-base-runner:rendered}
runner-import = ${instance-runner-import:output}
runner-export = $
{instance-runner-export:output
}
runner-export = $
${template-runner-export:rendered
}
frozen = ${instance-frozen:output}
pull-backup = ${template-pull-backup:output}
...
...
@@ -58,10 +60,24 @@ context =
key slapparameter_dict slap-configuration:configuration
mode = 0644
[template-runner-export]
recipe = slapos.recipe.template:jinja2
template = ${instance-runner-export:target}
rendered = $${buildout:directory}/instance-runner-export.cfg
mode = 640
context =
key pbsready_export_template_path template-pbsready-export:rendered
key template_runner_path instance-base-runner:rendered
raw exporter_script_path ${template-runner-export-script:location}/${template-runner-export-script:filename}
raw monitor_check_resilient_feed_template_path ${template-monitor-check-resilient-feed:location}/${template-monitor-check-resilient-feed:filename}
raw buildout_executable_location ${buildout:executable}
raw bash_executable_location ${bash:location}/bin/bash
raw rsync_executable_location ${rsync:location}/bin/rsync
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
cert = $${slap-connection:cert-file}
\ No newline at end of file
software/slaprunner/template/resilient_software_release_information.py.in
View file @
a4cd4d27
...
...
@@ -2,4 +2,6 @@
# takeover interface of the Resilient stack
def main():
return {}
\ No newline at end of file
return {
'Read the log from the importer': '<a href="${:resilient-log-url}">${:resilient-log-url}</a>',
}
\ No newline at end of file
software/slaprunner/template/runner-import.sh.jinja2
View file @
a4cd4d27
...
...
@@ -8,7 +8,23 @@ umask 077
exec > >(tee -ai {{ output_log_file }})
exec 2>&1
echo -e "\n\nrunner-import run at : $(date)"
RESTORE_EXIT_CODE_FILE="{{ restore_exit_code_file }}"
RESTORE_ERROR_MESSAGE_FILE="{{ restore_error_message_file }}"
ERROR_MESSAGE=""
fail_with_exit_code () {
echo 1 > $RESTORE_EXIT_CODE_FILE
echo -e "Failure during step : $ERROR_MESSAGE" > $RESTORE_ERROR_MESSAGE_FILE
}
trap fail_with_exit_code ERR
log_message () {
ERROR_MESSAGE=$1
echo -e $1
}
# Delete the error message file, to not keep it even after a successful build
rm $RESTORE_ERROR_MESSAGE_FILE || true
srv_directory={{ directory['srv'] }}
restore_element () {
...
...
@@ -27,7 +43,12 @@ restore_element () {
done
}
echo -e "\n\nrunner-import run at : $(date)"
log_message "Restoring WebRunner content..."
restore_element {{ directory['backup'] }}/runner/ $srv_directory/runner instance project proxy.db
log_message "Restoring WebRunner config (etc directory)..."
restore_element {{ directory['backup'] }}/etc/ {{ directory['etc'] }} config.json
cp -r {{ directory['backup'] }}/etc/.??* {{ directory['etc'] }};
...
...
@@ -40,10 +61,17 @@ if [ ! -e "$runner_import_restore" ]; then
touch $runner_import_restore
chmod +x $runner_import_restore
fi
echo "Running $runner_import_restore script
..."
log_message "Running $runner_import_restore
..."
$srv_directory/runner-import-restore || RESTORE_EXIT_CODE=$?
echo "Updating slapproxy database, software release and instances..."
# If no "etc/.project" neither "srv/runner/proxy.db", we can safely assume
# that there is no instnace deployed on runner0
if [ ! -f "directory['etc']/.project" ] && [ ! -f "$srv_directory/runner/proxy.db" ]; then
echo 0 > $RESTORE_EXIT_CODE_FILE
exit 0
fi
log_message "Updating slapproxy database..."
HOME="{{ directory['home'] }}"
# XXX Hardcoded
export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
...
...
@@ -69,35 +97,40 @@ $SQLITE3 $DATABASE "update partition_network11 set address='$IPV6' where netmask
MASTERURL="http://{{ ipv4 }}:{{ proxy_port }}"
echo "Building newest software..."
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$MASTERURL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1 ||
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$MASTERURL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1 ||
$SLAPOS node software --cfg $HOME/etc/slapos.cfg --all --master-url=$MASTERURL --logfile $HOME/srv/runner/software.log --pidfile $HOME/var/run/slapos-node-software.pid >/dev/null 2>&1
log_message "Removing old supervisord service description files..."
# XXX: Path hardcoded in slapos.core
rm {{ instance_folder }}/etc/supervisord.conf.d/* || true
log_message "Building newest Software Release..."
$SLAPOS node software --cfg {{ supervisord['slapos-cfg'] }} --all --master-url=$MASTERURL --logfile {{ supervisord['slapgrid-sr-log'] }} --pidfile {{ supervisord['slapgrid-sr-pid'] }} >/dev/null 2>&1 ||
$SLAPOS node software --cfg {{ supervisord['slapos-cfg'] }} --all --master-url=$MASTERURL --logfile {{ supervisord['slapgrid-sr-log'] }} --pidfile {{ supervisord['slapgrid-sr-pid'] }} >/dev/null 2>&1 ||
$SLAPOS node software --cfg {{ supervisord['slapos-cfg'] }} --all --master-url=$MASTERURL --logfile {{ supervisord['slapgrid-sr-log'] }} --pidfile {{ supervisord['slapgrid-sr-pid'] }} >/dev/null 2>&1 ||
(tail -n 200 {{ supervisord['slapgrid-sr-log'] }} && false)
# Remove defined scripts to force buildout to recreate them to have updated paths
rm $srv_directory/runner/instance/slappart*/srv/runner-import-restore || true
echo "Running slapos node instance
..."
log_message "Fixing Instances as needed after import
..."
# XXX hardcoded
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$MASTERURL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 ||
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$MASTERURL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1 ||
$SLAPOS node instance --cfg $HOME/etc/slapos.cfg --master-url=$MASTERURL --logfile $HOME/srv/runner/instance.log --pidfile $HOME/var/run/slapos-node-instance.pid >/dev/null 2>&1
$SLAPOS node instance --cfg {{ supervisord['slapos-cfg'] }} --master-url=$MASTERURL --logfile {{ supervisord['slapgrid-cp-log'] }} --pidfile {{ supervisord['slapgrid-cp-pid'] }} >/dev/null 2>&1 ||
$SLAPOS node instance --cfg {{ supervisord['slapos-cfg'] }} --master-url=$MASTERURL --logfile {{ supervisord['slapgrid-cp-log'] }} --pidfile {{ supervisord['slapgrid-cp-pid'] }} >/dev/null 2>&1 ||
$SLAPOS node instance --cfg {{ supervisord['slapos-cfg'] }} --master-url=$MASTERURL --logfile {{ supervisord['slapgrid-cp-log'] }} --pidfile {{ supervisord['slapgrid-cp-pid'] }} >/dev/null 2>&1 ||
(tail -n 200 {{ supervisord['slapgrid-cp-log'] }} && false)
# Invoke defined scripts for each partition inside of slaprunner
echo
"Invoke custom import scripts defined by each instances..."
log_message
"Invoke custom import scripts defined by each instances..."
for partition in $srv_directory/runner/instance/slappart*/
do
script=$partition/srv/runner-import-restore
if [ -e "$script" ]; then
echo "Running $script
script..."
log_message "Running $
script..."
$script || RESTORE_EXIT_CODE=$?
fi
done
# Change back slapproxy database to have all instances started
echo "Start instances
..."
log_message "Set instances as to start after takeover
..."
$SQLITE3 $DATABASE "update partition11 set requested_state='started';"
# Write exit code to an arbitrary file that will be checked by promise/monitor
echo "Write status file... End"
RESTORE_EXIT_CODE_FILE="{{ restore_exit_code_file }}"
log_message "Writing status file... End"
echo $RESTORE_EXIT_CODE > $RESTORE_EXIT_CODE_FILE
exit $RESTORE_EXIT_CODE
software/test-slave-instance-deployment/software.cfg
View file @
a4cd4d27
...
...
@@ -58,7 +58,7 @@ eggs = collective.recipe.template
[versions]
collective.recipe.template = 1.11
plone.recipe.command = 1.1
slapos.recipe.build = 0.2
7
slapos.recipe.build = 0.2
8
slapos.recipe.template = 2.7
# Replicate slapos stack, but without shacache to not have to compile the entire world for a simple test.
...
...
stack/erp5/buildout.cfg
View file @
a4cd4d27
...
...
@@ -665,13 +665,14 @@ zope.app.publication = 3.14.0
zope.app.testing = 3.8.1
# Pinned versions
Pillow = 3.
3.1
Pillow = 3.
4.2
Products.CMFActionIcons = 2.1.3
Products.DCWorkflowGraph = 0.4.1
# Products.ExternalEditor 2.0.0's dtml is not based on Zope2 OFS's one.
Products.ExternalEditor = 1.1.1
Products.GenericSetup = 1.8.
4
Products.GenericSetup = 1.8.
5
Products.LongRequestLogger = 2.0.0
# Products.MimetypesRegistry 2.1 requires AccessControl>=3.0.0Acquisition.
Products.MimetypesRegistry = 2.0.10
Products.PloneHotfix20160830 = 1.3
Products.PluginRegistry = 1.4
...
...
@@ -706,13 +707,13 @@ logilab-common = 1.2.2
matplotlib = 1.5.3
mistune = 0.7.3
notebook = 4.2.3
numpy = 1.11.
1
numpy = 1.11.
2
objgraph = 3.0.1
pandas = 0.1
8
.1
pandas = 0.1
9
.1
ply = 3.9
polib = 1.0.7
pprofile = 1.
9.2
prompt-toolkit = 1.0.
7
pprofile = 1.
10.0
prompt-toolkit = 1.0.
8
ptyprocess = 0.5.1
pycountry = 1.20
pyflakes = 1.3.0
...
...
@@ -720,7 +721,7 @@ pyflakes = 1.3.0
pylint = 1.4.4
python-memcached = 1.58
pytracemalloc = 1.2
pyzmq = 1
5.4
.0
pyzmq = 1
6.0
.0
qrcode = 5.3
restkit = 4.2.2
rtjp-eventlet = 0.3.2
...
...
@@ -728,16 +729,16 @@ scikit-learn = 0.18
scipy = 0.18.1
simplegeneric = 0.8.1
socketpool = 0.5.3
spyne = 2.12.1
3
spyne = 2.12.1
4
suds = 0.4
terminado = 0.6
threadframe = 0.2
timerserver = 2.0.2
tornado = 4.4.
1
tornado = 4.4.
2
traitlets = 4.3.1
urlnorm = 1.1.4
uuid = 1.30
validictory = 1.
0.2
validictory = 1.
1.0
widgetsnbextension = 1.2.6
xfw = 0.10
xupdate-processor = 0.4
...
...
@@ -751,11 +752,11 @@ Products.ZSQLMethods = 2.13.4
backports.shutil-get-terminal-size = 1.0.0
# Required by:
# tornado==4.4.
1
# tornado==4.4.
2
backports.ssl-match-hostname = 3.5.0.1
# Required by:
# tornado==4.4.
1
# tornado==4.4.
2
certifi = 2016.9.26
# Required by:
...
...
@@ -801,15 +802,15 @@ pickleshare = 0.7.4
# Required by:
# matplotlib==1.5.3
# pandas==0.1
8
.1
# pandas==0.1
9
.1
python-dateutil = 2.5.3
# Required by:
# tornado==4.4.
1
# tornado==4.4.
2
singledispatch = 3.4.0.3
# Required by:
# prompt-toolkit==1.0.
3
# prompt-toolkit==1.0.
8
wcwidth = 0.1.7
# Required by:
...
...
stack/monitor/buildout.cfg
View file @
a4cd4d27
...
...
@@ -35,7 +35,6 @@ url = ${:_profile_base_location_}/scripts/${:filename}
destination = ${buildout:parts-directory}/monitor-scripts
on-update = true
[monitor-eggs]
recipe = zc.recipe.egg
eggs =
...
...
@@ -47,10 +46,14 @@ eggs =
cns.recipe.symlink
slapos.toolbox
# Do no generate any scripts here as all of them are generated by extraeggs
scripts =
[extra-eggs]
<= monitor-eggs
recipe = zc.recipe.egg
interpreter = pythonwitheggs
eggs =
${monitor-eggs:eggs}
psutil
PyRSS2Gen
Jinja2
...
...
@@ -59,7 +62,7 @@ eggs =
# Monitor templates files
[monitor-httpd-conf]
<= monitor-template-base
md5sum =
40dc51fc156f1ad7eb94be7f3cbf08b4
md5sum =
f2d6951670733de3b37c0ebe9eee343b
filename = monitor-httpd.conf.in
[monitor-template-wrapper]
...
...
@@ -80,7 +83,7 @@ md5sum = 1bdb4e05c6be04f4e5766c64467fbcec
[monitor-httpd-cors]
<= monitor-template-base
filename = httpd-cors.cfg.in
md5sum =
5afad2bb6e088e080e907f1d837effbb
md5sum =
683ea85fc054094248baf5752dd089bf
# End templates files
# XXX keep compatibility (with software/ipython_notebook/software.cfg )
...
...
stack/monitor/templates/httpd-cors.cfg.in
View file @
a4cd4d27
...
...
@@ -2,4 +2,7 @@
{% set allow_domain = '|'.join(domain.replace('.', '\.').split()) -%}
SetEnvIf Origin "^http(s)?://(.+\.)?({{ allow_domain }})$" ORIGIN_DOMAIN=$0
Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN
{% endif -%}
\ No newline at end of file
Header always set Access-Control-Allow-Credentials "true" env=ORIGIN_DOMAIN
Header always set Access-Control-Allow-Methods "PROPFIND, PROPPATCH, COPY, MOVE, DELETE, MKCOL, LOCK, UNLOCK, PUT, GETLIB, VERSION-CONTROL, CHECKIN, CHECKOUT, UNCHECKOUT, REPORT, UPDATE, CANCELUPLOAD, HEAD, OPTIONS, GET, POST" env=ORIGIN_DOMAIN
Header always set Access-Control-Allow-Headers "Overwrite, Destination, Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Authorization" env=ORIGIN_DOMAIN
{% endif -%}
stack/monitor/templates/monitor-httpd.conf.in
View file @
a4cd4d27
...
...
@@ -56,9 +56,6 @@ Include {{ parameter_dict.get('httpd-cors-config-file') }}
Header set Vary Origin
Header set Cache-Control "private, max-age=40"
Header set Access-Control-Max-Age "40"
Header set Access-Control-Allow-Credentials "true"
Header set Access-Control-Allow-Methods "PROPFIND, PROPPATCH, COPY, MOVE, DELETE, MKCOL, LOCK, UNLOCK, PUT, GETLIB, VERSION-CONTROL, CHECKIN, CHECKOUT, UNCHECKOUT, REPORT, UPDATE, CANCELUPLOAD, HEAD, OPTIONS, GET, POST"
Header set Access-Control-Allow-Headers "Overwrite, Destination, Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Authorization"
DavLockDB {{ directory.get('monitor-var') }}/DavLock
Alias /share {{ directory.get('webdav') }}
...
...
stack/resilient/buildout.cfg
View file @
a4cd4d27
...
...
@@ -14,6 +14,7 @@ parts =
pbsready
pbsready-import
pbsready-export
notifier-feed-promise-template
template-replicated
template-parts
instance-frozen
...
...
@@ -41,7 +42,7 @@ eggs =
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready.cfg.in
output = ${buildout:directory}/pbsready.cfg
md5sum =
3dddf84daf5db8ff4ffc3878e206b46
7
md5sum =
0df8fe9b69f7943c3d5a2d30d464055
7
mode = 0644
[pbsready-import]
...
...
@@ -50,23 +51,23 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready-import.cfg.in
output = ${buildout:directory}/pbsready-import.cfg
md5sum =
10264fe1cfb7ebe567d50ebabbd93a43
md5sum =
5d5e4ad35c1a97ea5f7a15a4f5f766a8
mode = 0644
[pbsready-export]
# An export instance has an exporter script, and communicates
# to parent PBS instances to deliver the exported dump.
recipe = slapos.recipe.
template
recipe = slapos.recipe.
build:download
url = ${:_profile_base_location_}/pbsready-export.cfg.in
output = ${buildout:directory}/pbsready-export.cfg
md5sum =
793f1843a643b3c91b658eca2bad5abc
filename = pbsready-export.cfg.in
md5sum =
1b38292c42702f91f620cb99d1a88952
mode = 0644
[template-pull-backup]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-pull-backup.cfg.in
output = ${buildout:directory}/instance-pull-backup.cfg
md5sum =
3ef8f98ff013f06fcd81bba18872e561
md5sum =
7b4f8ac1a62680d624ac632f9601dab5
mode = 0644
[template-replicated]
...
...
@@ -83,6 +84,14 @@ md5sum = 071b1034ee8f5cc14f79b16fdeba2813
mode = 0644
destination = ${buildout:directory}/template-parts.cfg.in
[template-resilient-templates]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}.in
output = ${buildout:directory}/${:filename}
md5sum = be2ebf91faa6b5b131995a05a907707f
mode = 0644
filename = template-resilient-templates.cfg
[instance-frozen]
# When an instance is detected as broken, its software type is changed to "frozen".
# On the next run of slapgrid-cp, the buildout profile is replaced by instance-frozen.cfg,
...
...
@@ -95,7 +104,7 @@ output = ${buildout:directory}/instance-frozen.cfg
[resilient-web-takeover-cgi-script-download]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/resilient-web-takeover-cgi-script.py.in
md5sum =
c46c8e3e4ce4376c98ad2fc0e2ff0fe4
md5sum =
9d258d41eeef66f44f361adaa15cbd71
mode = 0644
destination = ${buildout:directory}/resilient-web-takeover-cgi-script.py.in
...
...
@@ -107,6 +116,12 @@ output = ${buildout:directory}/template-wrapper.cfg
mode = 0644
md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
[notifier-feed-promise-template]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/notifier-feed-promise.py.in
md5sum = d75346911dbc4cfcdb39a21e56cd5016
mode = 0644
##################
# Monitor element
#
...
...
stack/resilient/instance-pull-backup.cfg.in
View file @
a4cd4d27
...
...
@@ -4,6 +4,7 @@ parts =
publish-connection-information
pbs
logrotate
logrotate-entry-notifier
cron
cron-entry-logrotate
pbs-sshkeys-authority
...
...
@@ -14,6 +15,7 @@ parts =
backup-signature-link
cron-pbs-status-feed
pull-push-stalled-promise
notifier-feed-status-promise
## Monitor for pbs
monitor-base
...
...
@@ -128,6 +130,7 @@ directory = $${directory:pbs-backup}
cron-entries = $${cron:cron-entries}
wrappers-directory = $${directory:pbs-wrappers}
run-directory = $${basedirectory:run}
pull-push-maximum-run = 5
# XXX: this should be named "notifier-host"
notifier-url = http://[$${notifier:host}]:$${notifier:port}
slave-instance-list = $${slap-parameter:slave_instance_list}
...
...
@@ -147,7 +150,7 @@ wrapper-path = $${rootdirectory:bin}/resilient-genstatusrss.py
<= cron
recipe = slapos.cookbook:cron.d
name = resilient-pbs-status-feed
frequency = 5 * * * *
frequency =
*/
5 * * * *
command = $${pbs-resilient-status-feed:wrapper-path}
#----------------
...
...
@@ -201,6 +204,18 @@ log = $${cron-simplelogger:log}
frequency = daily
rotate-num = 30
[logrotate-entry-notifier]
recipe = collective.recipe.template
mode = 600
input = inline:
$${directory:notifier-feeds}/* {
rotate 5
weekly
nocompress
missingok
olddir $${directory:logrotate-backup}
}
output = $${logrotate:logrotate-entries}/notifier
#----------------
#--
...
...
@@ -303,10 +318,20 @@ symlink = $${directory:pbs-backup}/proof.signature = $${directory:monitor-resili
[pull-push-stalled-promise]
recipe = slapos.cookbook:wrapper
#
time-buffer is 18h : cron for backup is run once a day - 6h of random sleep
command-line = ${buildout:bin-directory}/check-feed-as-promise --feed-path $${pbs-resilient-status-feed:feed-path} --title --ok-pattern 'OK' --time-buffer
648
00
#
# time-buffer is 24h (+1h of latitude)
command-line = ${buildout:bin-directory}/check-feed-as-promise --feed-path $${pbs-resilient-status-feed:feed-path} --title --ok-pattern 'OK' --time-buffer
900
00
wrapper-path = $${basedirectory:promises}/stalled-pull-push
[notifier-feed-status-promise]
recipe = slapos.recipe.template:jinja2
template = ${notifier-feed-promise-template:target}
rendered = $${basedirectory:promises}/notifier-feed-check-malformed-or-failure.py
mode = 700
context =
key notifier_feed_directory directory:notifier-feeds
raw base_url http://[$${notifier:host}]:$${notifier:port}/get/
raw python_executable ${buildout:executable}
#----------------
#--
#-- Publish instance parameters.
...
...
stack/resilient/pbsready-export.cfg.in
View file @
a4cd4d27
[buildout]
extends =
${pbsready:output
}
extends =
{{ pbsready_template_path }
}
# Explicitely define extended parts from pbsready
# then add local parts
...
...
@@ -8,8 +8,12 @@ parts =
logrotate
logrotate-entry-cron
logrotate-entry-equeue
logrotate-entry-resilient
cron
cron-entry-logrotate
cron-entry-notifier-status-feed
notifier-feed-status-promise
notifier-stalled-promise
resilient-sshkeys-authority
sshd-raw-server
sshd-graceful
...
...
@@ -24,9 +28,9 @@ parts =
[resilient-directory]
recipe = slapos.cookbook:mkdirectory
home = $
$
{buildout:directory}
var = $
$
{:home}/var
pid = $
$
{:var}/pid
home = ${buildout:directory}
var = ${:home}/var
pid = ${:var}/pid
# Define port of ssh server. It has to be different from import so that it
# supports export/import using same IP (slaprunner, slapos-in-partition,
...
...
@@ -35,16 +39,17 @@ pid = $${:var}/pid
recipe = slapos.cookbook:free_port
minimum = 22200
maximum = 22209
ip = $
$
{slap-network-information:global-ipv6}
ip = ${slap-network-information:global-ipv6}
[notifier-port]
recipe = slapos.cookbook:free_port
minimum = 65526
maximum = 65535
ip = $
$
{notifier:host}
ip = ${notifier:host}
[resilient-publish-connection-parameter]
notification-id = http://[$${notifier:host}]:$${notifier:port}/get/$${notifier-exporter:name}
notification-id = http://[${notifier:host}]:${notifier:port}/get/${notifier-exporter:name}
-extends = publish-early
[notifier-exporter]
# notifier.notify launches an (exporter) executable, and when finished,
...
...
@@ -52,24 +57,27 @@ notification-id = http://[$${notifier:host}]:$${notifier:port}/get/$${notifier-e
<= notifier
recipe = slapos.cookbook:notifier.notify
name = exporter
title = Dumping $
$
{slap-parameter:namebase}
executable = $
$
{exporter:wrapper}
wrapper = $
$
{rootdirectory:bin}/exporter
notify = $
$
{slap-parameter:notify}
pidfile = $
${resilient-directory:pid}/$
${:name}.pid
title = Dumping ${slap-parameter:namebase}
executable = ${exporter:wrapper}
wrapper = ${rootdirectory:bin}/exporter
notify = ${slap-parameter:notify}
pidfile = $
{resilient-directory:pid}/
${:name}.pid
max-run = 3
[logrotate-entry-notifier]
output = ${rootdirectory:etc}/logrotate_notifier.conf
[notifier-exporter-promise]
recipe = slapos.recipe.template:jinja2
mode = 700
template = inline:
#!
${bash:location}/bin/bash
EXPORTER_FEED="$
$
{notifier-exporter:log-file}"
#!
{{ bash_executable_location }}
EXPORTER_FEED="${notifier-exporter:log-file}"
FAILURE_PATTERN="FAILURE"
if [ -s "$EXPORTER_FEED" ]; then
tail -n 1 $EXPORTER_FEED | grep -vq FAILURE_PATTERN
fi
rendered = $
$
{basedirectory:promises}/exporter-status
rendered = ${basedirectory:promises}/exporter-status
[cron-entry-backup]
# Schedule the periodic database dump.
...
...
@@ -77,9 +85,21 @@ rendered = $${basedirectory:promises}/exporter-status
<= cron
recipe = slapos.cookbook:cron.d
name = backup
frequency = $${slap-parameter:resiliency-backup-periodicity}
once-a-day = true
command = $${notifier-exporter:wrapper} --transaction-id `date +%s`
{% set resiliency_backup_periodicity = slapparameter_dict.get('resiliency-backup-periodicity') %}
{% if resiliency_backup_periodicity %}
frequency = {{ resiliency_backup_periodicity }}
{% else %}
time = ${publish-early:resiliency-backup-periodicity}
{% endif %}
command = {{ logrotate_executable_location }} -s ${basedirectory:run}/logrotate.status ${logrotate-entry-notifier:output}; ${notifier-exporter:wrapper} --transaction-id `date +%s`
[gen-resiliency-backup-periodicity]
recipe = slapos.cookbook:random.time
[publish-early]
recipe = slapos.cookbook:publish-early
-init =
resiliency-backup-periodicity gen-resiliency-backup-periodicity:time
[slap-parameter]
# In cron.d format (i.e things like */15 * * * * are accepted).
...
...
stack/resilient/pbsready-import.cfg.in
View file @
a4cd4d27
...
...
@@ -8,8 +8,13 @@ parts =
logrotate
logrotate-entry-cron
logrotate-entry-equeue
logrotate-entry-notifier
logrotate-entry-resilient
cron
cron-entry-logrotate
cron-entry-notifier-status-feed
notifier-feed-status-promise
notifier-stalled-promise
resilient-sshkeys-authority
sshd-raw-server
sshd-graceful
...
...
@@ -179,4 +184,4 @@ curl_path = ${curl:location}/bin/curl
###########
[backup-signature-link]
recipe = cns.recipe.symlink
symlink = $${post-notification-run:proof-signature-file} = $${directory:monitor-resilient}/backup.signature
\ No newline at end of file
symlink = $${post-notification-run:proof-signature-file} = $${directory:monitor-resilient}/backup.signature
stack/resilient/pbsready.cfg.in
View file @
a4cd4d27
...
...
@@ -4,8 +4,13 @@ parts =
logrotate
logrotate-entry-cron
logrotate-entry-equeue
logrotate-entry-notifier
logrotate-entry-resilient
cron
cron-entry-logrotate
cron-entry-notifier-status-feed
notifier-feed-status-promise
notifier-stalled-promise
resilient-sshkeys-authority
sshd-graceful
sshkeys-sshd
...
...
@@ -45,6 +50,7 @@ ssh = $${rootdirectory:etc}/ssh/
sshkeys = $${rootdirectory:srv}/sshkeys
notifier-feeds = $${basedirectory:notifier}/feeds
notifier-callbacks = $${basedirectory:notifier}/callbacks
notifier-status-items = $${basedirectory:notifier}/status-items
cron-entries = $${rootdirectory:etc}/cron.d
crontabs = $${rootdirectory:etc}/crontabs
cronstamps = $${rootdirectory:etc}/cronstamps
...
...
@@ -126,6 +132,26 @@ log = $${equeue:log} $${sshd-server:log}
frequency = daily
rotate-num = 30
[logrotate-entry-notifier]
recipe = collective.recipe.template
mode = 600
input = inline:
$${notifier:feeds}/* {
rotate 5
weekly
nocompress
missingok
olddir $${directory:logrotate-backup}
}
output = $${logrotate:logrotate-entries}/notifier
[logrotate-entry-resilient]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = resilient_log
log = $${basedirectory:log}/resilient.log
frequency = weekly
rotate-num = 7
#----------------
#--
...
...
@@ -166,9 +192,30 @@ command = ${buildout:bin-directory}/pubsubserver --callbacks $${directory:notifi
notifier-binary = ${buildout:bin-directory}/pubsubnotifier
host = $${slap-network-information:global-ipv6}
port = $${notifier-port:port}
instance-root-name = $${instance-info-parameters:root-name}
log-url = $${publish:monitor-base-url}/resilient/notifier-status-rss
status-item-directory = $${directory:notifier-status-items}
context =
key content notifier:command
[notifier-resilient-status-feed]
recipe = slapos.cookbook:wrapper
command-line = ${buildout:directory}/bin/generatefeed --output $${:feed-path} --status-item-path $${notifier:status-item-directory} --title "Status feed for $${notifier:instance-root-name}" --link $${notifier:log-url}
feed-path = $${directory:monitor-resilient}/notifier-status-rss
wrapper-path = $${rootdirectory:bin}/resilient-genstatusrss.py
[cron-entry-notifier-status-feed]
<= cron
recipe = slapos.cookbook:cron.d
name = resilient-notifier-status-feed
frequency = */5 * * * *
command = $${notifier-resilient-status-feed:wrapper-path}
[notifier-stalled-promise]
recipe = slapos.cookbook:wrapper
# time-buffer is 24h (+1h of latitude)
command-line = ${buildout:bin-directory}/check-feed-as-promise --feed-path $${notifier-resilient-status-feed:feed-path} --title --ok-pattern 'OK' --time-buffer 90000
wrapper-path = $${basedirectory:promises}/stalled-notifier-callbacks
#----------------
#--
...
...
@@ -260,6 +307,20 @@ input = inline:#!${bash:location}/bin/bash
output = $${basedirectory:promises}/public-key-existence
mode = 700
#----------------
#--
#-- Promises
[notifier-feed-status-promise]
recipe = slapos.recipe.template:jinja2
template = ${notifier-feed-promise-template:target}
rendered = $${basedirectory:promises}/notifier-feed-check-malformed-or-failure.py
mode = 700
context =
key notifier_feed_directory directory:notifier-feeds
raw base_url http://[$${notifier:host}]:$${notifier:port}/get/
raw python_executable ${buildout:executable}
#----------------
#--
#-- Connection informations to re-use.
...
...
stack/resilient/resilient-web-takeover-cgi-script.py.in
View file @
a4cd4d27
...
...
@@ -4,6 +4,7 @@ equeue_database = '${equeue:database}'
equeue_lockfile = '${equeue:lockfile}'
takeover_script = '${resiliency-takeover-script:wrapper-takeover}'
import atexit
import cgi
import cgitb
import datetime
...
...
@@ -21,12 +22,17 @@ else:
cgitb.enable()
def deleteTemporaryDirectory(path):
if os.path.exists(path):
shutil.rmtree(path)
def getLatestBackupDate():
"""
Get the date of the latest successful backup.
"""
# Create a copy of the db (locked by equeue process)
temporary_directory = tempfile.mkdtemp()
atexit.register(deleteTemporaryDirectory, temporary_directory)
equeue_database_copy = os.path.join(temporary_directory, 'equeue.db')
shutil.copyfile(equeue_database, equeue_database_copy)
db = gdbm.open(equeue_database_copy)
...
...
stack/resilient/template-resilient-templates.cfg.in
0 → 100644
View file @
a4cd4d27
[template-pbsready-export]
recipe = slapos.recipe.template:jinja2
template = ${pbsready-export:target}
rendered = $${buildout:directory}/pbsready-exporter.cfg
mode = 640
context =
key slapparameter_dict slap-configuration:configuration
raw pbsready_template_path ${pbsready:output}
raw bash_executable_location ${bash:location}/bin/bash
raw logrotate_executable_location ${logrotate:location}/sbin/logrotate
stack/resilient/templates/notifier-feed-promise.py.in
0 → 100644
View file @
a4cd4d27
#!{{ python_executable }}
import csv
import os
import sys
import urllib2
csv.field_size_limit(sys.maxsize)
notifier_feed_directory = '{{ notifier_feed_directory }}'
base_url = "{{ base_url }}"
feed_file_list = os.listdir(notifier_feed_directory)
for feed_file_name in feed_file_list:
url = base_url + feed_file_name
# Try feed consistency
try:
feed = urllib2.urlopen(url)
body = feed.read()
except urllib2.HTTPError as e:
sys.exit("%s is unavailable: %s" % (feed_file_name, e))
with open(os.path.join(notifier_feed_directory, feed_file_name)) as feed_file:
reader = csv.reader(feed_file)
# Get last row because we only care about last run
for row in reader:
pass
try:
timestamp, title, content, guid = row
if content.startswith('OK'):
continue
else:
sys.exit("Last run of %s failed" % feed_file_name)
except ValueError:
sys.exit("Notifier feed %s is malformed" % notifier_feed)
except NameError:
# row can be not defined if feed is empty
pass
stack/slapos.cfg
View file @
a4cd4d27
...
...
@@ -112,7 +112,7 @@ buildout-versions = 1.7
cffi = 1.8.3
click = 6.6
cliff = 2.2.0
cmd2 = 0.6.
8
cmd2 = 0.6.
9
collective.recipe.template = 1.13
cryptography = 1.5.2
decorator = 4.0.10
...
...
@@ -125,11 +125,11 @@ netaddr = 0.7.18
pbr = 1.10.0
plone.recipe.command = 1.1
prettytable = 0.7.2
psutil = 4.
3.1
pyOpenSSL = 16.
1
.0
psutil = 4.
4.2
pyOpenSSL = 16.
2
.0
pyasn1 = 0.1.9
pyparsing = 2.1.
9
pytz = 2016.
6.1
pyparsing = 2.1.
10
pytz = 2016.
7
requests = 2.11.1
setuptools = 19.6.2
six = 1.10.0
...
...
@@ -137,9 +137,9 @@ slapos.cookbook = 1.0.41
slapos.core = 1.3.18
slapos.extension.strip = 0.1
slapos.libnetworkcache = 0.14.5
slapos.recipe.build = 0.2
7
slapos.recipe.build = 0.2
8
slapos.recipe.cmmi = 0.2
stevedore = 1.1
7.1
stevedore = 1.1
8.0
unicodecsv = 0.14.1
xml-marshaller = 0.9.7
...
...
@@ -178,7 +178,7 @@ netifaces = 0.10.4
# Required by:
# cffi==1.8.3
pycparser = 2.1
4
pycparser = 2.1
7
# Required by:
# slapos.core==1.3.18
...
...
@@ -190,7 +190,7 @@ uritemplate = 3.0.0
# Required by:
# slapos.core==1.3.18
zope.interface = 4.
2.0
zope.interface = 4.
3.2
[networkcache]
download-cache-url = http://www.shacache.org/shacache
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment