Commit 5d65d899 authored by Alain Takoudjou's avatar Alain Takoudjou

merge master into grid-computing-merge

parents 2fc0ece1 e76f9575
Changes
=======
0.72.0 (2013-02-11)
-------------------
* librecipe: correctly handle newline and missing file in addLineToFile(). [Marco Mariani]
* LAMP: Copy php application even if directory exists but is empty. This handle new resilient LAMP stack. [Cedric de Saint Martin]
* LAMP: Don't even try to restart/reload/graceful Apache. This fix "Apache hangs" problem. [Cedric de Saint Martin]
0.71.4 (2013-02-01)
-------------------
* Enable IPv6 support in KumoFS. [Vincent Pelletier]
* Use new connection and get result when try to create new erp5 site. [Rafael Monnerat]
* Set up timezone database in mariab's mysql table so that we can use timezone conversion function. [Kazuhiko Shiozaki]
* Make erp5_bootstrap wait for manage_addERP5Site response [Rafael Monnerat]
0.71.3 (2013-01-31)
-------------------
* Add mysql_ip and mysql_port parameters in apachephp recipe [Cedric de Saint
Martin]
* Random password for postgres in standalone SR and lapp stack; accept
connections from the world. [Marco Mariani]
0.71.2 (2013-01-29)
-------------------
* revised postgres/lapp recipe. [Marco Mariani]
0.71.1 (2013-01-04)
-------------------
* Frontend: Sort instances by reference to avoid attacks. [Cedric de Saint
Martin]
* Frontend: Add public_ipv4 parameter support to ease deployment of slave
frontend. [Cedric de Saint Martin]
* Frontend: Move apache_frontend wrappers to watched directory (etc/service).
[Cedric de Saint Martin]
* Frontend: Add native path to varnish environment. [Cedric de Saint Martin]
0.71 (2012-12-20)
-----------------
* frontend: Add "path" parameter for Zope instances. [Cedric de Saint Martin]
0.70 (2012-11-05)
-----------------
* KVM: Add support for disk-type, second nbd and cpu-count. [Cedric de Saint
Martin]
0.69 (2012-10-30)
-----------------
* handle multiple notification_url values in notifier recipe [Marco Mariani]
* createWrapper() sh alternative to execute.execute() for simple cases
[Marco Mariani]
* fixed secret key generation in apachephp config [Marco Mariani]
0.68.1 (2012-10-03)
-------------------
......
[buildout]
parts = apache-perl perl-Apache2-Request
extends =
../apache/buildout.cfg
../perl/buildout.cfg
../libuuid/buildout.cfg
[apache-perl]
# Note: Shall react on each build of apache and reinstall itself
recipe = hexagonit.recipe.cmmi
url = http://perl.apache.org/dist/mod_perl-2.0.5.tar.gz
md5sum = 03d01d135a122bd8cebd0cd5b185d674
configure-command =
${perl:location}/bin/perl Makefile.PL
configure-options =
MP_AP_PREFIX=${apache-2.2:location}
LIBS="-L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}"
# for mod_apreq2 and Apache2::Request
[perl-Apache2-Request]
recipe = slapos.recipe.build:cpan
modules =
I/IS/ISAAC/libapreq2-2.13.tar.gz
perl = perl
cpan-configuration =
makepl_arg = ('--with-apache2-apxs=${apache-2.2:location}/bin/apxs')
environment =
APR_LDFLAGS = -L${libuuid:location}/lib -Wl,-R${libuuid:location}/lib -L${libexpat:location}/lib -Wl,-R${libexpat:location}/lib -L${apache-2.2:location}/lib -Wl,-R${apache-2.2:location}/lib
LDFLAGS=-L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib -L${apache-2.2:location}/lib -Wl,-rpath=${apache-2.2:location}/lib
......@@ -55,10 +55,50 @@ configure-options =
--enable-bz2
--enable-ftp
# Changing TMPDIR is required for PEAR installation.
# It will create a pear/temp directory under the SR instead of a shared /tmp/pear/temp.
# XXX we could mkdir tmp there
environment =
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig
PATH=${pkgconfig:location}/bin:${bzip2:location}/bin:${libxml2:location}/bin:%(PATH)s
LDFLAGS =-L${bzip2:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -L${libtool:location}/lib -Wl,-rpath -Wl,${libtool:location}/lib -L${mariadb:location}/lib -Wl,-rpath -Wl,${mariadb:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${libmcrypt:location}/lib -Wl,-rpath -Wl,${libmcrypt:location}/libblkid
TMPDIR=${buildout:parts-directory}/${:_buildout_section_name_}
[apache-php-postgres]
<=apache-php
configure-options =
--with-apxs2=${apache:location}/bin/apxs
--with-libxml-dir=${libxml2:location}
--with-zlib-dir=${zlib:location}
--with-bz2-dir=${bzip2:location}
--with-mcrypt=${libmcrypt:location}
--with-gd
--with-jpeg-dir=${libjpeg:location}
--with-png-dir=${libpng:location}
--enable-gd-native-ttf
--with-ttf
--with-freetype-dir=${freetype:location}
--with-curl=${curl:location}
--with-zip-dir=${zip:location}
--with-imap=${cclient:location}
--with-iconv-dir=${libiconv:location}
--with-gettext=${gettext:location}
--with-ldap=${openldap:location}
--with-imap-ssl
--with-openssl=${openssl:location}
--enable-libxml
--enable-mbstring
--enable-session
--enable-exif
--enable-zip
--enable-bz2
--enable-ftp
--with-pgsql=${postgresql:location}
[libmcrypt]
......
......@@ -31,7 +31,7 @@ recipe = hexagonit.recipe.cmmi
depends =
${gdbm:version}
version = 2.4.3
revision = 1
revision = 2
url = http://mir2.ovh.net/ftp.apache.org/dist/httpd/httpd-${:version}.tar.bz2
md5sum = 87aaf7bc7e8715f0455997bb8c6791aa
configure-command = cp -ar ${apr:location}/apr-${apr:version} srclib/apr/; cp -ar ${apr-util:location}/apr-util-${apr-util:version} srclib/apr-util; ./configure
......@@ -87,7 +87,7 @@ configure-options = --prefix=${buildout:parts-directory}/${:_buildout_section_na
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
CPPFLAGS =-I${libuuid:location}/include
CPPFLAGS =-I${libuuid:location}/include -I${openssl:location}/include
LDFLAGS =-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${gdbm:location}/lib
[mod_antiloris-apache-2.4.patch]
......@@ -149,6 +149,7 @@ configure-options = --disable-static
--enable-proxy-scgi
--enable-dav
--enable-dav-fs
--enable-dav-lock
--enable-so
--enable-ssl
--with-included-apr
......
......@@ -5,7 +5,7 @@ parts = busybox
[busybox]
recipe = slapos.recipe.build
url = http://git.busybox.net/busybox/snapshot/busybox-1_20_1.tar.gz
md5sum = 15758fc37ae8051d6def1b8afb691821
md5sum = 2dcfee8add6b9c52d6a91e97ba705b66
script =
extract_dir = self.extract(self.download(%(url)r, %(md5sum)r))
workdir = guessworkdir(extract_dir)
......
......@@ -18,10 +18,10 @@ download-only = true
[ca-certificates]
recipe = hexagonit.recipe.cmmi
version = 20120623
version = 20130119
url = ftp://ftp.free.fr/mirrors/ftp.debian.org/pool/main/c/ca-certificates/ca-certificates_${:version}.tar.gz
patch-binary = ${patch:location}/bin/patch
md5sum = 5105d4cc086f0d4ecf7bf2e4c4667289
md5sum = 1fbbec2028a33cf865b79c204aa2e626
patches =
${ca-certificates-sbin-dir.patch:location}/${ca-certificates-sbin-dir.patch:filename}
patch-options = -p0
......
......@@ -10,10 +10,10 @@ parts =
cclient
[cclient-patch]
recipe = slapos.recipe.download
recipe = hexagonit.recipe.download
download-only = true
url = ${:_profile_base_location_}/imap-2007f.patch
md5sum = 42c77fdd5d7a976fc302b93aadb3da98
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = imap-2007f.patch
[cclient]
......
......@@ -16,7 +16,7 @@ parts =
[node-sm]
recipe = slapos.recipe.build:npm
packages = sm@0.2.7
packages = sm@0.2.11
node = nodejs
environment =
PATH=${nodejs:location}/bin:%(PATH)s
......@@ -25,10 +25,10 @@ environment =
# Online IDE written in javascript/node.js
# URL : c9.io
# You can use it using the following command :
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
recipe = plone.recipe.command
stop-on-error = true
commit = 97db1467c517d265438684bd2a70b0b76ee282f6
commit = 5d18344936baf1d86b0fa5fc2c690051b4c77cb2
repository = https://github.com/ajaxorg/cloud9.git
location = ${buildout:parts-directory}/${:_buildout_section_name_}
git-binary = ${git:location}/bin/git
......@@ -37,26 +37,33 @@ command = export GIT_SSL_NO_VERIFY=true; export HOME=${:location}; (${:git-binar
update-command =
executable = ${:location}/bin/cloud9.js
[cloud9-session-directory.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
filename = cloud9-session-directory.patch
download-only = true
md5sum = 5dc8cc28447ed3747b8a53c768d872aa
[cloud9-git]
# Online IDE written in javascript/node.js
# URL : c9.io
# You can use it using the following command :
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
recipe = plone.recipe.command
stop-on-error = true
commit = c66284221143c175fc889418d499da6f37492a7c
commit = 9158bbf9e886e3b96e91239249aca66e420aa8d1
repository = https://github.com/ajaxorg/cloud9.git
location = ${buildout:parts-directory}/${:_buildout_section_name_}
environment = export GIT_SSL_NO_VERIFY=true; export PATH=${git:location}/bin:${nodejs:location}/bin:${node-sm:location}/node_modules/sm/bin:$PATH; export CPPFLAGS="-I${libxml2:location}/include -I${nodejs:location}/include"; export LDFLAGS="-L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib"; export HOME=${:location};
command = ${:environment} (git clone --quiet ${:repository} ${:location} && cd ${:location} && git reset --hard ${:commit} && ${node-sm:location}/node_modules/.bin/sm install) || (rm -fr ${:location}; exit 1)
command = ${:environment} (git clone --quiet ${:repository} ${:location} && cd ${:location} && git reset --hard ${:commit} && ${node-sm:location}/node_modules/.bin/sm install && patch -p1 < ${cloud9-session-directory.patch:location}/${cloud9-session-directory.patch:filename}) || (rm -fr ${:location}; exit 1)
update-command =
executable = ${:location}/server.js
[cloud9-npm]
# Online IDE written in javascript/node.js
# URL : c9.io
# URL : c9.io
# You can use it using the following command :
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
recipe = slapos.recipe.npm
# Node part has to be specified, otherwise system node is used.
node = nodejs
......
diff --git a/configs/default.js b/configs/default.js
index 6d1c85f..be35b37 100644
--- a/configs/default.js
+++ b/configs/default.js
@@ -22,6 +22,8 @@ var vfsUrl = "/vfs";
var port = argv.p || process.env.PORT || 3131;
var host = argv.l || process.env.IP || "localhost";
+var home = process.env['HOME']
+
var config = [
{
packagePath: "connect-architect/connect",
@@ -167,7 +169,7 @@ var config = [
},
{
packagePath: "connect-architect/connect.session.file",
- sessionsPath: __dirname + "/../.sessions"
+ sessionsPath: home + "/.sessions"
},
"./cloud9.permissions",
{
\ No newline at end of file
......@@ -3,6 +3,7 @@ parts =
liberation-fonts
ipaex-fonts
ipa-fonts
ocrb-fonts
[fonts]
location = ${buildout:parts-directory}/${:_buildout_section_name_}
......@@ -32,3 +33,10 @@ strip-top-level-dir = true
url = http://info.openlab.ipa.go.jp/ipafont/fontdata/IPAfont00303.zip
md5sum = 39a828acf27790adbe4944dfb4d94bb1
destination = ${fonts:location}/${:_buildout_section_name_}
[ocrb-fonts]
recipe = hexagonit.recipe.download
strip-top-level-dir = true
url = http://sourceforge.jp/frs/redir.php?m=jaist&f=%2Ftsukurimashou%2F56948%2Focr-0.2.zip
md5sum = 9f2acd83291a31dbe053912f4115db75
destination = ${fonts:location}/${:_buildout_section_name_}
......@@ -28,8 +28,8 @@ filename = imagemagick-6.6.6-1-no-gsx-gsc-probe.patch
[imagemagick]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.imagemagick.org/pub/ImageMagick/ImageMagick-6.7.8-8.tar.bz2
md5sum = 4e5c8f102f3e7401587c924f5b4bca15
url = ftp://ftp.imagemagick.org/pub/ImageMagick/ImageMagick-6.8.1-10.tar.bz2
md5sum = cde56988f9d2208d9d61815cc23665b4
depends =
${libtiff:version}
${librsvg:version}
......
......@@ -3,8 +3,10 @@
[buildout]
extends =
../bzip2/buildout.cfg
../imagemagick/buildout.cfg
../jbigkit/buildout.cfg
../zlib/buildout.cfg
parts =
libdmtx
dmtx-utils
......@@ -26,4 +28,4 @@ environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${imagemagick:location}/lib/pkgconfig:${libdmtx:location}/lib/pkgconfig
CPPFLAGS=-I${libdmtx:location}/include
LDFLAGS=-Wl,-rpath=${jbigkit:location}/lib
LDFLAGS=-Wl,-rpath=${jbigkit:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${zlib:location}/lib
......@@ -13,8 +13,8 @@ find-links =
recipe = slapos.recipe.build
# here, two %s are used, first one is for directory name (eg. x86_64), and second one is for filename (eg. x86-64).
version = 3.5.6
url = http://download.documentfoundation.org/libreoffice/stable/${:version}/rpm/%s/LibO_${:version}_Linux_%s_install-rpm_en-US.tar.gz
#url = http://download.documentfoundation.org/libreoffice/stable/${:version}/rpm/%s/LibO_${:version}_Linux_%s_install-rpm_en-US.tar.gz
url = http://ftp.psu.ac.th/pub/libreoffice/${:version}/rpm/%s/LibO_${:version}_Linux_%s_install-rpm_en-US.tar.gz
# supported architectures md5sums
md5sum_x86 = 079609188b86ede3b3eebe2b75862b31
md5sum_x86-64 = 6a559fa9c62c810464254d129bd2dc17
......
......@@ -14,7 +14,8 @@ url = http://ftp.gnome.org/pub/gnome/sources/libcroco/0.6/libcroco-0.6.3.tar.bz2
md5sum = e1e93eeff4367c896f3959af34ba20eb
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${pkgconfig:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig
PKG_CONFIG_PATH=${pkgconfig:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
LDFLAGS=-Wl,-rpath=${zlib:location}/lib
[librsvg]
recipe = hexagonit.recipe.cmmi
......@@ -36,4 +37,4 @@ configure-options =
environment =
PATH=${gdk-pixbuf:location}/bin:${glib:location}/bin:${libxml2:location}/bin:${pkgconfig:location}/bin:${pango:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${cairo:location}/lib/pkgconfig:${fontconfig:location}/lib/pkgconfig:${freetype:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${kbproto:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig:${libXau:location}/lib/pkgconfig:${libXrender:location}/lib/pkgconfig:${libcroco:location}/lib/pkgconfig:${libxcb:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig:${pango:location}/lib/pkgconfig:${pixman:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${xorg-libpthread-stubs:location}/lib/pkgconfig:${xproto:location}/lib/pkgconfig
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${zlib:location}/lib
......@@ -9,11 +9,9 @@ parts =
[libtiff]
recipe = hexagonit.recipe.cmmi
version = 4.0.2
#url = http://download.osgeo.org/libtiff/tiff-${:version}.tar.gz
# server is down - circumvent
version = 4.0.3
url = http://www.imagemagick.org/download/delegates/tiff-${:version}.tar.gz
md5sum = 04a08fa1e07e696e820a0c3f32465a13
md5sum = 051c1068e6a0627f461948c365290410
configure-options =
--disable-static
--without-x
......
......@@ -25,10 +25,10 @@ download-only = true
[mariadb]
recipe = hexagonit.recipe.cmmi
version = 5.5.25
version = 5.5.28a
revision = 1
url = http://downloads.askmonty.org/f/mariadb-${:version}/kvm-tarbake-jaunty-x86/mariadb-${:version}.tar.gz/from/http://ftp.osuosl.org/pub/mariadb
md5sum = 943f67c267d73a4080ab497e11740daf
md5sum = a2d20a040ef1e49944a4ffe65ed7fefa
# compile directory is required to build mysql plugins.
keep-compile-dir = true
patch-options = -p0
......
......@@ -3,6 +3,7 @@
extends =
../cmake/buildout.cfg
../glib/buildout.cfg
../pkgconfig/buildout.cfg
../openssl/buildout.cfg
../pcre/buildout.cfg
../mariadb/buildout.cfg
......@@ -22,6 +23,7 @@ buildout-bin-dir = ${buildout:bin-directory}
cmake-command = ${cmake:location}/bin/cmake
mysql-config = ${mariadb:location}/bin/mysql_config
mysqllib = ${mariadb:location}/lib
path = ${pkgconfig:location}/bin
pkg-config-path = ${glib:location}/lib/pkgconfig/:${pcre:location}/lib/pkgconfig/:${openssl:location}/lib/pkgconfig/
libraries = ${zlib:location}/lib/:${glib:location}/lib/:${pcre:location}/lib/:${mariadb:location}/lib/:${openssl:location}/lib/
includes = ${zlib:location}/include/:${glib:location}/include/:${pcre:location}/include/:${mariadb:location}/include:${openssl:location}/include/
......@@ -39,7 +41,7 @@ script =
extract_dir = self.extract(url)
workdir = guessworkdir(extract_dir)
self.applyPatchList(self.options['mydumper-patches'], cwd=workdir)
env['PATH'] = self.options['buildout-bin-dir'] + ':' + env.get('PATH', '')
env['PATH'] = self.options['path'] + ':' + self.options['buildout-bin-dir'] + ':' + env.get('PATH', '')
env['PKG_CONFIG_PATH'] = self.options['pkg-config-path'] + ':' + \
env.get('PKG_CONFIG_PATH', '')
env['CMAKE_INCLUDE_PATH'] = self.options['includes']
......
......@@ -3,7 +3,7 @@ parts =
noVNC
[noVNC]
recipe = slapos.recipe.build:download-unpacked
url = http://cloud.github.com/downloads/kanaka/noVNC/novnc-0.3.tar.gz
md5sum = 95d3c58921fa188c179491e8ef2acc12
recipe = hexagonit.recipe.download
url = http://cloud.github.com/downloads/kanaka/noVNC/novnc-0.4.tar.gz
md5sum = 5703d5d46022d8723796dcbbf821ee7f
strip-top-level-dir = true
......@@ -14,8 +14,8 @@ parts =
[nodejs-0.8]
# Server-side Javascript.
recipe = hexagonit.recipe.cmmi
url = http://nodejs.org/dist/v0.8.8/node-v0.8.8.tar.gz
md5sum = f4dae84e96a94b768404c14633bccd49
url = http://nodejs.org/dist/v0.8.14/node-v0.8.14.tar.gz
md5sum = 284fd2c7578064c339d9cf6a3a475ac7
configure-options =
--openssl-includes=${openssl:location}/include
--openssl-libpath=${openssl:location}/lib
......
[buildout]
extends =
../perl/buildout.cfg
../../component/openssl/buildout.cfg
../../component/zlib/buildout.cfg
parts =
perl-Crypt-SSLeay
[perl-Crypt-SSLeay]
recipe = slapos.recipe.build:cpan
modules =
G/GA/GAAS/URI-1.60.tar.gz
N/NA/NANIS/Crypt-SSLeay-0.64.tar.gz
cpan-configuration =
make_arg=('OTHERLDFLAGS="-L${zlib:location}/lib -Wl,-R${zlib:location}/lib -L${openssl:location}/lib -Wl,-R${openssl:location}/lib"')
makepl_arg=('INC=-I${openssl:location}/include')
environment =
OPENSSL_PREFIX=${openssl:location}
perl = perl
[buildout]
extends =
../perl/buildout.cfg
../postgresql/buildout.cfg
parts =
perl-DBD-Pg
[perl-DBD-Pg]
recipe = slapos.recipe.build:cpan
modules =
T/TU/TURNSTEP/DBD-Pg-2.19.3.tar.gz
environment =
POSTGRES_HOME=${postgresql:location}
perl = perl
[buildout]
extends =
../perl/buildout.cfg
../imagemagick/buildout.cfg
parts = perl-Image-Magick
[perl-Image-Magick_MakefilePL.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
md5sum = 31043e2b79e725d3b251aa09b4549046
download-only = true
filename = ${:_buildout_section_name_}
[perl-Image-Magick]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
${imagemagick:location}
url = http://search.cpan.org/CPAN/authors/id/J/JC/JCRISTY/PerlMagick-6.77.tar.gz
md5sum = fa0f66fa0cabbd1b196254f94dec8e99
patch-options = -p0
patches =
${perl-Image-Magick_MakefilePL.patch:location}/${perl-Image-Magick_MakefilePL.patch:filename}
configure-command =
${perl:location}/bin/perl Makefile.PL LIBS="-L${imagemagick:location}/lib -Wl,-R${imagemagick:location}/lib -L${perl:location}/libs-c -Wl,-R${perl:location}/libs-c" INC="-I${imagemagick:location}/include/ImageMagick"
--- Makefile.PL.orig 2012-10-02 14:58:25.537661734 +0200
+++ Makefile.PL 2012-10-02 15:23:32.250928745 +0200
@@ -156,11 +156,26 @@
}
# defaults for LIBS & INC & CCFLAGS params that we later pass to Writemakefile
-my $INC_magick = '-I../ -I.. -pthread -I/usr/include/cairo -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include -I/usr/include/pixman-1 -I/usr/include/freetype2 -I/usr/include/libpng12 -pthread -I/usr/include/pango-1.0 -I/usr/include/freetype2 -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include -I/usr/include/graphviz -I/usr/include/freetype2 -I/usr/include/libxml2 -I"' . $Config{'usrinc'} . '/ImageMagick"';
-my $LIBS_magick = '-L../magick/.libs -lMagickCore -lperl -lm';
-my $CCFLAGS_magick = "$Config{'ccflags'} -fopenmp -g -O2 -Wall -pthread";
-my $LDFLAGS_magick = "-L../magick/.libs -lMagickCore $Config{'ldflags'} -L/usr/lib";
-my $LDDLFLAGS_magick = "-L../magick/.libs -lMagickCore $Config{'lddlflags'} -L/usr/lib";
+my %paths_magick = ();
+foreach my $section ('INC', 'LIBS', 'CCFLAGS', 'LDFLAGS', 'LDDLFLAGS') {
+ $paths_magick{$section} = '';
+}
+{
+ my $i = 0;
+ while ($i <= $#ARGV) {
+ my ($key, $val) = split(/=/, $ARGV[$i], 2);
+ if (exists $paths_magick{$key}) {
+ $paths_magick{$key} = $val;
+ delete $ARGV[$i];
+ }
+ $i++;
+ }
+}
+# Enforce standard components
+$paths_magick{LIBS} .= ' -lMagickCore -lperl -lm';
+$paths_magick{CCFLAGS} .= " $Config{'ccflags'} -fopenmp -g -O2 -Wall -pthread";
+$paths_magick{LDFLAGS} .= " -lMagickCore $Config{'ldflags'}";
+$paths_magick{LDDLFLAGS} .= " -lMagickCore $Config{'lddlflags'}";
if (($^O eq 'MSWin32') && ($Config{cc} =~ /gcc/)) {
my($Ipaths, $Lpaths) = AutodetectWin32gcc();
@@ -168,11 +183,11 @@
#
# Setup for strawberry perl.
#
- $INC_magick = "$Ipaths";
- $LIBS_magick = "-lMagickCore";
- $CCFLAGS_magick = "$Config{'ccflags'}";
- $LDFLAGS_magick = "$Config{'ldflags'} $Lpaths ";
- $LDDLFLAGS_magick = "$Config{'lddlflags'} $Lpaths ";
+ $paths_magick{INC} = "$Ipaths";
+ $paths_magick{LIBS} = "-lMagickCore";
+ $paths_magick{CCFLAGS} = "$Config{'ccflags'}";
+ $paths_magick{LDFLAGS} = "$Config{'ldflags'} $Lpaths ";
+ $paths_magick{LDDLFLAGS} = "$Config{'lddlflags'} $Lpaths ";
}
# See lib/ExtUtils/MakeMaker.pm for details of how to influence
@@ -195,7 +210,7 @@
'DEFINE' => ' -D_LARGE_FILES=1 -DHAVE_CONFIG_H', # e.g., '-DHAVE_SOMETHING'
# Header search specfication and preprocessor flags
- 'INC' => $INC_magick,
+ 'INC' => $paths_magick{INC},
# C compiler
#'CC' => 'gcc -std=gnu99 -std=gnu99',
@@ -204,22 +219,22 @@
# 'CPPFLAGS' => "$Config{'cppflags'} -pthread -I/usr/include/cairo -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include -I/usr/include/pixman-1 -I/usr/include/freetype2 -I/usr/include/libpng12 -pthread -I/usr/include/pango-1.0 -I/usr/include/freetype2 -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include -I/usr/include/graphviz -I/usr/include/freetype2 -I/usr/include/libxml2",
# C compiler flags (e.g. -O -g)
- 'CCFLAGS' => $CCFLAGS_magick,
+ 'CCFLAGS' => $paths_magick{CCFLAGS},
# Linker
#'LD' => $Config{'ld'} == $Config{'cc'} ? 'gcc -std=gnu99 -std=gnu99' : $Config{'ld'},
# Linker flags for building an executable
- 'LDFLAGS' => $LDFLAGS_magick,
+ 'LDFLAGS' => $paths_magick{LDFLAGS},
# Linker flags for building a dynamically loadable module
- 'LDDLFLAGS' => $LDDLFLAGS_magick,
+ 'LDDLFLAGS' => $paths_magick{$LDDLFLAGS},
# Install PerlMagick binary into ImageMagick bin directory
'INSTALLBIN' => '/usr/local/bin',
# Library specification
- 'LIBS' => [ $LIBS_magick ],
+ 'LIBS' => [ $paths_magick{LIBS} ],
# Perl binary name (if a Perl binary is built)
'MAP_TARGET' => 'PerlMagick',
[buildout]
extends =
../perl/buildout.cfg
../xapian/buildout.cfg
parts =
perl-Search-Xapian
[perl-Search-Xapian]
recipe = slapos.recipe.build:cpan
modules =
O/OL/OLLY/Search-Xapian-1.2.10.0.tar.gz
environment =
XAPIAN_CONFIG=${xapian:location}/bin/xapian-config
perl = perl
[buildout]
extends =
../perl/buildout.cfg
../ncurses/buildout.cfg
../readline/buildout.cfg
parts = perl-Term-ReadLine-Gnu
[perl-Term-ReadLine-Gnu.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
md5sum = d4e4624e717c8da63e4d153149d57b68
download-only = true
filename = ${:_buildout_section_name_}
[perl-Term-ReadLine-Gnu]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/H/HA/HAYASHI/Term-ReadLine-Gnu-1.20.tar.gz
md5sum = fa33510193b89a2ada74fcef00816322
patch-options = -p0
patches =
${perl-Term-ReadLine-Gnu.patch:location}/${perl-Term-ReadLine-Gnu.patch:filename}
configure-command =
${perl:location}/bin/perl Makefile.PL --libdir=${readline:location}/lib --includedir="${readline:location}/include" LIBS="-Wl,-R${readline:location}/lib -L${ncurses:location}/lib -Wl,-R${ncurses:location}/lib" INC="-I${ncurses:location}/include"
--- Makefile.PL.orig 2012-09-26 16:40:42.922821617 +0200
+++ Makefile.PL 2012-10-02 02:51:26.400659861 +0200
@@ -22,7 +22,22 @@
use Config;
use Getopt::Long;
use 5.007; use 5.7.0; # use version 1.09 for older Perl
-my ($defs, $libs, $lddflags, $RLLIB, $RLINC);
+my ($defs, $lddflags, %config);
+
+{
+ $config{LIBS} = '';
+ $config{INC} = '';
+
+ my $i = 0;
+ while ($i <= $#ARGV) {
+ my ($key, $val) = split(/=/, $ARGV[$i], 2);
+ $config{$key} = $val;
+ if ($key eq 'LIBS' || $key eq 'INC') {
+ delete $ARGV[$i];
+ }
+ $i++;
+ }
+}
# exit 0 before creating the Makefile to be CPAN Testers friendly
# see http://wiki.cpantester.org/wiki/CPANAuthorNotes
@@ -36,20 +51,23 @@
$defs = ($Config{strings} =~ m|/string.h$|) ? '-DHAVE_STRING_H' : '';
# Parse command line to specify paths for the GNU Readline Library
+# (if they have not already been included within LIBS or INC)
{
my ($prefix, $libdir, $incdir);
GetOptions("prefix=s" => \$prefix,
"libdir=s" => \$libdir,
"includedir=s" => \$incdir);
- $RLLIB = defined $libdir
+ my $RLLIB = defined $libdir
? "-L$libdir" : (defined $prefix ? "-L$prefix/lib" : '');
- $RLINC = defined $incdir
+ my $RLINC = defined $incdir
? "-I$incdir" : (defined $prefix ? "-I$prefix/include" : '');
+ $config{LIBS} .= " $RLLIB";
+ $config{INC} .= " $RLINC";
}
if ($Config{osname} eq 'os2') {
# Check ftp://ftp.math.ohio-state.edu/pub/users/ilya/os2/
- $libs = '-lreadline_import';
+ $config{LIBS} .= ' -lreadline_import';
$defs .= ' -DOS2_USEDLL';
$lddflags = '';
} else {
@@ -60,18 +78,27 @@
# Old Cygwin may require setting false (0).
my $PREFER_CURSES = $Config{osname} eq 'aix' || $Config{osname} eq 'hpux'
|| $Config{osname} eq 'cygwin';
- my $TERMCAP_LIB = (! $PREFER_CURSES && &search_lib('-ltermcap'))
- || &search_lib('-lncurses')
- || &search_lib('-lcurses');
+ my @lib_dirs = ();
+ # Turn -L<path> parameters into a list of <path>s
+ {
+ foreach my $libopt (split(' ',$config{LIBS})) {
+ if ($libopt =~ m#^-L(.*)$#) {
+ push @lib_dirs, $1;
+ }
+ }
+ }
+ my $TERMCAP_LIB = (! $PREFER_CURSES && &search_lib('-ltermcap', \@lib_dirs))
+ || &search_lib('-lncurses', \@lib_dirs)
+ || &search_lib('-lcurses', \@lib_dirs);
unless ($TERMCAP_LIB) {
warn "Could not find neither libtermcap.a, libncurses.a, or libcurses.\n";
exit $err;
}
- $libs = "-lreadline $TERMCAP_LIB";
+ $config{LIBS} .= " -lreadline $TERMCAP_LIB";
# Latest Perl in FreeBSD does not need this hack. (Dec.2002)
- $libs .= ' -lcrypt' if ($Config{osname} =~ /freebsd/i);
+ $config{LIBS} .= ' -lcrypt' if ($Config{osname} =~ /freebsd/i);
$lddflags = '';
# If you are using old Cygwin, enable the following line.
#$lddflags = ($Config{osname} =~ /cygwin/i) ? '-static' : '';
@@ -80,7 +107,7 @@
# Check version of GNU Readline Library (for version 4.2 and before)
{
my ($rlmajorver, $rlminorver) =
- check_readline_version($RLINC, $RLLIB, $defs, $lddflags, $libs);
+ check_readline_version($defs, $lddflags, \%config);
if ($rlmajorver < 4 || $rlmajorver == 4 && $rlminorver <= 2) {
$defs .= " -DRL_READLINE_VERSION=" .
@@ -95,15 +122,15 @@
(
NAME => 'Term::ReadLine::Gnu',
VERSION_FROM => 'Gnu.pm',
- LIBS => [ "$RLLIB $libs" ],
- LDDLFLAGS => "$RLLIB $Config{lddlflags}",
+ LIBS => $config{LIBS},
+ LDDLFLAGS => $Config{lddlflags},
dynamic_lib => { OTHERLDFLAGS => $lddflags },
DEFINE => $defs,
($Config{osname} eq 'os2' ?
(
IMPORTS => { xfree => 'emxlibcm.401' }, # Yuck!
) : () ),
- INC => $RLINC,
+ INC => $config{INC},
dist => { COMPRESS => 'gzip -9f', SUFFIX => 'gz' },
clean => { FILES => "rlver.c rlver$Config{_exe}" },
);
@@ -122,20 +149,20 @@
exit(0);
########################################################################
-# Search a library '$lib' in $Config{libpth} directories, and return
+# Search a library '$lib' in the given directories (listref), and return
# $lib if exist or undef unless exist.
# ExtUtils::Liblist::ext() do similar job as this subroutine, but it
# warns unnecessary messages.
sub search_lib {
- my ($lib) = @_;
+ my ($lib, $lib_dirs) = @_;
unless ($lib =~ /^-l/) {
warn "search_lib: illegal arguments, \`$lib\'.\n";
return undef;
}
my $libbase = 'lib' . substr($lib, 2) . $Config{lib_ext};
my $libbase_so = 'lib' . substr($lib, 2) . "." . $Config{so};
- foreach (split(' ', $Config{libpth})) {
+ foreach (split(' ', $Config{libpth}), @$lib_dirs) {
if (-f $_ . '/' . $libbase) {
# print "$_/$libbase\n";
print "Found \`$_/$libbase\'.\n";
@@ -158,7 +185,7 @@
# RL_VERSION_MINOR
# Someday we don't need this subroutine..
sub check_readline_version {
- my ($RLINC, $RLLIB, $defs, $lddflags, $libs) = @_;
+ my ($defs, $lddflags, $config) = @_;
my $frlver = 'rlver.c';
# make temp file
@@ -172,7 +199,7 @@
close(F);
# compile it
- my $comp_cmd = "$Config{cc} $RLINC $Config{ccflags} $defs $frlver -o rlver $RLLIB $lddflags $Config{ldflags} $libs";
+ my $comp_cmd = "$Config{cc} $config->{INC} $Config{ccflags} $defs $frlver -o rlver $lddflags $Config{ldflags} $config->{LIBS}";
print $comp_cmd, "\n";
system($comp_cmd);
if ($?) {
[buildout]
extends =
../perl/buildout.cfg
../libiconv/buildout.cfg
parts = perl-Text-Iconv
[perl-Text-Iconv]
recipe = slapos.recipe.build:cpan
cpan-configuration =
makepl_arg='LIBS="-L${libiconv:location}/lib -Wl,-R${libiconv:location}/lib" INC="-I${libiconv:location}/include"'
modules =
M/MP/MPIOTR/Text-Iconv-1.7.tar.gz
perl = perl
[buildout]
extends =
../perl/buildout.cfg
../libxslt/buildout.cfg
../libxml2/buildout.cfg
../zlib/buildout.cfg
parts = perl-XML-LibXSLT
[perl-XML-LibXSLT]
recipe = slapos.recipe.build:cpan
cpan-configuration =
makepl_arg='LIBS="-L${libxslt:location}/lib -Wl,-R${libxslt:location}/lib -L${zlib:location}/lib -Wl,-R${zlib:location}/lib -L${libxml2:location}/lib -Wl,-R${libxml2:location}/lib" INC="-I${libxslt:location}/include -I${libxml2:location}/include/libxml2"'
modules =
S/SH/SHLOMIF/XML-LibXSLT-1.78.tar.gz
perl = perl
......@@ -12,24 +12,37 @@ md5sum = 9873a89c969bd5a478434c3b8b2d57d8
download-only = true
filename = ${:_buildout_section_name_}
[perl-postmakehook-download]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
md5sum = 7fded8308c1676decf77575c6d6b325f
download-only = true
filename = create-libs-symlink.py
[perl]
recipe = hexagonit.recipe.cmmi
depends =
${gdbm:version}
version = 5.16.0
version = 5.14.2
url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2
md5sum = 15a2f95fb27231e10998240f13acf961
md5sum = 04a4c5d3c1f9f19d77daff8e8cd19a26
siteprefix = ${buildout:parts-directory}/site_${:_buildout_section_name_}
patch-options = -p1
patches =
${perl-keep-linker-flags-in-ldflags.patch:location}/${perl-keep-linker-flags-in-ldflags.patch:filename}
# Viktor has adapted the following commands for AMD64 compilation
# TODO: find out how we can write a generic code that suits all architectures
configure-command =
sh Configure -des \
-A ccflags=-fPIC \
-Dprefix=${buildout:parts-directory}/${:_buildout_section_name_} \
-Dsiteprefix=${:siteprefix} \
-Dcflags=-I${gdbm:location}/include \
-Dldflags="-L${gdbm:location}/lib -Wl,-rpath=${gdbm:location}/lib" \
-Ui_db \
-Dnoextensions=ODBM_File
-Dnoextensions=ODBM_File \
-Dusethreads
environment =
PATH=${patch:location}/bin:%(PATH)s
CFLAGS='-m64 -mtune=nocona'
post-make-hook = ${perl-postmakehook-download:location}/${perl-postmakehook-download:filename}:post_make_hook
\ No newline at end of file
import fnmatch
import os
import pprint
def post_make_hook(options, buildout):
location = options['location']
matches = []
for root, dirnames, filenames in os.walk(location):
for filename in fnmatch.filter(filenames, 'libperl.a'):
matches.append(os.path.join(root, filename))
nr_matches = len(matches)
if nr_matches == 0:
print "ERROR - no libperl.* found!"
exit
elif nr_matches > 1:
print "WARNING - several libperl.a found, taking only the first one:", matches.join("\n")
# matches[0] is a prefix of "location"
# For the symlink, we want the relative path.
rel_link = os.path.relpath(os.path.dirname(matches[0]), location)
os.symlink(rel_link, os.path.join(location, "libs-c"))
print "Created symlink \"libs-c\" to", rel_link
......@@ -9,6 +9,9 @@ extends =
../glib/buildout.cfg
../popt/buildout.cfg
[pkg-config]
<= pkgconfig
[pkgconfig]
recipe = hexagonit.recipe.cmmi
url = ftp://mirror.ovh.net/gentoo-distfiles/distfiles/pkg-config-0.26.tar.gz
......
[buildout]
extends =
../openssl/buildout.cfg
../readline/buildout.cfg
../zlib/buildout.cfg
../ncurses/buildout.cfg
../perl/buildout.cfg
parts = postgresql
[postgresql]
<= postgresql92
[postgresql91]
recipe = hexagonit.recipe.cmmi
url = http://ftp.postgresql.org/pub/source/v9.1.7/postgresql-9.1.7.tar.bz2
md5sum = eaf7b67493d59d1a60767ffdfbd65ce9
configure-options = --with-openssl --with-perl
environment =
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${openssl:location}/include -I${ncurses:location}/lib
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${perl:location}/libs-c -Wl,-rpath=${perl:location}/libs-c
[postgresql92]
recipe = hexagonit.recipe.cmmi
url = http://ftp.postgresql.org/pub/source/v9.2.2/postgresql-9.2.2.tar.bz2
md5sum = 1cc388988e69bf75c6b55d59070100f6
configure-options = --with-openssl
environment =
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${openssl:location}/include -I${ncurses:location}/lib
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${perl:location}/libs-c -Wl,-rpath=${perl:location}/libs-c
......@@ -2,7 +2,8 @@
parts = python-setuptools
[setuptools-download]
recipe = slapos.recipe.build:download
recipe = hexagonit.recipe.download
download-only = true
filename = setuptools-0.6c11-py2.7.egg
url = http://pypi.python.org/packages/2.7/s/setuptools/${:filename}
md5sum = fe1f997bc722265116870bc7919059ea
......
[buildout]
parts =
rsync
[rsync]
recipe = hexagonit.recipe.cmmi
url = http://rsync.samba.org/ftp/rsync/src/rsync-3.0.9.tar.gz
md5sum = 5ee72266fe2c1822333c407e1761b92b
make-options =
PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
......@@ -27,6 +27,7 @@ parts =
slapos
cfg-environment
sh-environment
py
find-links =
http://www.nexedi.org/static/packages/source/slapos.buildout/
......@@ -48,13 +49,13 @@ exec-sitecustomize = false
allowed-eggs-from-site-packages =
[environment]
# Note: For now original PATH is appeneded to the end, as not all tools are
# Note: For now original PATH is appended to the end, as not all tools are
# provided by SlapOS
PATH=${bison:location}/bin:${bzip2:location}/bin:${gettext:location}/bin:${glib:location}/bin:${libxml2:location}/bin:${libxslt:location}/bin:${m4:location}/bin:${ncurses:location}/bin:${openssl:location}/bin:${pkgconfig:location}/bin:${python2.7:location}/bin:${readline:location}/bin:${sqlite3:location}/bin:${swig:location}/bin:${buildout:bin-directory}:${patch:location}/bin:$PATH
CFLAGS=-I${bzip2:location}/include -I${gdbm:location}/include -I${gettext:location}/include -I${glib:location}/include -I${libxml2:location}/include -I${libxslt:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${popt:location}/include -I${readline:location}/include -I${sqlite3:location}/include -I${zlib:location}/include
CPPFLAGS=${:CFLAGS}
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${gdbm:location}/lib -Wl,-rpath=${gdbm:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -L${libxslt:location}/lib -Wl,-rpath=${libxslt:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${popt:location}/lib -Wl,-rpath=${popt:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${sqlite3:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${popt:location}/lib/pkgconfig:${python2.7:location}/lib/pkconfig:${sqlite3:location}/lib/pkconfig
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${popt:location}/lib/pkgconfig:${python2.7:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig
LD_LIBRARY_PATH=${bzip2:location}/lib:${gdbm:location}/lib:${gettext:location}/lib:${glib:location}/lib:${libxml2:location}/lib:${libxslt:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${popt:location}/lib:${readline:location}/lib:${sqlite3:location}/lib:${zlib:location}/lib
[cfg-environment]
......@@ -113,50 +114,70 @@ scripts =
slapgrid-supervisorctl = slapos.grid.svcbackend:supervisorctl
slapgrid-supervisord = slapos.grid.svcbackend:supervisord
slapproxy = slapos.proxy:main
slapproxy-query = slapos.proxy.query:main
slapos = slapos.entry:main
slapos-watchdog = slapos.grid.watchdog:main
[py]
recipe = zc.recipe.egg
eggs =
${slapos:eggs}
python = python2.7
interpreter = py
scripts = py
[versions]
zc.buildout = 1.6.0-dev-SlapOS-007
# Use our own buildout version
zc.buildout = 1.6.0-dev-SlapOS-010
# Don't use beta
lxml = 3.0.2
Jinja2 = 2.6
Werkzeug = 0.8.3
buildout-versions = 1.7
collective.recipe.template = 1.9
hexagonit.recipe.cmmi = 1.6
lxml = 2.3.6
meld3 = 0.6.9
meld3 = 0.6.10
netaddr = 0.7.10
slapos.core = 0.31.1
slapos.libnetworkcache = 0.13.2
slapos.core = 0.35
slapos.libnetworkcache = 0.13.3
xml-marshaller = 0.9.7
z3c.recipe.scripts = 1.0.1
zc.recipe.egg = 1.3.2
# Required by:
# slapos.core==0.31.1
# slapos.core==0.35
Flask = 0.9
# Required by:
# hexagonit.recipe.cmmi==1.6
hexagonit.recipe.download = 1.5.1
hexagonit.recipe.download = 1.6
# Required by:
# slapos.core==0.31.1
# slapos.core==0.35
netifaces = 0.8
# Required by:
# slapos.core==0.31.1
# slapos.libnetworkcache==0.13.2
# slapos.core==0.35
pyflakes = 0.6.1
# Required by:
# slapos.core==0.35
# slapos.libnetworkcache==0.13.3
# supervisor==3.0b1
# zc.buildout==1.6.0-dev-SlapOS-007
# zope.interface==4.0.1
# zc.buildout==1.6.0-dev-SlapOS-010
# zope.interface==4.0.3
setuptools = 0.6c12dev-r88846
# Required by:
# slapos.core==0.31.1
# slapos.core==0.35
supervisor = 3.0b1
# Required by:
# slapos.core==0.31.1
zope.interface = 4.0.1
# slapos.core==0.35
unittest2 = 0.5.1
# Required by:
# slapos.core==0.35
zope.interface = 4.0.3
......@@ -17,8 +17,8 @@ filename = stunnel-4-hooks.py
[stunnel-4]
recipe = hexagonit.recipe.cmmi
url = http://mirror.bit.nl/stunnel/stunnel-4.53.tar.gz
md5sum = ab3bfc915357d67da18c73f73610d593
url = ftp://ftp.stunnel.org/stunnel/archive/4.x/stunnel-4.54.tar.gz
md5sum = c2b1db99e3ed547214568959a8ed18ac
pre-configure-hook = ${stunnel-4-hook-download:location}/${stunnel-4-hook-download:filename}:pre_configure_hook
configure-options =
--enable-ipv6
......
......@@ -12,11 +12,11 @@ parts =
[tomcat6]
recipe = hexagonit.recipe.download
strip-top-level-dir = true
url = http://apache.multidist.com/tomcat/tomcat-6/v6.0.35/bin/apache-tomcat-6.0.35.tar.gz
md5sum = 171d255cd60894b29a41684ce0ff93a8
url = http://apache.multidist.com/tomcat/tomcat-6/v6.0.36/bin/apache-tomcat-6.0.36.tar.gz
md5sum = 3dde098fd0b3a08d3f2867e4a95591ba
[tomcat7]
recipe = hexagonit.recipe.download
strip-top-level-dir = true
url = http://apache.multidist.com/tomcat/tomcat-7/v7.0.25/bin/apache-tomcat-7.0.25.tar.gz
md5sum = 2aa59d23555d641b20efad4aed86b693
url = http://apache.multidist.com/tomcat/tomcat-7/v7.0.34/bin/apache-tomcat-7.0.34.tar.gz
md5sum = 0f50494425c24450b4f66dfd4d2aecca
......@@ -45,6 +45,6 @@ patches =
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${garbage-collector:location}/lib/pkconfig
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${garbage-collector:location}/lib/pkgconfig
CPPFLAGS=-I${ncurses:location}/include/ -I${zlib:location}/include/ -I${garbage-collector:location}/include
LDFLAGS=-Wl,--as-needed -L${garbage-collector:location}/lib -Wl,-rpath=${garbage-collector:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[buildout]
parts =
xapian
depends =
../zlib/buildout.cfg
../libuuid/buildout.cfg
[xapian]
recipe = hexagonit.recipe.cmmi
url = http://oligarchy.co.uk/xapian/1.2.12/xapian-core-1.2.12.tar.gz
md5sum = faf33a3945edbe4c848627750856cbeb
environment =
CPPFLAGS = -I${zlib:location}/include -I${libuuid:location}/include
LDFLAGS = -L${zlib:location}/lib -Wl,-R${zlib:location}/lib -L${libuuid:location}/lib -Wl,-R${libuuid:location}/lib
\ No newline at end of file
##############################################################################
#
# Copyright (c) 2010-2013 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from setuptools import setup, find_packages
import glob
import os
version = '0.68.2-dev'
version = '0.72.0'
name = 'slapos.cookbook'
long_description = open("README.txt").read() + "\n" + \
open("CHANGES.txt").read() + "\n"
......@@ -26,7 +52,6 @@ setup(name=name,
packages=find_packages(),
include_package_data=True,
install_requires=[
'PyXML', # for full blown python interpreter
'lxml', # for full blown python interpreter
'netaddr', # to manipulate on IP addresses
'setuptools', # namespaces
......@@ -45,40 +70,41 @@ setup(name=name,
'addresiliency = slapos.recipe.addresiliency:Recipe',
'agent = slapos.recipe.agent:Recipe',
'apache.frontend = slapos.recipe.apache_frontend:Recipe',
'apache.zope.backend = slapos.recipe.apache_zope_backend:Recipe',
'apacheperl = slapos.recipe.apacheperl:Recipe',
'apachephp = slapos.recipe.apachephp:Recipe',
'apacheproxy = slapos.recipe.apacheproxy:Recipe',
'apache.zope.backend = slapos.recipe.apache_zope_backend:Recipe',
'boinc = slapos.recipe.boinc:Recipe',
'boinc.app = slapos.recipe.boinc:App',
'boinc.client = slapos.recipe.boinc:Client',
'bonjourgrid = slapos.recipe.bonjourgrid:Recipe',
'bonjourgrid.client = slapos.recipe.bonjourgrid:Client',
'certificate_authority.request = slapos.recipe.certificate_authority:Request',
'certificate_authority = slapos.recipe.certificate_authority:Recipe',
'certificate_authority.request = slapos.recipe.certificate_authority:Request',
'check_page_content = slapos.recipe.check_page_content:Recipe',
'check_port_listening = slapos.recipe.check_port_listening:Recipe',
'check_url_available = slapos.recipe.check_url_available:Recipe',
'check_page_content = slapos.recipe.check_page_content:Recipe',
'cloud9 = slapos.recipe.cloud9:Recipe',
'cloudooo.test = slapos.recipe.erp5_test:CloudoooRecipe',
'condor = slapos.recipe.condor:Recipe',
'condor.submit = slapos.recipe.condor:AppSubmit',
'cron.d = slapos.recipe.dcron:Part',
'cron = slapos.recipe.dcron:Recipe',
'cron.d = slapos.recipe.dcron:Part',
'davstorage = slapos.recipe.davstorage:Recipe',
'downloader = slapos.recipe.downloader:Recipe',
'dropbear = slapos.recipe.dropbear:Recipe',
'dropbear.add_authorized_key = slapos.recipe.dropbear:AddAuthorizedKey',
'dropbear.client = slapos.recipe.dropbear:Client',
'dropbear = slapos.recipe.dropbear:Recipe',
'dumpmdb = slapos.recipe.dumpmdb:Recipe',
'duplicity = slapos.recipe.duplicity:Recipe',
'egg_test = slapos.recipe.erp5_test:EggTestRecipe',
'equeue = slapos.recipe.equeue:Recipe',
'erp5.bootstrap = slapos.recipe.erp5_bootstrap:Recipe',
'erp5.promise = slapos.recipe.erp5_promise:Recipe',
'erp5scalabilitytestbed = slapos.recipe.erp5scalabilitytestbed:Recipe',
'erp5testnode = slapos.recipe.erp5testnode:Recipe',
'erp5.test = slapos.recipe.erp5_test:Recipe',
'erp5.update = slapos.recipe.erp5_update:Recipe',
'erp5scalabilitytestbed = slapos.recipe.erp5scalabilitytestbed:Recipe',
'erp5testnode = slapos.recipe.erp5testnode:Recipe',
'firefox = slapos.recipe.firefox:Recipe',
'fontconfig = slapos.recipe.fontconfig:Recipe',
'generate.mac = slapos.recipe.generatemac:Recipe',
......@@ -96,29 +122,33 @@ setup(name=name,
'importmdb = slapos.recipe.importmdb:Recipe',
'java = slapos.recipe.java:Recipe',
'kumofs = slapos.recipe.kumofs:Recipe',
'kvm.frontend = slapos.recipe.kvm_frontend:Recipe',
'kvm = slapos.recipe.kvm:Recipe',
'kvm.frontend = slapos.recipe.kvm_frontend:Recipe',
'lamp = slapos.recipe.lamp:Request',
'lamp.request = slapos.recipe.lamp:Request',
'lamp.simple = slapos.recipe.lamp:Simple',
'lamp = slapos.recipe.lamp:Request',
'lamp.static = slapos.recipe.lamp:Static',
'libcloudrequest = slapos.recipe.libcloudrequest:Recipe',
'libcloud = slapos.recipe.libcloud:Recipe',
'libcloudrequest = slapos.recipe.libcloudrequest:Recipe',
'lockfile = slapos.recipe.lockfile:Recipe',
'logrotate.d = slapos.recipe.logrotate:Part',
'logrotate = slapos.recipe.logrotate:Recipe',
'logrotate.d = slapos.recipe.logrotate:Part',
'memcached = slapos.recipe.memcached:Recipe',
'mkdirectory = slapos.recipe.mkdirectory:Recipe',
'mioga.instantiate = slapos.recipe.mioga.instantiate:Recipe',
'mydumper = slapos.recipe.mydumper:Recipe',
'mysql = slapos.recipe.mysql:Recipe',
'nbdserver = slapos.recipe.nbdserver:Recipe',
'nosqltestbed = slapos.recipe.nosqltestbed:NoSQLTestBed',
'notifier = slapos.recipe.notifier:Recipe',
'notifier.callback = slapos.recipe.notifier:Callback',
'notifier.notify = slapos.recipe.notifier:Notify',
'notifier = slapos.recipe.notifier:Recipe',
'novnc = slapos.recipe.novnc:Recipe',
'onetimeupload = slapos.recipe.onetimeupload:Recipe',
'pbs = slapos.recipe.pbs:Recipe',
'postgres = slapos.recipe.postgres:Recipe',
'postgres.export = slapos.recipe.postgres.backup:ExportRecipe',
'postgres.import = slapos.recipe.postgres.backup:ImportRecipe',
'proactive = slapos.recipe.proactive:Recipe',
'publish = slapos.recipe.publish:Recipe',
'publishurl = slapos.recipe.publishurl:Recipe',
......@@ -126,27 +156,26 @@ setup(name=name,
'pwgen.stable = slapos.recipe.pwgen:StablePasswordGeneratorRecipe',
'redis.server = slapos.recipe.redis:Recipe',
'requestoptional = slapos.recipe.request:RequestOptional',
'request = slapos.recipe.request:Recipe',
'seleniumrunner = slapos.recipe.seleniumrunner:Recipe',
'sheepdogtestbed = slapos.recipe.sheepdogtestbed:SheepDogTestBed',
'shellinabox = slapos.recipe.shellinabox:Recipe',
'shell = slapos.recipe.shell:Recipe',
'shellinabox = slapos.recipe.shellinabox:Recipe',
'signalwrapper= slapos.recipe.signal_wrapper:Recipe',
'simplelogger = slapos.recipe.simplelogger:Recipe',
'siptester = slapos.recipe.siptester:SipTesterRecipe',
'slapconfiguration = slapos.recipe.slapconfiguration:Recipe',
'slapcontainer = slapos.recipe.container:Recipe',
'slapmonitor = slapos.recipe.slapmonitor:Recipe',
'slapmonitor = slapos.recipe.slapmonitor:MonitorRecipe',
'slapmonitor-xml = slapos.recipe.slapmonitor:MonitorXMLRecipe',
'slapreport = slapos.recipe.slapreport:Recipe',
'slaprunner = slapos.recipe.slaprunner:Recipe',
'slaprunner.test = slapos.recipe.slaprunner:Test',
'softwaretype = slapos.recipe.softwaretype:Recipe',
'sphinx= slapos.recipe.sphinx:Recipe',
'sshkeys_authority.request = slapos.recipe.sshkeys_authority:Request',
'sshkeys_authority = slapos.recipe.sshkeys_authority:Recipe',
'sshkeys_authority.request = slapos.recipe.sshkeys_authority:Request',
'stunnel = slapos.recipe.stunnel:Recipe',
'symbolic.link = slapos.recipe.symbolic_link:Recipe',
'testnode = slapos.recipe.testnode:Recipe',
'tidstorage = slapos.recipe.tidstorage:Recipe',
'urlparse = slapos.recipe._urlparse:Recipe',
'uuid = slapos.recipe._uuid:Recipe',
......
testnode
========
Generic testnode.
......@@ -26,56 +26,73 @@
##############################################################################
from slapos.recipe.librecipe import GenericSlapRecipe
import sys
import os
class Recipe(GenericSlapRecipe):
""" This class provides the installation of the resilience
script on the partition.
"""
def _install(self):
path_list = []
self_id = int(self.parameter_dict['number'])
ip = self.parameter_dict['ip-list'].split(' ')
print 'Creating bully script with ips : %s\n' % ip
slap_connection = self.buildout['slap-connection']
path_conf = os.path.join(self.options['script'], 'conf.in')
path_bully = os.path.join(self.options['script'], self.parameter_dict['script'])
path_bully_new = os.path.join(self.options['script'], 'new.py')
path_run = os.path.join(self.options['run'], self.parameter_dict['wrapper'])
print 'paths: %s\n%s\n' % (path_run, path_bully)
bully_conf = dict(self_id=self_id,
ip_list=ip,
executable=sys.executable,
syspath=sys.path,
server_url=slap_connection['server-url'],
key_file=slap_connection.get('key-file'),
cert_file=slap_connection.get('cert-file'),
computer_id=slap_connection['computer-id'],
partition_id=slap_connection['partition-id'],
software=slap_connection['software-release-url'],
namebase=self.parameter_dict['namebase'],
confpath=path_conf)
try:
conf = self.createFile(path_conf,
self.substituteTemplate(
self.getTemplateFilename('conf.in.in'),
bully_conf))
path_list.append(conf)
script = self.createExecutable(path_bully,
self.substituteTemplate(
self.getTemplateFilename('bully.py.in'),
bully_conf))
path_list.append(script)
wrapper = self.createPythonScript(
path_run,
'slapos.recipe.librecipe.execute.execute',
[path_bully])
path_list.append(wrapper)
except IOError:
pass
return path_list
""" This class provides the installation of the resilience
scripts on the partition.
bin/takeover will perform a rename (must be run manually).
bin/bully will monitor, run elections and perform renames when needed.
"""
def _install(self):
path_list = []
confpath = os.path.join(self.options['etc'], 'bully.conf')
ip_list = self.parameter_dict['ip-list']
print 'Creating bully configuration with ips : %s\n' % ip_list
conf = self.createFile(confpath,
self.substituteTemplate(
self.getTemplateFilename('bully.conf.in'),
{
'self_id': int(self.parameter_dict['number']),
'ip_list': ip_list
}
))
path_list.append(conf)
slap_connection = self.buildout['slap-connection']
if self.optionIsTrue('enable-bully-service', default=False):
bully_dir = self.options['services']
else:
bully_dir = self.options['bin']
bully_wrapper = self.createPythonScript(
name=os.path.join(bully_dir, self.options['wrapper-bully']),
absolute_function='slapos.recipe.addresiliency.bully.run',
arguments={
'confpath': confpath,
'server_url': slap_connection['server-url'],
'key_file': slap_connection.get('key-file'),
'cert_file': slap_connection.get('cert-file'),
'computer_id': slap_connection['computer-id'],
'partition_id': slap_connection['partition-id'],
'software': slap_connection['software-release-url'],
'namebase': self.parameter_dict['namebase'],
})
path_list.append(bully_wrapper)
takeover_wrapper = self.createPythonScript(
name=os.path.join(self.options['bin'], self.options['wrapper-takeover']),
absolute_function='slapos.recipe.addresiliency.takeover.run',
arguments={
'server_url': slap_connection['server-url'],
'key_file': slap_connection.get('key-file'),
'cert_file': slap_connection.get('cert-file'),
'computer_id': slap_connection['computer-id'],
'partition_id': slap_connection['partition-id'],
'software': slap_connection['software-release-url'],
'namebase': self.parameter_dict['namebase'],
})
path_list.append(takeover_wrapper)
return path_list
# -*- coding: utf-8 -*-
import logging
import Queue
import socket
import thread
import time
import slapos.recipe.addresiliency.renamer
import slapos
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
BASE_PORT = 50000
SLEEPING_MINS = 2 # XXX was 30, increase after testing
MSG_PING = 'ping'
MSG_HALT = 'halt'
MSG_VICTORY = 'victory'
MSG_OK = 'ok'
STATE_NORMAL = 'normal'
STATE_WAITINGCONFIRM = 'waitingConfirm'
STATE_ELECTION = 'election'
STATE_REORGANIZATION = 'reorganization'
## Leader is always number 0
class ResilientInstance(object):
def __init__(self, comm, renamer, confpath):
self.comm = comm
self.participant_id = 0
self.state = STATE_NORMAL
self.halter_id = 0
self.inElection = False
self.alive = True
self.mainCanal = self.comm.create_canal([MSG_PING, MSG_HALT, MSG_VICTORY])
self.renamer = renamer
self.okCanal = self.comm.create_canal([MSG_OK])
self.confpath = confpath
self.loadConnectionInfo()
def loadConnectionInfo(self):
params = open(self.confpath, 'r').readlines()
self.total_participants = len(params[0].split())
new_id = int(params[1])
if self.participant_id != new_id:
self.halter_id = new_id
self.participant_id = new_id
log.debug('I am {} of {}'.format(self.participant_id, self.total_participants))
## Needs to be changed to use the master
def aliveManagement(self):
while self.alive:
log.info('XXX sleeping for %d minutes' % SLEEPING_MINS)
time.sleep(SLEEPING_MINS*60)
if self.participant_id == 0:
continue
self.comm.send(MSG_PING, 0)
message, sender = self.okCanal.get()
if message:
continue
self.election()
def listen(self):
while self.alive:
self.comm.recv()
def main(self):
while self.alive:
message, sender = self.mainCanal.get()
if message == MSG_PING:
self.comm.send(MSG_OK, sender)
elif message == MSG_HALT:
self.state = STATE_WAITINGCONFIRM
self.halter_id = int(sender)
self.comm.send(MSG_OK, sender)
elif message == MSG_VICTORY:
if int(sender) == self.halter_id and self.state == STATE_WAITINGCONFIRM:
log.info('{} thinks {} is the leader'.format(self.participant_id, sender))
self.comm.send(MSG_OK, sender)
self.state = STATE_NORMAL
def election(self):
self.inElection = True
self.loadConnectionInfo()
# Check if I'm the highest instance alive
for higher in range(self.participant_id + 1, self.total_participants):
self.comm.send(MSG_PING, higher)
message, sender = self.okCanal.get()
if message:
log.info('{} is alive ({})'.format(higher, self.participant_id))
self.inElection = False
return False
continue
if not self.alive:
return False
# I should be the new coordinator, halt those below me
log.info('Should be ME : {}'.format(self.participant_id))
self.state = STATE_ELECTION
self.halter_id = self.participant_id
ups = []
for lower in range(self.participant_id):
self.comm.send(MSG_HALT, lower)
message, sender = self.okCanal.get()
if message:
ups.append(lower)
#Broadcast Victory
self.state = STATE_REORGANIZATION
for up in ups:
self.comm.send(MSG_VICTORY, up)
message, sender = self.okCanal.get()
if message:
continue
log.info('Something is wrong... let\'s start over')
return self.election()
self.state = STATE_NORMAL
self.active = True
log.info('{} Is THE LEADER'.format(self.participant_id))
self.renamer.failover()
self.inElection = False
return True
class FilteredCanal(object):
def __init__(self, accept, timeout):
self.accept = accept
self.queue = Queue.Queue()
self.timeout = timeout
def append(self, message, sender):
if message in self.accept:
self.queue.put([message, sender])
def get(self):
try:
return self.queue.get(timeout=self.timeout)
except Queue.Empty:
return [None, None]
class Wrapper(object):
def __init__(self, confpath, timeout=20):
self.canals = []
self.ips = []
self.participant_id = 0
self.timeout = timeout
self.confpath = confpath
self.getConnectionInfo()
self.socket = None
def getConnectionInfo(self):
params = open(self.confpath, 'r').readlines()
self.ips = params[0].split()
self.participant_id = int(params[1])
log.debug('I am {} of {}'.format(self.participant_id, self.ips))
def start(self):
self.getConnectionInfo()
self.socket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
self.socket.bind((self.ips[self.participant_id], BASE_PORT + self.participant_id))
self.socket.listen(5)
def send(self, message, number):
self.getConnectionInfo()
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.connect((self.ips[number], BASE_PORT + number))
s.send(message + (' {}\n'.format(self.participant_id)))
except (socket.error, socket.herror, socket.gaierror, socket.timeout):
pass
finally:
s.close()
def create_canal(self, accept):
created = FilteredCanal(accept, self.timeout)
self.canals.append(created)
return created
def recv(self):
client, _ = self.socket.accept()
client_message = client.recv(1024)
if client_message:
message, sender = client_message.split()
for canal in self.canals:
canal.append(message, int(sender))
def run(args):
confpath = args.pop('confpath')
renamer = slapos.recipe.addresiliency.renamer.Renamer(server_url = args.pop('server_url'),
key_file = args.pop('key_file'),
cert_file = args.pop('cert_file'),
computer_guid = args.pop('computer_id'),
partition_id = args.pop('partition_id'),
software_release = args.pop('software'),
namebase = args.pop('namebase'))
if args:
raise ValueError('Unknown arguments: %s' % ', '.join(args))
wrapper = Wrapper(confpath=confpath, timeout=20)
computer = ResilientInstance(wrapper, renamer=renamer, confpath=confpath)
# idle waiting for connection infos
while computer.total_participants < 2:
computer.loadConnectionInfo()
time.sleep(30)
log.info('Starting')
computer.comm.start()
thread.start_new_thread(computer.listen, ())
thread.start_new_thread(computer.aliveManagement, ())
computer.main()
# -*- coding: utf-8 -*-
import logging
import time
import slapos
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
class Renamer(object):
def __init__(self, server_url, key_file, cert_file, computer_guid,
partition_id, software_release, namebase):
self.server_url = server_url
self.key_file = key_file
self.cert_file = cert_file
self.computer_guid = computer_guid
self.partition_id = partition_id
self.software_release = software_release
self.namebase = namebase
def _failover(self):
"""\
This method does
- retrieve the broken computer partition
- change its reference to 'broken-...' and its software type to 'frozen'
- retrieve the winner computer partition (attached to this process)
- change its reference to replace the broken one.
later, slapgrid will change its software_type as well.
Then, after running slapgrid-cp a few times, the winner takes over and
a new cp is created to replace it as an importer.
"""
slap = slapos.slap.slap()
slap.initializeConnection(self.server_url, self.key_file, self.cert_file)
# partition that will take over.
cp_winner = slap.registerComputerPartition(computer_guid=self.computer_guid,
partition_id=self.partition_id)
# XXX although we can already rename cp_winner, to change its software type we need to
# get hold of the root cp as well
cp_exporter_ref = self.namebase + '0' # this is ok. the boss is always number zero.
# partition to be deactivated
cp_broken = cp_winner.request(software_release=self.software_release,
software_type='frozen',
state='stopped',
partition_reference=cp_exporter_ref)
broken_new_ref = 'broken-{}'.format(time.strftime("%d-%b_%H:%M:%S", time.gmtime()))
log.debug("Renaming {}: {}".format(cp_broken.getId(), broken_new_ref))
cp_broken.rename(new_name=broken_new_ref)
cp_broken.stopped()
log.debug("Renaming {}: {}".format(cp_winner.getId(), cp_exporter_ref))
# update name (and later, software type) for the partition that will take over
cp_winner.rename(new_name=cp_exporter_ref)
cp_winner.bang(message='partitions have been renamed!')
def failover(self):
try:
self._failover()
log.info('Renaming done')
except slapos.slap.ServerError:
log.info('Internal server error')
# -*- coding: utf-8 -*-
import slapos.recipe.addresiliency.renamer
def run(args):
renamer = slapos.recipe.addresiliency.renamer.Renamer(server_url = args.pop('server_url'),
key_file = args.pop('key_file'),
cert_file = args.pop('cert_file'),
computer_guid = args.pop('computer_id'),
partition_id = args.pop('partition_id'),
software_release = args.pop('software'),
namebase = args.pop('namebase'))
renamer.failover()
......@@ -8,6 +8,7 @@ import sys
sys.path[:] = %(syspath)s
import slapos
from slapos import slap as slapmodule
port = 50000
......@@ -37,9 +38,11 @@ def rename_broken_and_stop():
slap.initializeConnection('%(server_url)s',
'%(key_file)s',
'%(cert_file)s')
computer_partition = slap.registerComputerPartition('%(computer_id)s',
'%(partition_id)s')
broken = computer_partition.request('%(software)s', 'frozen', '%(namebase)s0')
computer_partition = slap.registerComputerPartition(computer_guid='%(computer_id)s',
partition_id='%(partition_id)s')
broken = computer_partition.request(software_release='%(software)s',
software_type='frozen',
partition_reference='%(namebase)s0')
broken.rename('broken-%%s' %% (time.strftime("%%d-%%b_%%H:%%M:%%S", time.gmtime())))
broken.stopped()
......
#!%(executable)s
import logging
import os
import socket
import time
import sys
import thread
import time
import os
sys.path[:] = %(syspath)s
from slapos import slap as slapmodule
import slapos
BASE_PORT = 50000
SLEEPING_MINS = 2
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
class Renamer(object):
def __init__(self, server_url, key_file, cert_file, computer_guid,
partition_id, software_release, namebase):
self.server_url = server_url
self.key_file = key_file
self.cert_file = cert_file
self.computer_guid = computer_guid
self.partition_id = partition_id
self.software_release = software_release
self.namebase = namebase
def _failover(self):
slap = slapmodule.slap()
slap.initializeConnection(self.server_url,
self.key_file,
self.cert_file)
computer_partition = slap.registerComputerPartition(computer_guid=self.computer_guid,
partition_id=self.partition_id)
broken = computer_partition.request(software_release=self.software_release,
software_type='frozen',
partition_reference=self.namebase+'0')
broken.rename('broken-{}'.format(time.strftime("%%d-%%b_%%H:%%M:%%S", time.gmtime())))
broken.stopped()
computer_partition.rename(self.namebase+'0')
def failover(self):
try:
log.info('renaming done')
except slapos.slap.slap.ServerError:
log.info('Internal server error')
port = 50000
size = 1024
def rename_broken_and_stop():
try:
slap = slapmodule.slap()
slap.initializeConnection('%(server_url)s',
'%(key_file)s',
'%(cert_file)s')
computer_partition = slap.registerComputerPartition('%(computer_id)s',
'%(partition_id)s')
broken = computer_partition.request('%(software)s', 'frozen', '%(namebase)s0')
broken.rename('broken-%%s' %% (time.strftime("%%d-%%b_%%H:%%M:%%S", time.gmtime())))
broken.stopped()
computer_partition.rename('%(namebase)s0')
print 'renaming done\n'
except slapos.slap.slap.ServerError:
print 'Internal server error\n'
## Leader is always number 0
class ResilientInstance(object):
def __init__(self, comm):
def __init__(self, comm, renamer, confpath):
self.comm = comm
self.id = 0
self.state = 'normal'
self.halter = 0
self.nbComp = nbComp
self.inElection = False
self.alive = True
self.lastPing = time.clock()
self.mainCanal = self.comm.canal(['ping', 'halt',
'victory'])
self.mainCanal = self.comm.canal(['ping', 'halt', 'victory'])
self.renamer = renamer
self.okCanal = self.comm.canal(['ok'])
self.confpath = confpath
self.loadConnectionInfos()
def loadConnectionInfos(self):
file = open('%(confpath)s', 'r')
file = open(self.confpath, 'r')
params = file.read().split('\n')
file.close()
self.nbComp = len([x.strip("' ") for x in params[0].strip('[],').split(',')])
......@@ -67,7 +87,8 @@ class ResilientInstance(object):
## Needs to be changed to use the master
def aliveManagement(self):
while self.alive:
time.sleep(30*60)
log.info('XXX sleeping for %%d minutes' %% SLEEPING_MINS)
time.sleep(SLEEPING_MINS*60)
if self.id == 0:
continue
self.comm.send('ping', 0)
......@@ -84,7 +105,7 @@ class ResilientInstance(object):
while self.alive:
message, sender = self.mainCanal.get()
if message == 'ping':
self.comm.send('ok', sender)
self.comm.send('ok', sender)
elif message == 'halt':
self.state = 'waitingConfirm'
......@@ -93,7 +114,7 @@ class ResilientInstance(object):
elif message == 'victory':
if int(sender) == int(self.halter) and self.state == 'waitingConfirm':
print '%s thinks %s is the leader\n' % (self.id, sender)
log.info('{} thinks {} is the leader'.format(self.id, sender))
self.comm.send('ok', sender)
self.state = 'normal'
......@@ -105,7 +126,7 @@ class ResilientInstance(object):
self.comm.send('ping', higher)
message, sender = self.okCanal.get()
if message:
#print '%s is alive (%s)\n' % (higher, self.id)
log.info('{} is alive ({})'.format(higher, self.id))
self.inElection = False
return False
continue
......@@ -114,7 +135,7 @@ class ResilientInstance(object):
return False
#I should be the new coordinator, halt those below me
print 'Should be ME : %s \n' % self.id
log.info('Should be ME : {}'.format(self.id))
self.state = 'election'
self.halter = self.id
ups = []
......@@ -131,13 +152,13 @@ class ResilientInstance(object):
message, sender = self.okCanal.get()
if message:
continue
print 'Something is wrong... let\'s start over\n'
log.info('Something is wrong... let\'s start over')
return self.election()
self.state = 'normal'
self.active = True
print '%s Is THE LEADER \n' % self.id
log.info('{} Is THE LEADER'.format(self.id))
rename_broken_and_stop()
self.renamer.failover()
self.inElection = False
......@@ -164,27 +185,24 @@ class FilteredCanal(object):
self.lock.acquire()
if self.list:
self.lock.release()
val = self.list[0]
self.list = self.list[1:]
return val
return self.list.pop(0)
self.lock.release()
return [None, None]
class Wrapper(object):
def __init__(self, timeout=20):
self.read_pipes = [os.fdopen(x) for x in read_pipes]
self.write_pipes = write_pipes
def __init__(self, confpath, timeout=20):
self.canals = []
self.ips = []
self.id = 0
self.timeout = timeout
self.confpath = confpath
self.getConnectionInfos()
self.socket = None
def getConnectionInfos(self):
file = open('%(confpath)s', 'r')
file = open(self.confpath, 'r')
params = file.read().split('\n')
file.close()
self.ips = [x.strip("' ") for x in params[0].strip('[],').split(',')]
......@@ -193,17 +211,17 @@ class Wrapper(object):
def start(self):
self.getConnectionInfos()
self.socket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
self.socket.bind((self.ips[self.id], port + self.id))
s.listen(5)
self.socket.bind((self.ips[self.id], BASE_PORT + self.id))
self.socket.listen(5)
def send(self, message, number):
self.getConnectionInfos()
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.connect((self.ips[number], port + number))
s.send(message + (' %s\n' % self.id))
s.connect((self.ips[number], BASE_PORT + number))
s.send(message + (' {}\n'.format(self.id)))
except (socket.error, socket.herror, socket.gaierror, socket.timeout):
pass
pass
finally:
s.close()
......@@ -213,31 +231,48 @@ class Wrapper(object):
return created
def recv(self):
client, _ = s.accept()
client, _ = self.socket.accept()
client_message = client.recv(1024)
if client_message:
message, sender = client_message.split()
for canal in self.canals:
canal.append(message, sender)
canal.append(message, int(sender))
def main():
renamer = Renamer(server_url = '%(server_url)s',
key_file = '%(key_file)s',
cert_file = '%(cert_file)s',
computer_guid = '%(computer_id)s',
partition_id = '%(partition_id)s',
software_release = '%(software)s',
namebase = '%(namebase)s')
confpath = '%(confpath)s'
wrapper = Wrapper(confpath=confpath, timeout=20)
wrapper = createWrapper(20)
computer = ResilientInstance(wrapper, renamer=renamer, confpath=confpath)
computer = ResilientInstance(wrapper)
#idle waiting for connection infos
while computer.nbComp < 2 :
computer.loadConnectionInfos()
time.sleep(30)
#idle waiting for connection infos
while computer.nbComp < 2 :
computer.loadConnectionInfos()
time.sleep(30)
log.info('Starting')
print 'Starting\n'
computer.comm.start()
thread.start_new_thread(computer.listen, ())
thread.start_new_thread(computer.main, ())
thread.start_new_thread(computer.aliveManagement, ())
computer.comm.start()
thread.start_new_thread(computer.listen, ())
thread.start_new_thread(computer.main, ())
thread.start_new_thread(computer.aliveManagement, ())
while True:
# XXX tight loop
continue
while True:
continue
if __name__ == '__main__':
main()
......@@ -25,65 +25,42 @@
#
#############################################################################
import os
import sys
import zc.buildout
import slapos.slap
from slapos.recipe.librecipe import BaseSlapRecipe
from slapos.recipe.librecipe import GenericSlapRecipe
import json
import ConfigParser
from slapos.recipe.librecipe import GenericBaseRecipe
import sys
# XXX: BaseSlapRecipe and GenericSlapRecipe are deprecated, use
# GenericBaseRecipe and move partition parameter fetching to software release.
class Recipe(BaseSlapRecipe, GenericSlapRecipe):
class Recipe(GenericBaseRecipe):
def install(self):
self.path_list = []
crond = self.installCrond()
path_list = []
configuration_path = self.options["config"]
header = """[DEFAULT]
master_url = %s
key = %s
slap = slapos.slap.slap()
slap.initializeConnection(self.server_url, self.key_file, self.cert_file)
parameter_dict = slap.registerComputerPartition(
self.computer_id,
self.computer_partition_id,
).getInstanceParameterDict()
cert = %s
max_install_duration = %s
max_uninstall_duration = %s
max_request_duration = %s
max_destroy_duration = %s
""" % (self.options["master-url"],
"\n ".join(self.options["key"].split("\n")),
"\n ".join(self.options["cert"].split("\n")),
self.options["default_max_install_duration"],
self.options["default_max_uninstall_duration"],
self.options["default_max_request_duration"],
self.options["default_max_destroy_duration"])
# XXX: should probably expect one more (SR-originating) parameter instead
# of using self.work_directory .
configuration_path = os.path.join(self.work_directory, "agent.cfg")
with open(configuration_path, "w") as configuration:
configuration.write(parameter_dict["configuration"])
agent_crond_path = os.path.join(crond, "agent")
with open(agent_crond_path, "w") as agent_crond:
agent_crond.write("*/5 * * * * %s -S %s --pidfile=%s --log=%s "
"%s 2>&1 > /dev/null\n" % (
self.options["python_binary"],
self.options["agent_binary"],
self.options["pidfile"],
self.options["log"],
configuration_path,
))
configuration.write(header + self.options["configuration"])
path_list.append(self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
[self.options["agent_binary"], '--pidfile=%s' % self.options["pidfile"],
"--log=%s" % self.options["log"], configuration_path]))
return self.path_list + [configuration_path, agent_crond_path]
path_list.append(configuration_path)
def installCrond(self):
_, ws = self.egg.working_set()
timestamps = self.createDataDirectory('cronstamps')
cron_output = os.path.join(self.log_directory, 'cron-output')
self._createDirectory(cron_output)
catcher = zc.buildout.easy_install.scripts([('catchcron',
__name__ + '.catdatefile', 'catdatefile')], ws, sys.executable,
self.bin_directory, arguments=[cron_output])[0]
self.path_list.append(catcher)
cron_d = os.path.join(self.etc_directory, 'cron.d')
crontabs = os.path.join(self.etc_directory, 'crontabs')
self._createDirectory(cron_d)
self._createDirectory(crontabs)
wrapper = zc.buildout.easy_install.scripts([('crond',
'slapos.recipe.librecipe.execute', 'execute')], ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['dcrond_binary'].strip(), '-s', cron_d, '-c', crontabs,
'-t', timestamps, '-f', '-l', '5', '-M', catcher]
)[0]
self.path_list.append(wrapper)
return cron_d
return path_list
This diff is collapsed.
......@@ -5,12 +5,13 @@
PidFile "%(pid_file)s"
ServerName %(server_name)s
DocumentRoot %(document_root)s
ServerRoot %(instance_home)s
%(listen)s
ServerAdmin %(server_admin)s
DefaultType text/plain
TypesConfig conf/mime.types
TypesConfig %(httpd_home)s/conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
......@@ -22,8 +23,8 @@ ServerTokens Prod
# Log configuration
ErrorLog "%(error_log)s"
LogLevel warn
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-Agent}i\"" combined
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b" common
LogFormat "%%h %%{REMOTE_USER}i %%{Host}i %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-Agent}i\"" combined
LogFormat "%%h %%{REMOTE_USER}i %%{Host}i %%l %%u %%t \"%%r\" %%>s %%b" common
CustomLog "%(access_log)s" common
%(path_enable)s
......@@ -32,23 +33,23 @@ CustomLog "%(access_log)s" common
#LoadModule unixd_module modules/mod_unixd.so
#LoadModule access_compat_module modules/mod_access_compat.so
#LoadModule authz_core_module modules/mod_authz_core.so
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule deflate_module modules/mod_deflate.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule ssl_module modules/mod_ssl.so
LoadModule mime_module modules/mod_mime.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule negotiation_module modules/mod_negotiation.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
LoadModule cache_module modules/mod_cache.so
LoadModule mem_cache_module modules/mod_mem_cache.so
LoadModule antiloris_module modules/mod_antiloris.so
LoadModule authz_host_module %(httpd_home)s/modules/mod_authz_host.so
LoadModule log_config_module %(httpd_home)s/modules/mod_log_config.so
LoadModule deflate_module %(httpd_home)s/modules/mod_deflate.so
LoadModule setenvif_module %(httpd_home)s/modules/mod_setenvif.so
LoadModule version_module %(httpd_home)s/modules/mod_version.so
LoadModule proxy_module %(httpd_home)s/modules/mod_proxy.so
LoadModule proxy_http_module %(httpd_home)s/modules/mod_proxy_http.so
LoadModule ssl_module %(httpd_home)s/modules/mod_ssl.so
LoadModule mime_module %(httpd_home)s/modules/mod_mime.so
LoadModule dav_module %(httpd_home)s/modules/mod_dav.so
LoadModule dav_fs_module %(httpd_home)s/modules/mod_dav_fs.so
LoadModule negotiation_module %(httpd_home)s/modules/mod_negotiation.so
LoadModule rewrite_module %(httpd_home)s/modules/mod_rewrite.so
LoadModule headers_module %(httpd_home)s/modules/mod_headers.so
LoadModule cache_module %(httpd_home)s/modules/mod_cache.so
LoadModule mem_cache_module %(httpd_home)s/modules/mod_mem_cache.so
LoadModule antiloris_module %(httpd_home)s/modules/mod_antiloris.so
# The following directives modify normal HTTP response behavior to
# handle known problems with browser implementations.
......@@ -99,17 +100,28 @@ Header append Vary User-Agent
ProxyTimeout 600
RewriteEngine On
# Define the two rewritemaps : one for zope, one generic
# Include configuration file not operated by slapos. This file won't be erased
# or changed when slapgrid is ran. It can be freely customized by node admin.
Include %(custom_apache_virtualhost_conf)s
# Define the two RewriteMaps (key -> value store): one for Zope, one generic
# containing: rewritten URL -> original URL (a.k.a VirtualHostBase in Zope)
RewriteMap apachemapzope txt:%(apachemapzope_path)s
RewriteMap apachemapgeneric txt:%(apachemap_path)s
# Define another RewriteMap for Zope, containing:
# rewritten URL -> VirtualHostRoot
RewriteMap apachemapzopepath txt:%(apachemapzopepath_path)s
# First, we check if we have a zope backend server
# If so, let's use Virtual Host Daemon rewrite
RewriteCond ${apachemapzope:%%{SERVER_NAME}} >""
RewriteRule ^/(.*)$ ${apachemapzope:%%{SERVER_NAME}}/VirtualHostBase/https/%%{SERVER_NAME}:%%{SERVER_PORT}/VirtualHostRoot/$1 [L,P]
# We suppose that Apache listens to 443 (even indirectly thanks to things like iptables)
RewriteRule ^/(.*)$ ${apachemapzope:%%{SERVER_NAME}}/VirtualHostBase/https/%%{SERVER_NAME}:443/${apachemapzopepath:%%{SERVER_NAME}}/VirtualHostRoot/$1 [L,P]
# If we have generic backend server, let's rewrite without virtual host daemon
RewriteCond ${apachemapgeneric:%%{SERVER_NAME}} >""
# We suppose that Apache listens to 443 (even indirectly thanks to things like iptables)
RewriteRule ^/(.*)$ ${apachemapgeneric:%%{SERVER_NAME}}/$1 [L,P]
# If nothing exist : put a nice error
......@@ -120,6 +132,10 @@ Header append Vary User-Agent
RewriteEngine On
ProxyPreserveHost On
# Include configuration file not operated by slapos. This file won't be erased
# or changed when slapgrid is ran. It can be freely customized by node admin.
Include %(custom_apache_virtualhost_conf)s
# We accept generic (i.e not lamp) backends on http
RewriteMap apachemapgeneric txt:%(apachemap_path)s
RewriteCond ${apachemapgeneric:%%{SERVER_NAME}} >""
......
# Path protected
<Location %(path)s>
<Directory %(path)s>
Order Deny,Allow
Allow from %(access_control_string)s
</Location>
</Directory>
<Directory %(document_root)s>
Order Allow,Deny
Allow from All
</Directory>
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import shutil
import os
import signal
from binascii import b2a_uu as uuencode
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
# Copy application
# shutil.rmtree(self.options['htdocs'])
# shutil.copytree(self.options['source'],
# self.options['htdocs'])
# Install apache
apache_config = dict(
pid_file=self.options['pid-file'],
lock_file=self.options['lock-file'],
ip=self.options['ip'],
port=self.options['port'],
error_log=self.options['error-log'],
access_log=self.options['access-log'],
document_root=self.options['htdocs'],
)
httpd_conf = self.createFile(self.options['httpd-conf'],
self.substituteTemplate(self.getTemplateFilename('apache.in'),
apache_config)
)
path_list.append(httpd_conf)
wrapper = self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
[self.options['httpd-binary'], '-f', self.options['httpd-conf'],
'-DFOREGROUND']
)
path_list.append(wrapper)
if os.path.exists(self.options['pid-file']):
# Reload apache configuration
with open(self.options['pid-file']) as pid_file:
pid = int(pid_file.read().strip(), 10)
try:
os.kill(pid, signal.SIGUSR1) # Graceful restart
except OSError:
pass
return path_list
# Apache static configuration
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule headers_module modules/mod_headers.so
LoadModule dir_module modules/mod_dir.so
LoadModule mime_module modules/mod_mime.so
LoadModule perl_module modules/mod_perl.so
LoadModule apreq_module modules/mod_apreq2.so
# Basic server configuration
PidFile "%(pid_file)s"
Listen [%(ip)s]:%(port)s
ServerAdmin someone@email
DefaultType text/plain
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
# Log configuration
ErrorLog "%(error_log)s"
LogLevel warn
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-Agent}i\"" combined
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b" common
CustomLog "%(access_log)s" common
<Perl>
use strict;
use warnings;
package mytest;
use Apache2::Const -compile => qw(OK);
use Apache2::RequestIO ();
use Apache2::RequestRec ();
use Apache2::Request;
use Date::Calc ();
use Image::Magick;
sub test_handler {
my ($r) = @_;
$r->content_type('text/plain');
{
my $date = localtime();
$r->print("Hello world! Current time: $date\n");
}
{
my $year = (localtime())[5] + 1900;
$r->print("Testing Date::Calc: English ordinal for current year is "
. Date::Calc::English_Ordinal($year) . "\n");
}
{
my $apreq = new Apache2::Request($r);
my $hello = $apreq->param('hello');
$r->print("Testing Apache2::Request: The parameter 'hello' is '$hello'\n");
}
{
my $im = new Image::Magick;
$r->print("Testing Image::Magick: PerlMagick success: "
. Image::Magick->Success . "\n");
}
return Apache2::Const::OK;
}
</Perl>
<Location />
SetHandler perl-script
PerlHandler mytest::test_handler
</Location>
DocumentRoot %(document_root)s
DirectoryIndex index.html
......@@ -57,10 +57,14 @@ class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
# Copy application
if not os.path.exists(self.options['htdocs']):
shutil.copytree(self.options['source'],
self.options['htdocs'])
# Copy application if not already existing
htdocs_location = self.options['htdocs']
if not (os.path.exists(htdocs_location) and os.listdir(htdocs_location)):
try:
os.rmdir(htdocs_location)
except:
pass
shutil.copytree(self.options['source'], htdocs_location)
# Install php.ini
php_ini = self.createFile(os.path.join(self.options['php-ini-dir'],
......@@ -88,12 +92,14 @@ class Recipe(GenericBaseRecipe):
)
path_list.append(httpd_conf)
apache_args = [self.options['httpd-binary'], '-f', self.options['httpd-conf'],
'-DFOREGROUND']
wrapper = self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.executee',
(apache_args, self.environ)
)
wrapper = self.createWrapper(name=self.options['wrapper'],
command=self.options['httpd-binary'],
parameters=[
'-f',
self.options['httpd-conf'],
'-DFOREGROUND'
],
env=self.environ)
path_list.append(wrapper)
secret_key_filename = os.path.join(self.buildout['buildout']['directory'],
......@@ -101,7 +107,7 @@ class Recipe(GenericBaseRecipe):
if not os.path.exists(secret_key_filename):
secret_key = uuencode(os.urandom(45)).strip()
# Remove unsafe characters
secret_key = secret_key.translate(None, '"\'')
secret_key = secret_key.translate(None, '"\'\\')
with open(secret_key_filename, 'w') as secret_key_file:
secret_key_file.write(secret_key)
else:
......@@ -115,6 +121,8 @@ class Recipe(GenericBaseRecipe):
mysql_password=self.options['mysql-password'],
mysql_host='%s:%s' % (self.options['mysql-host'],
self.options['mysql-port']),
mysql_ip=self.options['mysql-host'],
mysql_port=self.options['mysql-port'],
secret_key=secret_key,
)
......@@ -133,12 +141,12 @@ class Recipe(GenericBaseRecipe):
self.substituteTemplate(self.options['template'], application_conf))
path_list.append(config)
if os.path.exists(self.options['pid-file']):
# Reload apache configuration
with open(self.options['pid-file']) as pid_file:
pid = int(pid_file.read().strip(), 10)
try:
os.kill(pid, signal.SIGHUP) #restart now
except OSError:
pass
#if os.path.exists(self.options['pid-file']):
# # Reload apache configuration
# with open(self.options['pid-file']) as pid_file:
# pid = int(pid_file.read().strip(), 10)
# try:
# os.kill(pid, signal.SIGUSR1) # Graceful restart
# except OSError:
# pass
return path_list
......@@ -15,4 +15,4 @@ date.timezone = Europe/Paris
file_uploads = On
upload_max_filesize = 8M
post_max_size = 8M
magic_quotes_gpc=0ff
magic_quotes_gpc=Off
......@@ -24,8 +24,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import signal
import subprocess
from slapos.recipe.librecipe import GenericBaseRecipe
......@@ -50,17 +49,23 @@ class Recipe(GenericBaseRecipe):
)
path_list.append(httpd_conf)
wrapper = self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
[self.options['httpd-binary'], '-f', self.options['httpd-conf'],
'-DFOREGROUND']
)
wrapper = self.createWrapper(name=self.options['wrapper'],
command=self.options['httpd-binary'],
parameters=[
'-f',
self.options['httpd-conf'],
'-DFOREGROUND',
])
path_list.append(wrapper)
if os.path.exists(self.options['pid-file']):
# Reload apache configuration
with open(self.options['pid-file']) as pid_file:
pid = int(pid_file.read().strip(), 10)
os.kill(pid, signal.SIGUSR1) # Graceful restart
subprocess.call([
self.options['httpd-binary'],
'-f',
self.options['httpd-conf'],
'-k',
'graceful',
])
return path_list
# vim: set et sts=2:
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
......
......@@ -7,15 +7,10 @@ import sys
hostname = "%(hostname)s"
port = %(port)s
connection_okay = False
try:
s = socket.create_connection((hostname, port))
connection_okay = True
s.close()
except (socket.error, socket.timeout):
connection_okay = False
if not connection_okay:
print >> sys.stderr, "%(port)s on %(hostname)s isn't listening"
sys.stderr.write("%(port)s on %(hostname)s isn't listening\n")
sys.exit(127)
......@@ -33,26 +33,22 @@ class Recipe(GenericBaseRecipe):
def install(self):
self.logger.info("Installing dcron...")
path_list = []
options = self.options
script = self.createWrapper(name=options['binary'],
command=options['dcrond-binary'].strip(),
parameters=[
'-s', options['cron-entries'],
'-c', options['crontabs'],
'-t', options['cronstamps'],
'-f', '-l', '5',
'-M', options['catcher']
])
cronstamps = self.options['cronstamps']
cron_d = self.options['cron-entries']
crontabs = self.options['crontabs']
catcher = self.options['catcher']
binary = self.options['binary']
script = self.createPythonScript(binary,
'slapos.recipe.librecipe.execute.execute',
[self.options['dcrond-binary'].strip(), '-s', cron_d, '-c', crontabs,
'-t', cronstamps, '-f', '-l', '5', '-M', catcher]
)
path_list.append(script)
self.logger.debug('Main cron executable created at : %r', script)
self.logger.info("dcron successfully installed.")
return path_list
return [script]
......
......@@ -158,6 +158,24 @@ class Client(GenericBaseRecipe):
return [wrapper]
def keysplit(s):
"""
Split a string like "ssh-rsa AKLFKJSL..... ssh-rsa AAAASAF...."
and return the individual key_type + key strings.
"""
si = iter(s.split(' '))
while True:
key_type = next(si)
try:
key_value = next(si)
except StopIteration:
# odd number of elements, should not happen, yield the last one by itself
yield key_type
break
yield '%s %s' % (key_type, key_value)
class AddAuthorizedKey(GenericBaseRecipe):
def install(self):
......@@ -167,7 +185,9 @@ class AddAuthorizedKey(GenericBaseRecipe):
path_list.append(ssh)
authorized_keys = AuthorizedKeysFile(os.path.join(ssh, 'authorized_keys'))
for key in self.options['key'].split(' '):
for key in keysplit(self.options['key']):
# XXX key might actually be the string 'None' or 'null'
authorized_keys.append(key)
return path_list
......@@ -30,17 +30,19 @@ class Recipe(GenericBaseRecipe):
def install(self):
commandline = [self.options['equeue-binary']]
commandline.extend(['--database', self.options['database']])
commandline.extend(['-l', self.options['log']])
parameters = [
'--database', self.options['database'],
'-l', self.options['log'],
]
if 'loglevel' in self.options:
commandline.extend(['--loglevel', self.options['loglevel']])
parameters.extend(['--loglevel', self.options['loglevel']])
commandline.append(self.options['socket'])
parameters.append(self.options['socket'])
wrapper = self.createWrapper(name=self.options['wrapper'],
command=self.options['equeue-binary'],
parameters=parameters)
return [wrapper]
return [self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
commandline,
)]
# vim: set et sts=2:
##############################################################################
#
# Copyright (c) 2012 Vifib SARL and Contributors. All Rights Reserved.
......@@ -24,12 +25,13 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import random
import os
import binascii
import os
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def __init__(self, buildout, name, options):
......@@ -44,7 +46,7 @@ class Recipe(GenericBaseRecipe):
return GenericBaseRecipe.__init__(self, buildout, name, options)
def install(self):
open_file = open(self.options['storage-path'], 'w')
open_file.write(self.options['passwd'])
open_file.close()
with open(self.options['storage-path'], 'w') as fout:
fout.write(self.options['passwd'])
return [self.options['storage-path']]
......@@ -33,14 +33,25 @@ class Recipe(GenericBaseRecipe):
kvm instance configuration.
"""
def install(self):
# Sanitize drive type parameter
self.options.setdefault('disk-type', 'virtio')
if not self.options.get('disk-type') in ['ide', 'scsi', 'sd',
'mtd', 'floppy', 'pflash', 'virtio']:
print 'Warning: "disk-type" parameter is not in allowed values. Using ' \
'"virtio" value.'
self.options['disk-type'] = 'virtio'
config = dict(
tap_interface=self.options['tap'],
vnc_ip=self.options['vnc-ip'],
vnc_port=self.options['vnc-port'],
nbd_ip=self.options['nbd-ip'],
nbd_ip=self.options['nbd-host'],
nbd_port=self.options['nbd-port'],
nbd2_ip=self.options.get('nbd2-host', ''),
nbd2_port=self.options.get('nbd2-port', 1024),
disk_path=self.options['disk-path'],
disk_size=self.options['disk-size'],
disk_type=self.options['disk-type'],
mac_address=self.options['mac-address'],
smp_count=self.options['smp-count'],
ram_size=self.options['ram-size'],
......@@ -50,7 +61,6 @@ class Recipe(GenericBaseRecipe):
shell_path=self.options['shell-path'],
qemu_path=self.options['qemu-path'],
qemu_img_path=self.options['qemu-img-path'],
# XXX Weak password
vnc_passwd=self.options['passwd']
)
......@@ -67,4 +77,3 @@ class Recipe(GenericBaseRecipe):
return [runner_path, controller_path]
......@@ -30,26 +30,32 @@ def getSocketStatus(host, port):
disk_path = '%(disk_path)s'
if not os.path.exists(disk_path):
subprocess.Popen(['%(qemu_img_path)s', 'create' ,'-f', 'qcow2',
'%(disk_path)s', '%(disk_size)sG'])
disk_path, '%(disk_size)sG'])
kvm_argument_list = ['%(qemu_path)s', '-enable-kvm', '-net', 'nic,macaddr=%(mac_address)s',
kvm_argument_list = ['%(qemu_path)s',
'-enable-kvm', '-net', 'nic,macaddr=%(mac_address)s',
'-net', 'tap,ifname=%(tap_interface)s,script=no,downscript=no',
'-smp', '%(smp_count)s',
'-m', '%(ram_size)s',
'-drive', 'file=%(disk_path)s,if=virtio,boot=on',
'-drive', 'file=%(disk_path)s,if=%(disk_type)s',
'-vnc', '%(vnc_ip)s:1,ipv4,password',
'-boot', 'menu=on',
'-qmp', 'unix:%(socket_path)s,server',
'-pidfile', '%(pid_file_path)s',
]
# Try to connect to NBD server
s = getSocketStatus('%(nbd_ip)s', %(nbd_port)s)
if s is None:
# NBD is not available : launch kvm without it
print 'Warning : Nbd is not available.'
os.execv('%(qemu_path)s', kvm_argument_list)
else:
# NBD is available
kvm_argument_list.extend(['-cdrom', 'nbd:[%(nbd_ip)s]:%(nbd_port)s'])
os.execv('%(qemu_path)s', kvm_argument_list)
# Try to connect to NBD server (and second nbd if defined)
for nbd_ip, nbd_port in (
('%(nbd_ip)s', %(nbd_port)s), ('%(nbd2_ip)s', %(nbd2_port)s)):
if nbd_ip and nbd_port:
s = getSocketStatus(nbd_ip, nbd_port)
if s is None:
# NBD is not available : launch kvm without it
print 'Warning : Nbd is not available.'
else:
# NBD is available
kvm_argument_list.extend([
'-drive',
'file=nbd:[%%s]:%%s,media=cdrom' %% (nbd_ip, nbd_port)])
os.execv('%(qemu_path)s', kvm_argument_list)
......@@ -90,13 +90,22 @@ class BaseSlapRecipe:
]
# SLAP related information
slap_connection = buildout['slap_connection']
self.computer_id = slap_connection['computer_id']
self.computer_partition_id = slap_connection['partition_id']
self.server_url = slap_connection['server_url']
self.software_release_url = slap_connection['software_release_url']
self.key_file = slap_connection.get('key_file')
self.cert_file = slap_connection.get('cert_file')
try:
slap_connection = buildout['slap_connection']
self.computer_id = slap_connection['computer_id']
self.computer_partition_id = slap_connection['partition_id']
self.server_url = slap_connection['server_url']
self.software_release_url = slap_connection['software_release_url']
self.key_file = slap_connection.get('key_file')
self.cert_file = slap_connection.get('cert_file')
except zc.buildout.buildout.MissingSection:
slap_connection = buildout['slap-connection']
self.computer_id = slap_connection['computer-id']
self.computer_partition_id = slap_connection['partition-id']
self.server_url = slap_connection['server-url']
self.software_release_url = slap_connection['software-release-url']
self.key_file = slap_connection.get('key-file')
self.cert_file = slap_connection.get('cert-file')
# setup egg to give possibility to generate scripts
self.egg = zc.recipe.egg.Egg(buildout, options['recipe'], options)
......@@ -145,13 +154,6 @@ class BaseSlapRecipe:
self._writeExecutable(wrapper_path, file_content)
return wrapper_path
def createReportRunningWrapper(self, file_content):
"""Creates report runnig wrapper and returns its path"""
report_wrapper_path = os.path.join(self.wrapper_report_directory,
'slapreport')
self._writeExecutable(report_wrapper_path, file_content)
return report_wrapper_path
def substituteTemplate(self, template_location, mapping_dict):
"""Returns template content after substitution"""
return open(template_location, 'r').read() % mapping_dict
......
# -*- coding: utf-8 -*-
# vim: set et sts=2:
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
......@@ -24,6 +26,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import io
import logging
import os
import sys
......@@ -35,6 +38,8 @@ import urlparse
import pkg_resources
import zc.buildout
from slapos.recipe.librecipe import shlex
class GenericBaseRecipe(object):
"""Boilerplate class for all Buildout recipes providing helpful methods like
creating configuration file, creating wrappers, generating passwords, etc.
......@@ -88,6 +93,21 @@ class GenericBaseRecipe(object):
def createExecutable(self, name, content, mode=0700):
return self.createFile(name, content, mode)
def addLineToFile(self, filepath, line, encoding='utf8'):
"""Append a single line to a text file, if the line does not exist yet.
line must be unicode."""
if os.path.exists(filepath):
lines = [l.rstrip('\n') for l in io.open(filepath, 'r', encoding=encoding)]
else:
lines = []
if not line in lines:
lines.append(line)
with io.open(filepath, 'w+', encoding=encoding) as f:
f.write(u'\n'.join(lines))
def createPythonScript(self, name, absolute_function, arguments=''):
"""Create a python script using zc.buildout.easy_install.scripts
......@@ -107,6 +127,34 @@ class GenericBaseRecipe(object):
path, arguments=arguments)[0]
return script
def createWrapper(self, name, command, parameters, comments=[], parameters_extra=False):
"""
Creates a very simple (one command) shell script for process replacement.
Takes care of quoting.
"""
lines = [ '#!/bin/sh' ]
for comment in comments:
lines.append('# %s' % comment)
lines.append('exec %s' % shlex.quote(command))
for param in parameters:
if len(lines[-1]) < 40:
lines[-1] += ' ' + shlex.quote(param)
else:
lines[-1] += ' \\'
lines.append('\t' + shlex.quote(param))
if parameters_extra:
# pass-through further parameters
lines[-1] += ' \\'
lines.append('\t$@')
content = '\n'.join(lines) + '\n'
return self.createFile(name, content, 0700)
def createDirectory(self, parent, name, mode=0700):
path = os.path.join(parent, name)
if not os.path.exists(path):
......
# -*- coding: utf-8 -*-
"""
backported part of shlex.py from Python 3.3
"""
import re
_find_unsafe = re.compile(r'[^\w@%+=:,./-]', 256).search
def quote(s):
"""Return a shell-escaped version of the string *s*."""
if not s:
return "''"
if _find_unsafe(s) is None:
return s
# use single quotes, and put single quotes into double quotes
# the string $'b is then quoted as '$'"'"'b'
return "'" + s.replace("'", "'\"'\"'") + "'"
This diff is collapsed.
......@@ -28,85 +28,76 @@ import subprocess
from slapos.recipe.librecipe import GenericBaseRecipe
def dump(args):
mydumper_cmd = [args['mydumper']]
mydumper_cmd.extend(['-B', args['database']])
if args['socket'] is not None:
mydumper_cmd.extend(['-S', args['socket']])
else:
mydumper_cmd.extend(['-h', args['host']])
mydumper_cmd.etxned(['-P', args['port']])
mydumper_cmd.extend(['-u', args['user']])
if args['password'] is not None:
mydumper_cmd.extend(['-p', args['password']])
if args['compression']:
mydumper_cmd.append('--compress')
def _mydumper_base_cmd(mydumper, database, user, password,
socket=None, host=None, port=None, **kw):
cmd = [mydumper]
cmd.extend(['-B', database])
if args['rows'] is not None:
mydumper_cmd.extend(['-r', args['rows']])
mydumper_cmd.extend(['-o', args['directory']])
if socket:
cmd.extend(['-S', socket])
else:
cmd.extend(['-h', host])
cmd.extend(['-P', port])
subprocess.check_call(mydumper_cmd)
cmd.extend(['-u', user])
if password:
cmd.extend(['-p', password])
return cmd
def do_import(args):
mydumper_cmd = [args['mydumper']]
mydumper_cmd.extend(['-B', args['database']])
def do_export(args):
cmd = _mydumper_base_cmd(**args)
if args['socket'] is not None:
mydumper_cmd.extend(['-S', args['socket']])
else:
mydumper_cmd.extend(['-h', args['host']])
mydumper_cmd.etxned(['-P', args['port']])
if args['compression']:
cmd.append('--compress')
mydumper_cmd.extend(['-u', args['user']])
if args['password'] is not None:
mydumper_cmd.extend(['-p', args['password']])
if args['rows'] is not None:
cmd.extend(['-r', args['rows']])
mydumper_cmd.append('--overwrite-tables')
cmd.extend(['-o', args['directory']])
mydumper_cmd.extend(['-d', args['directory']])
subprocess.check_call(cmd)
subprocess.check_call(mydumper_cmd)
def do_import(args):
cmd = _mydumper_base_cmd(**args)
cmd.append('--overwrite-tables')
cmd.extend(['-d', args['directory']])
subprocess.check_call(cmd)
class Recipe(GenericBaseRecipe):
def install(self):
# Host or socket should be defined
try:
self.options['host']
except:
self.options['socket']
config = dict(database=self.options['database'],
socket=self.options.get('socket'),
host=self.options.get('host'),
port=self.options.get('port', 3306),
directory=self.options['backup-directory'],
user=self.options['user'],
password=self.options.get('password'),
)
name = __name__
config = {
'database': self.options['database'],
'directory': self.options['backup-directory'],
'user': self.options['user'],
'password': self.options.get('password'),
}
if self.options.get('host'):
config['host'] = self.options['host']
config['port'] = self.options.get('port', 3306)
elif self.options.get('socket'):
config['socket'] = self.options['socket']
else:
raise ValueError("host or socket must be defined")
if self.optionIsTrue('import', False):
config.update(mydumper=self.options['myloader-binary'])
name += '.do_import'
function = do_import
config['mydumper'] = self.options['myloader-binary']
else:
config.update(mydumper=self.options['mydumper-binary'],
compression=self.optionIsTrue('compression', default=False),
rows=self.options.get('rows'),
)
name += '.dump'
function = do_export
config['mydumper'] = self.options['mydumper-binary']
config['compression'] = self.optionIsTrue('compression', default=False)
config['rows'] = self.options.get('rows')
wrapper = self.createPythonScript(self.options['wrapper'],
name,
config)
wrapper = self.createPythonScript(name=self.options['wrapper'],
absolute_function = '%s.%s' % (__name__, function.func_name),
arguments=config)
return [wrapper]
......@@ -31,59 +31,76 @@ from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
commandline = [self.options['server-binary']]
commandline.extend(['--callbacks', self.options['callbacks']])
commandline.extend(['--feeds', self.options['feeds']])
commandline.extend(['--equeue-socket', self.options['equeue-socket']])
commandline.append(self.options['host'])
commandline.append(self.options['port'])
options = self.options
script = self.createWrapper(name=options['wrapper'],
command=options['server-binary'],
parameters=[
'--callbacks', options['callbacks'],
'--feeds', options['feeds'],
'--equeue-socket', options['equeue-socket'],
options['host'], options['port']
],
comments=[
'',
'Upon receiving a notification, execute the callback(s).',
''])
return [script]
return [self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
commandline)]
class Callback(GenericBaseRecipe):
def createCallback(self, notification_id, callback):
callback_id = sha512(notification_id).hexdigest()
callback = self.createFile(os.path.join(self.options['callbacks'],
callback_id),
callback)
return callback
filepath = os.path.join(self.options['callbacks'], callback_id)
self.addLineToFile(filepath, callback)
return filepath
def install(self):
# XXX this path is returned multiple times, one for each callback that has been added.
return [self.createCallback(self.options['on-notification-id'],
self.options['callback'])]
class Notify(GenericBaseRecipe):
def createNotifier(self, notifier_binary, executable, wrapper, **kwargs):
if not os.path.exists(kwargs['log']):
def createNotifier(self, notifier_binary, wrapper, executable,
log, title, notification_url, feed_url):
if not os.path.exists(log):
# Just a touch
open(kwargs['log'], 'w').close()
commandline = [notifier_binary,
'-l', kwargs['log'],
'--title', kwargs['title'],
'--feed', kwargs['feed_url'],
'--notification-url', kwargs['notification_url'],
executable]
return self.createPythonScript(wrapper,
'slapos.recipe.librecipe.execute.execute',
[str(i) for i in commandline])
open(log, 'w').close()
parameters = [
'-l', log,
'--title', title,
'--feed', feed_url,
'--notification-url',
]
parameters.extend(notification_url.split(' '))
parameters.extend(['--executable', executable])
return self.createWrapper(name=wrapper,
command=notifier_binary,
parameters=parameters,
comments=[
'',
'Call an executable and send notification(s).',
''])
def install(self):
feedurl = self.unparseUrl(scheme='http', host=self.options['host'],
port=self.options['port'],
path='/get/%s' % self.options['name'])
script = self.createNotifier(
self.options['notifier-binary'],
wrapper=self.options['wrapper'],
executable=self.options['executable'],
log=os.path.join(self.options['feeds'], self.options['name']),
title=self.options['title'],
notification_url=self.options['notify'],
feed_url=feedurl,
)
feed_url = self.unparseUrl(scheme='http', host=self.options['host'],
port=self.options['port'],
path='/get/%s' % self.options['name'])
log = os.path.join(self.options['feeds'], self.options['name'])
options = self.options
script = self.createNotifier(notifier_binary=options['notifier-binary'],
wrapper=options['wrapper'],
executable=options['executable'],
log=log,
title=options['title'],
notification_url=options['notify'],
feed_url=feed_url)
return [script]
......@@ -24,14 +24,15 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from json import loads as unjson
from hashlib import sha512
from urlparse import urlparse
import inspect
import json
import os
import signal
import subprocess
import sys
import signal
import inspect
import urlparse
from slapos.recipe.librecipe import GenericSlapRecipe
from slapos.recipe.dropbear import KnownHostsFile
......@@ -43,8 +44,7 @@ from slapos import slap as slapmodule
def promise(args):
def failed_ssh(partition, ssh):
# Bad python 2 syntax, looking forward python 3 to have print(file=)
print >> sys.stderr, "SSH Connection failed"
sys.stderr.write("SSH Connection failed\n")
try:
ssh.terminate()
except:
......@@ -75,16 +75,20 @@ def promise(args):
slap = slapmodule.slap()
slap.initializeConnection(args['server_url'],
key_file=args.get('key_file'), cert_file=args.get('cert_file'))
key_file=args.get('key_file'),
cert_file=args.get('cert_file'))
partition = slap.registerComputerPartition(args['computer_id'],
args['partition_id'])
ssh = subprocess.Popen([args['ssh_client'], '%(user)s@%(host)s/%(port)s' % args],
stdin=subprocess.PIPE,
stdout=open(os.devnull, 'w'),
stderr=open(os.devnull, 'w'))
# Rdiff Backup protocol quit command
quitcommand = 'q' + chr(255) + chr(0) * 7
ssh_cmdline = [args['ssh_client'], '%(user)s@%(host)s/%(port)s' % args]
ssh = subprocess.Popen(ssh_cmdline, stdin=subprocess.PIPE,
stdout=open(os.devnull), stderr=open(os.devnull))
ssh.stdin.write(quitcommand)
ssh.stdin.flush()
ssh.stdin.close()
......@@ -113,7 +117,7 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
promise_path = os.path.join(self.options['promises-directory'],
url_hash)
parsed_url = urlparse(url)
parsed_url = urlparse.urlparse(url)
promise_dict = self.promise_base_dict.copy()
promise_dict.update(user=parsed_url.username,
host=parsed_url.hostname,
......@@ -127,15 +131,17 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
host = parsed_url.hostname
known_hosts_file[host] = entry['server-key']
remote_schema = '%(ssh)s -p %%s %(user)s@%(host)s' % \
# XXX use -y because the host might not yet be in the
# trusted hosts file until the next time slapgrid is run.
remote_schema = '%(ssh)s -y -p %%s %(user)s@%(host)s' % \
{
'ssh': self.options['sshclient-binary'],
'user': parsed_url.username,
'host': parsed_url.hostname,
}
command = [self.options['rdiffbackup-binary']]
command.extend(['--remote-schema', remote_schema])
parameters = ['--remote-schema', remote_schema]
remote_directory = '%(port)s::%(path)s' % {'port': parsed_url.port,
'path': parsed_url.path}
......@@ -144,38 +150,39 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
name_hash)
if entry['type'] == 'push':
command.extend(['--restore-as-of', 'now'])
command.append('--force')
command.extend([local_directory, remote_directory])
parameters.extend(['--restore-as-of', 'now'])
parameters.append('--force')
parameters.extend([local_directory, remote_directory])
comments = ['','Push data to a PBS *-import instance.','']
else:
command.extend([remote_directory, local_directory])
parameters.extend([remote_directory, local_directory])
comments = ['','Pull data from a PBS *-export instance.','']
wrapper_basepath = os.path.join(self.options['wrappers-directory'],
url_hash)
wrapper_path = wrapper_basepath
if 'notify' in entry:
wrapper_path = '%s_raw' % wrapper_basepath
wrapper_path = wrapper_basepath + '_raw'
else:
wrapper_path = wrapper_basepath
wrapper = self.createPythonScript(
wrapper_path,
'slapos.recipe.librecipe.execute.execute',
[str(i) for i in command]
)
wrapper = self.createWrapper(name=wrapper_path,
command=self.options['rdiffbackup-binary'],
parameters=parameters,
comments = comments)
path_list.append(wrapper)
if 'notify' in entry:
feed_url = '%s/get/%s' % (self.options['notifier-url'],
entry['notification-id'])
wrapper = self.createNotifier(
self.options['notifier-binary'],
wrapper=wrapper_basepath,
executable=wrapper_path,
log=os.path.join(self.options['feeds'], entry['notification-id']),
title=entry.get('title', 'Untitled'),
notification_url=entry['notify'],
feed_url=feed_url,
)
wrapper = self.createNotifier(notifier_binary=self.options['notifier-binary'],
wrapper=wrapper_basepath,
executable=wrapper_path,
log=os.path.join(self.options['feeds'], entry['notification-id']),
title=entry.get('title', 'Untitled'),
notification_url=entry['notify'],
feed_url=feed_url,
)
path_list.append(wrapper)
#self.setConnectionDict(dict(feed_url=feed_url), entry['slave_reference'])
......@@ -190,40 +197,39 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
return path_list
def _install(self):
path_list = []
if self.optionIsTrue('client', True):
self.logger.info("Client mode")
slap_connection = self.buildout['slap-connection']
self.promise_base_dict = dict(
server_url=slap_connection['server-url'],
computer_id=slap_connection['computer-id'],
cert_file=slap_connection.get('cert-file'),
key_file=slap_connection.get('key-file'),
partition_id=slap_connection['partition-id'],
ssh_client=self.options['sshclient-binary'],
)
slaves = unjson(self.options['slave-instance-list'])
self.promise_base_dict = {
'server_url': slap_connection['server-url'],
'computer_id': slap_connection['computer-id'],
'cert_file': slap_connection.get('cert-file'),
'key_file': slap_connection.get('key-file'),
'partition_id': slap_connection['partition-id'],
'ssh_client': self.options['sshclient-binary'],
}
slaves = json.loads(self.options['slave-instance-list'])
known_hosts = KnownHostsFile(self.options['known-hosts'])
with known_hosts:
# XXX this API could be cleaner
for slave in slaves:
path_list.extend(self.add_slave(slave, known_hosts))
else:
command = [self.options['rdiffbackup-binary']]
self.logger.info("Server mode")
command.extend(['--restrict', self.options['path']])
command.append('--server')
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
command)
wrapper = self.createWrapper(name=self.options['wrapper'],
command=self.options['rdiffbackup-binary'],
parameters=[
'--restrict', self.options['path'],
'--server'
])
path_list.append(wrapper)
return path_list
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import md5
import os
import subprocess
import textwrap
from zc.buildout import UserError
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
"""\
This recipe creates:
- a Postgres cluster
- configuration to allow connections from IPv4, IPv6 or unix socket.
- a superuser with provided name and generated password
- a database with provided name
- a start script in the services directory
Required options:
bin
path to the 'initdb' and 'postgres' binaries.
dbname
name of the database to be used by the application.
ipv4
set of ipv4 to listen on.
ipv6
set of ipv6 to listen on.
pgdata-directory
path to postgres configuration and data.
services
must be ${buildout:directory}/etc/service.
superuser
name of the superuser to create.
Exposed options:
password
generated password for the superuser.
url
generated DBAPI connection string.
it can be used as-is (ie. in sqlalchemy) or by the _urlparse.py recipe.
"""
def _options(self, options):
options['url'] = 'postgresql://%(superuser)s:%(password)s@[%(ipv6-random)s]:%(port)s/%(dbname)s' % options
def install(self):
pgdata = self.options['pgdata-directory']
# if the pgdata already exists, skip all steps, we don't need to do anything.
if not os.path.exists(pgdata):
self.createCluster()
self.createConfig()
self.createDatabase()
self.updateSuperuser()
self.createRunScript()
# install() methods usually return the pathnames of managed files.
# If they are missing, they will be rebuilt.
# In this case, we already check for the existence of pgdata,
# so we don't need to return anything here.
return []
def check_exists(self, path):
if not os.path.isfile(path):
raise IOError('File not found: %s' % path)
def createCluster(self):
"""\
A Postgres cluster is "a collection of databases that is managed
by a single instance of a running database server".
Here we create an empty cluster.
"""
initdb_binary = os.path.join(self.options['bin'], 'initdb')
self.check_exists(initdb_binary)
pgdata = self.options['pgdata-directory']
try:
subprocess.check_call([initdb_binary,
'-D', pgdata,
'-A', 'ident',
'-E', 'UTF8',
'-U', self.options['superuser'],
])
except subprocess.CalledProcessError:
raise UserError('Could not create cluster directory in %s' % pgdata)
def createConfig(self):
pgdata = self.options['pgdata-directory']
ipv4 = self.options['ipv4']
ipv6 = self.options['ipv6']
with open(os.path.join(pgdata, 'postgresql.conf'), 'wb') as cfg:
cfg.write(textwrap.dedent("""\
listen_addresses = '%s'
logging_collector = on
log_rotation_size = 50MB
max_connections = 100
datestyle = 'iso, mdy'
lc_messages = 'en_US.UTF-8'
lc_monetary = 'en_US.UTF-8'
lc_numeric = 'en_US.UTF-8'
lc_time = 'en_US.UTF-8'
default_text_search_config = 'pg_catalog.english'
unix_socket_directory = '%s'
unix_socket_permissions = 0700
""" % (
','.join(ipv4.union(ipv6)),
pgdata,
)))
with open(os.path.join(pgdata, 'pg_hba.conf'), 'wb') as cfg:
# see http://www.postgresql.org/docs/9.2/static/auth-pg-hba-conf.html
cfg_lines = [
'# TYPE DATABASE USER ADDRESS METHOD',
'',
'# "local" is for Unix domain socket connections only (check unix_socket_permissions!)',
'local all all ident',
'host all all 127.0.0.1/32 md5',
'host all all ::1/128 md5',
]
ipv4_netmask_bits = self.options.get('ipv4-netmask-bits', '32')
for ip in ipv4:
cfg_lines.append('host all all %s/%s md5' % (ip, ipv4_netmask_bits))
ipv6_netmask_bits = self.options.get('ipv6-netmask-bits', '128')
for ip in ipv6:
cfg_lines.append('host all all %s/%s md5' % (ip, ipv6_netmask_bits))
cfg.write('\n'.join(cfg_lines))
def createDatabase(self):
self.runPostgresCommand(cmd='CREATE DATABASE "%s"' % self.options['dbname'])
def updateSuperuser(self):
"""\
Set a password for the cluster administrator.
The application will also use it for its connections.
"""
# http://postgresql.1045698.n5.nabble.com/Algorithm-for-generating-md5-encrypted-password-not-found-in-documentation-td4919082.html
user = self.options['superuser']
password = self.options['password']
# encrypt the password to avoid storing in the logs
enc_password = 'md5' + md5.md5(password+user).hexdigest()
self.runPostgresCommand(cmd="""ALTER USER "%s" ENCRYPTED PASSWORD '%s'""" % (user, enc_password))
def runPostgresCommand(self, cmd):
"""\
Executes a command in single-user mode, with no daemon running.
Multiple commands can be executed by providing newlines,
preceeded by backslash, between them.
See http://www.postgresql.org/docs/9.1/static/app-postgres.html
"""
pgdata = self.options['pgdata-directory']
postgres_binary = os.path.join(self.options['bin'], 'postgres')
try:
p = subprocess.Popen([postgres_binary,
'--single',
'-D', pgdata,
'postgres',
], stdin=subprocess.PIPE)
p.communicate(cmd+'\n')
except subprocess.CalledProcessError:
raise UserError('Could not create database %s' % pgdata)
def createRunScript(self):
"""\
Creates a script that runs postgres in the foreground.
'exec' is used to allow easy control by supervisor.
"""
content = textwrap.dedent("""\
#!/bin/sh
exec %(bin)s/postgres \\
-D %(pgdata-directory)s
""" % self.options)
name = os.path.join(self.options['services'], 'postgres-start')
self.createExecutable(name, content=content)
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import textwrap
from slapos.recipe.librecipe import GenericBaseRecipe
class ExportRecipe(GenericBaseRecipe):
"""\
This recipe creates an exporter script for using with the resilient stack.
Required options:
backup-directory
folder that will contain the dump file.
bin
path to the 'pg_dump' binary.
dbname
name of the database to dump.
pgdata-directory
path to postgres configuration and data.
wrapper
full path of the exporter script to create.
"""
def install(self):
wrapper = self.options['wrapper']
self.createBackupScript(wrapper)
return [wrapper]
def createBackupScript(self, wrapper):
"""\
Create a script to backup the database in 'custom' format.
"""
content = textwrap.dedent("""\
#!/bin/sh
umask 077
%(bin)s/pg_dump \\
--host=%(pgdata-directory)s \\
--format=custom \\
--file=%(backup-directory)s/database.dump \\
%(dbname)s
""" % self.options)
self.createExecutable(wrapper, content=content)
class ImportRecipe(GenericBaseRecipe):
"""\
This recipe creates an importer script for using with the resilient stack.
Required options:
backup-directory
folder that contains the dump file.
bin
path to the 'pg_restore' binary.
dbname
name of the database to restore.
pgdata-directory
path to postgres configuration and data.
wrapper
full path of the importer script to create.
"""
def install(self):
wrapper = self.options['wrapper']
self.createRestoreScript(wrapper)
return [wrapper]
def createRestoreScript(self, wrapper):
"""\
Create a script to restore the database from 'custom' format.
"""
content = textwrap.dedent("""\
#!/bin/sh
%(bin)s/pg_restore \\
--host=%(pgdata-directory)s \\
--dbname=%(dbname)s \\
--clean \\
--no-owner \\
--no-acl \\
%(backup-directory)s/database.dump
""" % self.options)
self.createExecutable(wrapper, content=content)
......@@ -25,9 +25,9 @@
#
##############################################################################
import logging
from slapos import slap as slapmodule
import slapos.recipe.librecipe.generic as librecipe
import traceback
DEFAULT_SOFTWARE_TYPE = 'RootSoftwareInstance'
......@@ -123,28 +123,43 @@ class Recipe(object):
isSlave = options.get('slave', '').lower() in \
librecipe.GenericBaseRecipe.TRUE_VALUES
self.instance = instance = request(software_url, software_type,
name, partition_parameter_kw=partition_parameter_kw,
filter_kw=filter_kw, shared=isSlave)
self._raise_request_exception = None
self._raise_request_exception_formatted = None
self.instance = None
try:
self.instance = request(software_url, software_type,
name, partition_parameter_kw=partition_parameter_kw,
filter_kw=filter_kw, shared=isSlave)
# XXX what is the right way to get a global id?
options['instance_guid'] = self.instance.getId()
except (slapmodule.NotFoundError, slapmodule.ServerError, slapmodule.ResourceNotReady) as exc:
self._raise_request_exception = exc
self._raise_request_exception_formatted = traceback.format_exc()
for param in return_parameters:
options['connection-%s' % param] = ''
if not self.instance:
continue
try:
options['connection-%s' % param] = str(
instance.getConnectionParameter(param))
except (slapmodule.NotFoundError, slapmodule.ServerError):
options['connection-%s' % param] = ''
self.instance.getConnectionParameter(param))
except (slapmodule.NotFoundError, slapmodule.ServerError, slapmodule.ResourceNotReady):
if self.failed is None:
self.failed = param
def install(self):
if self._raise_request_exception:
raise self._raise_request_exception
if self.failed is not None:
# Check instance status to know if instance has been deployed
try:
if self.instance.getComputerId() is not None:
if self.instance._computer_id is not None:
status = self.instance.getState()
else:
status = 'not ready yet'
except (slapmodule.NotFoundError, slapmodule.ServerError):
except (slapmodule.NotFoundError, slapmodule.ServerError, slapmodule.ResourceNotReady):
status = 'not ready yet'
except AttributeError:
status = 'unknown'
......@@ -159,14 +174,19 @@ class Recipe(object):
class RequestOptional(Recipe):
"""
Request a SlapOS instance. Won't fail if instance is not ready.
Request a SlapOS instance. Won't fail if request failed or is not ready.
Same as slapos.cookbook:request, but won't raise in case of problem.
"""
def install(self):
if self.failed is not None:
if self._raise_request_exception_formatted:
self.logger.warning('Optional request failed.')
if not isinstance(self._raise_request_exception, slapmodule.NotFoundError):
# full traceback for optional 'not found' is too verbose and confusing
self.logger.warning(self._raise_request_exception_formatted)
elif self.failed is not None:
# Check instance status to know if instance has been deployed
try:
if self.instance.getComputerId() is not None:
if self.instance._computer_id is not None:
status = self.instance.getState()
else:
status = 'not ready yet'
......
......@@ -64,6 +64,10 @@ class Recipe(object):
Set of IPv4 addresses.
ipv6
Set of IPv6 addresses.
ipv4-random
One of the IPv4 addresses.
ipv6-random
One of the IPv6 addresses.
tap
Set of TAP interfaces.
configuration
......@@ -109,6 +113,13 @@ class Recipe(object):
# XXX: emit warning on unknown address type ?
options['ipv4'] = ipv4_set
options['ipv6'] = ipv6_set
# also export single ip values for those recipes that don't support sets.
if ipv4_set:
options['ipv4-random'] = list(ipv4_set)[0]
if ipv6_set:
options['ipv6-random'] = list(ipv6_set)[0]
options['tap'] = tap_set
options['configuration'] = parameter_dict
match = self.OPTCRE_match
......
......@@ -24,31 +24,31 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
"""
Slapmonitor instance configuration.
"""
from slapos.recipe.librecipe import GenericBaseRecipe
def __init__(self, buildout, name, options):
return GenericBaseRecipe.__init__(self, buildout, name, options)
class MonitorRecipe(GenericBaseRecipe):
def install(self):
config = dict(
pid_file_path=self.options['pid-file'],
database_path=self.options['database-path'],
slapmonitor_path = self.options['slapmonitor-path'],
shell_path=self.options['shell-path'],
)
options = self.options
script = self.createWrapper(name=options['path'],
command=options['slapmonitor-path'],
parameters=[
options['pid-file'],
options['database-path'],
])
return [script]
# Runners
runner_path = self.createExecutable(
self.options['path'],
self.substituteTemplate(self.getTemplateFilename('slapmonitor_run.in'),
config))
return [runner_path]
class MonitorXMLRecipe(GenericBaseRecipe):
def install(self):
options = self.options
script = self.createWrapper(name=options['path'],
command=options['slapmonitor-xml-path'],
parameters=[
options['database-path'],
],
parameters_extra=True)
return [script]
def update(self):
pass
#!%(shell_path)s
# BEWARE: This file is operated by slapgrid
# BEWARE: It will be overwritten automatically
#
exec %(slapmonitor_path)s %(pid_file_path)s %(database_path)s
......@@ -24,36 +24,23 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
"""
Slapmonitor instance configuration.
"""
def __init__(self, buildout, name, options):
return GenericBaseRecipe.__init__(self, buildout, name, options)
def install(self):
config = dict(
pid_file_path=self.options['pid-file'],
consumption_log_path=self.options['consumption-log-path'],
database_path=self.options['database-path'],
slapreport_path = self.options['slapreport-path'],
logbox_ip = self.options['logbox-ip'],
logbox_port = self.options['logbox-port'],
logbox_user = self.options['logbox-user'],
logbox_passwd = self.options['logbox-passwd'],
shell_path=self.options['shell-path'],
)
# Runners
runner_path = self.createExecutable(
self.options['path'],
self.substituteTemplate(self.getTemplateFilename('slapreport_run.in'),
config))
return [runner_path]
options = self.options
script = self.createWrapper(name=options['path'],
command=options['slapreport-path'],
parameters=[
options['pid-file'],
options['consumption-log-path'],
options['database-path'],
options['logbox-ip'],
options['logbox-port'],
options['logbox-user'],
options['logbox-passwd'],
])
return [script]
def update(self):
pass
#!%(shell_path)s
# BEWARE: This file is operated by slapgrid
# BEWARE: It will be overwritten automatically
#
exec %(slapreport_path)s %(pid_file_path)s %(consumption_log_path)s %(database_path)s %(logbox_ip)s %(logbox_port)s %(logbox_user)s %(logbox_passwd)s
......@@ -60,6 +60,8 @@ class Recipe(GenericBaseRecipe):
'supervisord.conf'),
runner_workdir=self.workdir,
etc_dir=self.options['etc_dir'],
run_dir=self.options['run_dir'],
log_dir=self.options['log_dir'],
runner_host=self.ipv6,
runner_port=self.runner_port,
ipv4_address=self.ipv4,
......@@ -128,6 +130,8 @@ class Test(GenericBaseRecipe):
'supervisord.conf'),
runner_workdir=self.workdir,
etc_dir=self.options['etc_dir'],
run_dir=self.options['etc_dir'],
log_dir=self.workdir,
runner_host=self.ipv6,
runner_port=self.runner_port,
ipv4_address=self.ipv4,
......
......@@ -20,6 +20,8 @@ runner_port = %(runner_port)s
ipv4_address = %(ipv4_address)s
ipv6_address = %(ipv6_address)s
etc_dir = %(etc_dir)s
run_dir = %(run_dir)s
log_dir = %(log_dir)s
[slapproxy]
host = %(proxy_host)s
......
......@@ -90,12 +90,14 @@ class Recipe:
self.logger.info('Deploying instance with software type %s' % \
software_type)
# Raise if request software_type does not exist ...
if software_type not in self.options:
if 'default' in self.options:
# ... Except for backward compatibility. Then use "default".
if software_type in ['RootSoftwareInstance']:
software_type = 'default'
else:
raise zc.buildout.UserError("This software type isn't mapped. And "
"there's no default software type.")
raise zc.buildout.UserError("This software type (%s) isn't mapped." % \
software_type)
instance_file_path = self.options[software_type]
......
......@@ -27,6 +27,9 @@
import os
import logging
import zc.buildout
class Recipe:
def __init__(self, buildout, name, options):
self.buildout = buildout
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import BaseSlapRecipe
import os
import pkg_resources
import zc.buildout
import zc.recipe.egg
import sys
CONFIG = dict(
proxy_port='5000',
computer_id='COMPUTER',
partition_reference='test0',
)
class Recipe(BaseSlapRecipe):
def __init__(self, buildout, name, options):
self.egg = zc.recipe.egg.Egg(buildout, options['recipe'], options)
BaseSlapRecipe.__init__(self, buildout, name, options)
def installSlapOs(self):
CONFIG['slapos_directory'] = self.createDataDirectory('slapos')
CONFIG['working_directory'] = self.createDataDirectory('testnode')
CONFIG['software_root'] = os.path.join(CONFIG['slapos_directory'],
'software')
CONFIG['instance_root'] = os.path.join(CONFIG['slapos_directory'],
'instance')
CONFIG['proxy_database'] = os.path.join(CONFIG['slapos_directory'],
'proxy.db')
CONFIG['proxy_host'] = self.getLocalIPv4Address()
CONFIG['master_url'] = 'http://%s:%s' % (CONFIG['proxy_host'],
CONFIG['proxy_port'])
self._createDirectory(CONFIG['software_root'])
self._createDirectory(CONFIG['instance_root'])
CONFIG['slapos_config'] = self.createConfigurationFile('slapos.cfg',
self.substituteTemplate(pkg_resources.resource_filename(__name__,
'template/slapos.cfg.in'), CONFIG))
self.path_list.append(CONFIG['slapos_config'])
def setupRunningWrapper(self):
self.path_list.extend(zc.buildout.easy_install.scripts([(
'testnode',
__name__+'.testnode', 'run')], self.ws,
sys.executable, self.wrapper_directory, arguments=[
dict(
environment=self.getRuntimeEnvironment(),
computer_id=CONFIG['computer_id'],
instance_dict=eval(self.parameter_dict.get('instance_dict', '{}')),
instance_root=CONFIG['instance_root'],
ipv4_address=self.getLocalIPv4Address(),
ipv6_address=self.getGlobalIPv6Address(),
master_url=CONFIG['master_url'],
profile_url=self.parameter_dict['profile_url'],
proxy_database=CONFIG['proxy_database'],
slapgrid_partition_binary=self.options['slapgrid_partition_binary'],
slapgrid_software_binary=self.options['slapgrid_software_binary'],
slapos_config=CONFIG['slapos_config'],
slapproxy_binary=self.options['slapproxy_binary'],
software_root=CONFIG['software_root'],
buildbot_binary=self.options['buildbot_binary'],
working_directory=CONFIG['working_directory'],
buildbot_host=self.parameter_dict['buildbot_host'],
slave_name=self.parameter_dict['slave_name'],
slave_password=self.parameter_dict['slave_password'],
bin_directory=self.bin_directory,
# botenvironemnt is splittable string of key=value to substitute
# environment of running bot
bot_environment=self.parameter_dict.get('bot_environment', ''),
partition_reference=CONFIG['partition_reference'],
)
]))
def installLocalSvn(self):
svn_dict = dict(svn_binary = self.options['svn_binary'])
svn_dict.update(self.parameter_dict)
svn_path = os.path.join(self.bin_directory, 'svn')
self._writeExecutable(svn_path, """\
#!/bin/sh
%(svn_binary)s --username %(svn_username)s --password %(svn_password)s \
--non-interactive --trust-server-cert --no-auth-cache "$@" """% svn_dict)
self.path_list.append(svn_path)
svnversion = os.path.join(self.bin_directory, 'svnversion')
if os.path.lexists(svnversion):
os.unlink(svnversion)
os.symlink(self.options['svnversion_binary'], svnversion)
self.path_list.append(svnversion)
def installLocalGit(self):
git = os.path.join(self.bin_directory, 'git')
if os.path.lexists(git):
os.unlink(git)
os.symlink(self.options['git_binary'], git)
self.path_list.append(git)
def installLocalZip(self):
zip = os.path.join(self.bin_directory, 'zip')
if os.path.lexists(zip):
os.unlink(zip)
os.symlink(self.options['zip_binary'], zip)
self.path_list.append(zip)
def installLocalPython(self):
"""Installs local python fully featured with eggs"""
self.path_list.extend(zc.buildout.easy_install.scripts([], self.ws,
sys.executable, self.bin_directory, scripts=None,
interpreter='python'))
def installLocalRunUnitTest(self):
link = os.path.join(self.bin_directory, 'runUnitTest')
destination = os.path.join(CONFIG['instance_root'],
CONFIG['partition_reference'], 'bin', 'runUnitTest')
if os.path.lexists(link):
if not os.readlink(link) != destination:
os.unlink(link)
if not os.path.lexists(link):
os.symlink(destination, link)
self.path_list.append(link)
def _installBuildbot(self):
self.setupRunningWrapper()
self.installLocalPython()
self.installLocalGit()
self.installLocalSvn()
self.installLocalRunUnitTest()
return self.path_list
def getRuntimeEnvironment(self):
env = {}
env['PATH'] = ':'.join([self.bin_directory] +
os.environ['PATH'].split(':'))
return env
def _installProfileTesting(self):
self.path_list.extend(zc.buildout.easy_install.scripts([(
'testnode',
__name__+'.profile_testnode', 'run')], self.ws,
sys.executable, self.wrapper_directory, arguments=[
dict(
environment=self.getRuntimeEnvironment(),
slapgrid_environment=eval(self.parameter_dict.get(
'slapgrid_environment', '{}')),
profile_path=self.parameter_dict.get('profile_path',
'slapos/software.cfg'),
repository=self.parameter_dict['repository'],
# Optional URL of test aggreagation system
test_suite_master_url=self.parameter_dict['test_suite_master_url'],
suite_name=self.parameter_dict['suite_name'],
branch=self.parameter_dict.get('branch', 'master'),
# internal parameters
software_root=CONFIG['software_root'],
computer_id=CONFIG['computer_id'],
git_binary=self.options['git_binary'],
master_url=CONFIG['master_url'],
proxy_database=CONFIG['proxy_database'],
slapgrid_software_binary=self.options['slapgrid_software_binary'],
slapos_config=CONFIG['slapos_config'],
slapproxy_binary=self.options['slapproxy_binary'],
working_directory=CONFIG['working_directory'],
bin_directory=self.bin_directory,
partition_reference=CONFIG['partition_reference'],
)
]))
return self.path_list
def _install(self):
self.requirements, self.ws = self.egg.working_set()
self.path_list = []
self.installSlapOs()
self.installLocalZip()
flavour = self.parameter_dict.get('flavour', 'buildbot')
if flavour == 'buildbot':
return self._installBuildbot()
elif flavour == 'profile-testing':
return self._installProfileTesting()
raise NotImplementedError('Falvour %r is unknown'% flavour)
import urlparse
import urllib
import httplib
import mimetools
from random import randint
import tempfile
import os
import stat
import zipfile
import mimetypes
import datetime
TB_SEP = "============================================================="\
"========="
def get_content_type(f):
return mimetypes.guess_type(f.name)[0] or 'application/octet-stream'
class ConnectionHelper:
def __init__(self, url):
self.conn = urlparse.urlparse(url)
if self.conn.scheme == 'http':
connection_type = httplib.HTTPConnection
if self.conn.port is None:
self.port = 80
else:
connection_type = httplib.HTTPSConnection
if self.conn.port is None:
self.port = 443
self.connection_type = connection_type
def _connect(self):
self.connection = self.connection_type(self.conn.hostname + ':' +
str(self.conn.port or self.port))
def POST(self, path, parameter_dict, file_list=None):
self._connect()
parameter_dict.update(__ac_name=self.conn.username,
__ac_password=self.conn.password)
header_dict = {'Content-type': "application/x-www-form-urlencoded"}
if file_list is None:
body = urllib.urlencode(parameter_dict)
else:
boundary = mimetools.choose_boundary()
header_dict['Content-type'] = 'multipart/form-data; boundary=%s' % (
boundary,)
body = ''
for k, v in parameter_dict.iteritems():
body += '--%s\r\n' % boundary
body += 'Content-Disposition: form-data; name="%s"\r\n' % k
body += '\r\n'
body += '%s\r\n' % v
for name, filename in file_list:
f = open(filename, 'r')
body += '--%s\r\n' % boundary
body += 'Content-Disposition: form-data; name="%s"; filename="%s"\r\n'\
% (name, name)
body += 'Content-Type: %s\r\n' % get_content_type(f)
body += 'Content-Length: %d\r\n' % os.fstat(f.fileno())[stat.ST_SIZE]
body += '\r\n'
body += f.read()
f.close()
body += '\r\n'
self.connection.request("POST", self.conn.path + '/' + path,
body, header_dict)
self.response = self.connection.getresponse()
class ERP5TestReportHandler:
def __init__(self, url, suite_name):
# random test id
self.test_id = "%s-%X" % (
("%s" % datetime.date.today()).replace("-", ""),
randint(1, 10000000000000000),
)
self.connection_helper = ConnectionHelper(url)
self.suite_name = suite_name
def reportStart(self):
# report that test is running
print 'Starting test with id %s' % self.test_id
self.connection_helper.POST('TestResultModule_reportRunning', dict(
test_suite=self.suite_name,
test_report_id=self.test_id,
))
def reportFinished(self, out_file, revision, success, duration, text):
# make file parsable by erp5_test_results
tempcmd = tempfile.mkstemp()[1]
tempcmd2 = tempfile.mkstemp()[1]
tempout = tempfile.mkstemp()[1]
templog = tempfile.mkstemp()[1]
log_lines = open(out_file, 'r').readlines()
tl = open(templog, 'w')
tl.write(TB_SEP + '\n')
if len(log_lines) > 900:
tl.write('...[truncated]... \n\n')
for log_line in log_lines[-900:]:
starts = log_line.startswith
if starts('Ran') or starts('FAILED') or starts('OK') or starts(TB_SEP):
continue
if starts('ERROR: ') or starts('FAIL: '):
tl.write('internal-test: ' + log_line)
continue
tl.write(log_line)
tl.write("----------------------------------------------------------------------\n")
tl.write('Ran 1 test in %.2fs\n' % duration)
if success:
tl.write('OK\n')
else:
tl.write('FAILED (failures=1)\n')
tl.write(TB_SEP + '\n')
tl.close()
open(tempcmd, 'w').write("""svn info dummy""")
open(tempcmd2, 'w').write(self.suite_name)
open(tempout, 'w').write("Revision: %s\n%s" % (revision, text))
# create nice zip archive
tempzip = tempfile.mkstemp()[1]
zip = zipfile.ZipFile(tempzip, 'w')
zip.write(tempcmd, 'dummy/001/cmdline')
zip.write(tempout, 'dummy/001/stdout')
zip.write(templog, 'dummy/001/stderr')
zip.write(tempout, '%s/002/stdout' % self.suite_name)
zip.write(templog, '%s/002/stderr' % self.suite_name)
zip.write(tempcmd2, '%s/002/cmdline' % self.suite_name)
zip.close()
os.unlink(templog)
os.unlink(tempcmd)
os.unlink(tempout)
os.unlink(tempcmd2)
# post it to ERP5
self.connection_helper.POST('TestResultModule_reportCompleted', dict(
test_report_id=self.test_id),
file_list=[('filepath', tempzip)]
)
os.unlink(tempzip)
import os
import socket
import signal
import shutil
import slapos.slap
import subprocess
import time
import atexit
from erp5testreporthandler import ERP5TestReportHandler
process_group_pid_list = []
def clean():
for pgpid in process_group_pid_list:
try:
os.killpg(pgpid, signal.SIGTERM)
except:
pass
def sigterm_handler(signal, frame):
clean()
def sigint_handler(signal, frame):
clean()
raise KeyboardInterrupt
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigterm_handler)
atexit.register(clean)
def getCurrentBranchName(config, p):
r = subprocess.Popen([config['git_binary'], 'branch'], stdout=subprocess.PIPE, cwd=p).communicate()[0]
for f in r.splitlines():
if f.startswith('*'):
return f.split()[1]
return ''
def getRevision(config, p):
return subprocess.Popen([config['git_binary'], 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, cwd=p).communicate()[0].strip()
def getCurrentFetchRemote(config, p):
r = subprocess.Popen([config['git_binary'], 'remote', '-v'], stdout=subprocess.PIPE, cwd=p).communicate()[0]
remote = ''
for f in r.splitlines():
if f.startswith('origin') and f.endswith('(fetch)'):
if remote != '':
raise ValueError('Too many remotes: %s' % r)
remote = r.split()[1]
return remote
def getMachineIdString():
"""Returns machine identification string"""
kw = dict(stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
idstr = subprocess.Popen(["uname", "-m"], **kw).communicate()[0].strip()
# try to detect gcc version
try:
gcc_list = subprocess.Popen(["gcc", "-v"], **kw).communicate()[0].split(
'\n')
for gcc in gcc_list:
if gcc.startswith('gcc version'):
idstr += ' gcc:' + gcc.split()[2]
break
except IndexError:
pass
# try to detect libc version
try:
libdir = os.path.sep + 'lib'
for libso in os.listdir(libdir):
if libso.startswith('libc.') and os.path.islink(os.path.join(libdir,
libso)):
libc = os.readlink(os.path.join(libdir, libso))
if libc.endswith('.so'):
idstr += ' libc:' + libc.split('-')[1][:-3]
else:
idstr += ' ' + libc
break
except IndexError:
pass
return idstr
def run(args):
config = args[0]
for k,v in config['environment'].iteritems():
os.environ[k] = v
proxy = None
slapgrid = None
last_revision_file = os.path.join(config['working_directory'],
'revision.txt')
if os.path.exists(last_revision_file):
os.unlink(last_revision_file)
# fetch repository from git
repository_clone = os.path.join(config['working_directory'], 'repository')
profile_path = os.path.join(repository_clone, config['profile_path'])
if os.path.exists(config['proxy_database']):
os.unlink(config['proxy_database'])
proxy = subprocess.Popen([config['slapproxy_binary'],
config['slapos_config']], close_fds=True, preexec_fn=os.setsid)
process_group_pid_list.append(proxy.pid)
slap = slapos.slap.slap()
slap.initializeConnection(config['master_url'])
while True:
try:
slap.registerSupply().supply(profile_path,
computer_guid=config['computer_id'])
except socket.error:
time.sleep(1)
pass
else:
break
while True:
info_list = []
a = info_list.append
while True:
try:
if os.path.exists(repository_clone):
if getCurrentFetchRemote(config, repository_clone) != config['repository']:
shutil.rmtree(repository_clone)
if not os.path.exists(repository_clone):
subprocess.check_call([config['git_binary'], 'clone',
config['repository'], repository_clone])
# switch to branch
branch = getCurrentBranchName(config, repository_clone)
if branch != config['branch']:
subprocess.check_call([config['git_binary'], 'checkout', '--force',
'--track', '-b', config['branch'], 'origin/'+config['branch']],
cwd=repository_clone)
subprocess.check_call([config['git_binary'], 'reset', '--hard',
'@{upstream}'], cwd=repository_clone)
except Exception:
print 'Retrying git in 60s'
time.sleep(60)
else:
break
a('Tested repository: %s' % config['repository'])
a('Machine identification: %s' % getMachineIdString())
erp5_report = ERP5TestReportHandler(config['test_suite_master_url'],
'@'.join([config['suite_name'], branch]))
last_revision = ''
if os.path.exists(last_revision_file):
last_revision = open(last_revision_file).read().strip()
revision = getRevision(config, repository_clone)
open(last_revision_file, 'w').write(revision)
if revision != last_revision:
print 'Running for revision %r' % revision
while True:
try:
erp5_report.reportStart()
except Exception:
print 'Retrying in 5s'
time.sleep(5)
else:
break
if os.path.exists(config['software_root']):
shutil.rmtree(config['software_root'])
os.mkdir(config['software_root'])
out_file = os.path.join(config['working_directory'], 'slapgrid.out')
if os.path.exists(out_file):
os.unlink(out_file)
out = open(out_file, 'w')
begin = time.time()
slapgrid_environment = os.environ.copy()
for k, v in config['slapgrid_environment'].iteritems():
slapgrid_environment[k] = v
a('Slapgrid environment: %r'% config['slapgrid_environment'])
slapgrid = subprocess.Popen([config['slapgrid_software_binary'], '-vc',
config['slapos_config']], close_fds=True, preexec_fn=os.setsid,
stdout=out, stderr=subprocess.STDOUT, env=slapgrid_environment)
process_group_pid_list.append(slapgrid.pid)
slapgrid.communicate()
out.close()
while True:
try:
erp5_report.reportFinished(out_file,revision,
slapgrid.returncode == 0, time.time() - begin,
'\n'.join(info_list))
except Exception:
print 'Retrying in 5s'
time.sleep(5)
else:
break
print 'Sleeping for 600s'
time.sleep(600)
[slapos]
software_root = %(software_root)s
instance_root = %(instance_root)s
master_url = %(master_url)s
computer_id = %(computer_id)s
[slapproxy]
host = %(proxy_host)s
port = %(proxy_port)s
database_uri = %(proxy_database)s
This diff is collapsed.
[buildout]
parts =
instance
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[instance]
recipe = ${instance-recipe:egg}:${instance-recipe:module}
agent_binary = ${buildout:directory}/bin/agent
pidfile = $${directory:srv}/agent.pid
log = $${directory:agentlog}/agent.log
wrapper = $${directory:run}/agent
config = $${directory:etc}/agent.cfg
master-url = $${slap-parameter:master-url}
key = $${slap-parameter:userkey}
cert = $${slap-parameter:usercertificate}
configuration = $${slap-parameter:configuration}
default_max_install_duration = $${slap-parameter:default_max_install_duration}
default_max_uninstall_duration = $${slap-parameter:default_max_uninstall_duration}
default_max_request_duration = $${slap-parameter:default_max_request_duration}
default_max_destroy_duration = $${slap-parameter:default_max_destroy_duration}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
run = $${:etc}/run
agentlog = $${buildout:directory}/var/log/agent
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
[buildout]
parts =
instance
switch_softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[instance]
recipe = ${instance-recipe:egg}:${instance-recipe:module}
agent_binary = ${buildout:directory}/bin/agent
report_start = ${buildout:directory}/bin/report_start
report_stop = ${buildout:directory}/bin/report_stop
dcrond_binary = ${dcron:location}/sbin/crond
python_binary = ${python2.7:location}/bin/python
pidfile = $${rootdirectory:run}/agent.pid
log = $${rootdirectory:agentlog}/agent.log
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
run = $${buildout:directory}/etc/run
agentlog = $${buildout:directory}/var/log/agent
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${template-agent:output}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment