Commit 0526e075 authored by Łukasz Nowak's avatar Łukasz Nowak

SlapOS related code moved out.

SlapOS code has been moved to: http://git.erp5.org/gitweb/slapos.git
parent 0eba9830
It is disallowed to:
* use hooks directory directly in buildout (eg. assuming that it will exists
while running buildout)
Rationale: It disallows using buildout in networked environment and extending
over network.
* accesing hooks by SVN URI
Rationale: It forces to use SVN structure and makes mirroring and HA hard to
obtain.
import os
import shutil
def post_make_hook(options, buildout):
make_options_list = [q for q in options.get('make-options', '').split('\n') if q]
if os.system('make %s -f Makefile-libbz2_so' % ' '.join(make_options_list)) != 0:
raise ValueError('Generation of dynamic library failed')
original = 'libbz2.so.1.0.6'
link_list = ['libbz2.so.1.0', 'libbz2.so.1', 'libbz2.so']
destination = os.path.join(options['location'], 'lib')
for filename in [original] + link_list:
f = os.path.join(destination, filename)
if os.path.exists(f) or os.path.islink(f):
os.unlink(f)
shutil.copyfile(os.path.join(os.curdir, original), os.path.join(destination,
original))
for link in link_list:
os.symlink(original, os.path.join(destination,
link))
import os
def pre_configure_hook(oprtions, buildout):
# fixes python bug related to not creating symlink contained in tarfiles
for missing in 'configure.ac', 'Makefile.in':
if not os.path.exists(os.path.join(os.path.curdir, missing)):
os.symlink(os.path.join(os.path.curdir, 'base', missing),
os.path.join(os.path.curdir, missing))
import os
import shutil
def post_make_hook(options, buildout):
destination = options['location']
directory_list = ['include', 'lib']
for d in directory_list:
dd = os.path.join(destination, d)
if not os.path.isdir(dd):
os.mkdir(dd)
for include in ['jbig.h', 'jbig85.h', 'jbig_ar.h']:
shutil.copyfile(os.path.join(os.curdir, 'libjbig', include),
os.path.join(destination, 'include', include))
for so in ['libjbig85.so', 'libjbig.so']:
shutil.copyfile(os.path.join(os.curdir, 'libjbig', so),
os.path.join(destination, 'lib', so))
import os
import sys
import traceback
from shutil import copy
from subprocess import Popen, PIPE
CONFIGURE_PATH = os.path.join('configure')
CONFIGURE_BACKUP_PATH = CONFIGURE_PATH + '_disabled'
# Fake configure, generating a fake Makefile which will create a marker file
# instead of actually installing anything.
# This is needed (?) to fetch --prefix from configure parameters, so we know
# where to tell Makefile to put the dummy file.
FAKE_CONFIGURE = '''#!%(python)s -S
import os
import sys
print 'Configuration is disabled on this host because %%s'
print 'Original configure file available at %(backup)s'
prefix = None
next = False
for arg in sys.argv:
if next:
prefix = arg
break
if arg.startswith('--prefix'):
if arg.startswith('--prefix='):
_, prefix = arg.split('=', 1)
break
next = True
if prefix is None:
raise '--prefix parameter not found'
# Generate Makefile with proper prefix
open('Makefile', 'w').write("""all:
\techo 'make disabled, see configure'
install:
\ttouch %%%%s""" %%%% (
os.path.join(prefix, 'BUILD_DISABLED_BY_BUILDOUT'),
))
sys.exit(0)
''' % {
'backup': CONFIGURE_BACKUP_PATH,
'python': sys.executable,
}
def pre_configure_hook(options, buildout):
gcc_executable = os.getenv('CC', 'gcc')
try:
gcc = Popen([gcc_executable, '-v'], stdout=PIPE, stderr=PIPE,
close_fds=True)
except OSError, (errno, _):
if errno == 2:
# No gcc installed, nothing to check
pass
else:
print 'Unexpected failure trying to detect gcc version'
traceback.print_exc()
else:
gcc.wait()
# Considered innocent until proven guilty.
error = None
for line in '\n'.join((gcc.stdout.read(), gcc.stderr.read())).splitlines():
if line.startswith('gcc version'):
if '4.1.1' in line and 'prerelease' in line:
# There is a bug in 4.1.1 prerelease (ie, as of mandriva
# 2007.0) g++ preventing kumo compilation from succeeding.
error = 'broken GCC version: %s' % (line, )
break
else:
print >>sys.stderr, 'GCC version could not be detected, ' \
'building anyway'
if error is not None:
print 'Disabling build, with reason:', error
# Copy to preserver permission
copy(CONFIGURE_PATH, CONFIGURE_BACKUP_PATH)
open(CONFIGURE_PATH, 'w').write(FAKE_CONFIGURE % (error, ))
import os
def pre_configure_hook(options, buildout):
os.chdir('build_unix')
import os
def pre_make_hook(options, buildout):
# change to pdftk directory, where built process shall be done
os.chdir('pdftk')
import os
def pre_configure_hook(options, buildout):
# remove certificate generation
# based on Gentoo: http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/net-misc/stunnel/stunnel-4.33.ebuild
name = os.path.join('tools','Makefile.in')
f = file(name, 'r')
d = f.read().replace('install-data-local:', 'do-not-run-this:')
f.close()
file(name, 'w').write(d)
import os
import shutil
def pre_configure_hook(options, buildout):
os.chdir('unix')
def post_make_hook(options, buildout):
bindir = os.path.join(options['location'], 'bin')
destination = os.path.join(bindir, 'tclsh')
if not os.path.exists(destination):
original = os.path.join(bindir, [q for q in os.listdir(bindir) if q.startswith('tclsh')][0])
shutil.copy(original, destination)
[buildout]
parts =
instance
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
[instance]
recipe = ${instance-recipe:name}
dcrond_binary = ${dcron:location}/sbin/crond
haproxy_binary = ${haproxy:location}/sbin/haproxy
httpd_binary = ${apache:location}/bin/httpd
innobackupex_binary = ${xtrabackup:location}/bin/innobackupex
kumo_gateway_binary = ${kumo:location}/bin/kumo-gateway
kumo_manager_binary = ${kumo:location}/bin/kumo-manager
kumo_server_binary = ${kumo:location}/bin/kumo-server
logrotate_binary = ${logrotate:location}/usr/sbin/logrotate
memcached_binary = ${memcached:location}/bin/memcached
mysql_binary = ${mariadb:location}/bin/mysql
mysql_install_binary = ${mariadb:location}/bin/mysql_install_db
mysql_upgrade_binary = ${mariadb:location}/bin/mysql_upgrade
mysqld_binary = ${mariadb:location}/libexec/mysqld
openssl_binary = ${openssl:location}/bin/openssl
perl_binary = ${perl:location}/bin/perl
rdiff_backup_binary = ${buildout:bin-directory}/rdiff-backup
repozo_binary = ${buildout:bin-directory}/repozo
runTestSuite_binary = ${buildout:bin-directory}/runTestSuite
runUnitTest_binary = ${buildout:bin-directory}/runUnitTest
runzeo_binary = ${buildout:bin-directory}/runzeo
runzope_binary = ${buildout:bin-directory}/runzope
tidstorage_repozo_binary = ${buildout:bin-directory}/tidstorage_repozo
tidstoraged_binary = ${buildout:bin-directory}/tidstoraged
zabbix_agent_binary = ${zabbix-agent:location}/sbin/zabbix_agent
# cloudooo specific configuration
ooo_binary_path = ${libreoffice-bin:location}/program
ooo_paster = ${buildout:bin-directory}/cloudooo_paster
ooo_uno_path = ${libreoffice-bin:location}/basis-link/program
link_binary_list =
${coreutils:location}/bin/basename
${coreutils:location}/bin/cat
${coreutils:location}/bin/cp
${coreutils:location}/bin/ls
${coreutils:location}/bin/tr
${coreutils:location}/bin/uname
${git:location}/bin/git
${graphviz:location}/bin/dot
${grep:location}/bin/grep
${imagemagick:location}/bin/convert
${mariadb:location}/bin/mysql
${mariadb:location}/bin/mysqldump
${pdftk:location}/bin/pdftk
${sed:location}/bin/sed
${tesseract:location}/bin/tesseract
${w3m:location}/bin/w3m
${xpdf:location}/bin/pdfinfo
${xpdf:location}/bin/pdftotext
${xtrabackup:location}/bin/xtrabackup_51
# XXX: products won't be needed as soon as all ERP5 (and products-deps)
# products will be eggified so then it will be possible to use them thanks to
# availability in software's eggs
products = ${products:list}
environment =
LD_LIBRARY_PATH = ${file:location}/lib:${zlib:location}/lib:${freetype:location}/lib:${libXext:location}/lib:${libXau:location}/lib:${libX11:location}/lib:${libXdmcp:location}/lib:${libxcb:location}/lib
--- a/configure.ac
+++ b/configure.ac
@@ -28,7 +28,7 @@
MYSQL_INC="$MYSQL_INC -I$ac_mysql_source_dir/regex"
MYSQL_INC="$MYSQL_INC -I$ac_mysql_source_dir"
AC_SUBST(MYSQL_INC)
- MYSQL_SOURCE_VERSION=`cat $ac_mysql_source_dir/configure.in | grep "\[[MySQL Server\]]" | sed -e "s|.*\([[0-9]]\+\.[[0-9]]\+\.[[0-9]]\+[[0-9a-zA-Z\_\-]]*\).*|\1|"`
+ MYSQL_SOURCE_VERSION=`cat $ac_mysql_source_dir/configure.in | grep "\[[MariaDB Server\]]" | sed -e "s|.*\([[0-9]]\+\.[[0-9]]\+\.[[0-9]]\+[[0-9a-zA-Z\_\-]]*\).*|\1|"`
AC_MSG_RESULT([yes: Using $ac_mysql_source_dir, version $MYSQL_SOURCE_VERSION])
else
AC_MSG_ERROR([invalid MySQL source directory: $ac_mysql_source_dir])
--- a/handlersocket/database.cpp
+++ b/handlersocket/database.cpp
@@ -686,19 +686,19 @@
for (uint32_t i = 0; i < limit + skip; ++i) {
if (i == 0) {
const key_part_map kpm = (1U << args.kvalslen) - 1;
- r = hnd->index_read_map(table->record[0], key_buf, kpm, find_flag);
+ r = hnd->ha_index_read_map(table->record[0], key_buf, kpm, find_flag);
} else {
switch (find_flag) {
case HA_READ_BEFORE_KEY:
case HA_READ_KEY_OR_PREV:
- r = hnd->index_prev(table->record[0]);
+ r = hnd->ha_index_prev(table->record[0]);
break;
case HA_READ_AFTER_KEY:
case HA_READ_KEY_OR_NEXT:
- r = hnd->index_next(table->record[0]);
+ r = hnd->ha_index_next(table->record[0]);
break;
case HA_READ_KEY_EXACT:
- r = hnd->index_next_same(table->record[0], key_buf, kplen_sum);
+ r = hnd->ha_index_next_same(table->record[0], key_buf, kplen_sum);
break;
default:
r = HA_ERR_END_OF_FILE; /* to finish the loop */
fix warnings with gcc-4.4
http://bugs.gentoo.org/248738
--- cyrus-sasl-2.1.22/plugins/digestmd5.c
+++ cyrus-sasl-2.1.22/plugins/digestmd5.c
@@ -2715,7 +2715,7 @@ static sasl_server_plug_t digestmd5_serv
"DIGEST-MD5", /* mech_name */
#ifdef WITH_RC4
128, /* max_ssf */
-#elif WITH_DES
+#elif defined(WITH_DES)
112,
#else
1,
@@ -4034,7 +4034,7 @@ static sasl_client_plug_t digestmd5_clie
"DIGEST-MD5",
#ifdef WITH_RC4 /* mech_name */
128, /* max ssf */
-#elif WITH_DES
+#elif defined(WITH_DES)
112,
#else
1,
--- cyrus-sasl-2.1.23/include/sasl.h 2010-11-25 18:15:05.000000000 +0100
+++ cyrus-sasl-2.1.23/include/sasl.h 2010-11-25 18:15:34.000000000 +0100
@@ -346,7 +346,7 @@
* Mechanisms must ignore callbacks with id's they don't recognize.
*/
unsigned long id;
- int (*proc)(); /* Callback function. Types of arguments vary by 'id' */
+ int (*proc); /* Callback function. Types of arguments vary by 'id' */
void *context;
} sasl_callback_t;
# Patch for making dcron usable without root user, as a local service
diff -ru dcron-4.4.org/chuser.c dcron-4.4/chuser.c
--- dcron-4.4.org/chuser.c 2010-01-18 16:27:31.000000000 +0100
+++ dcron-4.4/chuser.c 2011-04-01 11:19:19.000000000 +0200
@@ -14,47 +14,6 @@
int
ChangeUser(const char *user, char *dochdir)
{
- struct passwd *pas;
-
- /*
- * Obtain password entry and change privilages
- */
-
- if ((pas = getpwnam(user)) == 0) {
- printlogf(LOG_ERR, "failed to get uid for %s\n", user);
- return(-1);
- }
- setenv("USER", pas->pw_name, 1);
- setenv("HOME", pas->pw_dir, 1);
- setenv("SHELL", "/bin/sh", 1);
-
- /*
- * Change running state to the user in question
- */
-
- if (initgroups(user, pas->pw_gid) < 0) {
- printlogf(LOG_ERR, "initgroups failed: %s %s\n", user, strerror(errno));
- return(-1);
- }
- if (setregid(pas->pw_gid, pas->pw_gid) < 0) {
- printlogf(LOG_ERR, "setregid failed: %s %d\n", user, pas->pw_gid);
- return(-1);
- }
- if (setreuid(pas->pw_uid, pas->pw_uid) < 0) {
- printlogf(LOG_ERR, "setreuid failed: %s %d\n", user, pas->pw_uid);
- return(-1);
- }
- if (dochdir) {
- /* try to change to $HOME */
- if (chdir(pas->pw_dir) < 0) {
- printlogf(LOG_ERR, "chdir failed: %s %s\n", user, pas->pw_dir);
- /* dochdir is a backup directory, usually /tmp */
- if (chdir(dochdir) < 0) {
- printlogf(LOG_ERR, "chdir failed: %s %s\n", user, dochdir);
- return(-1);
- }
- }
- }
- return(pas->pw_uid);
+ return getpwnam(user);
}
diff -ru dcron-4.4.org/crontab.c dcron-4.4/crontab.c
--- dcron-4.4.org/crontab.c 2010-01-18 16:27:31.000000000 +0100
+++ dcron-4.4/crontab.c 2011-04-01 11:19:19.000000000 +0200
@@ -316,9 +316,6 @@
close(filedes[0]);
- if (ChangeUser(user, NULL) < 0)
- exit(0);
-
fd = open(file, O_RDONLY);
if (fd < 0) {
printlogf(0, "unable to open %s: %s", file, strerror(errno));
@@ -344,8 +341,6 @@
const char *ptr;
char visual[SMALL_BUFFER];
- if (ChangeUser(user, TMPDIR) < 0)
- exit(0);
if ((ptr = getenv("EDITOR")) == NULL || strlen(ptr) >= sizeof(visual))
if ((ptr = getenv("VISUAL")) == NULL || strlen(ptr) >= sizeof(visual))
ptr = PATH_VI;
diff -ru dcron-4.4.org/job.c dcron-4.4/job.c
--- dcron-4.4.org/job.c 2010-01-18 16:27:31.000000000 +0100
+++ dcron-4.4/job.c 2011-04-01 11:19:19.000000000 +0200
@@ -62,14 +62,6 @@
* Change running state to the user in question
*/
- if (ChangeUser(file->cf_UserName, TempDir) < 0) {
- printlogf(LOG_ERR, "unable to ChangeUser (user %s %s)\n",
- file->cf_UserName,
- line->cl_Description
- );
- exit(0);
- }
-
/* from this point we are unpriviledged */
if (DebugOpt)
@@ -295,14 +287,6 @@
* by the mailing and we already verified the mail file.
*/
- if (ChangeUser(file->cf_UserName, TempDir) < 0) {
- printlogf(LOG_ERR, "unable to ChangeUser to send mail (user %s %s)\n",
- file->cf_UserName,
- line->cl_Description
- );
- exit(0);
- }
-
/* from this point we are unpriviledged */
/*
diff -ru dcron-4.4.org/Makefile dcron-4.4/Makefile
--- dcron-4.4.org/Makefile 2010-01-18 16:27:31.000000000 +0100
+++ dcron-4.4/Makefile 2011-04-01 11:19:35.000000000 +0200
@@ -3,7 +3,6 @@
# these variables can be configured by e.g. `make SCRONTABS=/different/path`
PREFIX = /usr/local
-CRONTAB_GROUP = wheel
SCRONTABS = /etc/cron.d
CRONTABS = /var/spool/cron/crontabs
CRONSTAMPS = /var/spool/cron/cronstamps
@@ -20,10 +19,10 @@
SHELL = /bin/sh
-INSTALL = install -o root
+INSTALL = install
INSTALL_PROGRAM = $(INSTALL) -D
-INSTALL_DATA = $(INSTALL) -D -m0644 -g root
-INSTALL_DIR = $(INSTALL) -d -m0755 -g root
+INSTALL_DATA = $(INSTALL) -D -m0644
+INSTALL_DIR = $(INSTALL) -d -m0755
CFLAGS ?= -O2
CFLAGS += -Wall -Wstrict-prototypes
SRCS = main.c subs.c database.c job.c concat.c chuser.c
@@ -44,7 +43,6 @@
echo "SBINDIR = $(SBINDIR)" >> config
echo "BINDIR = $(BINDIR)" >> config
echo "MANDIR = $(MANDIR)" >> config
- echo "CRONTAB_GROUP = $(CRONTAB_GROUP)" >> config
echo "SCRONTABS = $(SCRONTABS)" >> config
echo "CRONTABS = $(CRONTABS)" >> config
echo "CRONSTAMPS = $(CRONSTAMPS)" >> config
@@ -62,13 +60,10 @@
$(CC) -c $(CPPFLAGS) $(CFLAGS) $(DEFS) $< -o $@
install:
- $(INSTALL_PROGRAM) -m0700 -g root crond $(DESTDIR)$(SBINDIR)/crond
- $(INSTALL_PROGRAM) -m4750 -g $(CRONTAB_GROUP) crontab $(DESTDIR)$(BINDIR)/crontab
+ $(INSTALL_PROGRAM) -m0755 crond $(DESTDIR)$(SBINDIR)/crond
+ $(INSTALL_PROGRAM) -m4750 crontab $(DESTDIR)$(BINDIR)/crontab
$(INSTALL_DATA) crontab.1 $(DESTDIR)$(MANDIR)/man1/crontab.1
$(INSTALL_DATA) crond.8 $(DESTDIR)$(MANDIR)/man8/crond.8
- $(INSTALL_DIR) $(DESTDIR)$(SCRONTABS)
- $(INSTALL_DIR) $(DESTDIR)$(CRONTABS)
- $(INSTALL_DIR) $(DESTDIR)$(CRONSTAMPS)
clean: force
rm -f *.o $(PROTOS)
--- Makefile.in 2002-10-08 16:09:12.000000000 +0000
+++ Makefile.in.nochange 2010-11-03 21:17:38.579435530 +0000
@@ -14,10 +14,6 @@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_DATA = @INSTALL_DATA@
-# File ownership and group
-BINOWN = bin
-BINGRP = bin
-
MAKEINFO = makeinfo
TEXI2DVI = texi2dvi
@@ -131,11 +127,11 @@
$(INSTALL_ROOT)$(includedir) $(INSTALL_ROOT)$(man3dir) \
$(INSTALL_ROOT)$(infodir)
$(LIBTOOL) $(INSTALL) -c libgdbm.la $(INSTALL_ROOT)$(libdir)/libgdbm.la
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) gdbm.h \
+ $(INSTALL_DATA) gdbm.h \
$(INSTALL_ROOT)$(includedir)/gdbm.h
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) $(srcdir)/gdbm.3 \
+ $(INSTALL_DATA) $(srcdir)/gdbm.3 \
$(INSTALL_ROOT)$(man3dir)/gdbm.3
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) $(srcdir)/gdbm.info \
+ $(INSTALL_DATA) $(srcdir)/gdbm.info \
$(INSTALL_ROOT)$(infodir)/gdbm.info
install-compat:
@@ -143,9 +139,9 @@
$(INSTALL_ROOT)$(includedir)
$(LIBTOOL) $(INSTALL) -c libgdbm_compat.la \
$(INSTALL_ROOT)$(libdir)/libgdbm_compat.la
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) $(srcdir)/dbm.h \
+ $(INSTALL_DATA) $(srcdir)/dbm.h \
$(INSTALL_ROOT)$(includedir)/dbm.h
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) $(srcdir)/ndbm.h \
+ $(INSTALL_DATA) $(srcdir)/ndbm.h \
$(INSTALL_ROOT)$(includedir)/ndbm.h
#libgdbm.a: $(OBJS) gdbm.h
diff -ur groonga-storage-engine-0.4.orig/configure groonga-storage-engine-0.4/configure
--- groonga-storage-engine-0.4.orig/configure 2010-11-24 06:23:50.000000000 +0100
+++ groonga-storage-engine-0.4/configure 2011-01-01 16:01:07.000000000 +0100
@@ -13925,8 +13925,8 @@
as_fn_error "failed to run \"$ac_mysql_config\": $plugindir" "$LINENO" 5
fi
MYSQL_INC="$MYSQL_INC $($ac_mysql_config --include)"
- ac_mysql_major_version="`$ac_mysql_config --version | cut -b 1-3`"
- if test "$ac_mysql_major_version" = "5.1"; then
+ ac_mysql_major_version="`$ac_mysql_config --version | cut -b 1,3`"
+ if test $ac_mysql_major_version -lt 55; then
MYSQL51="-DMYSQL51"
fi
--- ImageMagick-6.6.6-1/configure.ac~ 2010-11-28 21:51:05.000000000 +0100
+++ ImageMagick-6.6.6-1/configure.ac 2010-12-03 14:13:14.000000000 +0100
@@ -2791,7 +2791,7 @@
AC_PATH_PROG(PCLDelegate, "$PCLDelegateDefault", "$PCLDelegateDefault")
AC_PATH_PROG(PGPDecodeDelegate, "$PGPDecodeDelegateDefault", "$PGPDecodeDelegateDefault")
AC_PATH_PROG(POVDelegate, "$POVDelegateDefault", "$POVDelegateDefault")
-AC_PATH_PROGS(PSDelegate, gsx gsc "$PSDelegateDefault", "$PSDelegateDefault")
+AC_PATH_PROGS(PSDelegate, "$PSDelegateDefault", "$PSDelegateDefault")
AC_PATH_PROG(RLEEncodeDelegate, "$RLEEncodeDelegateDefault", "$RLEEncodeDelegateDefault")
AC_PATH_PROG(RMDelegate, "$RMDelegateDefault", "$RMDelegateDefault")
AC_PATH_PROG(RSVGDecodeDelegate, "$RSVGDecodeDelegateDefault", "$RSVGDecodeDelegateDefault")
--- ImageMagick-6.6.6-1/configure~ 2010-11-28 23:27:16.000000000 +0100
+++ ImageMagick-6.6.6-1/configure 2010-12-03 14:13:57.000000000 +0100
@@ -30931,7 +30931,7 @@
fi
-for ac_prog in gsx gsc "$PSDelegateDefault"
+for ac_prog in "$PSDelegateDefault"
do
# Extract the first word of "$ac_prog", so it can be a program name with args.
set dummy $ac_prog; ac_word=$2
--- ImageMagick-6.6.7-4/configure~ 2011-02-10 11:50:21.704561096 +0100
+++ ImageMagick-6.6.7-4/configure 2011-02-10 12:23:45.612561097 +0100
@@ -28251,7 +28251,7 @@
#
have_lzma='no'
LZMA_LIBS=''
-if test "$with_lzma" != 'no' || test "$with_tiff" != 'no'; then
+if test "$with_lzma" != 'no'; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: -------------------------------------------------------------" >&5
$as_echo "-------------------------------------------------------------" >&6; }
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for LZMA" >&5
--- ImageMagick-6.6.7-4/configure.ac~ 2011-02-10 11:50:21.693561096 +0100
+++ ImageMagick-6.6.7-4/configure.ac 2011-02-10 12:23:40.013561098 +0100
@@ -2290,7 +2290,7 @@
#
have_lzma='no'
LZMA_LIBS=''
-if test "$with_lzma" != 'no' || test "$with_tiff" != 'no'; then
+if test "$with_lzma" != 'no'; then
AC_MSG_RESULT([-------------------------------------------------------------])
AC_MSG_CHECKING(for LZMA)
AC_MSG_RESULT()
diff -ur jbigkit.orig/libjbig/Makefile jbigkit/libjbig/Makefile
--- jbigkit.orig/libjbig/Makefile 2008-08-30 20:20:52.000000000 +0300
+++ jbigkit/libjbig/Makefile 2010-02-28 13:12:41.000000000 +0200
@@ -1,29 +1,54 @@
# Unix makefile for the JBIG-KIT library
# $Id: jbigkit-2.0-build.patch,v 1.1 2010/02/28 11:21:39 ssuominen Exp $
-# Select an ANSI/ISO C compiler here, GNU gcc is recommended
-CC = gcc
+AR ?= ar
+CC ?= gcc
+RANLIB ?= ranlib
+
+CFLAGS += -Wall -ansi -pedantic
+
+ifeq ($(USERLAND),Darwin)
+ SONAME = dylib
+else
+ SONAME = so
+endif
-# Options for the compiler: A high optimization level is suggested
-CFLAGS = -g -O -Wall -ansi -pedantic # --coverage
-
-all: libjbig.a tstcodec tstcodec85
+all: libjbig.a libjbig.$(SONAME) libjbig85.a libjbig85.$(SONAME) tstcodec tstcodec85
tstcodec: tstcodec.o jbig.o jbig_ar.o
- $(CC) $(CFLAGS) -o tstcodec $+
+ $(CC) $(LDFLAGS) $(CFLAGS) -o tstcodec $+
tstcodec85: tstcodec85.o jbig85.o jbig_ar.o
- $(CC) $(CFLAGS) -o tstcodec85 $+
+ $(CC) $(LDFLAGS) $(CFLAGS) -o tstcodec85 $+
libjbig.a: jbig.o jbig_ar.o
rm -f libjbig.a
- ar rc libjbig.a jbig.o jbig_ar.o
- -ranlib libjbig.a
+ $(AR) rc libjbig.a jbig.o jbig_ar.o
+ -$(RANLIB) libjbig.a
+
+SOBJS = jbig.lo jbig_ar.lo
+
+libjbig.so: $(SOBJS)
+ $(CC) -shared $(LDFLAGS) -o $@ -Wl,-soname -Wl,$@ $(SOBJS)
+
+libjbig.dylib: $(SOBJS)
+ $(CC) -dynamic $(LDFLAGS) -o $@ -dynamiclib -install_name $@ $(SOBJS)
+
+SOBJS85 = jbig85.lo jbig_ar.lo
+
+libjbig85.so: $(SOBJS85)
+ $(CC) -shared $(LDFLAGS) -o $@ -Wl,-soname -Wl,$@ $(SOBJS85)
+
+libjbig85.dylib: $(SOBJS85)
+ $(CC) -dynamic $(LDFLAGS) -o $@ -dynamiclib -install_name $@ $(SOBJS85)
+
+%.lo: %.c jbig.h jbig85.h jbig_ar.h
+ $(CC) $(CFLAGS) -fPIC -c $< -o $@
libjbig85.a: jbig85.o jbig_ar.o
rm -f libjbig85.a
- ar rc libjbig85.a jbig85.o jbig_ar.o
- -ranlib libjbig85.a
+ $(AR) rc libjbig85.a jbig85.o jbig_ar.o
+ -$(RANLIB) libjbig85.a
jbig.o: jbig.c jbig.h jbig_ar.h
jbig85.o: jbig85.c jbig85.h jbig_ar.h
diff -ur jbigkit.orig/Makefile jbigkit/Makefile
--- jbigkit.orig/Makefile 2008-08-30 23:40:22.000000000 +0300
+++ jbigkit/Makefile 2010-02-28 12:59:07.000000000 +0200
@@ -1,34 +1,25 @@
# Unix makefile for JBIG-KIT
# $Id: jbigkit-2.0-build.patch,v 1.1 2010/02/28 11:21:39 ssuominen Exp $
-# Select an ANSI/ISO C compiler here, GNU gcc is recommended
-CC = gcc
-
-# Options for the compiler: A high optimization level is suggested
-CCFLAGS = -O2 -W
-#CCFLAGS = -O -g -W -Wall -ansi -pedantic #-DDEBUG # developer only
-
-CFLAGS = $(CCFLAGS) -I../libjbig
-
VERSION=2.0
all: lib pbm
- @echo "Enter 'make test' in order to start some automatic tests."
+ @echo "Enter '$(MAKE) test' in order to start some automatic tests."
lib:
- (cd libjbig; make "CC=$(CC)" "CFLAGS=$(CFLAGS)")
+ (cd libjbig; $(MAKE))
pbm: lib
- (cd pbmtools; make "CC=$(CC)" "CFLAGS=$(CFLAGS)")
+ (cd pbmtools; $(MAKE))
test: lib pbm
- (cd libjbig; make "CC=$(CC)" "CFLAGS=$(CFLAGS)" test)
- (cd pbmtools; make "CC=$(CC)" "CFLAGS=$(CFLAGS)" test)
+ (cd libjbig; $(MAKE) test)
+ (cd pbmtools; $(MAKE) test)
clean:
rm -f *~ core
- (cd libjbig; make clean)
- (cd pbmtools; make clean)
+ (cd libjbig; $(MAKE) clean)
+ (cd pbmtools; $(MAKE) clean)
distribution: clean
rm -f libjbig/libjbig*.a
diff -ur jbigkit.orig/pbmtools/Makefile jbigkit/pbmtools/Makefile
--- jbigkit.orig/pbmtools/Makefile 2008-08-26 01:26:39.000000000 +0300
+++ jbigkit/pbmtools/Makefile 2010-02-28 13:01:19.000000000 +0200
@@ -2,10 +2,10 @@
# $Id: jbigkit-2.0-build.patch,v 1.1 2010/02/28 11:21:39 ssuominen Exp $
# Select an ANSI/ISO C compiler here, e.g. GNU gcc is recommended
-CC = gcc
+CC ?= gcc
# Options for the compiler
-CFLAGS = -g -Wall -ansi -pedantic -I../libjbig # --coverage
+CFLAGS += -Wall -ansi -pedantic -I../libjbig
.SUFFIXES: .1 .5 .txt $(SUFFIXES)
@@ -13,16 +13,16 @@
pbmtojbg.txt jbgtopbm.txt pbm.txt pgm.txt
pbmtojbg: pbmtojbg.o ../libjbig/libjbig.a
- $(CC) $(CFLAGS) -o pbmtojbg pbmtojbg.o -L../libjbig -ljbig
+ $(CC) $(LDFLAGS) $(CFLAGS) -o pbmtojbg pbmtojbg.o -L../libjbig -ljbig
jbgtopbm: jbgtopbm.o ../libjbig/libjbig.a
- $(CC) $(CFLAGS) -o jbgtopbm jbgtopbm.o -L../libjbig -ljbig
+ $(CC) $(LDFLAGS) $(CFLAGS) -o jbgtopbm jbgtopbm.o -L../libjbig -ljbig
pbmtojbg85: pbmtojbg85.o ../libjbig/libjbig85.a
- $(CC) $(CFLAGS) -o pbmtojbg85 pbmtojbg85.o -L../libjbig -ljbig85
+ $(CC) $(LDFLAGS) $(CFLAGS) -o pbmtojbg85 pbmtojbg85.o -L../libjbig -ljbig85
jbgtopbm85: jbgtopbm85.o ../libjbig/libjbig85.a
- $(CC) $(CFLAGS) -o jbgtopbm85 jbgtopbm85.o -L../libjbig -ljbig85
+ $(CC) $(LDFLAGS) $(CFLAGS) -o jbgtopbm85 jbgtopbm85.o -L../libjbig -ljbig85
jbgtopbm.o: jbgtopbm.c ../libjbig/jbig.h
pbmtojbg.o: pbmtojbg.c ../libjbig/jbig.h
@@ -31,31 +31,31 @@
../libjbig/libjbig.a: ../libjbig/jbig.c ../libjbig/jbig.h \
../libjbig/jbig_ar.c ../libjbig/jbig_ar.h
- make -C ../libjbig libjbig.a
+ $(MAKE) -C ../libjbig libjbig.a
../libjbig/libjbig85.a: ../libjbig/jbig85.c ../libjbig/jbig85.h \
../libjbig/jbig_ar.c ../libjbig/jbig_ar.h
- make -C ../libjbig libjbig85.a
+ $(MAKE) -C ../libjbig libjbig85.a
test: test82 test85
test82: pbmtojbg jbgtopbm
- make IMG=ccitt1 OPTIONSP= dotest1
- make IMG=ccitt2 OPTIONSP= dotest1
- make IMG=ccitt3 OPTIONSP= dotest1
- make IMG=xvlogo "OPTIONSP=-d 3" dotest1
- make IMG=sandra OPTIONSP= OPTIONSJ= dotest2g
- make IMG=sandra OPTIONSP=-b OPTIONSJ=-b dotest2g
- make IMG=sandra OPTIONSP=-q OPTIONSJ= dotest2g
- make IMG=sandra "OPTIONSP=-o 0" OPTIONSJ= dotest2g
- make IMG=sandra "OPTIONSP=-o 2" OPTIONSJ= dotest2g
- make IMG=multi OPTIONSP= OPTIONSJ= dotest2g
- make IMG=multi OPTIONSP=-b OPTIONSJ=-b dotest2g
- make IMG=mx "OPTIONSP=-q -s 3 -m 128" dotest1
- make IMG=mx "OPTIONSP=-q -s 3 -m 128" dotest2b
- make IMG=mx "OPTIONSP=-q -s 3 -m 128 -p 92" dotest2b
- make IMG=mx "OPTIONSP=-q -Y -1" dotest2b
- make IMG=mx "OPTIONSP=-Y -1" dotest2b
+ $(MAKE) IMG=ccitt1 OPTIONSP= dotest1
+ $(MAKE) IMG=ccitt2 OPTIONSP= dotest1
+ $(MAKE) IMG=ccitt3 OPTIONSP= dotest1
+ $(MAKE) IMG=xvlogo "OPTIONSP=-d 3" dotest1
+ $(MAKE) IMG=sandra OPTIONSP= OPTIONSJ= dotest2g
+ $(MAKE) IMG=sandra OPTIONSP=-b OPTIONSJ=-b dotest2g
+ $(MAKE) IMG=sandra OPTIONSP=-q OPTIONSJ= dotest2g
+ $(MAKE) IMG=sandra "OPTIONSP=-o 0" OPTIONSJ= dotest2g
+ $(MAKE) IMG=sandra "OPTIONSP=-o 2" OPTIONSJ= dotest2g
+ $(MAKE) IMG=multi OPTIONSP= OPTIONSJ= dotest2g
+ $(MAKE) IMG=multi OPTIONSP=-b OPTIONSJ=-b dotest2g
+ $(MAKE) IMG=mx "OPTIONSP=-q -s 3 -m 128" dotest1
+ $(MAKE) IMG=mx "OPTIONSP=-q -s 3 -m 128" dotest2b
+ $(MAKE) IMG=mx "OPTIONSP=-q -s 3 -m 128 -p 92" dotest2b
+ $(MAKE) IMG=mx "OPTIONSP=-q -Y -1" dotest2b
+ $(MAKE) IMG=mx "OPTIONSP=-Y -1" dotest2b
rm -f test-*.jbg test-*.pbm test-*.pgm
./jbgtopbm ../examples/ccitt1.jbg | ./pbmtojbg > test-ccitt1.jbg
cmp ../examples/ccitt1.jbg test-ccitt1.jbg
@@ -91,24 +91,24 @@
cmp test-$(IMG).pgm ../examples/$(IMG).pgm
test85: pbmtojbg jbgtopbm pbmtojbg85 jbgtopbm85 test-t82.pbm
- make IMG=t82 "OPTIONSP=-p 0" dotest85
- make IMG=t82 "OPTIONSP=-p 8" dotest85
- make IMG=t82 "OPTIONSP=-p 8 -r" dotest85b
- make IMG=t82 "OPTIONSP=-p 64" dotest85
- make IMG=t82 "OPTIONSP=-p 72" dotest85
- make IMG=t82 "OPTIONSP=-s 2 -C c" dotest85
- make IMG=t82 "OPTIONSP=-s 99999" dotest85
- make IMG=t82 "OPTIONSP=-Y 9999 0" dotest85
- make IMG=t82 "OPTIONSP=-Y 1951 0" dotest85
- make IMG=t82 "OPTIONSP=-Y -1 127" dotest85
- make IMG=t82 "OPTIONSP=-Y -1 128" dotest85
- make IMG=t82 "OPTIONSP=-Y -1 1919" dotest85
- make IMG=t82 "OPTIONSP=-Y -1 1920" dotest85
- make IMG=t82 "OPTIONSP=-Y -1 1949" dotest85
- make IMG=t82 "OPTIONSP=-Y -1 1950" dotest85
- make IMG=ccitt1 dotest85
- make IMG=ccitt2 dotest85
- make IMG=ccitt3 dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-p 0" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-p 8" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-p 8 -r" dotest85b
+ $(MAKE) IMG=t82 "OPTIONSP=-p 64" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-p 72" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-s 2 -C c" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-s 99999" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-Y 9999 0" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-Y 1951 0" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-Y -1 127" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-Y -1 128" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-Y -1 1919" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-Y -1 1920" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-Y -1 1949" dotest85
+ $(MAKE) IMG=t82 "OPTIONSP=-Y -1 1950" dotest85
+ $(MAKE) IMG=ccitt1 dotest85
+ $(MAKE) IMG=ccitt2 dotest85
+ $(MAKE) IMG=ccitt3 dotest85
rm -f test-*.jbg test-*.jbg85 test-*.pbm
@echo
@echo "The T.85 pbmtools have PASSED the functional tests. Good!"
@@ -142,7 +142,7 @@
./jbgtopbm $< $@
test-t82.pbm:
- make -C ../libjbig tstcodec
+ $(MAKE) -C ../libjbig tstcodec
../libjbig/tstcodec $@
.1.txt .5.txt:
# Author: Guillaume BOTTEX
# Feature added: support for IPv6
# Bug fixed: https://github.com/etolabo/kumofs/issues#issue/8
diff --git a/src/logic/boot.cc b/src/logic/boot.cc
index d0a4c32..c197a30 100644
--- a/src/logic/boot.cc
+++ b/src/logic/boot.cc
@@ -29,6 +29,12 @@ scoped_listen_tcp::scoped_listen_tcp(struct sockaddr_in addr) :
m_addr(addr),
m_sock(listen(m_addr)) { }
+#ifdef KUMO_IPV6
+scoped_listen_tcp::scoped_listen_tcp(struct sockaddr_in6 addr) :
+ m_addr(addr),
+ m_sock(listen(m_addr)) { }
+#endif
+
scoped_listen_tcp::~scoped_listen_tcp()
{
::close(m_sock);
@@ -37,7 +43,11 @@ scoped_listen_tcp::~scoped_listen_tcp()
int scoped_listen_tcp::listen(const rpc::address& addr)
{
+#ifdef KUMO_IPV6
+ int lsock = socket(PF_INET6, SOCK_STREAM, 0);
+#else
int lsock = socket(PF_INET, SOCK_STREAM, 0);
+#endif
if(lsock < 0) {
throw std::runtime_error("socket failed");
}
@@ -152,7 +162,11 @@ void rpc_args::convert()
void cluster_args::convert()
{
cluster_addr = rpc::address(cluster_addr_in);
- cluster_addr_in.sin_addr.s_addr = INADDR_ANY; // listen any
+#ifdef KUMO_IPV6
+ // cluster_addr_in.sin6_addr = in6addr_any; // listen any
+#else
+ // cluster_addr_in.sin_addr.s_addr = INADDR_ANY; // listen any
+#endif
cluster_lsock = scoped_listen_tcp::listen(
rpc::address(cluster_addr_in));
rpc_args::convert();
diff --git a/src/logic/boot.h b/src/logic/boot.h
index 5d09549..e0e6ac1 100644
--- a/src/logic/boot.h
+++ b/src/logic/boot.h
@@ -31,6 +31,9 @@ namespace kumo {
class scoped_listen_tcp {
public:
scoped_listen_tcp(struct sockaddr_in addr);
+#ifdef KUMO_IPV6
+ scoped_listen_tcp(struct sockaddr_in6 addr);
+#endif
~scoped_listen_tcp();
public:
@@ -110,7 +113,11 @@ struct cluster_args : rpc_args {
virtual void set_basic_args();
virtual void show_usage();
+#ifdef KUMO_IPV6
+ struct sockaddr_in6 cluster_addr_in;
+#else
struct sockaddr_in cluster_addr_in;
+#endif
rpc::address cluster_addr; // convert
int cluster_lsock; // convert
diff --git a/src/logic/gateway/main.cc b/src/logic/gateway/main.cc
index 8829d6b..7faf81a 100644
--- a/src/logic/gateway/main.cc
+++ b/src/logic/gateway/main.cc
@@ -30,8 +30,13 @@ using namespace kumo;
struct arg_t : rpc_args {
+#ifdef KUMO_IPV6
+ sockaddr_in6 manager1_in;
+ sockaddr_in6 manager2_in;
+#else
sockaddr_in manager1_in;
sockaddr_in manager2_in;
+#endif
bool manager2_set;
rpc::address manager1; // convert
rpc::address manager2; // convert
@@ -48,15 +53,27 @@ struct arg_t : rpc_args {
std::string local_cache;
bool mctext_set;
+#ifdef KUMO_IPV6
+ sockaddr_in6 mctext_addr_in;
+#else
sockaddr_in mctext_addr_in;
+#endif
int mctext_lsock; // convert
bool mcbin_set;
+#ifdef KUMO_IPV6
+ sockaddr_in6 mcbin_addr_in;
+#else
sockaddr_in mcbin_addr_in;
+#endif
int mcbin_lsock; // convert
bool cloudy_set;
+#ifdef KUMO_IPV6
+ sockaddr_in6 cloudy_addr_in;
+#else
sockaddr_in cloudy_addr_in;
+#endif
int cloudy_lsock; // convert
bool mc_save_flag;
diff --git a/src/logic/manager/main.cc b/src/logic/manager/main.cc
index 5847ae0..63f948a 100644
--- a/src/logic/manager/main.cc
+++ b/src/logic/manager/main.cc
@@ -27,7 +27,11 @@ struct arg_t : cluster_args {
bool auto_replace;
bool partner_set;
+#ifdef KUMO_IPV6
+ struct sockaddr_in6 partner_in;
+#else
struct sockaddr_in partner_in;
+#endif
rpc::address partner; // convert
virtual void convert()
diff --git a/src/logic/server/main.cc b/src/logic/server/main.cc
index cd92e46..5d87120 100644
--- a/src/logic/server/main.cc
+++ b/src/logic/server/main.cc
@@ -25,8 +25,13 @@ struct arg_t : cluster_args {
std::string dbpath;
+#ifdef KUMO_IPV6
+ sockaddr_in6 manager1_in;
+ sockaddr_in6 manager2_in;
+#else
sockaddr_in manager1_in;
sockaddr_in manager2_in;
+#endif
bool manager2_set;
rpc::address manager1; // convert
rpc::address manager2; // convert
diff --git a/src/logic/server/mod_replace_stream.cc b/src/logic/server/mod_replace_stream.cc
index 091ebc9..25ee736 100644
--- a/src/logic/server/mod_replace_stream.cc
+++ b/src/logic/server/mod_replace_stream.cc
@@ -104,7 +104,11 @@ RPC_IMPL(mod_replace_stream_t, ReplaceOffer, req, z, response)
using namespace mp::placeholders;
m_stream_core->connect(
+#ifdef KUMO_IPV6
+ PF_INET6, SOCK_STREAM, 0,
+#else
PF_INET, SOCK_STREAM, 0,
+#endif
(sockaddr*)addrbuf, sizeof(addrbuf),
net->connect_timeout_msec(),
mp::bind(&mod_replace_stream_t::stream_connected, this, _1, _2));
diff --git a/src/rpc/client_tmpl.h b/src/rpc/client_tmpl.h
index 8424677..173a308 100644
--- a/src/rpc/client_tmpl.h
+++ b/src/rpc/client_tmpl.h
@@ -159,7 +159,11 @@ bool client_tmpl<Transport, Session>::async_connect(
addr.getaddr((sockaddr*)&addrbuf);
using namespace mp::placeholders;
+#ifdef KUMO_IPV6
+ wavy::connect(PF_INET6, SOCK_STREAM, 0,
+#else
wavy::connect(PF_INET, SOCK_STREAM, 0,
+#endif
(sockaddr*)addrbuf, sizeof(addrbuf),
m_connect_timeout_msec,
mp::bind(
Took originally from OpenSuse spec. Needed on opensuse to avoid "error: array subscript is above array bounds"
--- memcached-orig/memcached.c
+++ memcached-new/memcached.c 2010/05/06 11:40:56
@@ -2335,15 +2335,18 @@
inline static void process_stats_detail(conn *c, const char *command) {
assert(c != NULL);
- if (strcmp(command, "on") == 0) {
+ char on[] = "on";
+ char off[] = "off";
+ char dump[] = "dump";
+ if (strcmp(command, on) == 0) {
settings.detail_enabled = 1;
out_string(c, "OK");
}
- else if (strcmp(command, "off") == 0) {
+ else if (strcmp(command, off) == 0) {
settings.detail_enabled = 0;
out_string(c, "OK");
}
- else if (strcmp(command, "dump") == 0) {
+ else if (strcmp(command, dump) == 0) {
int len;
char *stats = stats_prefix_dump(&len);
write_and_free(c, stats, len);
diff -rdBu memcached-1.4.0-rc1/memcached.h memcached-1.4.0-my/memcached.h
--- memcached-1.4.0-rc1/memcached.h 2009-05-29 00:51:56.000000000 +0400
+++ memcached-1.4.0-my/memcached.h 2009-06-07 22:32:52.000000000 +0400
@@ -75,21 +75,21 @@
/* warning: don't use these macros with a function, as it evals its arg twice */
#define ITEM_get_cas(i) ((uint64_t)(((i)->it_flags & ITEM_CAS) ? \
- *(uint64_t*)&((i)->end[0]) : 0x0))
+ *(uint64_t*)((char*)(i) + sizeof(*i)) : 0x0))
#define ITEM_set_cas(i,v) { if ((i)->it_flags & ITEM_CAS) { \
- *(uint64_t*)&((i)->end[0]) = v; } }
+ *(uint64_t*)((char*)(i) + sizeof(*i)) = v; } }
-#define ITEM_key(item) (((char*)&((item)->end[0])) \
+#define ITEM_key(item) ((char*)(item) + sizeof(*item) \
+ (((item)->it_flags & ITEM_CAS) ? sizeof(uint64_t) : 0))
-#define ITEM_suffix(item) ((char*) &((item)->end[0]) + (item)->nkey + 1 \
+#define ITEM_suffix(item) ((char*)(item) + sizeof(*item) + (item)->nkey + 1 \
+ (((item)->it_flags & ITEM_CAS) ? sizeof(uint64_t) : 0))
-#define ITEM_data(item) ((char*) &((item)->end[0]) + (item)->nkey + 1 \
+#define ITEM_data(item) ((char*)(item) + sizeof(*item) + (item)->nkey + 1 \
+ (item)->nsuffix \
+ (((item)->it_flags & ITEM_CAS) ? sizeof(uint64_t) : 0))
-#define ITEM_ntotal(item) (sizeof(struct _stritem) + (item)->nkey + 1 \
+#define ITEM_ntotal(item) (sizeof(*item) + (item)->nkey + 1 \
+ (item)->nsuffix + (item)->nbytes \
+ (((item)->it_flags & ITEM_CAS) ? sizeof(uint64_t) : 0))
@@ -285,7 +285,6 @@
uint8_t it_flags; /* ITEM_* above */
uint8_t slabs_clsid;/* which slab class we're in */
uint8_t nkey; /* key length, w/terminating null and padding */
- void * end[];
/* if it_flags & ITEM_CAS we have 8 bytes CAS */
/* then null-terminated key */
/* then " flags length\r\n" (no terminating null) */
diff --git a/items.c b/items.c
index e7f01ea..9fc6704 100644
--- a/items.c
+++ b/items.c
@@ -450,9 +450,7 @@ void do_item_stats_sizes(ADD_STAT add_stats, void *c) {
for (i = 0; i < num_buckets; i++) {
if (histogram[i] != 0) {
char key[8];
- int klen = 0;
- klen = snprintf(key, sizeof(key), "%d", i * 32);
- assert(klen < sizeof(key));
+ assert(snprintf(key, sizeof(key), "%d", i * 32) < sizeof(key));
APPEND_STAT(key, "%u", histogram[i]);
}
}
diff --git a/memcached.c b/memcached.c
index 750c8b3..0913b77 100644
--- a/memcached.c
+++ b/memcached.c
@@ -4627,8 +4627,6 @@ int main (int argc, char **argv) {
/* create the listening socket, bind it, and init */
if (settings.socketpath == NULL) {
- int udp_port;
-
const char *portnumber_filename = getenv("MEMCACHED_PORT_FILENAME");
char temp_portnumber_filename[PATH_MAX];
FILE *portnumber_file = NULL;
@@ -4658,7 +4656,6 @@ int main (int argc, char **argv) {
* then daemonise if needed, then init libevent (in some cases
* descriptors created by libevent wouldn't survive forking).
*/
- udp_port = settings.udpport ? settings.udpport : settings.port;
/* create the UDP listening socket and bind it */
errno = 0;
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
diff --git a/pdftk/Makefile.Base b/pdftk/Makefile.Base
index 0bedd04..36a439e 100644
--- a/pdftk/Makefile.Base
+++ b/pdftk/Makefile.Base
@@ -35,22 +35,24 @@ javalib :
$(MAKE) -f Makefile -iC $(JAVALIBPATH) all
attachments.o : attachments.cc attachments.h pdftk.h $(JAVALIB)
- $(CXX) $(CPPFLAGS) $(CXXFLAGS) -I$(JAVALIBPATH) attachments.cc -c
+ $(CXX) $(CPPFLAGS) $(LDFLAGS) $(CXXFLAGS) -I$(JAVALIBPATH) attachments.cc -c
report.o : report.cc report.h pdftk.h $(JAVALIB)
- $(CXX) $(CPPFLAGS) $(CXXFLAGS) -I$(JAVALIBPATH) report.cc -c
+ $(CXX) $(CPPFLAGS) $(LDFLAGS) $(CXXFLAGS) -I$(JAVALIBPATH) report.cc -c
pdftk.o : pdftk.cc pdftk.h attachments.h report.h $(JAVALIB) $(GCJ_LOCAL_LIB_FULL)
- $(CXX) $(CPPFLAGS) $(CXXFLAGS) -I$(JAVALIBPATH) pdftk.cc -c
+ $(CXX) $(CPPFLAGS) $(LDFLAGS) $(CXXFLAGS) -I$(JAVALIBPATH) pdftk.cc -c
pdftk : pdftk.o attachments.o report.o $(JAVALIB) $(GCJ_LOCAL_LIB_FULL)
- $(CXX) $(CXXFLAGS) attachments.o report.o pdftk.o $(JAVALIB) $(GCJ_LOCAL_LIB_FULL) $(LDLIBS) -o pdftk
+ $(CXX) $(CXXFLAGS) $(LDFLAGS) attachments.o report.o pdftk.o $(JAVALIB) $(GCJ_LOCAL_LIB_FULL) $(LDLIBS) -o pdftk
install:
- /usr/bin/install pdftk /usr/local/bin
+ mkdir -p ${DEST}/bin
+ install pdftk ${DEST}/bin
uninstall:
- $(RM) $(RMFLAGS) /usr/local/bin/pdftk
+ $(RM) $(RMFLAGS) ${DEST}/pdftk
+ rmdir ${DEST}
clean:
$(RM) $(RMFLAGS) *.o
--- perl-5.12.3/Configure.orig 2011-01-09 21:20:52.000000000 +0100
+++ perl-5.12.3/Configure 2011-01-26 19:33:33.106561007 +0100
@@ -8092,16 +8092,7 @@
case "$dflt" in
none) dflt='' ;;
esac
- for thisflag in $ldflags; do
- case "$thisflag" in
- -L*|-R*|-Wl,-R*)
- case " $dflt " in
- *" $thisflag "*) ;;
- *) dflt="$dflt $thisflag" ;;
- esac
- ;;
- esac
- done
+ dflt="$dflt $ldflags"
case "$dflt" in
''|' ') dflt='none' ;;
--- Python-2.6.6/setup.py~ 2010-12-11 12:05:13.000000000 +0100
+++ Python-2.6.6/setup.py 2011-03-05 13:33:04.556662701 +0100
@@ -411,7 +411,7 @@
'/lib64', '/usr/lib64',
'/lib', '/usr/lib',
]
- inc_dirs = self.compiler.include_dirs + ['/usr/include']
+ inc_dirs = self.compiler.include_dirs
exts = []
missing = []
@@ -818,15 +818,6 @@
# construct a list of paths to look for the header file in on
# top of the normal inc_dirs.
db_inc_paths = [
- '/usr/include/db4',
- '/usr/local/include/db4',
- '/opt/sfw/include/db4',
- '/usr/include/db3',
- '/usr/local/include/db3',
- '/opt/sfw/include/db3',
- # Fink defaults (http://fink.sourceforge.net/)
- '/sw/include/db4',
- '/sw/include/db3',
]
# 4.x minor number specific paths
for x in gen_db_minor_ver_nums(4):
From: Arnaud Fontaine <arnaud.fontaine@nexedi.com>
Date: Mon, 7 Mar 2011 13:02:05 +0900
Subject: [PATCH] Pass CPPFLAGS when building modules in Python 2.4
In contrary to Python 2.6, when building Python 2.4 modules, CPPFLAGS
specified to configure script is not passed at all. This patch backports
the fix from Python 2.6.
--- Makefile.pre.in 2006-10-09 02:41:25.000000000 +0900
+++ Makefile.pre.in 2011-03-07 14:58:34.368000777 +0900
@@ -56,7 +56,10 @@
OPT= @OPT@
BASECFLAGS= @BASECFLAGS@
CFLAGS= $(BASECFLAGS) $(OPT)
-CPPFLAGS= -I. -I$(srcdir)/Include
+# Both CPPFLAGS and LDFLAGS need to contain the shell's value for setup.py to
+# be able to build extension modules using the directories specified in the
+# environment variables
+CPPFLAGS= -I. -I$(srcdir)/Include @CPPFLAGS@
LDFLAGS= @LDFLAGS@
LDLAST= @LDLAST@
SGI_ABI= @SGI_ABI@
--- setup.py 2006-10-09 02:41:25.000000000 +0900
+++ setup.py 2011-03-07 14:53:36.208000779 +0900
@@ -3,7 +3,7 @@
__version__ = "$Revision: 52231 $"
-import sys, os, getopt, imp, re
+import sys, os, getopt, imp, re, optparse
from distutils import log
from distutils import sysconfig
@@ -243,6 +243,39 @@
add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
+ # Add paths specified in the environment variables LDFLAGS and
+ # CPPFLAGS for header and library files.
+ # We must get the values from the Makefile and not the environment
+ # directly since an inconsistently reproducible issue comes up where
+ # the environment variable is not set even though the value were passed
+ # into configure and stored in the Makefile (issue found on OS X 10.3).
+ for env_var, arg_name, dir_list in (
+ ('LDFLAGS', '-L', self.compiler.library_dirs),
+ ('CPPFLAGS', '-I', self.compiler.include_dirs)):
+ env_val = sysconfig.get_config_var(env_var)
+ if env_val:
+ # To prevent optparse from raising an exception about any
+ # options in env_val that is doesn't know about we strip out
+ # all double dashes and any dashes followed by a character
+ # that is not for the option we are dealing with.
+ #
+ # Please note that order of the regex is important! We must
+ # strip out double-dashes first so that we don't end up with
+ # substituting "--Long" to "-Long" and thus lead to "ong" being
+ # used for a library directory.
+ env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
+ ' ', env_val)
+ parser = optparse.OptionParser()
+ # Make sure that allowing args interspersed with options is
+ # allowed
+ parser.allow_interspersed_args = True
+ parser.error = lambda msg: None
+ parser.add_option(arg_name, dest="dirs", action="append")
+ options = parser.parse_args(env_val.split())[0]
+ if options.dirs:
+ for directory in reversed(options.dirs):
+ add_dir_to_list(dir_list, directory)
+
# Add paths to popular package managers on OS X/darwin
if sys.platform == "darwin":
# Fink installs into /sw by default
--- Lib/optparse.py 2006-05-29 03:15:43.000000000 +0900
+++ Lib/optparse.py 2011-03-07 21:20:17.192000789 +0900
@@ -69,7 +69,13 @@
import sys, os
import types
import textwrap
-from gettext import gettext as _
+
+try:
+ from gettext import gettext
+except ImportError:
+ def gettext(message):
+ return message
+_ = gettext
def _repr(self):
return "<%s at 0x%x: %s>" % (self.__class__.__name__, id(self), self)
From: Arnaud Fontaine <arnaud.fontaine@nexedi.com>
Date: Mon, 7 Mar 2011 14:10:31 +0900
Subject: [PATCH] Do not use system include directory
Backport python-2.6.6-no_system_inc_dirs.patch to Python 2.4.
--- setup.py 2006-10-09 02:41:25.000000000 +0900
+++ setup.py 2011-03-07 15:15:00.724000778 +0900
@@ -270,7 +270,7 @@
'/lib64', '/usr/lib64',
'/lib', '/usr/lib',
]
- inc_dirs = self.compiler.include_dirs + ['/usr/include']
+ inc_dirs = self.compiler.include_dirs
exts = []
platform = self.get_platform()
@@ -528,31 +528,7 @@
# construct a list of paths to look for the header file in on
# top of the normal inc_dirs.
- db_inc_paths = [
- '/usr/include/db4',
- '/usr/local/include/db4',
- '/opt/sfw/include/db4',
- '/sw/include/db4',
- '/usr/include/db3',
- '/usr/local/include/db3',
- '/opt/sfw/include/db3',
- '/sw/include/db3',
- ]
- # 4.x minor number specific paths
- for x in (0,1,2,3,4):
- db_inc_paths.append('/usr/include/db4%d' % x)
- db_inc_paths.append('/usr/include/db4.%d' % x)
- db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
- db_inc_paths.append('/usr/local/include/db4%d' % x)
- db_inc_paths.append('/pkg/db-4.%d/include' % x)
- db_inc_paths.append('/opt/db-4.%d/include' % x)
- # 3.x minor number specific paths
- for x in (2,3):
- db_inc_paths.append('/usr/include/db3%d' % x)
- db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
- db_inc_paths.append('/usr/local/include/db3%d' % x)
- db_inc_paths.append('/pkg/db-3.%d/include' % x)
- db_inc_paths.append('/opt/db-3.%d/include' % x)
+ db_inc_paths = []
# Add some common subdirectories for Sleepycat DB to the list,
# based on the standard include directories. This way DB3/4 gets
This source diff could not be displayed because it is too large. You can view the blob instead.
# http://www.sphinxsearch.com/bugs/view.php?id=550
# 0000550: Can not make libsphinxclient
--- sphinx-1.10-beta/api/libsphinxclient/sphinxclient.c 2010-07-15 13:05:40.000000000 +0200
+++ sphinx-1.10-beta/api/libsphinxclient/sphinxclient.c 2010-07-21 20:43:26.760024489 +0200
@@ -1355,11 +1355,13 @@
optval = 1;
#ifndef _WIN32
+ #ifdef SO_NOSIGPIPE
if ( setsockopt ( sock, SOL_SOCKET, SO_NOSIGPIPE, (void *)&optval, (socklen_t)sizeof(optval) ) < 0 )
{
set_error ( client, "setsockopt() failed: %s", sock_error() );
return -1;
}
+ #endif
#endif
res = connect ( sock, (struct sockaddr*)&sa, sizeof(sa) );
--- Makefile.in
+++ Makefile.in
@@ -47,6 +47,7 @@
SVN_SASL_LIBS = @SVN_SASL_LIBS@
SVN_SERF_LIBS = @SVN_SERF_LIBS@
SVN_SQLITE_LIBS = @SVN_SQLITE_LIBS@
+SVN_XML_LIBS = -lexpat
SVN_ZLIB_LIBS = @SVN_ZLIB_LIBS@
LIBS = @LIBS@
--- build/ac-macros/aprutil.m4
+++ build/ac-macros/aprutil.m4
@@ -77,16 +77,14 @@
AC_MSG_ERROR([apu-config --prefix failed])
fi
- dnl When APR stores the dependent libs in the .la file, we don't need
- dnl --libs.
- SVN_APRUTIL_LIBS="`$apu_config --link-libtool --libs`"
+ SVN_APRUTIL_LIBS="`$apu_config --link-libtool`"
if test $? -ne 0; then
- AC_MSG_ERROR([apu-config --link-libtool --libs failed])
+ AC_MSG_ERROR([apu-config --link-libtool failed])
fi
- SVN_APRUTIL_EXPORT_LIBS="`$apu_config --link-ld --libs`"
+ SVN_APRUTIL_EXPORT_LIBS="`$apu_config --link-ld`"
if test $? -ne 0; then
- AC_MSG_ERROR([apu-config --link-ld --libs failed])
+ AC_MSG_ERROR([apu-config --link-ld failed])
fi
AC_SUBST(SVN_APRUTIL_INCLUDES)
--- build/ac-macros/apr.m4
+++ build/ac-macros/apr.m4
@@ -74,16 +74,14 @@
AC_MSG_ERROR([apr-config --prefix failed])
fi
- dnl When APR stores the dependent libs in the .la file, we don't need
- dnl --libs.
- SVN_APR_LIBS="`$apr_config --link-libtool --libs`"
+ SVN_APR_LIBS="`$apr_config --link-libtool`"
if test $? -ne 0; then
- AC_MSG_ERROR([apr-config --link-libtool --libs failed])
+ AC_MSG_ERROR([apr-config --link-libtool failed])
fi
- SVN_APR_EXPORT_LIBS="`$apr_config --link-ld --libs`"
+ SVN_APR_EXPORT_LIBS="`$apr_config --link-ld`"
if test $? -ne 0; then
- AC_MSG_ERROR([apr-config --link-ld --libs failed])
+ AC_MSG_ERROR([apr-config --link-ld failed])
fi
SVN_APR_SHLIB_PATH_VAR="`$apr_config --shlib-path-var`"
diff --git a/Makefile b/Makefile
index dbb15d5..852a627 100644
--- a/Makefile
+++ b/Makefile
@@ -137,5 +137,5 @@ $(TARGET): xtrabackup.o $(INNODBOBJS) $(MYSQLOBJS)
clean:
rm -f *.o xtrabackup_*
install:
- install -m 755 innobackupex-1.5.1 $(BIN_DIR)
+ install -m 755 innobackupex-1.5.1 $(BIN_DIR)/innobackupex
install -m 755 xtrabackup_* $(BIN_DIR)
diff --git a/utils/build.sh b/utils/build.sh
index 3f0cee7..c41dc89 100755
--- a/utils/build.sh
+++ b/utils/build.sh
@@ -21,12 +21,14 @@ function usage()
{
echo "Build an xtrabackup binary against the specified InnoDB flavor."
echo
- echo "Usage: `basename $0` CODEBASE"
+ echo "Usage: `basename $0` CODEBASE PREFIX LIBTOOL_LOCATION"
echo "where CODEBASE can be one of the following values or aliases:"
echo " innodb51_builtin | 5.1 build against built-in InnoDB in MySQL 5.1"
echo " innodb55 | 5.5 build against InnoDB in MySQL 5.5"
echo " xtradb51 | xtradb build against Percona Server with XtraDB 5.1"
echo " xtradb55 | xtradb55 build against Percona Server with XtraDB 5.5"
+ echo "where PREFIX is abolute path for install location"
+ echo "where LIBTOOL_LOCATION is abolute path of libtool"
exit -1
}
@@ -79,7 +81,12 @@ function build_server()
{
echo "Configuring the server"
cd $server_dir
- BUILD/autorun.sh
+ libtoolize -c -f
+ aclocal -I $libtool_location/share/aclocal -I config/ac-macros
+ autoheader
+ automake -c -a -f
+ autoconf
+ touch sql/sql_yacc.yy
eval $configure_cmd
echo "Building the server"
@@ -92,9 +99,10 @@ function build_xtrabackup()
echo "Building XtraBackup"
mkdir $build_dir
cp $top_dir/Makefile $top_dir/xtrabackup.c $build_dir
+ cp $top_dir/innobackupex $build_dir/innobackupex-1.5.1
cd $build_dir
- $MAKE_CMD $xtrabackup_target
+ $MAKE_CMD PREFIX=$1 $xtrabackup_target
cd $top_dir
}
@@ -103,11 +111,36 @@ function build_tar4ibd()
echo "Building tar4ibd"
unpack_and_patch libtar-1.2.11.tar.gz tar4ibd_libtar-1.2.11.patch
cd libtar-1.2.11
- ./configure
+ ./configure --prefix=$1
$MAKE_CMD
cd $topdir
}
+function install_server()
+{
+ echo "Installing the server"
+ cd $server_dir
+ $MAKE_CMD install
+ cd $top_dir
+}
+
+function install_xtrabackup()
+{
+ echo "Installing XtraBackup"
+ echo $build_dir
+ cd $build_dir
+ $MAKE_CMD PREFIX=$1 install
+ cd $top_dir
+}
+
+function install_tar4ibd()
+{
+ echo "Installing tar4ibd"
+ cd libtar-1.2.11
+ $MAKE_CMD install
+ cd $topdir
+}
+
################################################################################
# Do all steps to build the server, xtrabackup and tar4ibd
# Expects the following variables to be set before calling:
@@ -136,9 +169,15 @@ function build_all()
build_server
- build_xtrabackup
+ build_xtrabackup $1
+
+ build_tar4ibd $1
+
+ install_server
+
+ install_xtrabackup $1
- build_tar4ibd
+ install_tar4ibd
}
if ! test -f xtrabackup.c
@@ -148,6 +187,15 @@ then
fi
type=$1
+prefix=$2
+if [ "x$prefix" == "x" ] ; then
+ usage
+fi
+libtool_location=$3
+if [ "x$libtool_location" == "x" ] ; then
+ usage
+fi
+
top_dir=`pwd`
case "$type" in
@@ -161,9 +209,10 @@ case "$type" in
--with-plugins=innobase \
--with-zlib-dir=bundled \
--enable-shared \
- --with-extra-charsets=complex"
+ --with-extra-charsets=complex \
+ --prefix=$2"
- build_all
+ build_all $2
;;
"innodb55" | "5.5")
@@ -225,7 +274,7 @@ case "$type" in
build_server
- build_xtrabackup
+ build_xtrabackup
build_tar4ibd
;;
[buildout]
parts =
apache
apache-antiloris
extends =
libexpat.cfg
libuuid.cfg
gdbm.cfg
openssl.cfg
pcre.cfg
pkgconfig.cfg
sqlite3.cfg
zlib.cfg
[apache]
# inspired on http://old.aclark.net/team/aclark/blog/a-lamp-buildout-for-wordpress-and-other-php-apps/
recipe = hexagonit.recipe.cmmi
url = http://mir2.ovh.net/ftp.apache.org/dist//httpd/httpd-2.2.19.tar.bz2
md5sum = 832f96a6ec4b8fc7cf49b9efd4e89060
configure-options = --disable-static
--enable-authn-alias
--enable-bucketeer
--enable-cache
--enable-case-filter
--enable-case-filter-in
--enable-cgid
--enable-charset-lite
--enable-disk-cache
--enable-echo
--enable-exception-hook
--enable-mods-shared=all
--enable-optional-fn-export
--enable-optional-fn-import
--enable-optional-hook-export
--enable-optional-hook-import
--enable-proxy
--enable-proxy-ajp
--enable-proxy-balancer
--enable-proxy-connect
--enable-proxy-ftp
--enable-proxy-http
--enable-proxy-scgi
--enable-so
--enable-ssl
--with-included-apr
--with-ssl=${openssl:location}
--with-z=${zlib:location}
--with-expat=${libexpat:location}
--with-pcre=${pcre:location}
--with-sqlite3=${sqlite3:location}
--with-dbm=gdbm
--with-gdm=${gdbm:location}
--without-lber
--without-ldap
--without-ndbm
--without-berkeley-db
--without-pgsql
--without-mysql
--without-sqlite2
--without-oracle
--without-freedts
--without-odbc
--without-iconv
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
CPPFLAGS =-I${libuuid:location}/include
LDFLAGS =-Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -L${libuuid:location}/lib -Wl,-rpath -Wl,${libuuid:location}/lib -Wl,-rpath -Wl,${libexpat:location}/lib -Wl,-rpath -Wl,${pcre:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib -Wl,-rpath -Wl,${gdbm:location}/lib
[apache-antiloris]
# Note: Shall react on each build of apache and reinstall itself
recipe = hexagonit.recipe.cmmi
url = http://sourceforge.net/projects/mod-antiloris/files/mod_antiloris-0.4.tar.bz2/download
md5sum = 66862bf10e9be3a023e475604a28a0b4
configure-command = ${apache:location}/bin/apxs
configure-options = -c mod_antiloris.c
make-binary = ${:configure-command}
make-options = -i -a -n antiloris mod_antiloris.la
make-targets =
[buildout]
extends =
m4.cfg
perl.cfg
parts =
autoconf
[autoconf]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/autoconf/autoconf-2.68.tar.gz
md5sum = c3b5247592ce694f7097873aa07d66fe
environment =
M4=${m4:location}/bin/m4
PATH=${perl:location}/bin:%(PATH)s
[buildout]
extends =
autoconf.cfg
perl.cfg
parts =
automake-1.11
[automake-1.11]
recipe = hexagonit.recipe.cmmi
md5sum = c2972c4d9b3e29c03d5f2af86249876f
url = http://ftp.gnu.org/gnu/automake/automake-1.11.1.tar.bz2
environment =
PATH =${autoconf:location}/bin:${perl:location}/bin:%(PATH)s
[buildout]
extends =
m4.cfg
parts =
bison
[bison]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/bison/bison-2.4.3.tar.gz
md5sum = ea45c778b36bdc7a720096819e292a73
environment =
M4=${m4:location}/bin/m4
[buildout]
parts =
boost-lib
[boost-lib-download]
recipe = hexagonit.recipe.download
url = http://downloads.sourceforge.net/sourceforge/boost/boost_1_43_0.tar.gz
md5sum = 734565ca4819bf04bd8e903e116c3fb1
strip-top-level-dir = true
[boost-lib]
recipe = plone.recipe.command
source = ${boost-lib-download:location}
destination = ${buildout:parts-directory}/${:_buildout_section_name_}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
stop-on-error = true
command =
rm -fr ${:destination} &&
mkdir -p ${:destination} &&
cd ${:source} &&
./bootstrap.sh --prefix=${:location} &&
./bjam install
[buildout]
parts =
bzip2
[bzip2-hooks-download]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../hook/${:filename}
md5sum = 066c8355b7d726f30176ea5b6a35e1a2
download-only = true
filename = bzip2-hooks.py
[bzip2]
recipe = hexagonit.recipe.cmmi
url = http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz
md5sum = 00b516f4704d4a7cb50a1d97e6e8e15b
configure-command = true
make-options =
PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
CFLAGS="-fpic -fPIC -Wall -Winline -O2 -g -D_FILE_OFFSET_BITS=64"
post-make-hook = ${bzip2-hooks-download:location}/${bzip2-hooks-download:filename}:post_make_hook
[buildout]
parts = ccache
[ccache-common]
recipe = hexagonit.recipe.cmmi
[ccache]
<= ccache-3.1
[ccache-3.1]
<= ccache-common
url = http://samba.org/ftp/ccache/ccache-3.1.tar.bz2
md5sum = 7961852e1e36f11559039c32142f58df
[buildout]
extends =
cloudooo.cfg
extensions = mr.developer
sources = sources
auto-checkout = ${buildout:cloudooo-packages}
[sources]
cloudooo = svn https://svn.erp5.org/repos/public/erp5/trunk/utils/cloudooo
cloudooo.handler.pdf = svn https://svn.erp5.org/repos/public/erp5/trunk/utils/cloudooo.handler.pdf/
cloudooo.handler.ffmpeg = svn https://svn.erp5.org/repos/public/erp5/trunk/utils/cloudooo.handler.ffmpeg/
cloudooo.handler.imagemagick = svn https://svn.erp5.org/repos/public/erp5/trunk/utils/cloudooo.handler.imagemagick/
cloudooo.handler.ooo = svn https://svn.erp5.org/repos/public/erp5/trunk/utils/cloudooo.handler.ooo/
[buildout]
software_home = ${:directory}
extends =
../profiles/versions-common.cfg
../profiles/software-definition.cfg
../profiles/common.cfg
xpdf.cfg
imagemagick.cfg
file.cfg
pdftk.cfg
ffmpeg.cfg
python-2.6.cfg
libreoffice-bin.cfg
lxml-python.cfg
python = software_definition
versions = versions
parts =
instance_template
cloudooo
imagemagick
instance-egg
libreoffice-bin
file
xpdf
pdftk
ffmpeg
python2.6
bootstrap2.6
cloudooo-packages =
cloudooo.handler.ooo
cloudooo.handler.pdf
cloudooo.handler.ffmpeg
cloudooo.handler.imagemagick
cloudooo
[instance-egg]
recipe = zc.recipe.egg
python = python2.6
eggs =
erp5.recipe.cloudoooinstance
z3c.recipe.mkdir
${buildout:cloudooo-packages}
[cloudooo]
recipe = zc.recipe.egg
python = python2.6
interpreter = pycloudoo
scripts =
eggs =
${lxml-python:egg}
collective.recipe.supervisor
plone.recipe.command
erp5.extension.sectionextender
supervisor
${buildout:cloudooo-packages}
[software_definition]
software_home = ${buildout:directory}
executable = ${python2.6:executable}
[buildout]
parts =
cmake
[cmake]
recipe = hexagonit.recipe.cmmi
url = http://www.cmake.org/files/v2.8/cmake-2.8.3.tar.gz
md5sum = a76a44b93acf5e3badda9de111385921
[buildout]
parts =
coreutils
[coreutils]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/coreutils/coreutils-8.9.tar.gz
md5sum = 36909ae68840d73a800120cf74af794a
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_} --enable-install-program=tr,basename,uname,cat,cp,ls
environment =
LDFLAGS =-Wl,--as-needed
# GNU cpio copies files into or out of a cpio or tar archive. The archive can be another file on the disk, a magnetic tape, or a pipe.
[buildout]
parts = cpio
[cpio]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/cpio/cpio-2.11.tar.bz2
md5sum = 20fc912915c629e809f80b96b2e75d7d
[buildout]
extends =
cmake.cfg
imagemagick.cfg
parts = cuneiform
[cuneiform]
recipe = hexagonit.recipe.cmmi
url = http://launchpad.net/cuneiform-linux/1.0/1.0/+download/cuneiform-linux-1.0.0.tar.bz2
md5sum = 785232ffffad7d82446fbac08a1c3ef9
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command =
mkdir build && cd build && \
${cmake:location}/bin/cmake \
-DCMAKE_INSTALL_RPATH=${:location}/lib64:${:location}/lib \
-DCMAKE_INSTALL_PREFIX=${:location} \
-DNO_SYSTEM_ENVIRONMENT_PATH=ON \
-DCMAKE_INCLUDE_PATH=${imagemagick:location}/include \
-DCMAKE_LIBRARY_PATH=${imagemagick:location}/lib \
-DCMAKE_INSTALL_RPATH=${:location}/lib64:${:location}/lib:${imagemagick:location}/lib \
-DCMAKE_BUILD_TYPE=release \
..
make-binary =
cd build && make
# libcurl - the multiprotocol file transfer library
# http://curl.haxx.se/
[buildout]
extends =
openssl.cfg
pkgconfig.cfg
zlib.cfg
parts =
curl
[curl]
recipe = hexagonit.recipe.cmmi
url = http://curl.haxx.se/download/curl-7.21.3.tar.bz2
md5sum = 5b57fee22090b5c43a6886fdd35af2ce
configure-options =
--disable-static
--disable-ldap
--disable-ldaps
--disable-rtsp
--disable-proxy
--disable-dict
--disable-telnet
--disable-tftp
--disable-pop3
--disable-imap
--disable-smtp
--disable-gopher
--enable-ipv6
--disable-sspi
--with-ssl=${openssl:location}
--with-zlib=${zlib:location}
--without-nss
--without-libssh2
--without-libidn
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
LDFLAGS=-Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib
[buildout]
parts =
cyrus-sasl
extends =
zlib.cfg
[cyrus-sasl-gcc4.4-patch]
# patch available thanks to Gentoo packagers
# http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/dev-libs/cyrus-sasl/files/cyrus-sasl-2.1.22-gcc44.patch?revision=1.2
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 5deb4d67b53ecba20c7887fc8fdebee1
filename = cyrus-sasl-2.1.22-gcc44.patch
download-only = true
[cyrus-sasl]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.cyrusimap.org/cyrus-sasl/cyrus-sasl-2.1.23.tar.gz
md5sum = 2eb0e48106f0e9cd8001e654f267ecbc
patches =
${cyrus-sasl-gcc4.4-patch:location}/${cyrus-sasl-gcc4.4-patch:filename}
patch-options =
-p1
configure-options =
--disable-digest
--disable-gssapi
--disable-otp
--with-dblib=none
--without-des
--without-openssl
--without-pam
--without-saslauthd
# it seems that parallel build sometimes fails.
make-options =
-j1
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
parts = dcron
[dcron-patch-nonroot]
recipe = hexagonit.recipe.download
md5sum = 2f5b22dc1cbe81060a9c28e6f5c06e8b
url = ${:_profile_base_location_}/../patch/${:filename}
filename = dcron-4.4.noroot.no.globals.patch
download-only = true
[dcron]
recipe = hexagonit.recipe.cmmi
url = http://www.jimpryor.net/linux/releases/dcron-4.4.tar.gz
md5sum = 02d848ba043a9df5bf2102a9f4bc04bd
configure-command = true
patches =
${dcron-patch-nonroot:location}/${dcron-patch-nonroot:filename}
patch-options = -p1
make-options =
PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
[buildout]
extends =
mysql-tritonn-5.0.cfg
python-2.4.cfg
lxml-python.cfg
mysql-python.cfg
subversion.cfg
pysvn-python.cfg
python-ldap-python.cfg
glib.cfg
parts =
cmf15
itools
mysql-python
products-other
products-deps
products-erp5
[cmf15]
recipe = plone.recipe.distros
urls =
http://www.zope.org/Products/CMF/CMF-1.5.4/CMF-1.5.4.tar.gz
nested-packages =
CMF-1.5.4.tar.gz
version-suffix-packages =
CMF-1.5.4.tar.gz
[itools]
# use a custom build for itools, to add lib64 to the include path
recipe = zc.recipe.egg:custom
python = python2.4
egg = itools
find-links =
http://download.hforge.org/itools/0.20/
include-dirs =
${glib:location}/include/glib-2.0
${glib:location}/lib/glib-2.0/include
library-dirs =
${glib:location}/lib
rpath =
${glib:location}/lib
[products-deps]
recipe = plone.recipe.distros
urls =
http://www.zope.org/Members/shh/ExtFile/1.4.4/ExtFile-1.4.4.tar.gz
http://www.zope.org/Members/NIP/ZMailIn/1.0.1/ZMailIn-1-0-1.tgz
http://www.zope.org/Members/NIP/ZMailIn/1.0.0/CMFMailIn-1.0.0
http://www.zope.org/Products/PluggableAuthService/PluggableAuthService-1.1b2/PluggableAuthService-1.1b2.tar.gz
http://download.hforge.org/localizer/Localizer-1.2.3.tar.gz
version-suffix-packages =
Localizer-1.2.3.tar.gz
[products-ldap]
recipe = plone.recipe.distros
urls =
http://www.dataflake.org/software/ldapmultiplugins/ldapmultiplugins_1.1/LDAPMultiPlugins-1_1.tgz
http://www.dataflake.org/software/ldapuserfolder/ldapuserfolder_2.6/LDAPUserFolder-2_6.tgz
[products-other]
# Recipe infrae.subversion is using svn command under the hood, but there is
# no way to pass --trust-server-cert --non-interactive --no-auth-cache, so in 2.12 falvour
# it is better to evaluate usage of provided subversion command
recipe = plone.recipe.command
svn_param =--trust-server-cert --non-interactive --no-auth-cache --quiet
# dircty hack to support PluginRegistry/utils.py:17 assumption that products
# are in Products folder
# XXX: Zelenium was eggfied for recent zope versions (2.12) and are available at Bazaar.
# some better alternative should be used in future.
location = ${buildout:parts-directory}/${:_buildout_section_name_}
destination = ${:location}/Products
stop-on-error = true
update-command = ${:command}
command = ${subversion:location}/bin/svn checkout ${:svn_param} http://svn.plone.org/svn/collective/DCWorkflowGraph/tags/release-0_3/ ${:destination}/DCWorkflowGraph &&
${subversion:location}/bin/svn checkout ${:svn_param} svn://svn.zope.org/repos/main/Zelenium/trunk/@110603 ${:destination}/Zelenium &&
${subversion:location}/bin/svn checkout ${:svn_param} svn://svn.zope.org/repos/main/PluginRegistry/tags/1.0 ${:destination}/PluginRegistry &&
${subversion:location}/bin/svn checkout ${:svn_param} http://svn.plone.org/svn/archetypes/MimetypesRegistry/tags/Archetypes-1.4.0-final ${:destination}/MimetypesRegistry
[eggs]
recipe = zc.recipe.egg
python = python2.4
eggs =
${itools:egg}
${mysql-python:egg}
${lxml-python:egg}
${pysvn-python:egg}
${python-ldap-python:egg}
PyXML
ClientForm
SOAPpy
cElementTree
chardet
ctypes
elementtree
erp5.recipe.mysqldatabase
erp5diff
ipdb
mechanize
numpy
ordereddict
pycrypto
paramiko
ply
python-ldap
python-magic
python-memcached
pytz
simplejson
threadframe
timerserver
urlnorm
uuid
xml_marshaller
xupdate_processor
feedparser
extra-paths =
${zope-2.8:location}/lib/python
# shut down script generation. Other parts can generate scripts as needed by
# reusing ${eggs:eggs}
# parameterizing the version of the generated python interpreter name by the
# python section version causes dependency between this egg section and the
# installation of python, which we don't want on an instance
interpreter = python2.4
scripts =
python=${:interpreter}
ipython=i${:interpreter}
[mysql-python]
python = python2.4
[lxml-python]
python = python2.4
[python-ldap-python]
python = python2.4
[pysvn-python]
python = python2.4
[omelette]
# XXX don't use this part until this omelette bug is fixed:
# https://bugs.launchpad.net/collective.buildout/+bug/553005
recipe = collective.recipe.omelette
eggs = ${eggs:eggs}
packages =
${itools:lib} .
[precache-eggs]
python = python2.4
eggs +=
plone.recipe.zope2zeoserver
[buildout]
parts = fastjar
extends =
zlib.cfg
[fastjar]
recipe = hexagonit.recipe.cmmi
url = http://sourceforge.net/projects/fastjar/files/fastjar/0.94/fastjar-0.94.tar.gz/download
md5sum = 14d4bdfac236e347d806c6743dba48c6
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
extends =
bzip2.cfg
libpng.cfg
pkgconfig.cfg
zlib.cfg
parts =
ffmpeg
[ffmpeg]
<= ffmpeg-0.6
[libogg]
recipe = hexagonit.recipe.cmmi
url = http://downloads.xiph.org/releases/ogg/libogg-1.2.2.tar.gz
md5sum = 5a9fcabc9a1b7c6f1cd75ddc78f36c56
configure-options =
--disable-static
[libvorbis]
recipe = hexagonit.recipe.cmmi
url = http://downloads.xiph.org/releases/vorbis/libvorbis-1.3.2.tar.bz2
md5sum = 798a4211221073c1409f26eac4567e8b
configure-options =
--disable-static
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libogg:location}/lib/pkgconfig
[libtheora]
recipe = hexagonit.recipe.cmmi
url = http://downloads.xiph.org/releases/theora/libtheora-1.1.1.tar.bz2
md5sum = 292ab65cedd5021d6b7ddd117e07cd8e
configure-options =
--disable-static
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libogg:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig:${libvorbis:location}/lib/pkgconfig
[yasm]
recipe = hexagonit.recipe.cmmi
url = http://www.tortall.net/projects/yasm/releases/yasm-1.1.0.tar.gz
[libvpx]
recipe = hexagonit.recipe.cmmi
url = http://webm.googlecode.com/files/libvpx-v0.9.6.tar.bz2
md5sum = 383f3f07a76099682abb43f79b692b72
configure-options =
--enable-shared
environment =
PATH=${yasm:location}/bin:%(PATH)s
[libx264]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.videolan.org/pub/videolan/x264/snapshots/x264-snapshot-20110412-2245.tar.bz2
md5sum = 423c402214544d0e2f21455175a0d01f
configure-options =
--enable-shared
--enable-pic
environment =
PATH=${yasm:location}/bin:%(PATH)s
[lame]
recipe = hexagonit.recipe.cmmi
url = http://prdownloads.sourceforge.net/lame/lame-3.98.4.tar.gz
md5sum = 8e9866ad6b570c6c95c8cba48060473f
configure-options =
--disable-static
--disable-gtktest
[opencore-amr]
recipe = hexagonit.recipe.cmmi
url = http://prdownloads.sourceforge.net/opencore-amr/opencore-amr/opencore-amr-0.1.2.tar.gz
md5sum = 8e8b8b253eb046340ff7b6bf7a6ccd3e
configure-options =
--disable-static
[ffmpeg-0.6]
recipe = hexagonit.recipe.cmmi
url = http://www.ffmpeg.org/releases/ffmpeg-0.6.3.tar.bz2
md5sum = cdf4ad9b2a4d195b5ca874494bc7b0b0
configure-options =
--enable-gpl
--enable-version3
--enable-postproc
--enable-bzlib
--enable-libtheora
--enable-libvorbis
--enable-libvpx
--enable-libx264
--enable-libmp3lame
--enable-libopencore-amrnb
--enable-libopencore-amrwb
--enable-shared
--enable-zlib
--disable-static
--extra-ldflags="-Wl,-rpath -Wl,${buildout:parts-directory}/${:_buildout_section_name_}/lib"
environment =
CPPFLAGS=-I${bzip2:location}/include -I${libogg:location}/include -I${libvorbis:location}/include -I${libtheora:location}/include -I${libvpx:location}/include -I${libx264:location}/include -I${lame:location}/include -I${opencore-amr:location}/include -I${zlib:location}/include
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -L${libogg:location}/lib -Wl,-rpath -Wl,${libogg:location}/lib -L${libvorbis:location}/lib -Wl,-rpath -Wl,${libvorbis:location}/lib -L${libtheora:location}/lib -Wl,-rpath -Wl,${libtheora:location}/lib -L${libvpx:location}/lib -Wl,-rpath -Wl,${libvpx:location}/lib -L${libx264:location}/lib -Wl,-rpath -Wl,${libx264:location}/lib -L${lame:location}/lib -Wl,-rpath -Wl,${lame:location}/lib -L${opencore-amr:location}/lib -Wl,-rpath -Wl,${opencore-amr:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
PATH=${yasm:location}/bin:%(PATH)s
[buildout]
parts = file
extends =
zlib.cfg
[file]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.astron.com/pub/file/file-5.07.tar.gz
md5sum = b8d1f9a8a644067bd0a703cebf3f4858
configure-options =
--disable-static
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
extends =
boost-lib.cfg
tokyocabinet.cfg
parts =
flare
[flare]
recipe = hexagonit.recipe.cmmi
url = http://labs.gree.jp/data/source/flare-1.0.9.tgz
md5sum = e59ccce1ba29e7edf6f665561678d5c8
configure-options =
--with-tokyocabinet=${tokyocabinet:location}
--with-boost=${boost-lib:location}
environment =
LDFLAGS =-Wl,-rpath -Wl,${tokyocabinet:location}/lib -Wl,-rpath -Wl,${boost-lib:location}/lib
[buildout]
extends =
m4.cfg
parts =
flex
[flex]
recipe = hexagonit.recipe.cmmi
url = http://downloads.sourceforge.net/project/flex/flex/flex-2.5.35/flex-2.5.35.tar.gz
md5sum = 201d3f38758d95436cbc64903386de0b
environment =
M4=${m4:location}/bin/m4
[buildout]
extends =
fonts.cfg
freetype.cfg
libxml2.cfg
pkgconfig.cfg
zlib.cfg
parts =
fontconfig
[fontconfig]
recipe = hexagonit.recipe.cmmi
url = http://fontconfig.org/release/fontconfig-2.8.0.tar.gz
md5sum = 77e15a92006ddc2adbb06f840d591c0e
configure-options =
--disable-static
--disable-docs
--enable-libxml2
--with-default-fonts=${fonts:location}
--with-freetype-config=${freetype:location}/bin/freetype-config
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
parts =
liberation-fonts
ipaex-fonts
[fonts]
location = ${buildout:parts-directory}/${:_buildout_section_name_}
# Liberation(tm) Fonts - a font family which aims at metric
# compatibility with Arial, Times New Roman, and Courier New.
# https://fedorahosted.org/liberation-fonts/
[liberation-fonts]
recipe = hexagonit.recipe.download
strip-top-level-dir = true
url = https://fedorahosted.org/releases/l/i/liberation-fonts/liberation-fonts-ttf-1.06.0.20100721.tar.gz
md5sum = ca4870d899fd7e943ffc310a5421ad4d
destination = ${fonts:location}/${:_buildout_section_name_}
# IPAex Font - Japanese fonts provided by IPA
# http://ossipedia.ipa.go.jp/ipafont/index.html
[ipaex-fonts]
recipe = hexagonit.recipe.download
strip-top-level-dir = true
url = http://info.openlab.ipa.go.jp/ipafont/fontdata/IPAexfont00103.zip
md5sum = ac67b2fc3aab7f683d89f0070df284e7
destination = ${fonts:location}/${:_buildout_section_name_}
# FreeType - a Free, High-Quality, and Portable Font Engine
# http://freetype.org/
[buildout]
extends =
zlib.cfg
parts =
freetype
[freetype]
recipe = hexagonit.recipe.cmmi
url = http://download.savannah.gnu.org/releases/freetype/freetype-2.4.4.tar.gz
md5sum = 9273efacffb683483e58a9e113efae9f
configure-options =
--disable-static
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
parts = garbage-collector
[garbage-collector]
recipe = hexagonit.recipe.cmmi
md5sum = 2ff9924c7249ef7f736ecfe6f08f3f9b
url = http://www.hpl.hp.com/personal/Hans_Boehm/gc/gc_source/gc-7.1.tar.gz
configure-options =
--disable-static
# GNU C Compiler
# Mostly required to support languages different then C or C++
[buildout]
extends =
m4.cfg
zip.cfg
parts =
gcc-java
[gmp]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.gmplib.org/pub/gmp-4.3.2/gmp-4.3.2.tar.bz2
md5sum = dd60683d7057917e34630b4a787932e8
# GMP does not correctly detect achitecture so it have to be given
# as hexagonit.recipe.cmmi is using shell expansion in subproceses
# backticks are working
configure-options =
--build=`uname -m`-linux
environment =
PATH=${m4:location}/bin:%(PATH)s
[mpfr]
recipe = hexagonit.recipe.cmmi
url = http://www.mpfr.org/mpfr-3.0.0/mpfr-3.0.0.tar.bz2
md5sum = f45bac3584922c8004a10060ab1a8f9f
configure-options =
--with-gmp=${gmp:location}
environment =
CPPFLAGS =-I${gmp:location}/include
LDFLAGS =-L${gmp:location}/lib -Wl,-rpath -Wl,${gmp:location}/lib
[mpc]
recipe = hexagonit.recipe.cmmi
url = http://www.multiprecision.org/mpc/download/mpc-0.9.tar.gz
md5sum = 0d6acab8d214bd7d1fbbc593e83dd00d
configure-options =
--with-gmp=${gmp:location}
--with-mpfr=${mpfr:location}
environment =
CPPFLAGS =-I${mpfr:location}/include -I${gmp:location}/include
LDFLAGS =-L${mpfr:location}/lib -Wl,-rpath -Wl,${mpfr:location}/lib -L${gmp:location}/lib -Wl,-rpath -Wl,${gmp:location}/lib
[ecj]
recipe = hexagonit.recipe.download
download-only = true
url = ftp://sourceware.org/pub/java/ecj-4.5.jar
md5sum = d7cd6a27c8801e66cbaa964a039ecfdb
filename = ecj.jar
[gcc-download]
recipe = hexagonit.recipe.download
url = http://www.mirrorservice.org/sites/sourceware.org/pub/gcc/releases/gcc-4.5.2/gcc-4.5.2.tar.bz2
md5sum = d6559145853fbaaa0fd7556ed93bce9a
strip-top-level-dir = True
destination = ${gcc-java-source:location}
[gcc-java-download]
recipe = hexagonit.recipe.download
url = http://www.mirrorservice.org/sites/sourceware.org/pub/gcc/releases/gcc-4.5.2/gcc-java-4.5.2.tar.bz2
md5sum = fe2b647bace18dc7867a4192def46e2c
strip-top-level-dir = True
destination = ${gcc-java-source:location}
ignore-existing = true
[gcc-java-source]
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[gcc-java]
depends =
${gcc-download:location}
${gcc-java-download:location}
recipe = hexagonit.recipe.cmmi
path = ${gcc-java-source:location}
md5sum = bb3265edf0fa7543e50cedb93e04e427
configure-command = make clean \\; make distclean \\; ./configure
# GMP does not correctly detect achitecture so it have to be given
# as hexagonit.recipe.cmmi is using shell expansion in subproceses
# backticks are working
configure-options =
--disable-bootstrap
--build=`uname -m`-linux
--enable-languages=java
--disable-multilib
--with-gmp=${gmp:location}
--with-mpfr=${mpfr:location}
--with-mpc=${mpc:location}
--with-ecj-jar=${ecj:location}/${ecj:filename}
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
environment =
CPPFLAGS =-I${mpfr:location}/include -I${gmp:location}/include -I${mpc:location}/include
LDFLAGS =-L${mpfr:location}/lib -Wl,-rpath -Wl,${mpfr:location}/lib -L${gmp:location}/lib -Wl,-rpath -Wl,${gmp:location}/lib -Wl,-rpath -Wl,${mpc:location}/lib
PATH=${zip:location}/bin:%(PATH)s
# make install does not work when several core are used
make-targets = install -j1
[buildout]
parts =
gdbm
[gdbm-nochange-patch-download]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = fafa6cae0afbf2b5afb9ef3b8e3035a4
download-only = true
filename = gdbm-Makefile.in-nochange.patch
[gdbm]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.gnu.org/gnu/gdbm/gdbm-1.8.3.tar.gz
md5sum = 1d1b1d5c0245b1c00aff92da751e9aa1
patches = ${gdbm-nochange-patch-download:location}/${gdbm-nochange-patch-download:filename}
configure-options =
--disable-static
# install as parts/gdbm/include/gdbm/*.h etc. because some softwares
# (eg. python's dbmmodule.c extension) assume the location like this.
includedir = ${buildout:parts-directory}/${:_buildout_section_name_}/include
make-targets =
install install-compat includedir=${:includedir}/gdbm && rm -f ${:includedir}/*.h && ln -sf gdbm/gdbm.h ${:includedir}/gdbm.h
# it seems that parallel build sometimes fails for gdbm.
make-options =
-j1
[buildout]
parts =
gettext
extends =
ncurses.cfg
libxml2.cfg
zlib.cfg
[gettext]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/pub/gnu/gettext/gettext-0.18.1.1.tar.gz
md5sum = 3dd55b952826d2b32f51308f2f91aa89
configure-options =
--disable-static
--disable-java
--disable-csharp
--with-libncurses-prefix=${ncurses:location}
--with-libxml2-prefix=${libxml2:location}
--with-included-gettext
--without-emacs
--disable-acl
--disable-openmp
environment =
CPPFLAGS=-I${libxml2:location}/include -I${zlib:location}/include -I${ncurses:location}/include
LDFLAGS=-L${libxml2:location}/lib -Wl,-rpath -Wl,${libxml2:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${ncurses:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib
[buildout]
extends =
fontconfig.cfg
libjpeg.cfg
libtiff.cfg
pkgconfig.cfg
parts = ghostscript
[ghostscript-hooks-download]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../hook/${:filename}
filename = ghostscript-hooks.py
md5sum = 731475648c91507bd1dfe2a61ee84552
download-only = true
[ghostscript-common]
recipe = hexagonit.recipe.cmmi
pre-configure-hook = ${ghostscript-hooks-download:location}/${ghostscript-hooks-download:filename}:pre_configure_hook
configure-options =
--disable-cups
--disable-cairo
--without-x
--with-drivers=FILES
# it seems that parallel build sometimes fails for ghostscript.
make-options = -j1
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${fontconfig:location}/lib/pkgconfig
CPPFLAGS=-I${libtiff:location}/include
LDFLAGS=-Wl,-rpath -Wl,${fontconfig:location}/lib -L${libjpeg:location}/lib -Wl,-rpath -Wl,${libjpeg:location}/lib -L${libtiff:location}/lib -Wl,-rpath -Wl,${libtiff:location}/lib
LD_LIBRARY_PATH=${fontconfig:location}/lib
[ghostscript]
# we prefer ghostscript-8 for now, because ghostscript-9.00 seems to have a
# problem with Japanese fonts if -dTextAlphaBits=4 is specified by
# imagemagick.
<= ghostscript-8
[ghostscript-9]
<= ghostscript-common
url = http://ghostscript.com/releases/ghostscript-9.00.tar.gz
md5sum = a402462478b4cdda3e1816899227b845
[ghostscript-8]
<= ghostscript-common
url = http://www.nexedi.org/static/tarballs/ghostscript/ghostscript-8.71-no-looping-symlink.tar.bz2
md5sum = 34639af3ffe8594f2c5ea944dfbe1d78
# git - a distributed version control system with speed and efficiency
# http://git-scm.com/
[buildout]
extends =
curl.cfg
libexpat.cfg
openssl.cfg
perl.cfg
zlib.cfg
parts =
git
[git]
recipe = hexagonit.recipe.cmmi
url = http://kernel.org/pub/software/scm/git/git-1.7.3.4.tar.bz2
md5sum = 3a2602016f98c529cda7b9fad1a6e216
configure-options =
--with-curl=${curl:location}
--with-openssl=${openssl:location}
--with-zlib=${zlib:location}
--with-expat=${libexpat:location}
--with-perl=${perl:location}/bin/perl
--without-python
--without-tcltk
environment =
PATH=${curl:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
extends =
gettext.cfg
zlib.cfg
[glib]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/sources/glib/2.28/glib-2.28.6.tar.bz2
md5sum = 7d8fc15ae70d5111c0cf2a79d50ef717
configure-options =
--disable-static
--disable-selinux
--disable-fam
--disable-xattr
environment =
CPPFLAGS=-I${zlib:location}/include -I${gettext:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib
PATH=${gettext:location}/bin:%(PATH)s
# Graphviz - Graph Visualization Software
# http://www.graphviz.org/
[buildout]
parts =
graphviz
extends =
fontconfig.cfg
freetype.cfg
libpng.cfg
libtool.cfg
pkgconfig.cfg
zlib.cfg
[graphviz]
recipe = hexagonit.recipe.cmmi
url = http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.26.3.tar.gz
md5sum = 6f45946fa622770c45609778c0a982ee
configure-options =
--with-ltdl-include=${libtool:location}/include
--with-ltdl-lib=${libtool:location}/lib
--with-pngincludedir=${libpng:location}/include
--with-pnglibdir=${libpng:location}/lib
--with-zincludedir=${zlib:location}/include
--with-zlibdir=${zlib:location}/lib
--with-freetype2
--with-fontconfig
--disable-swig
--disable-sharp
--disable-guile
--disable-io
--disable-java
--disable-lua
--disable-ocaml
--disable-perl
--disable-php
--disable-python
--disable-r
--disable-ruby
--disable-tcl
--without-x
--without-expat
--without-devil
--without-rsvg
--without-ghostscript
--without-visio
--without-pangocairo
--without-lasi
--without-glitz
--without-jpeg
--without-glut
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${fontconfig:location}/lib/pkgconfig:${freetype:location}/lib/pkgconfig
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
extends =
pcre.cfg
parts =
grep
[grep]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/grep/grep-2.8.tar.gz
md5sum = cb2dfc502c5afc7a4a6e5f6cefd6850e
environment =
PKG_CONFIG_PATH=${pcre:location}/lib/pkgconfig
LDFLAGS =-Wl,--as-needed -Wl,-rpath -Wl,${pcre:location}/lib
# Handlersocket - a NoSQL plugin for MySQL.
# http://github.com/ahiguti/HandlerSocket-Plugin-for-MySQL
[buildout]
extends =
autoconf.cfg
automake.cfg
libtool.cfg
mariadb.cfg
parts =
handlersocket
[handlersocket-mariadb-patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 2654feea2e867c898b741ac0f0aa8e14
filename = HandlerSocket-Plugin-for-MySQL-1.0.6-mariadb.patch
download-only = true
[handlersocket]
recipe = hexagonit.recipe.cmmi
url = http://github.com/ahiguti/HandlerSocket-Plugin-for-MySQL/tarball/1.0.6
md5sum = 57f5c131e3d29701b01dd92c35ed25fd
patch-options = -p1
patches =
${handlersocket-mariadb-patch:location}/${handlersocket-mariadb-patch:filename}
configure-command =
ACLOCAL_ARGS=-I${libtool:location}/share/aclocal ./autogen.sh && ./configure
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-mysql-source=${mariadb:location}__compile__/mariadb-${mariadb:version}
--with-mysql-bindir=${mariadb:location}/bin
--with-mysql-plugindir=${mariadb:location}/lib/mysql/plugin
environment =
PATH =${autoconf:location}/bin:${automake-1.11:location}/bin:${libtool:location}/bin:%(PATH)s
[buildout]
parts = haproxy
[haproxy]
recipe = hexagonit.recipe.cmmi
url = http://haproxy.1wt.eu/download/1.4/src/haproxy-1.4.15.tar.gz
md5sum = c1b4fc6028c6d8e23dde8c91ff47eabe
configure-command = true
# If the system is running on Linux 2.6, we use "linux26" as the TARGET,
# otherwise use "generic".
# For ARCH value, x86_64 and i[3456]86 are supported.
make-options =
TARGET="$(uname -sr 2>/dev/null|grep -q '^Linux 2\.6' && echo linux26 || echo generic)"
ARCH="$(uname -m 2>/dev/null|grep -E '^(x86_64|i[3456]86)$')"
PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
[buildout]
parts =
hookbox
find-links =
http://www.nexedi.org/static/packages/source/
versions = versions
[versions]
# special version of hookbox supporting history_duration option
# https://github.com/fdiary/hookbox
hookbox = 0.3.4nxd001
[hookbox]
recipe = zc.recipe.egg
eggs =
hookbox
scripts =
hookbox
# ImageMagick - a software suite to create, edit, and compose bitmap images
# http://www.imagemagick.org/
[buildout]
parts = imagemagick
extends =
bzip2.cfg
fontconfig.cfg
freetype.cfg
ghostscript.cfg
libjpeg.cfg
libpng.cfg
libtiff.cfg
libtool.cfg
jasper.cfg
jbigkit.cfg
pkgconfig.cfg
zlib.cfg
[imagemagick-6.6.6-1-no-gsx-gsc-probe.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
path = ${:filename}
md5sum = 3f28ecd9f6722cf2c3238ce6ec3d7a68
download-only = true
filename = imagemagick-6.6.6-1-no-gsx-gsc-probe.patch
[imagemagick-6.6.7-4-without-lzma.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
path = ${:filename}
md5sum = 04e1b934a58f4b3a83e4df148795b338
download-only = true
filename = imagemagick-6.6.7-4-without-lzma.patch
[imagemagick]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.imagemagick.org/pub/ImageMagick/ImageMagick-6.6.9-10.tar.bz2
md5sum = 985bd453c3e502f2771af5329c1cc384
configure-options =
--disable-static
--without-x
--with-magick-plus-plus
--disable-openmp
--disable-opencl
--without-dps
--without-djvu
--without-fftw
--without-fpx
--with-fontconfig
--without-gslib
--without-gvc
--without-lcms
--without-lcms2
--without-lqr
--without-lzma
--without-openexr
--without-rsvg
--without-wmf
--without-xml
--with-bzlib=${bzip2:location}
--with-zlib=${zlib:location}
--with-ltdl-include=${libtool:location}/include
--with-ltdl-lib=${libtool:location}/lib
--with-frozenpaths
patch-options = -p1
patches =
${imagemagick-6.6.6-1-no-gsx-gsc-probe.patch:location}/${imagemagick-6.6.6-1-no-gsx-gsc-probe.patch:filename}
${imagemagick-6.6.7-4-without-lzma.patch:location}/${imagemagick-6.6.7-4-without-lzma.patch:filename}
environment =
PATH=${freetype:location}/bin:${ghostscript:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${fontconfig:location}/lib/pkgconfig
CPPFLAGS=-I${bzip2:location}/include -I${zlib:location}/include -I${jbigkit:location}/include -I${libjpeg:location}/include -I${libtiff:location}/include -I${libpng:location}/include -I${jasper:location}/include -I${freetype:location}/include
LDFLAGS =-L${bzip2:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath -L${zlib:location}/lib -Wl,${zlib:location}/lib -L${jbigkit:location}/lib -Wl,-rpath -Wl,${jbigkit:location}/lib -L${libjpeg:location}/lib -Wl,-rpath -Wl,${libjpeg:location}/lib -L${libtiff:location}/lib -Wl,-rpath -Wl,${libtiff:location}/lib -L${libpng:location}/lib -Wl,-rpath -Wl,${libpng:location}/lib -L${jasper:location}/lib -Wl,-rpath -Wl,${jasper:location}/lib -L${freetype:location}/lib -Wl,-rpath -Wl,${freetype:location}/lib
[buildout]
extends =
libjpeg.cfg
parts =
jasper
[jasper]
recipe = hexagonit.recipe.cmmi
url = http://www.ece.uvic.ca/~mdadams/jasper/software/jasper-1.900.1.zip
md5sum = a342b2b4495b3e1394e161eb5d85d754
# jasper configure script is not executable by default
configure-command =
/bin/sh ./configure --prefix=${buildout:parts-directory}/${:_buildout_section_name_} --disable-static --enable-shared --disable-opengl
environment =
CPPFLAGS=-I${libjpeg:location}/include
LDFLAGS=-L${libjpeg:location}/lib -Wl,-rpath -Wl,${libjpeg:location}/lib
[buildout]
parts =
jbigkit
[jbigkit-hooks]
recipe = hexagonit.recipe.download
download-only = true
url = ${:_profile_base_location_}/../hook/${:filename}
md5sum = f1edb4ddd212d2d100d7ea8b2e42d21f
filename = jbigkit-hooks.py
[jbigkit-build-patch]
recipe = hexagonit.recipe.download
# Patched thanks to Gentoo developers:
# http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/media-libs/jbigkit/files/jbigkit-2.0-build.patch?revision=1.1
url = ${:_profile_base_location_}/../patch/${:filename}
download-only = true
filename = jbigkit-2.0-build.patch
md5sum = e974958e9331735c07478e9c2dde8795
[jbigkit]
recipe = hexagonit.recipe.cmmi
url = http://www.cl.cam.ac.uk/~mgk25/download/jbigkit-2.0.tar.gz
md5sum = 3dd87f605abb1a97a22dc79d8b3e8f6c
patch-options =
-p1
patches =
${jbigkit-build-patch:location}/${jbigkit-build-patch:filename}
configure-command = true
make-targets = lib pbm
post-make-hook = ${jbigkit-hooks:location}/${jbigkit-hooks:filename}:post_make_hook
[buildout]
extends =
tokyocabinet.cfg
messagepack.cfg
openssl.cfg
zlib.cfg
parts = kumo
find-links = http://www.nexedi.org/static/packages/source/
[kumo-hooks-download]
url = ${:_profile_base_location_}/../hook/${:filename}
md5sum = 958a595a02de75624728f8d65e39d800
recipe = hexagonit.recipe.download
download-only=true
filename = kumo-hooks.py
[kumo-ipv6-multiip-patch-download]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
filename = kumofs-0.4.13_ipv6support_multiiplistenfix.patch
md5sum = 53af9f1f1375940841c589a6cbe11425
download-only = true
[kumo]
pre-configure-hook = ${kumo-hooks-download:location}/${kumo-hooks-download:filename}:pre_configure_hook
recipe = erp5.recipe.cmmiforcei686
url = https://github.com/downloads/etolabo/kumofs/kumofs-0.4.13.tar.gz
md5sum = 46148e9536222d0ad2ef36777c55714d
patches = ${kumo-ipv6-multiip-patch-download:location}/${kumo-ipv6-multiip-patch-download:filename}
patch-options = -p1
configure-options =
--with-tokyocabinet=${tokyocabinet:location}
--with-msgpack=${messagepack:location}
environment =
CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include
LDFLAGS=-L${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath -Wl,${tokyocabinet:location}/lib -Wl,-rpath -Wl,${messagepack:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib
[buildout]
parts = libdb
[libdb-hooks-download]
url = ${:_profile_base_location_}/../hook/${:filename}
md5sum = acb3bfb990a48381176c1e8f74130e30
recipe = hexagonit.recipe.download
download-only=true
filename = libdb-hooks.py
[libdb]
recipe = hexagonit.recipe.cmmi
url = http://download.oracle.com/berkeley-db/db-4.5.20.tar.gz
md5sum = b0f1c777708cb8e9d37fb47e7ed3312d
pre-configure-hook = ${libdb-hooks-download:location}/${libdb-hooks-download:filename}:pre_configure_hook
configure-command = ../dist/configure
configure-options =
--disable-static
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
[buildout]
parts = libevent
[libevent]
recipe = hexagonit.recipe.cmmi
url = http://www.monkey.org/~provos/libevent-1.4.13-stable.tar.gz
md5sum = 0b3ea18c634072d12b3c1ee734263664
configure-options =
--disable-static
[buildout]
parts =
libexpat
[libexpat]
recipe = hexagonit.recipe.cmmi
url = http://sourceforge.net/projects/expat/files/expat/2.0.1/expat-2.0.1.tar.gz/download
md5sum = ee8b492592568805593f81f8cdf2a04c
configure-options =
--disable-static
[buildout]
parts =
libjpeg
[libjpeg]
recipe = hexagonit.recipe.cmmi
url = http://www.ijg.org/files/jpegsrc.v8b.tar.gz
md5sum = e022acbc5b36cd2cb70785f5b575661e
configure-options =
--disable-static
[buildout]
extends =
zlib.cfg
parts =
libpng12
libpng
[libpng-common]
recipe = hexagonit.recipe.cmmi
configure-options =
--disable-static
environment =
CPPFLAGS =-I${zlib:location}/include
LDFLAGS =-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[libpng12]
<= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.2.44.tar.bz2
md5sum = e3ac7879d62ad166a6f0c7441390d12b
[libpng]
<= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.5.1.tar.bz2
md5sum = 8fdcb7c0e78d54ff0362a800cbe3bc31
[buildout]
extends = cpio.cfg
parts =
libreoffice-bin
find-links =
http://www.nexedi.org/static/packages/source/
versions = versions
[versions]
# special version of z3c.recipe.openoffice with architecture autodetection
z3c.recipe.openoffice = 0.3.1dev7
[libreoffice-bin]
recipe = z3c.recipe.openoffice
install-javafilter = no
install-pyuno-egg = no
hack-openoffice-python = no
flavour = libreoffice
# here, two %s are used, first one is for directory name (eg. x86_64), and second one is for filename (eg. x86-64).
# base-url = http://download.documentfoundation.org/libreoffice/stable/3.3.2/rpm/%s/LibO_3.3.2_Linux_%s_install-rpm_en-US.tar.gz
cpio = ${cpio:location}/bin/cpio
[buildout]
extends =
bzip2.cfg
popt.cfg
zlib.cfg
parts =
librsync
[librsync]
recipe = hexagonit.recipe.cmmi
url = http://downloads.sourceforge.net/sourceforge/librsync/librsync-0.9.7.tar.gz
md5sum = 24cdb6b78f45e0e83766903fd4f6bc84
configure-options =
--disable-static
--enable-shared
environment =
CPPFLAGS=-I${zlib:location}/include -I${bzip2:location}/include -I${popt:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${bzip2:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -L${popt:location}/lib -Wl,-rpath -Wl,${popt:location}/lib
[buildout]
extends =
libjpeg.cfg
jbigkit.cfg
zlib.cfg
parts =
libtiff
[libtiff]
recipe = hexagonit.recipe.cmmi
url = http://download.osgeo.org/libtiff/tiff-3.9.5.tar.gz
md5sum = 8fc7ce3b4e1d0cc8a319336967815084
configure-options =
--disable-static
--without-x
environment =
CPPFLAGS=-I${libjpeg:location}/include -I${jbigkit:location}/include -I${zlib:location}/include
LDFLAGS=-L${libjpeg:location}/lib -Wl,-rpath -Wl,${libjpeg:location}/lib -L${jbigkit:location}/lib -Wl,-rpath -Wl,${jbigkit:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
parts = libtool
[libtool]
recipe = hexagonit.recipe.cmmi
md5sum = b32b04148ecdd7344abc6fe8bd1bb021
url = http://ftp.gnu.org/gnu/libtool/libtool-2.4.tar.gz
configure-options =
--disable-static
[buildout]
parts =
libuuid
[libuuid]
recipe = hexagonit.recipe.cmmi
url = http://ftp.kernel.org/pub/linux/utils/util-linux/v2.18/util-linux-ng-2.18.tar.bz2
md5sum = 2f5f71e6af969d041d73ab778c141a77
configure-options =
--disable-static
--enable-libuuid
--disable-agetty
--disable-cramfs
--disable-fallocate
--disable-fsck
--disable-libblkid
--disable-libmount
--disable-makeinstall-chown
--disable-makeinstall-setuid
--disable-mount
--disable-nls
--disable-pivot_root
--disable-rename
--disable-require-password
--disable-schedutils
--disable-switch_root
--disable-tls
--disable-unshare
--disable-uuidd
--disable-wall
--without-libiconv-prefix
--without-libintl-prefix
--without-ncurses
--without-slang
--without-pam
--without-selinux
--without-audit
make-options =
-C shlibs/uuid
# libxml2 - the XML C parser and toolkit
# http://xmlsoft.org/
[buildout]
extends = zlib.cfg
parts =
libxml2
[libxml2]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.xmlsoft.org/libxml2/libxml2-2.7.8.tar.gz
md5sum = 8127a65e8c3b08856093099b52599c86
configure-options =
--disable-static
--without-python
--with-zlib=${zlib:location}
environment =
LDFLAGS = -Wl,-rpath -Wl,${zlib:location}/lib
[buildout]
extends =
libxml2.cfg
pkgconfig.cfg
zlib.cfg
parts =
libxslt
[libxslt]
url = ftp://xmlsoft.org/libxslt/libxslt-1.1.26.tar.gz
md5sum = e61d0364a30146aaa3001296f853b2b9
recipe = hexagonit.recipe.cmmi
configure-options =
--disable-static
--with-libxml-prefix=${libxml2:location}
--without-crypto
--without-python
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-Wl,-rpath -Wl,${zlib:location}/lib
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
[buildout]
extends =
popt.cfg
parts = logrotate
[logrotate]
recipe = hexagonit.recipe.cmmi
url = https://fedorahosted.org/releases/l/o/logrotate/logrotate-3.7.9.tar.gz
md5sum = eeba9dbca62a9210236f4b83195e4ea5
configure-command = true
make-options = PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
environment =
POPT_DIR=${popt:location}/include -L${popt:location}/lib -Wl,-rpath -Wl,${popt:location}/lib
[buildout]
extends =
libxml2.cfg
libxslt.cfg
zlib.cfg
parts =
lxml-python
[lxml-python-env]
PATH = ${libxslt:location}/bin:%(PATH)s
[lxml-python]
recipe = zc.recipe.egg:custom
egg = lxml
rpath =
${libxml2:location}/lib/
${libxslt:location}/lib/
${zlib:location}/lib/
environment = lxml-python-env
[buildout]
parts = lynx
[lynx]
recipe = hexagonit.recipe.cmmi
md5sum = 124253e635f7c76bdffc47b9d036c812
url = http://lynx.isc.org/lynx2.8.7/lynx2.8.7.tar.gz
# M2Crypto - A Python crypto and SSL toolkit
# http://chandlerproject.org/Projects/MeTooCrypto
[buildout]
extends =
openssl.cfg
swig.cfg
parts =
M2Crypto
[M2Crypto]
recipe = zc.recipe.egg:custom
egg = M2Crypto
environment = M2Crypto-env
rpath =
${openssl:location}/lib/
include-dirs =
${openssl:location}/include
library-dirs =
${openssl:location}/lib
[M2Crypto-env]
PATH = ${swig:location}/bin:%(PATH)s
[buildout]
parts =
m4
[m4]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/m4/m4-1.4.15.tar.gz
md5sum = 5649a2e593b6c639deae9e72ede777dd
# MariaDB - a database server that offers drop-in replacement functionality for MySQL.
# http://mariadb.org/
[buildout]
extends =
zlib.cfg
ncurses.cfg
readline.cfg
parts =
mariadb
[mariadb]
recipe = hexagonit.recipe.cmmi
version = 5.2.6
url = http://www.percona.com/downloads/MariaDB/mariadb-${:version}/kvm-tarbake-jaunty-x86/mariadb-${:version}.tar.gz
md5sum = e562aca71ae16b490196f99aa7e64b55
# compile directory is required to build mysql plugins.
keep-compile-dir = true
# configure: how to avoid searching for my.cnf?
# - like in mysql part in http://svn.zope.org/zodbshootout/trunk/buildout.cfg?view=markup
# we use embeded yassl instead of openssl to avoid compilation errors on sphinx search engine.
configure-options =
--disable-static
--enable-thread-safe-client
--enable-local-infile
--enable-assembler
--with-pic
--with-fast-mutexes
--with-charset=utf8
--with-collation=utf8_unicode_ci
--without-readline
--with-ssl
--with-zlib-dir=${zlib:location}
environment =
CPPFLAGS =-I${ncurses:location}/include -I${readline:location}/include
LDFLAGS =-L${readline:location}/lib -L${ncurses:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -Wl,-rpath -Wl,${readline:location}/lib
[buildout]
parts = memcached
extends = libevent.cfg
[memcached-strict-aliasing-patch]
# on some platforms original memcached refuses to build:
# * http://code.google.com/p/memcached/issues/detail?id=60
# * http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=565033
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = c03b3bfc237b77667b0e90442b0980e8
download-only = true
filename = memcached-fix-strict-aliasing.patch
[memcached-fix-array-subscript-is-above-array-bounds]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
filename = memcached-1.4-fix-array-subscript-is-above-array-bounds.patch
download-only = true
md5sum = 472508b9a4b6c0b9f5d6f2abce3444e3
[memcached-gcc4.6.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
filename = memcached-gcc4.6.patch
download-only = true
md5sum = 3418477f64500cd2a8dce046f5d72fec
[memcached]
<= memcached-1.4.5
[memcached-1.4.5]
<= memcached-common
url = http://memcached.googlecode.com/files/memcached-1.4.5.tar.gz
md5sum = 583441a25f937360624024f2881e5ea8
[memcached-1.4.4]
<= memcached-common
url = http://memcached.googlecode.com/files/memcached-1.4.4.tar.gz
md5sum = 5ca5b24de347e97ac1f48f3785b4178a
[memcached-common]
recipe = hexagonit.recipe.cmmi
configure-options =
--with-libevent=${libevent:location}
patches =
${memcached-strict-aliasing-patch:location}/${memcached-strict-aliasing-patch:filename}
${memcached-fix-array-subscript-is-above-array-bounds:location}/${memcached-fix-array-subscript-is-above-array-bounds:filename}
${memcached-gcc4.6.patch:location}/${memcached-gcc4.6.patch:filename}
patch-options = -p1
environment =
LDFLAGS =-Wl,-rpath ${libevent:location}/lib
[buildout]
parts = messagepack
find-links = http://www.nexedi.org/static/packages/source/
[messagepack]
recipe = erp5.recipe.cmmiforcei686
url = http://downloads.sourceforge.net/project/msgpack/msgpack/cpp/msgpack-0.5.4.tar.gz
md5sum = 18d96a3178f7cad73c0ca44f6284ae7d
configure-options =
--disable-static
# mroonga - a MySQL storage engine using full-text search engine groonga
# http://mroonga.github.com/
# http://groonga.org/
[buildout]
parts =
mroonga
extends =
mariadb.cfg
pkgconfig.cfg
[groonga]
recipe = hexagonit.recipe.cmmi
url = http://groonga.org/files/groonga/groonga-1.1.0.tar.gz
md5sum = a5a381db446e017f172bf6b003a4fed9
configure-options =
--without-mecab
[mroonga-mariadb-patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = da2457b9f8006cddb279a165128479e5
filename = groonga-storage-engine-0.4.mariadb.patch
download-only = true
[mroonga]
recipe = hexagonit.recipe.cmmi
url = http://github.com/downloads/mroonga/mroonga/groonga-storage-engine-0.4.tar.gz
md5sum = eb39ddaebb5a295f103bb1f66d5b33da
patch-options = -p1
patches =
${mroonga-mariadb-patch:location}/${mroonga-mariadb-patch:filename}
configure-options =
--with-mysql-source=${mariadb:location}__compile__/mariadb-${mariadb:version}
--with-mysql-config=${mariadb:location}/bin/mysql_config
environment =
PATH=${groonga:location}/bin:${pkgconfig:location}/bin:%(PATH)s
CPPFLAGS=-I${groonga:location}/include/groonga
LDFLAGS=-L${groonga:location}/lib
PKG_CONFIG_PATH=${groonga:location}/lib/pkgconfig
[buildout]
extends =
autoconf.cfg
automake.cfg
zlib.cfg
bison.cfg
flex.cfg
libtool.cfg
ncurses.cfg
readline.cfg
parts =
mysql-5.1
[mysql-5.1-sphinx-patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 6580393ca93ecf564cad0552b91a563e
filename = mysql-5.1.49-sphinx-1.10.diff
download-only = true
[mysql-5.1]
recipe = hexagonit.recipe.cmmi
version = 5.1.54
url = http://mysql.he.net/Downloads/MySQL-5.1/mysql-${:version}.tar.gz
md5sum = 2a0f45a2f8b5a043b95ce7575796a30b
# compile directory is required to build mysql plugins.
keep-compile-dir = true
# configure: how to avoid searching for my.cnf?
# - like in mysql part in http://svn.zope.org/zodbshootout/trunk/buildout.cfg?view=markup
configure-command =
libtoolize -c -f
aclocal -I ${libtool:location}/share/aclocal -I config/ac-macros
autoheader
automake -c -a -f
autoconf
touch sql/sql_yacc.yy
./configure
# we use embeded yassl instead of openssl to avoid compilation errors on sphinx search engine.
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--enable-thread-safe-client
--enable-local-infile
--enable-assembler
--with-pic
--with-fast-mutexes
--with-charset=utf8
--with-collation=utf8_unicode_ci
--with-server-suffix=mysql-5.1
--without-readline
--with-ssl
--with-zlib-dir=${zlib:location}
make-options =
LIBTOOL=libtool
patch-options = -p1
patches =
${mysql-5.1-sphinx-patch:location}/${mysql-5.1-sphinx-patch:filename}
environment =
PATH =${autoconf:location}/bin:${automake-1.11:location}/bin:${libtool:location}/bin:${bison:location}/bin:${flex:location}/bin:%(PATH)s
CPPFLAGS =-I${ncurses:location}/include -I${readline:location}/include
LDFLAGS =-L${readline:location}/lib -L${ncurses:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -Wl,-rpath -Wl,${readline:location}/lib
[buildout]
extends =
mariadb.cfg
openssl.cfg
zlib.cfg
parts =
mysql-python
[mysql-python-env]
PATH =${mariadb:location}/bin:%(PATH)s
[mysql-python]
recipe = zc.recipe.egg:custom
egg = MySQL-python
environment = mysql-python-env
rpath =
${mariadb:location}/lib/mysql/
${zlib:location}/lib/
${openssl:location}/lib/
# This is software part of buildout for mysql with senna
# Originally made by Leonardo Rochael Almeida <leorochael@gmail.com> (thanks!)
# Original place: https://svn.erp5.org/repos/public/experimental/mysqlsenna.buildout/
[buildout]
extends =
autoconf.cfg
automake.cfg
zlib.cfg
bison.cfg
flex.cfg
libtool.cfg
ncurses.cfg
openssl.cfg
readline.cfg
parts =
mysql-tritonn-5.0
[senna]
recipe = hexagonit.recipe.cmmi
url = http://www.nexedi.org/static/tarballs/senna/senna-r1311.tar.gz
md5sum = 1cc51554789f81a9e5208da04a5c2f4c
configure-command =
echo 'AC_CONFIG_MACRO_DIR([m4])' >> configure.ac
echo 'ACLOCAL_AMFLAGS = -I m4' >> Makefile.am
ACLOCAL_ARGS=-I${libtool:location}/share/aclocal ./autogen.sh
./configure
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--without-mecab
--disable-glibtest
make-options =
LIBTOOL=libtool
environment =
PATH=${autoconf:location}/bin:${automake-1.11:location}/bin:${libtool:location}/bin:%(PATH)s
[mysql-5.0-tritonn-patch]
recipe = hexagonit.recipe.download
url = http://dl.sourceforge.jp/tritonn/44472/${:filename}
md5sum = 257abe9c4afdc9b08033687fd486a595
filename = tritonn-1.0.12-mysql-5.0.87.diff
download-only = true
[mysql-5.0-sphinx-patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 4ca071cde319213a213ab3605e326d1c
filename = mysql-5.0.87-sphinx-1.10.diff
download-only = true
[mysql-tritonn-5.0]
recipe = hexagonit.recipe.cmmi
url = http://www.nexedi.org/static/tarballs/mysql/mysql-5.0.87.tar.gz
md5sum = 65e6229cc98b6a8d4c5206d7fe16c7be
# configure: how to avoid searching for my.cnf?
# - like in mysql part in http://svn.zope.org/zodbshootout/trunk/buildout.cfg?view=markup
configure-command =
libtoolize -c -f
aclocal -I ${libtool:location}/share/aclocal -I config/ac-macros
autoheader
automake -c -a -f
autoconf
touch sql/sql_yacc.yy
./configure
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-senna
--with-openssl=${openssl:location}
--without-mecab
--enable-thread-safe-client
--with-charset=utf8
--with-collation=utf8_unicode_ci
--with-server-suffix=mysql-tritonn-5.0
--with-mysqlmanager
--enable-assembler
--without-readline
--with-sphinx-storage-engine
--with-named-curses-libs=${ncurses:location}/lib/libncurses.so
--with-zlib-dir=${zlib:location}
make-options =
LIBTOOL=libtool
LN_S='ln -s'
patch-options = -p1
patches =
${mysql-5.0-tritonn-patch:location}/${mysql-5.0-tritonn-patch:filename}
${mysql-5.0-sphinx-patch:location}/${mysql-5.0-sphinx-patch:filename}
environment =
PATH=${senna:location}/bin:${autoconf:location}/bin:${automake-1.11:location}/bin:${libtool:location}/bin:${bison:location}/bin:${flex:location}/bin:%(PATH)s
CPPFLAGS=-I${senna:location}/include/senna -I${ncurses:location}/include -I${readline:location}/include
LDFLAGS=-L${senna:location}/lib -L${readline:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -Wl,-rpath -Wl,${readline:location}/lib
[buildout]
parts =
ncurses
[ncurses]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.9.tar.gz
md5sum = 8cb9c412e5f2d96bc6f459aa8c6282a1
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-shared
--without-normal
--without-debug
--enable-rpath
# tricky way to rerun with --enable-widec
make-targets =
install && (for i in curses unctrl eti form menu panel term; do ln -sf ncurses/$i.h ${buildout:parts-directory}/${:_buildout_section_name_}/include/$i.h; done) && ./configure ${:configure-options} --enable-widec && make install
environment =
LDFLAGS =-Wl,--as-needed
make-options =
-j1
[buildout]
parts =
neon
extends =
libxml2.cfg
openssl.cfg
pkgconfig.cfg
zlib.cfg
[neon]
recipe = hexagonit.recipe.cmmi
url = http://www.webdav.org/neon/neon-0.29.5.tar.gz
md5sum = ff369e69ef0f0143beb5626164e87ae2
configure-options =
--disable-static
--enable-shared
--with-ssl=openssl
--without-expat
--without-gssapi
--with-libxml2
--enable-threadsafe-ssl=posix
--disable-nls
environment =
PATH=${libxml2:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${libxml2:location}/lib/pkgconfig
CPPFLAGS=-I${openssl:location}/include -I${zlib:location}/include
LDFLAGS=-L${openssl:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${libxml2:location}/lib
[buildout]
parts = nullmailer
[nullmailer]
recipe = hexagonit.recipe.cmmi
url = http://untroubled.org/nullmailer/nullmailer-1.05.tar.gz
md5sum = 35124cc05f893efba1310e2ec7c876ff
[buildout]
parts = ocropus
[ocropus-patch-scons]
recipe = hexagonit.recipe.download
download-only = true
filename = ocropus.SConstruct-local-installation.patch
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 08710ec022f3ce13e5c0b584dfee2c1c
[ocropus]
recipe = hexagonit.recipe.cmmi
url = http://www.nexedi.org/static/packages/source/ocropus/ocropus-0.4.4.tar.gz
md5sum = 1485dbe9aab27574bfe3c8b4395cf3ce
configure-options =
${iulib:location}
patches = ${ocropus-patch-scons:location}/${ocropus-patch-scons:filename}
patch-options = -p1
environment =
PATH =${scons-bin:destination_directory}:%(PATH)s
[scons-local]
recipe = hexagonit.recipe.download
url = http://prdownloads.sourceforge.net/scons/scons-local-2.0.1.tar.gz
[scons-bin]
recipe = plone.recipe.command
destination_directory = ${scons-local:location}
command =
ln -sf ${scons-local:location}/scons.py ${:destination_directory}/scons
[iulib-patch-scons]
recipe = hexagonit.recipe.download
download-only = true
filename = iulib.SConstruct-libtiff-detection.patch
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = dbbdf909814fb84ffe47e2ff59980db7
[iulib]
recipe = hexagonit.recipe.cmmi
url = http://www.nexedi.org/static/packages/source/ocropus/iulib-0.4.4.tar.gz
md5sum = 3d9754dbd64c56029ce1cd7c2f61894c
# workaround http://code.google.com/p/iulib/issues/detail?id=27
patches = ${iulib-patch-scons:location}/${iulib-patch-scons:filename}
patch-options = -p1
environment =
PATH =${scons-bin:destination_directory}:%(PATH)s
# OpenOffice.org daemon software buildout
[buildout]
extends = openoffice-bin.cfg
parts +=
oood
[oood]
recipe = hexagonit.recipe.download
url = http://www.nexedi.org/static/tarballs/oood/oood-r36294.tar.gz
md5sum = 9e71251eea4b310fd6bd4ebf8abf890e
strip-top-level-dir = true
[buildout]
parts =
openldap
extends =
openssl.cfg
cyrus-sasl.cfg
[openldap]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-2.4.25.tgz
md5sum = ec63f9c2add59f323a0459128846905b
configure-options =
--disable-static
--disable-slapd
--enable-backends=no
--enable-overlays=no
--with-cyrus-sasl
--without-fetch
--disable-rewrite
--disable-odbc
--with-threads
--with-tls=openssl
environment =
CPPFLAGS=-I${openssl:location}/include -I${cyrus-sasl:location}/include
LDFLAGS=-L${openssl:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -L${cyrus-sasl:location}/lib -Wl,-rpath -Wl,${cyrus-sasl:location}/lib
[buildout]
parts =
openoffice-bin
find-links =
http://www.nexedi.org/static/packages/source/
[openoffice-bin]
# XXX: This section and recipe have to be reviewed against using
# zc.buildout.download API and generally following zc.buildout way
# of dealing with remote resources
#recipe = erp5.recipe.openoffice
recipe = z3c.recipe.openoffice
install-pyuno-egg = no
hack-openoffice-python = no
base-url = http://download.services.openoffice.org/files/stable/3.3.0/OOo_3.3.0_Linux_%s_install-rpm-wJRE_en-US.tar.gz
version = 3
# profile to build openoffice from source
[buildout]
[buildout]
[opensp]
recipe = hexagonit.recipe.cmmi
url = http://prdownloads.sourceforge.net/openjade/OpenSP-1.5.2.tar.gz
md5sum = 670b223c5d12cee40c9137be86b6c39b
configure-options =
--disable-static
--disable-doc-build
# OpenSSL - a toolkit implementing SSL v2/v3 and TLS v1 protocols as
# well as a full-strength general purpose cryptography
# library.
# http://www.openssl.org/
[buildout]
extends =
zlib.cfg
parts =
openssl
[openssl]
recipe = hexagonit.recipe.cmmi
url = https://www.openssl.org/source/openssl-1.0.0d.tar.gz
md5sum = 40b6ea380cc8a5bf9734c2f8bf7e701e
configure-command = ./config
configure-options =
-I${zlib:location}/include
-L${zlib:location}/lib
--openssldir=${buildout:parts-directory}/${:_buildout_section_name_}/etc/ssl
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--libdir=lib
shared
no-zlib
# it seems that parallel build sometimes fails for openssl.
make-options =
-j1
LDFLAGS="-Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${buildout:parts-directory}/${:_buildout_section_name_}/lib"
SHARED_LDFLAGS="-Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${buildout:parts-directory}/${:_buildout_section_name_}/lib"
[buildout]
parts =
patch
[patch]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.gnu.org/gnu/patch/patch-2.6.1.tar.gz
md5sum = d758eb96d3f75047efc004a720d33daf
[buildout]
parts =
pcre
[pcre]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.12.tar.bz2
md5sum = f14a9fef3c92f3fc6c5ac92d7a2c7eb3
configure-options =
--disable-static
--enable-utf8
--enable-unicode-properties
[buildout]
# Note: Locally provided gcc with gcj is used, as there is high unstability
# in how gcj is functional on many operating systems.
# Because of this the only way to have portable pdftk profile it is required
# to provide working gcj and then have it used for pdftk compilation.
extends =
fastjar.cfg
gcc.cfg
parts = pdftk
[pdftk-1.44-Makefile.Base.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
download-only = true
md5sum = 04a3607213e4e638c1fd993321804499
filename = pdftk-1.44-Makefile.Base.patch
[pdftk-hooks-download]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../hook/${:filename}
filename = pdftk-hooks.py
md5sum = d2a067b98953ad30ba1230f87dfb8b92
download-only = true
[pdftk]
recipe = hexagonit.recipe.cmmi
url = http://www.pdflabs.com/tools/pdftk-the-pdf-toolkit/pdftk-1.44-src.zip
md5sum = 9eb50fffcd621a627d387750c60982b4
configure-command = true
patches =
${pdftk-1.44-Makefile.Base.patch:location}/${pdftk-1.44-Makefile.Base.patch:filename}
patch-options = -p1
# pdftk does not work correcly with parallel building
make-options =
-j1
-f Makefile.Redhat
DEST=${buildout:parts-directory}/${:_buildout_section_name_}
pre-make-hook = ${pdftk-hooks-download:location}/${pdftk-hooks-download:filename}:pre_make_hook
environment =
PATH=${gcc-java:location}/bin:${fastjar:location}/bin:%(PATH)s
LDFLAGS=-L${gcc-java:location}/lib -Wl,-rpath -Wl,${gcc-java:location}/lib -L${gcc-java:location}/lib64 -Wl,-rpath -Wl,${gcc-java:location}/lib64
LD_LIBRARY_PATH=${gcc-java:location}/lib:${gcc-java:location}/lib64
[buildout]
extends =
perl.cfg
parts =
perl-Class-Accessor
[perl-Class-Accessor]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/K/KA/KASEI/Class-Accessor-0.34.tar.gz
md5sum = 0d9640d237a13276145f7e44b4855b89
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
parts =
perl-Config-General
[perl-Config-General]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/T/TL/TLINDEN/Config-General-2.50.tar.gz
md5sum = 18d9b00582e8943956b32f8a9ba23380
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
parts =
perl-Encode-HanExtra
[perl-Encode-HanExtra]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/A/AU/AUDREYT/Encode-HanExtra-0.23.tar.gz
md5sum = e1d3bc32c1c8ee304235a06fbcd5d5a4
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
parts =
perl-HTML-Encoding
[perl-HTML-Encoding]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/B/BJ/BJOERN/HTML-Encoding-0.61.tar.gz
md5sum = bce9f00f04ad055feaa4d6511b30e421
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
perl-HTML-Tagset.cfg
parts =
perl-HTML-Parser
[perl-HTML-Parser]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
${perl-HTML-Tagset:location}
url = http://search.cpan.org/CPAN/authors/id/G/GA/GAAS/HTML-Parser-3.68.tar.gz
md5sum = 5550b2da7aa94341f1e8a17a4ac20c68
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
parts =
perl-HTML-Tagset
[perl-HTML-Tagset]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/P/PE/PETDANCE/HTML-Tagset-3.20.tar.gz
md5sum = d2bfa18fe1904df7f683e96611e87437
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
parts =
perl-HTML-Template
[perl-HTML-Template]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/S/SA/SAMTREGAR/HTML-Template-2.9.tar.gz
md5sum = cbf88a486b36284be55765ac7357c187
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
parts =
perl-JSON
[perl-JSON]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/M/MA/MAKAMAKA/JSON-2.50.tar.gz
md5sum = 7aa1f17255ec8b074a1e8b18df23d6fb
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
parts =
perl-Net-IP
[perl-Net-IP]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/M/MA/MANU/Net-IP-1.25.tar.gz
md5sum = a49c0b02a9b793ff60191cdafc0c202e
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
opensp.cfg
perl.cfg
perl-Class-Accessor.cfg
parts =
perl-SGML-Parser-OpenSP
[perl-SGML-Parser-OpenSP]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
${perl-Class-Accessor:location}
url = http://search.cpan.org/CPAN/authors/id/B/BJ/BJOERN/SGML-Parser-OpenSP-0.994.tar.gz
md5sum = b1ee0244e9daa4d37f28cf32c4753691
configure-command =
${perl:location}/bin/perl Makefile.PL
make-options =
INC=-I${opensp:location}/include
OTHERLDFLAGS="-L${opensp:location}/lib -Wl,-rpath -Wl,${opensp:location}/lib"
[buildout]
extends =
perl.cfg
parts =
perl-URI
[perl-URI]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/G/GA/GAAS/URI-1.58.tar.gz
md5sum = 540575aee18616ad9a21e0af7a1e7b18
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
libxml2.cfg
perl.cfg
perl-XML-NamespaceSupport.cfg
perl-XML-SAX.cfg
zlib.cfg
parts =
perl-XML-LibXML
[perl-XML-LibXML]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
${perl-XML-NamespaceSupport:location}
${perl-XML-SAX:location}
url = http://search.cpan.org/CPAN/authors/id/P/PA/PAJAS/XML-LibXML-1.70.tar.gz
md5sum = 33d4294f708e20c298cfe534d1166844
configure-command =
${perl:location}/bin/perl Makefile.PL DEBUG=1 \
INC=-I${libxml2:location}/include/libxml2 \
LIBS="-L${libxml2:location}/lib -L${zlib:location}/lib"
# Parallel make does not work for this package on fast machines
# with many cores
make-options =
OTHERLDFLAGS=" -Wl,-rpath -Wl,${libxml2:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib" -j1
environment =
LD_LIBRARY_PATH=${libxml2:location}/lib:${zlib:location}/lib
PERLLIB=blib/lib
[buildout]
extends =
perl.cfg
parts =
perl-XML-NamespaceSupport
[perl-XML-NamespaceSupport]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://search.cpan.org/CPAN/authors/id/P/PE/PERIGRIN/XML-NamespaceSupport-1.11.tar.gz
md5sum = 222cca76161cd956d724286d36b607da
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
perl.cfg
perl-XML-NamespaceSupport.cfg
parts =
perl-XML-SAX
[perl-XML-SAX]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
${perl-XML-NamespaceSupport:location}
url = http://search.cpan.org/CPAN/authors/id/G/GR/GRANTM/XML-SAX-0.96.tar.gz
md5sum = bdcd4119a62505184e211e9dfaef0ab1
configure-command =
${perl:location}/bin/perl Makefile.PL
environment =
PERLLIB=blib/lib
[buildout]
extends =
perl.cfg
perl-HTML-Parser.cfg
parts =
perl-libwww-perl
[perl-libwww-perl]
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
${perl-HTML-Parser:location}
url = http://search.cpan.org/CPAN/authors/id/G/GA/GAAS/libwww-perl-5.837.tar.gz
md5sum = 9bbf1bce482b0bac98bb4f04253c03d0
configure-command =
${perl:location}/bin/perl Makefile.PL
[buildout]
extends =
gdbm.cfg
patch.cfg
parts =
perl
[perl-keep-linker-flags-in-ldflags.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 9873a89c969bd5a478434c3b8b2d57d8
download-only = true
filename = ${:_buildout_section_name_}
[perl]
recipe = hexagonit.recipe.cmmi
version = 5.14.0
url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2
md5sum = e7457deea78330c5f8eebb2fd2a45479
patch-options = -p1
patches =
${perl-keep-linker-flags-in-ldflags.patch:location}/${perl-keep-linker-flags-in-ldflags.patch:filename}
configure-command =
sh Configure -des \
-Dprefix=${buildout:parts-directory}/${:_buildout_section_name_} \
-Dsiteprefix=${buildout:parts-directory}/site_${:_buildout_section_name_} \
-Dcflags=-I${gdbm:location}/include \
-Dldflags="-L${gdbm:location}/lib -Wl,-rpath -Wl,${gdbm:location}/lib" \
-Ui_db \
-Dnoextensions=ODBM_File
environment =
PATH=${patch:location}/bin:%(PATH)s
# pkg-config - a helper tool used when compiling applications and libraries
# http://pkgconfig.freedesktop.org/
[buildout]
parts =
pkgconfig
extends =
gettext.cfg
glib.cfg
popt.cfg
[pkgconfig]
recipe = hexagonit.recipe.cmmi
url = http://pkgconfig.freedesktop.org/releases/pkg-config-0.25.tar.gz
md5sum = a3270bab3f4b69b7dc6dbdacbcae9745
location = ${buildout:parts-directory}/${:_buildout_section_name_}
# build pkg-config twice so that second configure can use pkg-config
# to compute GLIB_CFLAGS and GLIB_LIBS.
configure-command =
./configure --prefix=${:location} --with-installed-glib --with-installed-popt && make && make install && ./configure
configure-options =
--prefix=${:location}
--with-installed-glib
--with-installed-popt
environment =
PATH=${:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig
CPPFLAGS=-I${glib:location}/include -I${popt:location}/include
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib -L${glib:location}/lib -Wl,-rpath -Wl,${glib:location}/lib -L${popt:location}/lib -Wl,-rpath -Wl,${popt:location}/lib
[buildout]
parts =
popt
[popt]
recipe = hexagonit.recipe.cmmi
url = ftp://anduin.linuxfromscratch.org/BLFS/svn/p/popt-1.16.tar.gz
md5sum = 3743beefa3dd6247a73f8f7a32c14c33
configure-options =
--disable-static
# Postfix
# http://www.postfix.org/
[buildout]
extends =
libdb.cfg
openssl.cfg
pcre.cfg
parts = postfix
[postfix]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.porcupine.org/mirrors/postfix-release/official/postfix-2.8.3.tar.gz
md5sum = b3922ededd3fd6051f759e58a4ada3ae
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = make
configure-options = makefiles CCARGS='-DUSE_TLS -DHAS_PCRE -DHAS_DB -I${libdb:location}/include -I${pcre:location}/include -I${openssl:location}/include' AUXLIBS='-L${openssl:location}/lib -L${pcre:location}/lib -L${libdb:location}/lib -lssl -lpcre -ldb -lcrypto -Wl,-rpath -Wl,${openssl:location}/lib -Wl,-rpath -Wl,${pcre:location}/lib -Wl,-rpath -Wl,${libdb:location}/lib'
make-targets = non-interactive-package install_root=${:location}
[buildout]
extends =
apache.cfg
subversion.cfg
parts =
pysvn-python
[pysvn-python]
recipe = zc.recipe.egg:custom
egg =
pysvn
include-dirs =
${subversion:location}/include/subversion-1
${apache:location}/include
library-dirs =
${subversion:location}/lib
${apache:location}/lib
[buildout]
extends =
bzip2.cfg
gdbm.cfg
gettext.cfg
ncurses.cfg
openssl.cfg
readline.cfg
sqlite3.cfg
zlib.cfg
parts =
pythonbin2.4
[python2.4-dbm-patch]
recipe = hexagonit.recipe.download
# original patch from http://bugs.gentoo.org/attachment.cgi?id=109117
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 8611020af1463b42f253ac73a91b09a1
download-only = true
filename = python2.4-dbm.patch
[python2.4-no_system_inc_dirs-patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 65c1193ac42c15109be0e6f9b7a671b8
download-only = true
filename = python2.4-no_system_inc_dirs.patch
[python2.4-CPPFLAGS-patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 912b6d4b107e3f382995a4d3afcd3eca
download-only = true
filename = python2.4-backport-CPPFLAGS-setup-from-python2.6.patch
[python2.4]
recipe = hexagonit.recipe.cmmi
# This is actually the default setting for prefix, but we can't use it in
# other settings in this part if we don't set it explicitly here.
prefix = ${buildout:parts-directory}/${:_buildout_section_name_}
version = 2.4
package_version = ${:version}.6
executable = ${:prefix}/bin/python${:version}
python_version_major = 2.4
python_version_minor = 6
python_version_full = ${:python_version_major}.${:python_version_minor}
url =
http://python.org/ftp/python/${:python_version_full}/Python-${:python_version_full}.tgz
patches =
${python2.4-dbm-patch:location}/${python2.4-dbm-patch:filename}
${python2.4-no_system_inc_dirs-patch:location}/${python2.4-no_system_inc_dirs-patch:filename}
${python2.4-CPPFLAGS-patch:location}/${python2.4-CPPFLAGS-patch:filename}
configure-options =
--enable-unicode=ucs4
--with-threads
environment =
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${ncurses:location}/include/ -I${ncurses:location}/include/ncursesw/ -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include
LDFLAGS=-L${zlib:location}/lib -L${readline:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${readline:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath -Wl,${gdbm:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib
[pythonbin2.4]
# XXX/Note: This is hackish way to have fully featured python interpreter
recipe = zc.recipe.egg:scripts
python = python2.4
eggs = ${eggs:eggs}
invokepython>=0.4
extra-paths =
${zope-2.8:location}/lib/python
scripts =
invokepython=python${python2.4:python_version_major}
ipython=ipython${python2.4:python_version_major}
[bootstrap2.4]
recipe = zc.recipe.egg
eggs = zc.buildout
suffix =
scripts =
buildout=bootstrap2.4
arguments = sys.argv[1:] + ["bootstrap"]
suffix = 2.4
python = python2.4
[buildout]
# XXX: Extends shall not jump out of software
extends =
bzip2.cfg
gdbm.cfg
gettext.cfg
ncurses.cfg
openssl.cfg
readline.cfg
sqlite3.cfg
zlib.cfg
parts =
python2.6
[python-2.6.6-no_system_inc_dirs.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
path = ${:filename}
md5sum = ed7f8e11a97e45e5e09649c8b242e917
download-only = true
filename = python-2.6.6-no_system_inc_dirs.patch
[python2.6]
recipe = hexagonit.recipe.cmmi
# This is actually the default setting for prefix, but we can't use it in
# other settings in this part if we don't set it explicitly here.
prefix = ${buildout:parts-directory}/${:_buildout_section_name_}
version = 2.6
package_version = ${:version}.6
executable = ${:prefix}/bin/python${:version}
url =
http://python.org/ftp/python/${:package_version}/Python-${:package_version}.tgz
md5sum = b2f209df270a33315e62c1ffac1937f0
patch-options = -p1
patches =
${python-2.6.6-no_system_inc_dirs.patch:location}/${python-2.6.6-no_system_inc_dirs.patch:filename}
configure-options =
--enable-unicode=ucs4
--with-threads
environment =
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${ncurses:location}/include/ -I${ncurses:location}/include/ncursesw/ -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include
LDFLAGS=-L${zlib:location}/lib -L${readline:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${readline:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath -Wl,${gdbm:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib
[bootstrap2.6]
recipe = zc.recipe.egg
eggs = zc.buildout
suffix =
scripts =
buildout=bootstrap2.6
arguments = sys.argv[1:] + ["bootstrap"]
python = python2.6
[buildout]
extends =
bzip2.cfg
gdbm.cfg
gettext.cfg
ncurses.cfg
openssl.cfg
readline.cfg
sqlite3.cfg
zlib.cfg
parts =
python2.7
[python2.7]
<= python2.7.1
[python2.7.0]
<= python2.7common
package_version = 2.7
md5sum = 35f56b092ecf39a6bd59d64f142aae0f
package_version_suffix =
[python2.7.1]
<= python2.7common
package_version = 2.7.1
md5sum = 15ed56733655e3fab785e49a7278d2fb
package_version_suffix =
[bootstrap2.7]
recipe = zc.recipe.egg
eggs = zc.buildout
suffix =
scripts =
buildout=bootstrap2.7
arguments = sys.argv[1:] + ["bootstrap"]
python = python2.7
[python2.7common]
recipe = hexagonit.recipe.cmmi
# This is actually the default setting for prefix, but we can't use it in
# other settings in this part if we don't set it explicitly here.
prefix = ${buildout:parts-directory}/${:_buildout_section_name_}
version = 2.7
executable = ${:prefix}/bin/python${:version}
url =
http://python.org/ftp/python/${:package_version}/Python-${:package_version}${:package_version_suffix}.tgz
configure-options =
--enable-unicode=ucs4
--with-threads
environment =
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${ncurses:location}/include/ -I${ncurses:location}/include/ncursesw/ -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include
LDFLAGS=-L${zlib:location}/lib -L${readline:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${readline:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath -Wl,${gdbm:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib
[buildout]
parts =
python-ldap
extends =
cyrus-sasl.cfg
openldap.cfg
openssl.cfg
[python-ldap-python]
recipe = zc.recipe.egg:custom
egg = python-ldap
rpath =
${openldap:location}/lib
${cyrus-sasl:location}/lib
${openssl:location}/lib
include-dirs =
${openldap:location}/include
${cyrus-sasl:location}/include/sasl
${openssl:location}/include
library-dirs =
${openldap:location}/lib
${cyrus-sasl:location}/lib
${openssl:location}/lib
[buildout]
extends =
librsync.cfg
parts =
rdiff-backup
[rdiff-backup-build]
recipe = zc.recipe.egg:custom
egg = rdiff-backup
include-dirs =
${librsync:location}/include/
library-dirs =
${librsync:location}/lib/
rpath =
${librsync:location}/lib/
find-links = http://download.savannah.nongnu.org/releases/rdiff-backup/rdiff-backup-1.0.5.tar.gz
[rdiff-backup]
# Scripts only generation part for rdiff-backup
unzip = true
recipe = zc.recipe.egg
eggs =
${rdiff-backup-build:egg}
entry-points =
rdiff-backup=rdiff_backup.Main:Main
arguments = sys.argv[1:]
[buildout]
parts =
readline
extends =
ncurses.cfg
[readline]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/readline/readline-6.1.tar.gz
md5sum = fc2f7e714fe792db1ce6ddc4c9fb4ef3
configure-options =
--disable-static
--with-ncurses=${ncurses:location}
environment =
LDFLAGS =-Wl,-rpath ${ncurses:location}/lib
[buildout]
parts =
sed
[sed]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.gnu.org/gnu/sed/sed-4.2.1.tar.gz
md5sum = f0fd4d7da574d4707e442285fd2d3b86
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
environment =
LDFLAGS =-Wl,--as-needed
[buildout]
parts =
serf
extends =
apache.cfg
libuuid.cfg
openssl.cfg
zlib.cfg
[serf]
recipe = hexagonit.recipe.cmmi
url = http://serf.googlecode.com/files/serf-0.7.0.tar.gz
md5sum = 3233f22a5875320da7bdc854d0873080
configure-options =
--with-apr=${apache:location}/bin/apr-1-config
--with-apr-util=${apache:location}/bin/apu-1-config
--with-openssl=${openssl:location}
environment =
CFLAGS=-I${zlib:location}/include -I${libuuid:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${libuuid:location}/lib -Wl,-rpath -Wl,${libuuid:location}/lib
[buildout]
parts = sphinx
extends =
libexpat.cfg
mariadb.cfg
zlib.cfg
[sphinx-1.10-fix_nosigpipe.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = b606b2a267ebfbd009bb1e72e7a29462
download-only = true
filename = sphinx-1.10-fix_nosigpipe.patch
[sphinx-1.10-beta-snowball.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
path = ${:filename}
md5sum = 65a5ee78bf27c030734674c018006287
download-only = true
filename = sphinx-1.10-beta-snowball.patch
[sphinx]
recipe = hexagonit.recipe.cmmi
url = http://www.sphinxsearch.com/files/sphinx-1.10-beta.tar.gz
md5sum = 5b52ce9e93a73c66d37bc3a2402f14fa
configure-options =
--with-mysql
--with-mysql-includes=${mariadb:location}/include/mysql
--with-mysql-libs=${mariadb:location}/lib/mysql
--with-libstemmer
--with-iconv
--without-pgsql
--without-unixodbc
patch-options = -p1
patches =
${sphinx-1.10-fix_nosigpipe.patch:location}/${sphinx-1.10-fix_nosigpipe.patch:filename}
${sphinx-1.10-beta-snowball.patch:location}/${sphinx-1.10-beta-snowball.patch:filename}
environment =
CPPFLAGS=-I${zlib:location}/include -I${libexpat:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath ${mariadb:location}/lib/mysql -L${libexpat:location}/lib -Wl,-rpath ${libexpat:location}/lib
[buildout]
extends = readline.cfg
parts =
sqlite3
[sqlite3]
recipe = hexagonit.recipe.cmmi
url = http://www.sqlite.org/sqlite-autoconf-3070602.tar.gz
md5sum = f16c08617968b4087b3d591fd575f59f
configure-options =
--disable-static
--enable-readline
environment =
CPPFLAGS=-I${readline:location}/include -I${ncurses:location}/include
LDFLAGS=-L${buildout:parts-directory}/${:_buildout_section_name_} -Wl,-rpath -Wl,${readline:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -L${readline:location}/lib -L${ncurses:location}/lib
[buildout]
extends = openssl.cfg
parts =
stunnel
[stunnel]
<= stunnel-4
recipe = hexagonit.recipe.cmmi
[stunnel-4-hook-download]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../hook/${:filename}
md5sum = 5b099b386c34c5b8d0664c0292ba322a
download-only=true
filename = stunnel-4-hooks.py
[stunnel-4]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.stunnel.org/stunnel/stunnel-4.36.tar.gz
md5sum = 600a09b03798424842b24548ca1e4235
pre-configure-hook = ${stunnel-4-hook-download:location}/${stunnel-4-hook-download:filename}:pre_configure_hook
configure-options =
--enable-ipv6
--disable-libwrap
--with-ssl=${openssl:location}
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-Wl,-rpath -Wl,${openssl:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
# Subversion - Enterprise-class centralized version control for the masses
# http://subversion.apache.org/
[buildout]
extends =
apache.cfg
libexpat.cfg
libuuid.cfg
neon.cfg
pkgconfig.cfg
sqlite3.cfg
zlib.cfg
parts =
subversion
[subversion-1.6.0-disable_linking_against_unneeded_libraries]
recipe = hexagonit.recipe.download
download-only = true
filename = ${:_buildout_section_name_}.patch
# Patch available thanks to gentoo developpers
# http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/dev-vcs/subversion/files/subversion-1.6.0-disable_linking_against_unneeded_libraries.patch?revision=1.1
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = 8d911ec2422dc4c08a00693ac915a07a
[subversion]
recipe = hexagonit.recipe.cmmi
url = http://subversion.tigris.org/downloads/subversion-1.6.16.tar.bz2
md5sum = 32f25a6724559fe8691d1f57a63f636e
patches =
${subversion-1.6.0-disable_linking_against_unneeded_libraries:location}/${subversion-1.6.0-disable_linking_against_unneeded_libraries:filename}
configure-options =
--disable-static
--with-apr=${apache:location}/bin/apr-1-config
--with-apr-util=${apache:location}/bin/apu-1-config
--without-apxs
--with-zlib=${zlib:location}
--with-sqlite=${sqlite3:location}
--with-neon=${neon:location}
--without-berkeley-db
--without-sasl
--without-apr_memcache
--without-gnome-keyring
--without-kwallet
--without-jdk
--without-jikes
--without-swig
--without-junit
--without-ctypesgen
--without-ruby-sitedir
--without-ruby-test-verbose
--disable-nls
# it seems that parallel build sometimes fails.
make-options =
-j1
environment =
PATH=${pkgconfig:location}/bin:${neon:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${apache:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${neon:location}/lib/pkgconfig
CPPFLAGS=-I${libexpat:location}/include -I${libuuid:location}/include
LDFLAGS=-L${libexpat:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${sqlite3:location}/lib -Wl,-rpath -Wl,${neon:location}/lib -Wl,-rpath -Wl,${apache:location}/lib -L${libuuid:location}/lib -Wl,-rpath -Wl,${libuuid:location}/lib
# swig - Generate scripting interfaces to C/C++ code
# http://www.swig.org/
[buildout]
extends =
bison.cfg
parts =
swig
[swig]
recipe = hexagonit.recipe.cmmi
url = http://downloads.sourceforge.net/project/swig/swig/swig-1.3.40/swig-1.3.40.tar.gz
md5sum = 2df766c9e03e02811b1ab4bba1c7b9cc
configure-options =
--disable-ccache
--with-python=${buildout:executable}
--without-allegrocl
--without-chicken
--without-clisp
--without-csharp
--without-gcj
--without-guile
--without-java
--without-lua
--without-mzscheme
--without-ocaml
--without-octave
--without-perl5
--without-php
--without-pike
--without-python3
--without-r
--without-ruby
--without-tcl
environment =
PATH = ${bison:location}/bin:%(PATH)s
[buildout]
extends =
jbigkit.cfg
libjpeg.cfg
libpng.cfg
libtiff.cfg
zlib.cfg
parts =
tesseract-eng-traineddata-unzip
tesseract
[tesseract-share]
# XXX: tesseract seems not easy configurable on runtime about where to find
# its trained data, so just move out its datadir to own controlled location
recipe = plone.recipe.command
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = mkdir -p ${:location} && mkdir -p ${:location}/tessdata
update-command = ${:command}
stop-on-error = yes
[tesseract]
recipe = hexagonit.recipe.cmmi
url = http://tesseract-ocr.googlecode.com/files/tesseract-3.00.tar.gz
md5sum = cc812a261088ea0c3d2da735be35d09f
configure-options =
--disable-static
--datarootdir=${tesseract-share:location}
environment =
CPPFLAGS=-I${zlib:location}/include -I${jbigkit:location}/include -I${libjpeg:location}/include -I${libtiff:location}/include -I${libpng:location}/include
LDFLAGS =-Wl,-rpath -L${zlib:location}/lib -Wl,${zlib:location}/lib -L${jbigkit:location}/lib -Wl,-rpath -Wl,${jbigkit:location}/lib -L${libjpeg:location}/lib -Wl,-rpath -Wl,${libjpeg:location}/lib -L${libtiff:location}/lib -Wl,-rpath -Wl,${libtiff:location}/lib -L${libpng:location}/lib -Wl,-rpath -Wl,${libpng:location}/lib
[tesseract-eng-traineddata]
recipe = hexagonit.recipe.download
download-only = true
url = http://tesseract-ocr.googlecode.com/files/eng.traineddata.gz
md5sum = d91041ad156cf2db36664e91ef799451
[tesseract-eng-traineddata-unzip]
location = ${buildout:parts-directory}/${:_buildout_section_name_}
recipe = plone.recipe.command
command = gunzip ${tesseract-eng-traineddata:location}/eng.traineddata.gz -c > ${tesseract-share:location}/tessdata/eng.traineddata
update-command = ${:command}
stop-on-error = yes
[buildout]
extends =
bzip2.cfg
zlib.cfg
parts = tokyocabinet
[tokyocabinet]
recipe = hexagonit.recipe.cmmi
url = http://fallabs.com/tokyocabinet/tokyocabinet-1.4.46.tar.gz
md5sum = 341dadd1f3d68760e350f7e731111786
configure-options =
--with-zlib=${zlib:location}
--with-bzip=${bzip2:location}
environment =
LDFLAGS =-Wl,-rpath -Wl,${zlib:location}/lib -Wl,-rpath -Wl,${bzip2:location}/lib -Wl,-rpath=${buildout:parts-directory}/${:_buildout_section_name_}/lib
# varnish - a web accelerator written with performance and flexibility in mind.
# http://www.varnish-cache.org/
[buildout]
parts =
varnish
varnish-2.1
extends =
ncurses.cfg
pcre.cfg
pkgconfig.cfg
[varnish]
<= varnish-2.0
[varnish-2.0]
recipe = hexagonit.recipe.cmmi
url = http://sourceforge.net/projects/varnish/files/varnish/2.0.6/varnish-2.0.6.tar.gz/download
md5sum = d91dc21c636db61c69b5e8f061c5bb95
configure-options =
--disable-static
environment =
CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib
[varnish-2.1]
recipe = hexagonit.recipe.cmmi
url = http://repo.varnish-cache.org/source/varnish-2.1.5.tar.gz
md5sum = 2d2f227da36a2a240c475304c717b8e3
configure-options =
--disable-static
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${pcre:location}/lib/pkgconfig
CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib
[buildout]
extends =
perl.cfg
perl-Config-General.cfg
perl-Encode-HanExtra.cfg
perl-HTML-Encoding.cfg
perl-HTML-Parser.cfg
perl-HTML-Template.cfg
perl-JSON.cfg
perl-Net-IP.cfg
perl-SGML-Parser-OpenSP.cfg
perl-URI.cfg
perl-XML-LibXML.cfg
perl-libwww-perl.cfg
parts = w3-validator
find-links =
http://www.nexedi.org/static/packages/source/
versions = versions
[versions]
erp5.recipe.w3validator = 1.0.2
[w3-validator]
recipe = erp5.recipe.w3validator
depends =
${perl-Config-General:location}
${perl-Encode-HanExtra:location}
${perl-HTML-Encoding:location}
${perl-HTML-Parser:location}
${perl-HTML-Template:location}
${perl-JSON:location}
${perl-Net-IP:location}
${perl-SGML-Parser-OpenSP:location}
${perl-URI:location}
${perl-XML-LibXML:location}
${perl-libwww-perl:location}
perl-path = ${perl:location}/bin/perl
# validator-1_2-release in http://dvcs.w3.org/hg/markup-validator/tags
validator-url = http://dvcs.w3.org/hg/markup-validator/archive/754b7a64cdb4.tar.bz2
dtd-url =
[buildout]
extends =
garbage-collector.cfg
ncurses.cfg
openssl.cfg
pkgconfig.cfg
zlib.cfg
parts =
w3m
[w3m]
recipe = hexagonit.recipe.cmmi
md5sum = 1b845a983a50b8dec0169ac48479eacc
url = http://downloads.sourceforge.net/project/w3m/w3m/w3m-0.5.3/w3m-0.5.3.tar.gz
configure-options =
--with-gc=${garbage-collector:location}
--with-ssl=${openssl:location}
--with-termlib=ncurses
--disable-nls
--disable-image
--disable-dict
--disable-xface
--disable-mouse
--disable-nntp
--disable-help-cgi
--disable-external-uri-loader
--disable-w3mmailer
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
CPPFLAGS=-I${ncurses:location}/include/ -I${zlib:location}/include/
LDFLAGS=-Wl,--as-needed -L${garbage-collector:location}/lib -Wl,-rpath -Wl,${garbage-collector:location}/lib -L${ncurses:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -L${openssl:location}/lib -Wl,-rpath -Wl,${openssl:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib
# Minimalistict xorg
[buildout]
extends =
freetype.cfg
libxml2.cfg
libxslt.cfg
pkgconfig.cfg
zlib.cfg
parts =
libXdmcp
libXext
libXau
libX11
[xorg-aclocal]
ACLOCAL=${xorg-util-macros:location}/share/aclocal
[xorg-util-macros]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/util/util-macros-1.11.0.tar.bz2
md5sum = 22d5cdff672450cb6902e0d68c200dcb
[xproto]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/proto/xproto-7.0.20.tar.bz2
md5sum = 65633168e5315c19defb4652cd3d83c1
configure-options =
--disable-specs
--without-xmlto
--without-fop
[xextproto]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/proto/xextproto-7.1.2.tar.bz2
md5sum = 263ae968b223c23b2986603d84e5c30e
configure-options =
--disable-specs
--without-xmlto
--without-fop
[xtrans]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/lib/xtrans-1.2.6.tar.bz2
md5sum = c66f9ffd2da4fb012220c6c40ebc7609
configure-options =
--disable-docs
--without-xmlto
--without-fop
[libXau]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/lib/libXau-1.0.6.tar.bz2
md5sum = 4a2cbd83727682f9ee1c1e719bac6adb
configure-options =
--disable-static
environment =
PKG_CONFIG_PATH=${xproto:location}/lib/pkgconfig
LD_LIBRARY_PATH=${xproto:location}/lib
LD_RUN_PATH=${xproto:location}/lib
PATH=${pkgconfig:location}/bin:%(PATH)s
[xcbproto]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/xcb/xcb-proto-1.6.tar.bz2
md5sum = 04313e1d914b44d0e457f6c494fc178b
environment =
PATH=${libxml2:location}/bin:%(PATH)s
PYTHON=${buildout:executable}
[xorg-libpthread-stubs]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/xcb/libpthread-stubs-0.3.tar.bz2
md5sum = e8fa31b42e13f87e8f5a7a2b731db7ee
[libxcb]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/xcb/libxcb-1.7.tar.bz2
md5sum = 925699df361b99491165ebc12068056b
configure-options =
--disable-static
--disable-build-docs
environment =
PKG_CONFIG_PATH=${xcbproto:location}/lib/pkgconfig:${libXau:location}/lib/pkgconfig:${xproto:location}/lib/pkgconfig:${xorg-libpthread-stubs:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig
LD_LIBRARY_PATH=${xcbproto:location}/lib:${libXau:location}/lib:${xorg-libpthread-stubs:location}/lib:${libxslt:location}/lib
LD_RUN_PATH=${xcbproto:location}/lib:${libXau:location}/lib:${xorg-libpthread-stubs:location}/lib:${libxslt:location}/lib
PATH=${pkgconfig:location}/bin:${libxslt:location}/bin:%(PATH)s
PYTHON=${buildout:executable}
# Python note: libxcb requires python with ElementTree. In case of appliance
# non system python is used, and this binary contains ElementTree. In case of
# using this profile outside appliance it is required to provide python with
# suitable library.
[libXext]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/lib/libXext-1.2.0.tar.bz2
md5sum = 9bb236ff0193e9fc1c1fb504dd840331
environment =
PKG_CONFIG_PATH=${xcbproto:location}/lib/pkgconfig:${libXau:location}/lib/pkgconfig:${xproto:location}/lib/pkgconfig:${xorg-libpthread-stubs:location}/lib/pkgconfig:${xextproto:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libxcb:location}/lib/pkgconfig
LD_LIBRARY_PATH=${xcbproto:location}/lib:${libXau:location}/lib:${xorg-libpthread-stubs:location}/lib:${xextproto:location}/lib:${libX11:location}/lib:${libxcb:location}/lib
LD_RUN_PATH=${xcbproto:location}/lib:${libXau:location}/lib:${xorg-libpthread-stubs:location}/lib:${xextproto:location}/lib:${libX11:location}/lib:${libxcb:location}/lib
PATH=${pkgconfig:location}/bin:%(PATH)s
# Warning: do *not* enable -fPIC CFLAGS for this library. Even if it fails and ld asks you to enable it. This will not solve your problem, and create an unexpected (by build chain) setup (all .o will be position-independant code).
# CFLAGS=-fPIC
configure-options =
--disable-static
--disable-specs
--without-xmlto
--without-fop
patches =
${libXext-patch-link-error:location}/${libXext-patch-link-error:filename}
patch-options = -p1
[libXext-patch-link-error]
# Fixes libXext.la link error
recipe = hexagonit.recipe.download
url = http://cgit.freedesktop.org/xorg/lib/libXext/patch/?id=700c7896b832d6e4fb0185f0d5382b01f94e7141
download-only = true
filename = 700c7896b832d6e4fb0185f0d5382b01f94e7141.patch
md5sum = 52635ef694ee6f1acb642a77ee8eb010
[libX11]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/lib/libX11-1.4.0.tar.bz2
md5sum = b63d9f7493a61df51d0c0be04ac435e4
configure-options =
--disable-static
--enable-loadable-i18n
--disable-composecache
--disable-xcms
--disable-xf86bigfont
--disable-xkb
--disable-specs
--without-xmlto
--without-fop
environment =
PKG_CONFIG_PATH=${xproto:location}/lib/pkgconfig:${xextproto:location}/lib/pkgconfig:${xtrans:location}/share/pkgconfig:${libxcb:location}/lib/pkgconfig:${xorg-libpthread-stubs:location}/lib/pkgconfig:${libXau:location}/lib/pkgconfig
LD_LIBRARY_PATH=${xproto:location}/lib:${xextproto:location}/lib:${libxcb:location}/lib
LD_RUN_PATH=${xproto:location}/lib:${xextproto:location}/lib:${libxcb:location}/lib
PATH=${pkgconfig:location}/bin:%(PATH)s
CFLAGS=-I${xproto:location}/include
CPPFLAGS=-I${xproto:location}/include
[libXdmcp]
recipe = hexagonit.recipe.cmmi
url = http://www.x.org/releases/X11R7.6/src/lib/libXdmcp-1.1.0.tar.bz2
md5sum = 762b6bbaff7b7d0831ddb4f072f939a5
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig:${xproto:location}/lib/pkgconfig
LD_LIBRARY_PATH=${xproto:location}/lib
LD_RUN_PATH=${xproto:location}/lib
PATH=${pkgconfig:location}/bin:%(PATH)s
configure-options =
--disable-static
--without-xmlto
--without-fop
[buildout]
parts = xpdf
[xpdf]
<= xpdf-3.02
[xpdf-patch-download]
recipe = hexagonit.recipe.download
url = ftp://ftp.foolabs.com/pub/xpdf/${:filename}
download-only = true
[xpdf-3.02pl1.patch]
<= xpdf-patch-download
filename = xpdf-3.02pl1.patch
md5sum = 877118786dfe27d1b7aa5a6759cc6e45
[xpdf-3.02pl2.patch]
<= xpdf-patch-download
filename = xpdf-3.02pl2.patch
md5sum = 3a5cb165ae66781e0b21e6219ae06795
[xpdf-3.02pl3.patch]
<= xpdf-patch-download
filename = xpdf-3.02pl3.patch
md5sum = 581963ede0fb5715e1a69f01b5b8ce63
[xpdf-3.02pl4.patch]
<= xpdf-patch-download
filename = xpdf-3.02pl4.patch
md5sum = 70b752716798dd341a4bf890df5f6fdc
[xpdf-3.02pl5.patch]
<= xpdf-patch-download
filename = xpdf-3.02pl5.patch
md5sum = 504902ca5e9d66c67eed03636ec6b163
[xpdf-3.02]
recipe = hexagonit.recipe.cmmi
md5sum = 599dc4cc65a07ee868cf92a667a913d2
url = ftp://ftp.foolabs.com/pub/xpdf/xpdf-3.02.tar.gz
configure-options =
--without-x
patch-options = -p1
patches =
${xpdf-3.02pl1.patch:location}/${xpdf-3.02pl1.patch:filename}
${xpdf-3.02pl2.patch:location}/${xpdf-3.02pl2.patch:filename}
${xpdf-3.02pl3.patch:location}/${xpdf-3.02pl3.patch:filename}
${xpdf-3.02pl4.patch:location}/${xpdf-3.02pl4.patch:filename}
${xpdf-3.02pl5.patch:location}/${xpdf-3.02pl5.patch:filename}
# xtrabackup: hot backup utility for MySQL
# http://www.percona.com/
[buildout]
extends =
autoconf.cfg
automake.cfg
bison.cfg
flex.cfg
libtool.cfg
ncurses.cfg
readline.cfg
zlib.cfg
parts =
xtrabackup
[xtrabackup-build-patch-download]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/../patch/${:filename}
md5sum = e018df8bb3ed672891388556b8e91e35
download-only = true
filename = xtrabackup_build.patch
[xtrabackup]
recipe = hexagonit.recipe.cmmi
url = http://www.percona.com/redir/downloads/XtraBackup/XtraBackup-1.6/source/xtrabackup-1.6.tar.gz
md5sum = 7c263723312cba36539df4cd7a119744
make-binary = true
patches = ${xtrabackup-build-patch-download:location}/${xtrabackup-build-patch-download:filename}
patch-options = -p1
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = utils/build.sh innodb51_builtin ${:location} ${libtool:location}
environment =
CPPFLAGS =-I${zlib:location}/include -I${ncurses:location}/include -I${readline:location}/include
LDFLAGS =-L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${ncurses:location}/lib -Wl,-rpath -Wl,${ncurses:location}/lib -L${readline:location}/lib -Wl,-rpath -Wl,${readline:location}/lib
PATH=${autoconf:location}/bin:${automake-1.11:location}/bin:${libtool:location}/bin:${flex:location}/bin:%(PATH)s:${bison:location}/bin
make-options =
-j1
[buildout]
parts =
zabbix-agent
[zabbix-agent]
recipe = hexagonit.recipe.cmmi
url = http://prdownloads.sourceforge.net/zabbix/zabbix-1.8.4.tar.gz?download
md5sum = 969ce09317c98b205bc96157e16f5c8c
configure-options =
--enable-agent
--enable-ipv6
[buildout]
parts = zip
[zip]
recipe = hexagonit.recipe.cmmi
url = http://sourceforge.net/projects/infozip/files/Zip%203.x%20%28latest%29/3.0/zip30.tar.gz/download
md5sum = 7b74551e63f8ee6aab6fbc86676c0d37
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = true
make-options = prefix=${:location} NO_BZIP2_SUPPORT=1 -f unix/Makefile generic
[buildout]
parts =
zlib
[zlib]
recipe = hexagonit.recipe.cmmi
url = http://prdownloads.sourceforge.net/libpng/zlib-1.2.5.tar.gz?download
md5sum = c735eab2d659a96e5a594c9e8541ad63
[buildout]
# adapt https://svn.erp5.org/repos/public/experimental/erp5.buildout-zope-2.12/
# based on https://svn.erp5.org/repos/public/experimental/erp5.buildout
[buildout]
extends=
python-2.4.cfg
[zope-2.8]
recipe = erp5.recipe.zope2install
url = http://www.zope.org/Products/Zope/2.8.11/Zope-2.8.11-final.tgz
md5sum = eff3e52d6ecde2d3669ea81e445b5a3a
python = python2.4
skip-fake-eggs =
ClientForm
mechanize
pytz
*/build/
*/dist/
*/src/*.egg-info/
Changelog
=========
1.0 (unreleased)
----------------
include CHANGES.txt
recursive-include src/erp5/recipe/testnode *.in
The erp5.recipe.tesnode aims to install generic erp5 testnode.
from setuptools import setup, find_packages
name = "erp5.recipe.testnode"
version = '1.0.24'
def read(name):
return open(name).read()
long_description=( read('README.txt')
+ '\n' +
read('CHANGES.txt')
)
setup(
name = name,
version = version,
description = "ZC Buildout recipe for create an testnode instance",
long_description=long_description,
license = "GPLv3",
keywords = "buildout erp5 test",
classifiers=[
"Framework :: Buildout :: Recipe",
"Programming Language :: Python",
],
packages = find_packages('src'),
package_dir = {'': 'src'},
include_package_data=True,
install_requires = [
'setuptools',
'slapos.lib.recipe',
'xml_marshaller',
'zc.buildout',
'zc.recipe.egg',
# below are requirements to provide full blown python interpreter
'lxml',
'PyXML',
],
namespace_packages = ['erp5', 'erp5.recipe'],
entry_points = {'zc.buildout': ['default = %s:Recipe' % name]},
)
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
import slapos.slap, subprocess, os, time
from xml_marshaller import xml_marshaller
class SlapOSControler(object):
def __init__(self, config, process_group_pid_set=None):
self.config = config
# By erasing everything, we make sure that we are able to "update"
# existing profiles. This is quite dirty way to do updates...
if os.path.exists(config['proxy_database']):
os.unlink(config['proxy_database'])
proxy = subprocess.Popen([config['slapproxy_binary'],
config['slapos_config']], close_fds=True, preexec_fn=os.setsid)
process_group_pid_set.add(proxy.pid)
# XXX: dirty, giving some time for proxy to being able to accept
# connections
time.sleep(10)
slap = slapos.slap.slap()
slap.initializeConnection(config['master_url'])
# register software profile
self.software_profile = config['custom_profile_path']
slap.registerSupply().supply(
self.software_profile,
computer_guid=config['computer_id'])
computer = slap.registerComputer(config['computer_id'])
# create partition and configure computer
partition_reference = config['partition_reference']
partition_path = os.path.join(config['instance_root'], partition_reference)
if not os.path.exists(partition_path):
os.mkdir(partition_path)
os.chmod(partition_path, 0750)
computer.updateConfiguration(xml_marshaller.dumps({
'address': config['ipv4_address'],
'instance_root': config['instance_root'],
'netmask': '255.255.255.255',
'partition_list': [{'address_list': [{'addr': config['ipv4_address'],
'netmask': '255.255.255.255'},
{'addr': config['ipv6_address'],
'netmask': 'ffff:ffff:ffff::'},
],
'path': partition_path,
'reference': partition_reference,
'tap': {'name': partition_reference},
}
],
'reference': config['computer_id'],
'software_root': config['software_root']}))
def runSoftwareRelease(self, config, environment, process_group_pid_set=None):
print "SlapOSControler.runSoftwareRelease"
while True:
cpu_count = os.sysconf("SC_NPROCESSORS_ONLN")
os.putenv('MAKEFLAGS', '-j%s' % cpu_count)
os.environ['PATH'] = environment['PATH']
stdout = open(os.path.join(
config['instance_root'],'.runSoftwareRelease_out'),
'w+')
stderr = open(os.path.join(
config['instance_root'],'.runSoftwareRelease_err'),
'w+')
slapgrid = subprocess.Popen([config['slapgrid_software_binary'], '-v', '-c',
#'--buildout-parameter',"'-U -N' -o",
config['slapos_config']],
stdout=stdout, stderr=stderr,
close_fds=True, preexec_fn=os.setsid)
process_group_pid_set.add(slapgrid.pid)
slapgrid.wait()
stdout.seek(0)
stderr.seek(0)
process_group_pid_set.remove(slapgrid.pid)
status_dict = {'status_code':slapgrid.returncode,
'stdout':stdout.read(),
'stderr':stderr.read()}
stdout.close()
stderr.close()
return status_dict
def runComputerPartition(self, config, process_group_pid_set=None):
print "SlapOSControler.runSoftwareRelease"
slap = slapos.slap.slap()
slap.registerOpenOrder().request(self.software_profile,
partition_reference='testing partition',
partition_parameter_kw=config['instance_dict'])
slapgrid = subprocess.Popen([config['slapgrid_partition_binary'],
config['slapos_config'], '-c', '-v'], close_fds=True, preexec_fn=os.setsid)
process_group_pid_set.add(slapgrid.pid)
slapgrid.wait()
process_group_pid_set.remove(slapgrid.pid)
if slapgrid.returncode != 0:
raise ValueError('Slapgrid instance failed')
import os, sys, subprocess, re, threading
from testnode import SubprocessError
_format_command_search = re.compile("[[\\s $({?*\\`#~';<>&|]").search
_format_command_escape = lambda s: "'%s'" % r"'\''".join(s.split("'"))
def format_command(*args, **kw):
cmdline = []
for k, v in sorted(kw.items()):
if _format_command_search(v):
v = _format_command_escape(v)
cmdline.append('%s=%s' % (k, v))
for v in args:
if _format_command_search(v):
v = _format_command_escape(v)
cmdline.append(v)
return ' '.join(cmdline)
def subprocess_capture(p, quiet=False):
def readerthread(input, output, buffer):
while True:
data = input.readline()
if not data:
break
output(data)
buffer.append(data)
if p.stdout:
stdout = []
output = quiet and (lambda data: None) or sys.stdout.write
stdout_thread = threading.Thread(target=readerthread,
args=(p.stdout, output, stdout))
stdout_thread.setDaemon(True)
stdout_thread.start()
if p.stderr:
stderr = []
stderr_thread = threading.Thread(target=readerthread,
args=(p.stderr, sys.stderr.write, stderr))
stderr_thread.setDaemon(True)
stderr_thread.start()
if p.stdout:
stdout_thread.join()
if p.stderr:
stderr_thread.join()
p.wait()
return (p.stdout and ''.join(stdout),
p.stderr and ''.join(stderr))
GIT_TYPE = 'git'
SVN_TYPE = 'svn'
class Updater(object):
_git_cache = {}
realtime_output = True
stdin = file(os.devnull)
def __init__(self, repository_path, revision=None, git_binary=None):
self.revision = revision
self._path_list = []
self.repository_path = repository_path
self.git_binary = git_binary
def getRepositoryPath(self):
return self.repository_path
def getRepositoryType(self):
try:
return self.repository_type
except AttributeError:
# guess the type of repository we have
if os.path.isdir(os.path.join(
self.getRepositoryPath(), '.git')):
repository_type = GIT_TYPE
elif os.path.isdir(os.path.join(
self.getRepositoryPath(), '.svn')):
repository_type = SVN_TYPE
else:
raise NotImplementedError
self.repository_type = repository_type
return repository_type
def deletePycFiles(self, path):
"""Delete *.pyc files so that deleted/moved files can not be imported"""
for path, dir_list, file_list in os.walk(path):
for file in file_list:
if file[-4:] in ('.pyc', '.pyo'):
# allow several processes clean the same folder at the same time
try:
os.remove(os.path.join(path, file))
except OSError, e:
if e.errno != errno.ENOENT:
raise
def spawn(self, *args, **kw):
quiet = kw.pop('quiet', False)
env = kw and dict(os.environ, **kw) or None
command = format_command(*args, **kw)
print '\n$ ' + command
sys.stdout.flush()
p = subprocess.Popen(args, stdin=self.stdin, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=env,
cwd=self.getRepositoryPath())
if self.realtime_output:
stdout, stderr = subprocess_capture(p, quiet)
else:
stdout, stderr = p.communicate()
if not quiet:
sys.stdout.write(stdout)
sys.stderr.write(stderr)
result = dict(status_code=p.returncode, command=command,
stdout=stdout, stderr=stderr)
if p.returncode:
raise SubprocessError(result)
return result
def _git(self, *args, **kw):
return self.spawn(self.git_binary, *args, **kw)['stdout'].strip()
def _git_find_rev(self, ref):
try:
return self._git_cache[ref]
except KeyError:
if os.path.exists('.git/svn'):
r = self._git('svn', 'find-rev', ref)
assert r
self._git_cache[ref[0] != 'r' and 'r%u' % int(r) or r] = ref
else:
r = self._git('rev-list', '--topo-order', '--count', ref), ref
self._git_cache[ref] = r
return r
def getRevision(self, *path_list):
if not path_list:
path_list = self._path_list
if self.getRepositoryType() == GIT_TYPE:
h = self._git('log', '-1', '--format=%H', '--', *path_list)
return self._git_find_rev(h)
elif self.getRepositoryType() == SVN_TYPE:
stdout = self.spawn('svn', 'info', *path_list)['stdout']
return str(max(map(int, SVN_CHANGED_REV.findall(stdout))))
raise NotImplementedError
def checkout(self, *path_list):
if not path_list:
path_list = '.',
revision = self.revision
if self.getRepositoryType() == GIT_TYPE:
# edit .git/info/sparse-checkout if you want sparse checkout
if revision:
if type(revision) is str:
h = self._git_find_rev('r' + revision)
else:
h = revision[1]
if h != self._git('rev-parse', 'HEAD'):
self.deletePycFiles('.')
self._git('reset', '--merge', h)
else:
self.deletePycFiles('.')
if os.path.exists('.git/svn'):
self._git('svn', 'rebase')
else:
self._git('pull', '--ff-only')
self.revision = self._git_find_rev(self._git('rev-parse', 'HEAD'))
elif self.getRepositoryType() == SVN_TYPE:
# following code allows sparse checkout
def svn_mkdirs(path):
path = os.path.dirname(path)
if path and not os.path.isdir(path):
svn_mkdirs(path)
self.spawn(*(args + ['--depth=empty', path]))
for path in path_list:
args = ['svn', 'up', '--force', '--non-interactive']
if revision:
args.append('-r%s' % revision)
svn_mkdirs(path)
args += '--set-depth=infinity', path
self.deletePycFiles(path)
try:
status_dict = self.spawn(*args)
except SubprocessError, e:
if 'cleanup' not in e.stderr:
raise
self.spawn('svn', 'cleanup', path)
status_dict = self.spawn(*args)
if not revision:
self.revision = revision = SVN_UP_REV.findall(
status_dict['stdout'].splitlines()[-1])[0]
else:
raise NotImplementedError
self._path_list += path_list
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.lib.recipe.BaseSlapRecipe import BaseSlapRecipe
import os
import pkg_resources
import zc.buildout
import zc.recipe.egg
import sys
CONFIG = dict(
proxy_port='5000',
computer_id='COMPUTER',
partition_reference='test0',
)
class Recipe(BaseSlapRecipe):
def __init__(self, buildout, name, options):
self.egg = zc.recipe.egg.Egg(buildout, options['recipe'], options)
BaseSlapRecipe.__init__(self, buildout, name, options)
def installSlapOs(self):
CONFIG['slapos_directory'] = self.createDataDirectory('slapos')
CONFIG['working_directory'] = self.createDataDirectory('testnode')
CONFIG['software_root'] = os.path.join(CONFIG['slapos_directory'],
'software')
CONFIG['instance_root'] = os.path.join(CONFIG['slapos_directory'],
'instance')
CONFIG['proxy_database'] = os.path.join(CONFIG['slapos_directory'],
'proxy.db')
CONFIG['proxy_host'] = self.getLocalIPv4Address()
CONFIG['master_url'] = 'http://%s:%s' % (CONFIG['proxy_host'],
CONFIG['proxy_port'])
self._createDirectory(CONFIG['software_root'])
self._createDirectory(CONFIG['instance_root'])
CONFIG['slapos_config'] = self.createConfigurationFile('slapos.cfg',
self.substituteTemplate(pkg_resources.resource_filename(__name__,
'template/slapos.cfg.in'), CONFIG))
self.path_list.append(CONFIG['slapos_config'])
def setupRunningWrapper(self):
self.path_list.extend(zc.buildout.easy_install.scripts([(
'testnode',
__name__+'.testnode', 'run')], self.ws,
sys.executable, self.wrapper_directory, arguments=[
dict(
computer_id=CONFIG['computer_id'],
instance_dict=eval(self.parameter_dict.get('instance_dict', '{}')),
instance_root=CONFIG['instance_root'],
ipv4_address=self.getLocalIPv4Address(),
ipv6_address=self.getGlobalIPv6Address(),
master_url=CONFIG['master_url'],
profile_path=self.parameter_dict['profile_path'],
proxy_database=CONFIG['proxy_database'],
proxy_port=CONFIG['proxy_port'],
slapgrid_partition_binary=self.options['slapgrid_partition_binary'],
slapgrid_software_binary=self.options['slapgrid_software_binary'],
slapos_config=CONFIG['slapos_config'],
slapproxy_binary=self.options['slapproxy_binary'],
git_binary=self.options['git_binary'],
software_root=CONFIG['software_root'],
working_directory=CONFIG['working_directory'],
vcs_repository=self.parameter_dict.get('vcs_repository'),
node_quantity=self.parameter_dict.get('node_quantity', '1'),
branch=self.parameter_dict.get('branch', None),
test_suite_master_url=self.parameter_dict.get(
'test_suite_master_url', None),
test_suite_name=self.parameter_dict.get('test_suite_name'),
test_suite_title=self.parameter_dict.get('test_suite_title'),
bin_directory=self.bin_directory,
foo='bar',
# botenvironemnt is splittable string of key=value to substitute
# environment of running bot
bot_environment=self.parameter_dict.get('bot_environment', ''),
partition_reference=CONFIG['partition_reference'],
environment=dict(PATH=os.environ['PATH']),
)
]))
def installLocalSvn(self):
svn_dict = dict(svn_binary = self.options['svn_binary'])
svn_dict.update(self.parameter_dict)
self._writeExecutable(os.path.join(self.bin_directory, 'svn'), """\
#!/bin/sh
%(svn_binary)s --username %(svn_username)s --password %(svn_password)s \
--non-interactive --trust-server-cert --no-auth-cache "$@" """% svn_dict)
svnversion = os.path.join(self.bin_directory, 'svnversion')
if os.path.lexists(svnversion):
os.unlink(svnversion)
os.symlink(self.options['svnversion_binary'], svnversion)
def installLocalGit(self):
git_dict = dict(git_binary = self.options['git_binary'])
git_dict.update(self.parameter_dict)
double_slash_end_position = 1
# XXX, this should be provided by slapos
print "bin_directory : %r" % self.bin_directory
home_directory = os.path.join(*os.path.split(self.bin_directory)[0:-1])
print "home_directory : %r" % home_directory
git_dict.setdefault("git_server_name", "git.erp5.org")
if git_dict.get('vcs_username', None) is not None:
netrc_file = open(os.path.join(home_directory, '.netrc'), 'w')
netrc_file.write("""
machine %(git_server_name)s
login %(vcs_username)s
password %(vcs_password)s""" % git_dict)
netrc_file.close()
def installLocalRepository(self):
if self.parameter_dict.get('vcs_repository').endswith('git'):
self.installLocalGit()
else:
self.installLocalSvn()
def installLocalZip(self):
zip = os.path.join(self.bin_directory, 'zip')
if os.path.lexists(zip):
os.unlink(zip)
os.symlink(self.options['zip_binary'], zip)
def installLocalPython(self):
"""Installs local python fully featured with eggs"""
self.path_list.extend(zc.buildout.easy_install.scripts([], self.ws,
sys.executable, self.bin_directory, scripts=None,
interpreter='python'))
def installLocalRunUnitTest(self):
link = os.path.join(self.bin_directory, 'runUnitTest')
destination = os.path.join(CONFIG['instance_root'],
CONFIG['partition_reference'], 'bin', 'runUnitTest')
if os.path.lexists(link):
if not os.readlink(link) != destination:
os.unlink(link)
if not os.path.lexists(link):
os.symlink(destination, link)
def _install(self):
self.requirements, self.ws = self.egg.working_set([__name__])
self.path_list = []
self.installSlapOs()
self.setupRunningWrapper()
self.installLocalRepository()
self.installLocalZip()
self.installLocalPython()
self.installLocalRunUnitTest()
return self.path_list
[slapos]
software_root = %(software_root)s
instance_root = %(instance_root)s
master_url = %(master_url)s
computer_id = %(computer_id)s
[slapproxy]
host = %(proxy_host)s
port = %(proxy_port)s
database_uri = %(proxy_database)s
from xml_marshaller import xml_marshaller
import os, xmlrpclib, time, imp
from glob import glob
import signal
import slapos.slap
import subprocess
import sys
import socket
import pprint
from SlapOSControler import SlapOSControler
class SubprocessError(EnvironmentError):
def __init__(self, status_dict):
self.status_dict = status_dict
def __getattr__(self, name):
return self.status_dict[name]
def __str__(self):
return 'Error %i' % self.status_code
from Updater import Updater
process_group_pid_set = set()
process_pid_file_list = []
process_command_list = []
def sigterm_handler(signal, frame):
for pgpid in process_group_pid_set:
try:
os.killpg(pgpid, signal.SIGTERM)
except:
pass
for pid_file in process_pid_file_list:
try:
os.kill(int(open(pid_file).read().strip()), signal.SIGTERM)
except:
pass
for p in process_command_list:
try:
subprocess.call(p)
except:
pass
sys.exit(1)
signal.signal(signal.SIGTERM, sigterm_handler)
def safeRpcCall(function, *args):
retry = 64
while True:
try:
return function(*args)
except (socket.error, xmlrpclib.ProtocolError), e:
print >>sys.stderr, e
pprint.pprint(args, file(function._Method__name, 'w'))
time.sleep(retry)
retry += retry >> 1
slapos_controler = None
def run(args):
config = args[0]
slapgrid = None
branch = config.get('branch', None)
supervisord_pid_file = os.path.join(config['instance_root'], 'var', 'run',
'supervisord.pid')
subprocess.check_call([config['git_binary'],
"config", "--global", "http.sslVerify", "false"])
previous_revision = None
run_software = True
# find what will be the path of the repository
repository_name = config['vcs_repository'].split('/')[-1].split('.')[0]
repository_path = os.path.join(config['working_directory'],repository_name)
config['repository_path'] = repository_path
sys.path.append(repository_path)
# Write our own software.cfg to use the local repository
custom_profile_path = os.path.join(config['working_directory'], 'software.cfg')
config['custom_profile_path'] = custom_profile_path
# create a profile in order to use the repository we already have
custom_profile = open(custom_profile_path, 'w')
profile_content = """
[buildout]
extends = %(software_config_path)s
[%(repository_name)s]
repository = %(repository_path)s
""" % {'software_config_path': os.path.join(repository_path,
config['profile_path']),
'repository_name': repository_name,
'repository_path' : repository_path}
if branch is not None:
profile_content += "\nbranch = %s" % branch
custom_profile.write(profile_content)
custom_profile.close()
retry_software = False
try:
while True:
# kill processes from previous loop if any
try:
for pgpid in process_group_pid_set:
try:
os.killpg(pgpid, signal.SIGTERM)
except:
pass
process_group_pid_set.clear()
# Make sure we have local repository
if not os.path.exists(repository_path):
parameter_list = [config['git_binary'], 'clone',
config['vcs_repository']]
if branch is not None:
parameter_list.extend(['-b',branch])
parameter_list.append(repository_path)
subprocess.check_call(parameter_list)
# XXX this looks like to not wait the end of the command
# Make sure we have local repository
updater = Updater(repository_path, git_binary=config['git_binary'])
updater.checkout()
revision = updater.getRevision()
if previous_revision == revision:
time.sleep(120)
if not(retry_software):
continue
retry_software = False
previous_revision = revision
print config
portal_url = config['test_suite_master_url']
test_result_path = None
test_result = (test_result_path, revision)
if portal_url:
if portal_url[-1] != '/':
portal_url += '/'
portal = xmlrpclib.ServerProxy("%s%s" %
(portal_url, 'portal_task_distribution'),
allow_none=1)
master = portal.portal_task_distribution
assert master.getProtocolRevision() == 1
test_result = safeRpcCall(master.createTestResult,
config['test_suite_name'], revision, [],
False, config['test_suite_title'])
print "testnode, test_result : %r" % (test_result,)
if test_result:
test_result_path, test_revision = test_result
if revision != test_revision:
# other testnodes on other boxes are already ready to test another
# revision
updater = Updater(repository_path, git_binary=config['git_binary'],
revision=test_revision)
updater.checkout()
# Now prepare the installation of SlapOS
slapos_controler = SlapOSControler(config,
process_group_pid_set=process_group_pid_set)
# this should be always true later, but it is too slow for now
status_dict = slapos_controler.runSoftwareRelease(config,
environment=config['environment'],
process_group_pid_set=process_group_pid_set,
)
if status_dict['status_code'] != 0:
safeRpcCall(master.reportTaskFailure,
test_result_path, status_dict, config['test_suite_title'])
retry_software = True
continue
# create instances, it should take some seconds only
slapos_controler.runComputerPartition(config,
process_group_pid_set=process_group_pid_set)
# update repositories downloaded by buildout. Later we should get
# from master a list of repositories
repository_path_list = glob(os.path.join(config['software_root'],
'*', 'parts', 'git_repository', '*'))
assert len(repository_path_list) >= 0
for repository_path in repository_path_list:
updater = Updater(repository_path, git_binary=config['git_binary'])
updater.checkout()
if os.path.split(repository_path)[-1] == repository_name:
# redo checkout with good revision, the previous one is used
# to pull last code
updater = Updater(repository_path, git_binary=config['git_binary'],
revision=revision)
updater.checkout()
# calling dist/externals is only there for backward compatibility,
# the code will be removed soon
if os.path.exists(os.path.join(repository_path, 'dist/externals.py')):
process = subprocess.Popen(['dist/externals.py'],
cwd=repository_path)
process.wait()
partition_path = os.path.join(config['instance_root'],
config['partition_reference'])
run_test_suite_path = os.path.join(partition_path, 'bin',
'runTestSuite')
if not os.path.exists(run_test_suite_path):
raise ValueError('No %r provided' % run_test_suite_path)
run_test_suite_revision = revision
if isinstance(revision, tuple):
revision = ','.join(revision)
# Deal with Shebang size limitation
file_object = open(run_test_suite_path, 'r')
line = file_object.readline()
file_object.close()
invocation_list = []
if line[:2] == '#!':
invocation_list = line[2:].split()
invocation_list.extend([run_test_suite_path,
'--test_suite', config['test_suite_name'],
'--revision', revision,
'--node_quantity', config['node_quantity'],
'--master_url', config['test_suite_master_url']])
run_test_suite = subprocess.Popen(invocation_list)
process_group_pid_set.add(run_test_suite.pid)
run_test_suite.wait()
process_group_pid_set.remove(run_test_suite.pid)
except SubprocessError:
time.sleep(120)
continue
finally:
# Nice way to kill *everything* generated by run process -- process
# groups working only in POSIX compilant systems
# Exceptions are swallowed during cleanup phase
print "going to kill %r" % (process_group_pid_set,)
for pgpid in process_group_pid_set:
try:
os.killpg(pgpid, signal.SIGTERM)
except:
pass
try:
if os.path.exists(supervisord_pid_file):
os.kill(int(open(supervisord_pid_file).read().strip()), signal.SIGTERM)
except:
pass
Changelog
=========
1.1 (unreleased)
----------------
* backup enabled for Certificate Authority [Łukasz Nowak]
include CHANGES.txt
recursive-include src/slapos/recipe/erp5 *.in
The slapos.recipe.erp5 aims to instanciate an ERP5 environnment
===============================================================
from setuptools import setup, find_packages
#def getGitIteration():
# import subprocess
# return str(int(subprocess.Popen(["git", "rev-list", "--count", "HEAD", "--",
# "."], stdout=subprocess.PIPE).communicate()[0]))
name = "slapos.recipe.erp5"
version = '1.1-dev-184'
def read(name):
return open(name).read()
long_description=( read('README.txt')
+ '\n' +
read('CHANGES.txt')
)
setup(
name = name,
version = version,
description = "ZC Buildout recipe for create an erp5 instance",
long_description=long_description,
license = "GPLv3",
keywords = "buildout slapos erp5",
classifiers=[
"Framework :: Buildout :: Recipe",
"Programming Language :: Python",
],
packages = find_packages('src'),
package_dir = {'': 'src'},
include_package_data=True,
install_requires = [
'zc.recipe.egg',
'setuptools',
'slapos.lib.recipe',
'Zope2',
],
namespace_packages = ['slapos', 'slapos.recipe'],
entry_points = {'zc.buildout': ['default = %s:Recipe' % name]},
)
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.lib.recipe.BaseSlapRecipe import BaseSlapRecipe
import binascii
import os
import pkg_resources
import pprint
import hashlib
import sys
import zc.buildout
import zc.recipe.egg
import ConfigParser
from Zope2.utilities.mkzopeinstance import write_inituser
class Recipe(BaseSlapRecipe):
def getTemplateFilename(self, template_name):
return pkg_resources.resource_filename(__name__,
'template/%s' % template_name)
site_id = 'erp5'
def _install(self):
self.path_list = []
self.requirements, self.ws = self.egg.working_set([__name__])
# self.cron_d is a directory, where cron jobs can be registered
self.cron_d = self.installCrond()
self.logrotate_d, self.logrotate_backup = self.installLogrotate()
self.killpidfromfile = zc.buildout.easy_install.scripts(
[('killpidfromfile', __name__ + '.killpidfromfile',
'killpidfromfile')], self.ws, sys.executable, self.bin_directory)[0]
self.path_list.append(self.killpidfromfile)
ca_conf = self.installCertificateAuthority()
memcached_conf = self.installMemcached(ip=self.getLocalIPv4Address(),
port=11000)
kumo_conf = self.installKumo(self.getLocalIPv4Address())
conversion_server_conf = self.installConversionServer(
self.getLocalIPv4Address(), 23000, 23060)
mysql_conf = self.installMysqlServer(self.getLocalIPv4Address(), 45678)
user, password = self.installERP5()
zodb_dir = os.path.join(self.data_root_directory, 'zodb')
self._createDirectory(zodb_dir)
zodb_root_path = os.path.join(zodb_dir, 'root.fs')
zope_access = self.installZope(ip=self.getLocalIPv4Address(),
port=12000 + 1, name='zope_%s' % 1,
zodb_configuration_string=self.substituteTemplate(
self.getTemplateFilename('zope-zodb-snippet.conf.in'),
dict(zodb_root_path=zodb_root_path)), with_timerservice=True)
key, certificate = self.requestCertificate('Login Based Access')
apache_conf = dict(
apache_login=self.installBackendApache(ip=self.getGlobalIPv6Address(),
port=13000, backend=zope_access, key=key, certificate=certificate))
self.installERP5Site(user, password, zope_access, mysql_conf,
conversion_server_conf, memcached_conf, kumo_conf, self.site_id)
self.installTestRunner(ca_conf, mysql_conf, conversion_server_conf,
memcached_conf, kumo_conf)
self.installTestSuiteRunner(ca_conf, mysql_conf, conversion_server_conf,
memcached_conf, kumo_conf)
self.linkBinary()
self.setConnectionDict(dict(
site_url=apache_conf['apache_login'],
site_user=user,
site_password=password,
memcached_url=memcached_conf['memcached_url'],
kumo_url=kumo_conf['kumo_address']
))
return self.path_list
def _requestZeoFileStorage(self, server_name, storage_name):
"""Local, slap.request compatible, call to ask for filestorage on Zeo
filter_kw can be used to select specific Zeo server
Someday in future it will be possible to invoke:
self.request(
software_release=self.computer_partition.getSoftwareRelease().getURI(),
software_type='Zeo Server',
partition_reference=storage_name,
filter_kw={'server_name': server_name},
shared=True
)
Thanks to this it will be possible to select precisely on which server
which storage will be placed.
"""
base_port = 35001
if getattr(self, '_zeo_storage_dict', None) is None:
self._zeo_storage_dict = {}
if getattr(self, '_zeo_storage_port_dict', None) is None:
self._zeo_storage_port_dict = {}
self._zeo_storage_port_dict.setdefault(server_name,
base_port+len(self._zeo_storage_port_dict))
self._zeo_storage_dict[server_name] = self._zeo_storage_dict.get(
server_name, []) + [storage_name]
return dict(
ip=self.getLocalIPv4Address(),
port=self._zeo_storage_port_dict[server_name],
storage_name=storage_name
)
def installLogrotate(self):
"""Installs logortate main configuration file and registers its to cron"""
logrotate_d = os.path.abspath(os.path.join(self.etc_directory,
'logrotate.d'))
self._createDirectory(logrotate_d)
logrotate_backup = self.createBackupDirectory('logrotate')
logrotate_conf = self.createConfigurationFile("logrotate.conf",
"include %s" % logrotate_d)
logrotate_cron = os.path.join(self.cron_d, 'logrotate')
state_file = os.path.join(self.data_root_directory, 'logrotate.status')
open(logrotate_cron, 'w').write('0 0 * * * %s -s %s %s' %
(self.options['logrotate_binary'], state_file, logrotate_conf))
self.path_list.extend([logrotate_d, logrotate_conf, logrotate_cron])
return logrotate_d, logrotate_backup
def registerLogRotation(self, name, log_file_list, postrotate_script):
"""Register new log rotation requirement"""
open(os.path.join(self.logrotate_d, name), 'w').write(
self.substituteTemplate(self.getTemplateFilename(
'logrotate_entry.in'),
dict(file_list=' '.join(['"'+q+'"' for q in log_file_list]),
postrotate=postrotate_script, olddir=self.logrotate_backup)))
def linkBinary(self):
"""Links binaries to instance's bin directory for easier exposal"""
for linkline in self.options.get('link_binary_list', '').splitlines():
if not linkline:
continue
target = linkline.split()
if len(target) == 1:
target = target[0]
path, linkname = os.path.split(target)
else:
linkname = target[1]
target = target[0]
link = os.path.join(self.bin_directory, linkname)
if os.path.lexists(link):
if not os.path.islink(link):
raise zc.buildout.UserError(
'Target link already %r exists but it is not link' % link)
os.unlink(link)
os.symlink(target, link)
self.logger.debug('Created link %r -> %r' % (link, target))
self.path_list.append(link)
def installKumo(self, ip, kumo_manager_port=13101, kumo_server_port=13201,
kumo_server_listen_port=13202, kumo_gateway_port=13301):
# XXX: kumo is not storing pid in file, unless it is not running as daemon
# but running daemons is incompatible with SlapOS, so there is currently
# no way to have Kumo's pid files to rotate logs and send signals to them
config = dict(
kumo_gateway_binary=self.options['kumo_gateway_binary'],
kumo_gateway_ip=ip,
kumo_gateway_log=os.path.join(self.log_directory, "kumo-gateway.log"),
kumo_manager_binary=self.options['kumo_manager_binary'],
kumo_manager_ip=ip,
kumo_manager_log=os.path.join(self.log_directory, "kumo-manager.log"),
kumo_server_binary=self.options['kumo_server_binary'],
kumo_server_ip=ip,
kumo_server_log=os.path.join(self.log_directory, "kumo-server.log"),
kumo_server_storage=os.path.join(self.data_root_directory, "kumodb.tch"),
kumo_manager_port=kumo_manager_port,
kumo_server_port=kumo_server_port,
kumo_server_listen_port=kumo_server_listen_port,
kumo_gateway_port=kumo_gateway_port
)
self.path_list.append(self.createRunningWrapper('kumo_gateway',
self.substituteTemplate(self.getTemplateFilename('kumo_gateway.in'),
config)))
self.path_list.append(self.createRunningWrapper('kumo_manager',
self.substituteTemplate(self.getTemplateFilename('kumo_manager.in'),
config)))
self.path_list.append(self.createRunningWrapper('kumo_server',
self.substituteTemplate(self.getTemplateFilename('kumo_server.in'),
config)))
return dict(
kumo_address = '%s:%s' % (config['kumo_gateway_ip'],
config['kumo_gateway_port']),
kumo_gateway_ip=config['kumo_gateway_ip'],
kumo_gateway_port=config['kumo_gateway_port'],
)
def installMemcached(self, ip, port):
config = dict(
memcached_binary=self.options['memcached_binary'],
memcached_ip=ip,
memcached_port=port,
)
self.path_list.append(self.createRunningWrapper('memcached',
self.substituteTemplate(self.getTemplateFilename('memcached.in'),
config)))
return dict(memcached_url='%s:%s' %
(config['memcached_ip'], config['memcached_port']),
memcached_ip=config['memcached_ip'],
memcached_port=config['memcached_port'])
def installTestRunner(self, ca_conf, mysql_conf, conversion_server_conf,
memcached_conf, kumo_conf):
"""Installs bin/runUnitTest executable to run all tests using
bin/runUnitTest"""
testinstance = self.createDataDirectory('testinstance')
# workaround wrong assumptions of ERP5Type.tests.runUnitTest about
# directory existence
unit_test = os.path.join(testinstance, 'unit_test')
if not os.path.isdir(unit_test):
os.mkdir(unit_test)
runUnitTest = zc.buildout.easy_install.scripts([
('runUnitTest', __name__ + '.testrunner', 'runUnitTest')],
self.ws, sys.executable, self.bin_directory, arguments=[dict(
instance_home=testinstance,
prepend_path=self.bin_directory,
openssl_binary=self.options['openssl_binary'],
test_ca_path=ca_conf['certificate_authority_path'],
call_list=[self.options['runUnitTest_binary'],
'--erp5_sql_connection_string', '%(mysql_test_database)s@%'
'(ip)s:%(tcp_port)s %(mysql_test_user)s '
'%(mysql_test_password)s' % mysql_conf,
'--conversion_server_hostname=%(conversion_server_ip)s' % \
conversion_server_conf,
'--conversion_server_port=%(conversion_server_port)s' % \
conversion_server_conf,
'--volatile_memcached_server_hostname=%(memcached_ip)s' % memcached_conf,
'--volatile_memcached_server_port=%(memcached_port)s' % memcached_conf,
'--persistent_memcached_server_hostname=%(kumo_gateway_ip)s' % kumo_conf,
'--persistent_memcached_server_port=%(kumo_gateway_port)s' % kumo_conf,
]
)])[0]
self.path_list.append(runUnitTest)
def installTestSuiteRunner(self, ca_conf, mysql_conf, conversion_server_conf,
memcached_conf, kumo_conf):
"""Installs bin/runTestSuite executable to run all tests using
bin/runUnitTest"""
testinstance = self.createDataDirectory('test_suite_instance')
# workaround wrong assumptions of ERP5Type.tests.runUnitTest about
# directory existence
unit_test = os.path.join(testinstance, 'unit_test')
if not os.path.isdir(unit_test):
os.mkdir(unit_test)
connection_string_list = []
for test_database, test_user, test_password in \
mysql_conf['mysql_parallel_test_dict']:
connection_string_list.append(
'%s@%s:%s %s %s' % (test_database, mysql_conf['ip'],
mysql_conf['tcp_port'], test_user, test_password))
command = zc.buildout.easy_install.scripts([
('runTestSuite', __name__ + '.test_suite_runner', 'runTestSuite')],
self.ws, sys.executable, self.bin_directory, arguments=[dict(
instance_home=testinstance,
prepend_path=self.bin_directory,
openssl_binary=self.options['openssl_binary'],
test_ca_path=ca_conf['certificate_authority_path'],
call_list=[self.options['runTestSuite_binary'],
'--db_list', ','.join(connection_string_list),
'--conversion_server_hostname=%(conversion_server_ip)s' % \
conversion_server_conf,
'--conversion_server_port=%(conversion_server_port)s' % \
conversion_server_conf,
'--volatile_memcached_server_hostname=%(memcached_ip)s' % memcached_conf,
'--volatile_memcached_server_port=%(memcached_port)s' % memcached_conf,
'--persistent_memcached_server_hostname=%(kumo_gateway_ip)s' % kumo_conf,
'--persistent_memcached_server_port=%(kumo_gateway_port)s' % kumo_conf,
]
)])[0]
self.path_list.append(command)
def installCrond(self):
timestamps = self.createDataDirectory('cronstamps')
cron_output = os.path.join(self.log_directory, 'cron-output')
self._createDirectory(cron_output)
catcher = zc.buildout.easy_install.scripts([('catchcron',
__name__ + '.catdatefile', 'catdatefile')], self.ws, sys.executable,
self.bin_directory, arguments=[cron_output])[0]
self.path_list.append(catcher)
cron_d = os.path.join(self.etc_directory, 'cron.d')
crontabs = os.path.join(self.etc_directory, 'crontabs')
self._createDirectory(cron_d)
self._createDirectory(crontabs)
wrapper = zc.buildout.easy_install.scripts([('crond',
__name__ + '.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['dcrond_binary'].strip(), '-s', cron_d, '-c', crontabs,
'-t', timestamps, '-f', '-l', '5', '-M', catcher]
)[0]
self.path_list.append(wrapper)
return cron_d
def requestCertificate(self, name):
hash = hashlib.sha512(name).hexdigest()
key = os.path.join(self.ca_private, hash + self.ca_key_ext)
certificate = os.path.join(self.ca_certs, hash + self.ca_crt_ext)
parser = ConfigParser.RawConfigParser()
parser.add_section('certificate')
parser.set('certificate', 'name', name)
parser.set('certificate', 'key_file', key)
parser.set('certificate', 'certificate_file', certificate)
parser.write(open(os.path.join(self.ca_request_dir, hash), 'w'))
return key, certificate
def installCertificateAuthority(self, ca_country_code='XX',
ca_email='xx@example.com', ca_state='State', ca_city='City',
ca_company='Company'):
backup_path = self.createBackupDirectory('ca')
self.ca_dir = os.path.join(self.data_root_directory, 'ca')
self._createDirectory(self.ca_dir)
self.ca_request_dir = os.path.join(self.ca_dir, 'requests')
self._createDirectory(self.ca_request_dir)
config = dict(ca_dir=self.ca_dir, request_dir=self.ca_request_dir)
self.ca_private = os.path.join(self.ca_dir, 'private')
self.ca_certs = os.path.join(self.ca_dir, 'certs')
self.ca_crl = os.path.join(self.ca_dir, 'crl')
self.ca_newcerts = os.path.join(self.ca_dir, 'newcerts')
self.ca_key_ext = '.key'
self.ca_crt_ext = '.crt'
for d in [self.ca_private, self.ca_crl, self.ca_newcerts, self.ca_certs]:
self._createDirectory(d)
for f in ['crlnumber', 'serial']:
if not os.path.exists(os.path.join(self.ca_dir, f)):
open(os.path.join(self.ca_dir, f), 'w').write('01')
if not os.path.exists(os.path.join(self.ca_dir, 'index.txt')):
open(os.path.join(self.ca_dir, 'index.txt'), 'w').write('')
openssl_configuration = os.path.join(self.ca_dir, 'openssl.cnf')
config.update(
working_directory=self.ca_dir,
country_code=ca_country_code,
state=ca_state,
city=ca_city,
company=ca_company,
email_address=ca_email,
)
self._writeFile(openssl_configuration, pkg_resources.resource_string(
__name__, 'template/openssl.cnf.ca.in') % config)
self.path_list.extend(zc.buildout.easy_install.scripts([
('certificate_authority',
__name__ + '.certificate_authority', 'runCertificateAuthority')],
self.ws, sys.executable, self.wrapper_directory, arguments=[dict(
openssl_configuration=openssl_configuration,
openssl_binary=self.options['openssl_binary'],
certificate=os.path.join(self.ca_dir, 'cacert.pem'),
key=os.path.join(self.ca_private, 'cakey.pem'),
crl=os.path.join(self.ca_crl),
request_dir=self.ca_request_dir
)]))
# configure backup
backup_cron = os.path.join(self.cron_d, 'ca_rdiff_backup')
open(backup_cron, 'w').write(
'''0 0 * * * %(rdiff_backup)s %(source)s %(destination)s'''%dict(
rdiff_backup=self.options['rdiff_backup_binary'],
source=self.ca_dir,
destination=backup_path))
self.path_list.append(backup_cron)
return dict(
ca_certificate=os.path.join(config['ca_dir'], 'cacert.pem'),
ca_crl=os.path.join(config['ca_dir'], 'crl'),
certificate_authority_path=config['ca_dir']
)
def installConversionServer(self, ip, port, openoffice_port):
name = 'conversion_server'
working_directory = self.createDataDirectory(name)
conversion_server_dict = dict(
working_path=working_directory,
uno_path=self.options['ooo_uno_path'],
office_binary_path=self.options['ooo_binary_path'],
ip=ip,
port=port,
openoffice_port=openoffice_port,
)
for env_line in self.options['environment'].splitlines():
env_line = env_line.strip()
if not env_line:
continue
if '=' in env_line:
env_key, env_value = env_line.split('=')
conversion_server_dict[env_key.strip()] = env_value.strip()
else:
raise zc.buildout.UserError('Line %r in environment parameter is '
'incorrect' % env_line)
config_file = self.createConfigurationFile(name + '.cfg',
self.substituteTemplate(self.getTemplateFilename('cloudooo.cfg.in'),
conversion_server_dict))
self.path_list.append(config_file)
self.path_list.extend(zc.buildout.easy_install.scripts([(name,
__name__ + '.execute', 'execute_with_signal_translation')], self.ws,
sys.executable, self.wrapper_directory,
arguments=[self.options['ooo_paster'].strip(), 'serve', config_file]))
return {
name + '_port': conversion_server_dict['port'],
name + '_ip': conversion_server_dict['ip']
}
def installHaproxy(self, ip, port, name, server_check_path, url_list):
server_template = """ server %(name)s %(address)s cookie %(name)s check inter 20s rise 2 fall 4"""
config = dict(name=name, ip=ip, port=port,
server_check_path=server_check_path,)
i = 1
server_list = []
for url in url_list:
server_list.append(server_template % dict(name='%s_%s' % (name, i),
address=url))
i += 1
config['server_text'] = '\n'.join(server_list)
haproxy_conf_path = self.createConfigurationFile('haproxy_%s.cfg' % name,
self.substituteTemplate(self.getTemplateFilename('haproxy.cfg.in'),
config))
self.path_list.append(haproxy_conf_path)
wrapper = zc.buildout.easy_install.scripts([('haproxy_%s' % name,
__name__ + '.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['haproxy_binary'].strip(), '-f', haproxy_conf_path]
)[0]
self.path_list.append(wrapper)
return '%s:%s' % (ip, port)
def installERP5(self):
"""
All zope have to share file created by portal_classes
(until everything is integrated into the ZODB).
So, do not request zope instance and create multiple in the same partition.
"""
# Create instance directories
self.erp5_directory = self.createDataDirectory('erp5shared')
# Create init user
password = self.generatePassword()
# XXX Unhardcoded me please
user = 'zope'
write_inituser(
os.path.join(self.erp5_directory, "inituser"), user, password)
self._createDirectory(self.erp5_directory)
for directory in (
'Constraint',
'Document',
'Extensions',
'PropertySheet',
'import',
'lib',
'tests',
'Products',
'etc',
):
self._createDirectory(os.path.join(self.erp5_directory, directory))
self._createDirectory(os.path.join(self.erp5_directory, 'etc',
'package-includes'))
return user, password
def installERP5Site(self, user, password, zope_access, mysql_conf,
conversion_server_conf=None, memcached_conf=None, kumo_conf=None, erp5_site_id='erp5'):
""" Create a script controlled by supervisor, which creates a erp5
site on current available zope and mysql environment"""
conversion_server = None
if conversion_server_conf is not None:
conversion_server = "%s:%s" % (conversion_server_conf['conversion_server_ip'],
conversion_server_conf['conversion_server_port'])
if memcached_conf is None:
memcached_conf = {}
if kumo_conf is None:
kumo_conf = {}
# XXX Conversion server and memcache server coordinates are not relevant
# for pure site creation.
https_connection_url = "http://%s:%s@%s/" % (user, password, zope_access)
mysql_connection_string = "%(mysql_database)s@%(ip)s:%(tcp_port)s %(mysql_user)s %(mysql_password)s" % mysql_conf
# XXX URL list vs. repository + list of bt5 names?
bt5_list = self.parameter_dict.get("bt5_list", "").split()
bt5_repository_list = self.parameter_dict.get("bt5_repository_list", "").split()
self.path_list.extend(zc.buildout.easy_install.scripts([('erp5_update',
__name__ + '.erp5', 'updateERP5')], self.ws,
sys.executable, self.wrapper_directory,
arguments=[erp5_site_id,
mysql_connection_string,
https_connection_url,
memcached_conf.get('memcached_url'),
conversion_server,
kumo_conf.get("kumo_address"),
bt5_list,
bt5_repository_list]))
return []
def installZeo(self, ip):
zodb_dir = os.path.join(self.data_root_directory, 'zodb')
self._createDirectory(zodb_dir)
zeo_configuration_dict = {}
zeo_number = 0
for zeo_server in sorted(self._zeo_storage_dict.iterkeys()):
zeo_number += 1
zeo_event_log = os.path.join(self.log_directory, 'zeo-%s.log'% zeo_number)
zeo_pid = os.path.join(self.run_directory, 'zeo-%s.pid'% zeo_number)
self.registerLogRotation('zeo-%s' % zeo_number, [zeo_event_log],
self.killpidfromfile + ' ' + zeo_pid + ' SIGUSR2')
config = dict(
zeo_ip=ip,
zeo_port=self._zeo_storage_port_dict[zeo_server],
zeo_event_log=zeo_event_log,
zeo_pid=zeo_pid,
)
storage_definition_list = []
for storage_name in sorted(self._zeo_storage_dict[zeo_server]):
path = os.path.join(zodb_dir, '%s.fs' % storage_name)
storage_definition_list.append("""<filestorage %(storage_name)s>
path %(path)s
</filestorage>"""% dict(storage_name=storage_name, path=path))
zeo_configuration_dict[storage_name] = dict(
ip=ip,
port=config['zeo_port'],
path=path
)
config['zeo_filestorage_snippet'] = '\n'.join(storage_definition_list)
zeo_conf_path = self.createConfigurationFile('zeo-%s.conf' % zeo_number,
self.substituteTemplate(self.getTemplateFilename('zeo.conf.in'), config))
self.path_list.append(zeo_conf_path)
wrapper = zc.buildout.easy_install.scripts([('zeo_%s' % zeo_number,
__name__ + '.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['runzeo_binary'].strip(), '-C', zeo_conf_path]
)[0]
self.path_list.append(wrapper)
return zeo_configuration_dict
def installTidStorage(self, ip, port, known_tid_storage_identifier_dict,
access_url):
"""Install TidStorage with all required backup tools
known_tid_storage_identifier_dict is a dictionary of:
(((ip, port),), storagename): (filestorage path, url for serialize)
url for serialize will be merged with access_url by internal tidstorage
"""
backup_base_path = self.createBackupDirectory('zodb')
# it is time to fill known_tid_storage_identifier_dict with backup
# destination
formatted_storage_dict = dict()
for key, v in known_tid_storage_identifier_dict.copy().iteritems():
# generate unique name for each backup
storage_name = key[1]
destination = os.path.join(backup_base_path, storage_name)
self._createDirectory(destination)
formatted_storage_dict[str(key)] = (v[0], destination, v[1])
logfile = os.path.join(self.log_directory, 'tidstorage.log')
pidfile = os.path.join(self.run_directory, 'tidstorage.pid')
statusfile = os.path.join(self.log_directory, 'tidstorage.tid')
timestamp_file_path = os.path.join(self.log_directory,
'repozo_tidstorage_timestamp.log')
# shared configuration file
tidstorage_config = self.createConfigurationFile('tidstorage.py',
self.substituteTemplate(self.getTemplateFilename('tidstorage.py.in'),
dict(
known_tid_storage_identifier_dict=pprint.pformat(formatted_storage_dict),
base_url='%s/%%s/serialize' % access_url,
host=ip,
port=port,
timestamp_file_path=timestamp_file_path,
logfile=logfile,
pidfile=pidfile,
statusfile=statusfile
)))
# TID server
tidstorage_server = zc.buildout.easy_install.scripts([('tidstoraged',
__name__ + '.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['tidstoraged_binary'], '--nofork', '--config',
tidstorage_config])[0]
self.registerLogRotation('tidsorage', [logfile, timestamp_file_path],
self.killpidfromfile + ' ' + pidfile + ' SIGHUP')
self.path_list.append(tidstorage_config)
self.path_list.append(tidstorage_server)
# repozo wrapper
tidstorage_repozo = zc.buildout.easy_install.scripts([('tidstorage_repozo',
__name__ + '.execute', 'execute')], self.ws, sys.executable,
self.bin_directory, arguments=[
self.options['tidstorage_repozo_binary'], '--config', tidstorage_config,
'--repozo', self.options['repozo_binary'], '-z'])[0]
self.path_list.append(tidstorage_repozo)
# and backup configuration
tidstorage_repozo_cron = os.path.join(self.cron_d, 'tidstorage_repozo')
open(tidstorage_repozo_cron, 'w').write('''0 0 * * * %(tidstorage_repozo)s
0 0 * * * cp -f %(tidstorage_tid)s %(tidstorage_tid_backup)s'''%dict(
tidstorage_repozo=tidstorage_repozo,
tidstorage_tid=statusfile,
tidstorage_tid_backup=os.path.join(backup_base_path, 'tidstorage.tid')))
self.path_list.append(tidstorage_repozo_cron)
return dict(host=ip, port=port)
def installZope(self, ip, port, name, zodb_configuration_string,
with_timerservice=False, tidstorage_config=None, thread_amount=1,
with_deadlockdebugger=True):
# Create zope configuration file
zope_config = dict(
products=self.options['products'],
thread_amount=thread_amount
)
# configure default Zope2 zcml
open(os.path.join(self.erp5_directory, 'etc', 'site.zcml'), 'w').write(
pkg_resources.resource_string('Zope2', 'utilities/skel/etc/site.zcml'))
zope_config['zodb_configuration_string'] = zodb_configuration_string
zope_config['instance'] = self.erp5_directory
zope_config['event_log'] = os.path.join(self.log_directory,
'%s-event.log' % name)
zope_config['z2_log'] = os.path.join(self.log_directory,
'%s-Z2.log' % name)
zope_config['pid-filename'] = os.path.join(self.run_directory,
'%s.pid' % name)
zope_config['lock-filename'] = os.path.join(self.run_directory,
'%s.lock' % name)
self.registerLogRotation(name, [zope_config['event_log'],
zope_config['z2_log']], self.killpidfromfile + ' ' +
zope_config['pid-filename'] + ' SIGUSR2')
prefixed_products = []
for product in reversed(zope_config['products'].split()):
product = product.strip()
if product:
prefixed_products.append('products %s' % product)
prefixed_products.insert(0, 'products %s' % os.path.join(
self.erp5_directory, 'Products'))
zope_config['products'] = '\n'.join(prefixed_products)
zope_config['address'] = '%s:%s' % (ip, port)
zope_config['tmp_directory'] = self.tmp_directory
zope_config['path'] = self.bin_directory
zope_wrapper_template_location = self.getTemplateFilename('zope.conf.in')
zope_conf_content = self.substituteTemplate(
zope_wrapper_template_location, zope_config)
if with_timerservice:
zope_conf_content += self.substituteTemplate(
self.getTemplateFilename('zope.conf.timerservice.in'), zope_config)
if tidstorage_config is not None:
zope_conf_content += self.substituteTemplate(
self.getTemplateFilename('zope-tidstorage-snippet.conf.in'),
tidstorage_config)
if with_deadlockdebugger:
zope_conf_content += self.substituteTemplate(
self.getTemplateFilename('zope-deadlockdebugger-snippet.conf.in'),
dict(dump_url='/manage_debug_threads',
secret=self.generatePassword()))
zope_conf_path = self.createConfigurationFile("%s.conf" % name,
zope_conf_content)
self.path_list.append(zope_conf_path)
# Create init script
wrapper = zc.buildout.easy_install.scripts([(name,
__name__ + '.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['runzope_binary'].strip(), '-C', zope_conf_path]
)[0]
self.path_list.append(wrapper)
return zope_config['address']
def _getApacheConfigurationDict(self, prefix, ip, port):
apache_conf = dict()
apache_conf['pid_file'] = os.path.join(self.run_directory,
prefix + '.pid')
apache_conf['lock_file'] = os.path.join(self.run_directory,
prefix + '.lock')
apache_conf['ip'] = ip
apache_conf['port'] = port
apache_conf['server_admin'] = 'admin@'
apache_conf['error_log'] = os.path.join(self.log_directory,
prefix + '-error.log')
apache_conf['access_log'] = os.path.join(self.log_directory,
prefix + '-access.log')
self.registerLogRotation(prefix, [apache_conf['error_log'],
apache_conf['access_log']], self.killpidfromfile + ' ' +
apache_conf['pid_file'] + ' SIGUSR1')
return apache_conf
def _writeApacheConfiguration(self, prefix, apache_conf, backend,
access_control_string=None):
rewrite_rule_template = \
"RewriteRule (.*) http://%(backend)s$1 [L,P]"
if access_control_string is None:
path_template = pkg_resources.resource_string(__name__,
'template/apache.zope.conf.path.in')
path = path_template % dict(path='/')
else:
path_template = pkg_resources.resource_string(__name__,
'template/apache.zope.conf.path-protected.in')
path = path_template % dict(path='/',
access_control_string=access_control_string)
d = dict(
path=path,
backend=backend,
backend_path='/',
port=apache_conf['port'],
vhname=path.replace('/', ''),
)
rewrite_rule = rewrite_rule_template % d
apache_conf.update(**dict(
path_enable=path,
rewrite_rule=rewrite_rule
))
apache_conf_string = pkg_resources.resource_string(__name__,
'template/apache.zope.conf.in') % apache_conf
return self.createConfigurationFile(prefix + '.conf', apache_conf_string)
def installFrontendZopeApache(self, ip, port, name, frontend_path, backend_url,
backend_path, key, certificate, access_control_string=None):
ident = 'frontend_' + name
apache_conf = self._getApacheConfigurationDict(ident, ip, port)
apache_conf['server_name'] = name
apache_conf['ssl_snippet'] = pkg_resources.resource_string(__name__,
'template/apache.ssl-snippet.conf.in') % dict(
login_certificate=certificate, login_key=key)
rewrite_rule_template = \
"RewriteRule ^%(path)s($|/.*) %(backend_url)s/VirtualHostBase/https/%(server_name)s:%(port)s%(backend_path)s/VirtualHostRoot/_vh_%(vhname)s$1 [L,P]\n"
path = pkg_resources.resource_string(__name__, 'template/apache.zope.conf.path-protected.in') % dict(path='/', access_control_string='none')
if access_control_string is None:
path_template = pkg_resources.resource_string(__name__,
'template/apache.zope.conf.path.in')
path += path_template % dict(path=frontend_path)
else:
path_template = pkg_resources.resource_string(__name__,
'template/apache.zope.conf.path-protected.in')
path += path_template % dict(path=frontend_path,
access_control_string=access_control_string)
d = dict(
path=frontend_path,
backend_url=backend_url,
backend_path=backend_path,
port=apache_conf['port'],
vhname=frontend_path.replace('/', ''),
server_name=name
)
rewrite_rule = rewrite_rule_template % d
apache_conf.update(**dict(
path_enable=path,
rewrite_rule=rewrite_rule
))
apache_conf_string = pkg_resources.resource_string(__name__,
'template/apache.zope.conf.in') % apache_conf
apache_config_file = self.createConfigurationFile(ident + '.conf',
apache_conf_string)
self.path_list.append(apache_config_file)
self.path_list.extend(zc.buildout.easy_install.scripts([(
ident, __name__ + '.apache', 'runApache')], self.ws,
sys.executable, self.wrapper_directory, arguments=[
dict(
required_path_list=[key, certificate],
binary=self.options['httpd_binary'],
config=apache_config_file
)
]))
def installBackendApache(self, ip, port, backend, key, certificate,
suffix='', access_control_string=None):
apache_conf = self._getApacheConfigurationDict('backend_apache'+suffix, ip,
port)
apache_conf['server_name'] = '%s' % apache_conf['ip']
apache_conf['ssl_snippet'] = pkg_resources.resource_string(__name__,
'template/apache.ssl-snippet.conf.in') % dict(
login_certificate=certificate, login_key=key)
apache_config_file = self._writeApacheConfiguration('backend_apache'+suffix,
apache_conf, backend, access_control_string)
self.path_list.append(apache_config_file)
self.path_list.extend(zc.buildout.easy_install.scripts([(
'backend_apache'+suffix,
__name__ + '.apache', 'runApache')], self.ws,
sys.executable, self.wrapper_directory, arguments=[
dict(
required_path_list=[key, certificate],
binary=self.options['httpd_binary'],
config=apache_config_file
)
]))
# Note: IPv6 is assumed always
return 'https://[%(ip)s]:%(port)s' % apache_conf
def installMysqlServer(self, ip, port, database='erp5', user='user',
test_database='test_erp5', test_user='test_user', template_filename=None,
parallel_test_database_amount=100, mysql_conf=None):
if mysql_conf is None:
mysql_conf = {}
backup_directory = self.createBackupDirectory('mysql')
if template_filename is None:
template_filename = self.getTemplateFilename('my.cnf.in')
error_log = os.path.join(self.log_directory, 'mysqld.log')
slow_query_log = os.path.join(self.log_directory, 'mysql-slow.log')
mysql_conf.update(
ip=ip,
data_directory=os.path.join(self.data_root_directory,
'mysql'),
tcp_port=port,
pid_file=os.path.join(self.run_directory, 'mysqld.pid'),
socket=os.path.join(self.run_directory, 'mysqld.sock'),
error_log=error_log,
slow_query_log=slow_query_log,
mysql_database=database,
mysql_user=user,
mysql_password=self.generatePassword(),
mysql_test_password=self.generatePassword(),
mysql_test_database=test_database,
mysql_test_user=test_user,
mysql_parallel_test_dict=[
('test_%i' % x,)*2 + (self.generatePassword(),) \
for x in xrange(0,parallel_test_database_amount)],
)
self.registerLogRotation('mysql', [error_log, slow_query_log],
'%(mysql_binary)s --no-defaults -B --user=root '
'--socket=%(mysql_socket)s -e "FLUSH LOGS"' % dict(
mysql_binary=self.options['mysql_binary'],
mysql_socket=mysql_conf['socket']))
self._createDirectory(mysql_conf['data_directory'])
mysql_conf_path = self.createConfigurationFile("my.cnf",
self.substituteTemplate(template_filename,
mysql_conf))
mysql_script_list = []
for x_database, x_user, x_password in \
[(mysql_conf['mysql_database'],
mysql_conf['mysql_user'],
mysql_conf['mysql_password']),
(mysql_conf['mysql_test_database'],
mysql_conf['mysql_test_user'],
mysql_conf['mysql_test_password']),
] + mysql_conf['mysql_parallel_test_dict']:
mysql_script_list.append(pkg_resources.resource_string(__name__,
'template/initmysql.sql.in') % {
'mysql_database': x_database,
'mysql_user': x_user,
'mysql_password': x_password})
mysql_script_list.append('EXIT')
mysql_script = '\n'.join(mysql_script_list)
self.path_list.extend(zc.buildout.easy_install.scripts([('mysql_update',
__name__ + '.mysql', 'updateMysql')], self.ws,
sys.executable, self.wrapper_directory, arguments=[dict(
mysql_script=mysql_script,
mysql_binary=self.options['mysql_binary'].strip(),
mysql_upgrade_binary=self.options['mysql_upgrade_binary'].strip(),
socket=mysql_conf['socket'],
)]))
self.path_list.extend(zc.buildout.easy_install.scripts([('mysqld',
__name__ + '.mysql', 'runMysql')], self.ws,
sys.executable, self.wrapper_directory, arguments=[dict(
mysql_install_binary=self.options['mysql_install_binary'].strip(),
mysqld_binary=self.options['mysqld_binary'].strip(),
data_directory=mysql_conf['data_directory'].strip(),
mysql_binary=self.options['mysql_binary'].strip(),
socket=mysql_conf['socket'].strip(),
configuration_file=mysql_conf_path,
)]))
self.path_list.extend([mysql_conf_path])
# backup configuration
backup_directory = self.createBackupDirectory('mysql')
full_backup = os.path.join(backup_directory, 'full')
incremental_backup = os.path.join(backup_directory, 'incremental')
self._createDirectory(full_backup)
self._createDirectory(incremental_backup)
innobackupex_argument_list = [self.options['perl_binary'],
self.options['innobackupex_binary'],
'--defaults-file=%s' % mysql_conf_path,
'--socket=%s' %mysql_conf['socket'].strip(), '--user=root']
environment = dict(PATH='%s' % self.bin_directory)
innobackupex_incremental = zc.buildout.easy_install.scripts([(
'innobackupex_incremental', __name__ + '.execute', 'executee')],
self.ws, sys.executable, self.bin_directory, arguments=[
innobackupex_argument_list + ['--incremental'],
environment])[0]
self.path_list.append(innobackupex_incremental)
innobackupex_full = zc.buildout.easy_install.scripts([('innobackupex_full',
__name__ + '.execute', 'executee')], self.ws,
sys.executable, self.bin_directory, arguments=[
innobackupex_argument_list,
environment])[0]
self.path_list.append(innobackupex_full)
backup_controller = zc.buildout.easy_install.scripts([
('innobackupex_controller', __name__ + '.innobackupex', 'controller')],
self.ws, sys.executable, self.bin_directory,
arguments=[innobackupex_incremental, innobackupex_full, full_backup,
incremental_backup])[0]
self.path_list.append(backup_controller)
mysql_backup_cron = os.path.join(self.cron_d, 'mysql_backup')
open(mysql_backup_cron, 'w').write('0 0 * * * ' + backup_controller)
self.path_list.append(mysql_backup_cron)
# The return could be more explicit database, user ...
return mysql_conf
import os
import sys
import time
def runApache(args):
sleep = 60
conf = args[0]
while True:
ready = True
for f in conf.get('required_path_list', []):
if not os.path.exists(f):
print 'File %r does not exists, sleeping for %s' % (f, sleep)
ready = False
if ready:
break
time.sleep(sleep)
apache_wrapper_list = [conf['binary'], '-f', conf['config'], '-DFOREGROUND']
apache_wrapper_list.extend(sys.argv[1:])
sys.stdout.flush()
sys.stderr.flush()
os.execl(apache_wrapper_list[0], *apache_wrapper_list)
import os
import sys
import time
def catdatefile(args):
directory = args[0]
try:
suffix = args[1]
except IndexError:
suffix = '.log'
f = open(os.path.join(directory,
time.strftime('%Y-%m-%d.%H:%M.%s') + suffix), 'aw')
for line in sys.stdin.read():
f.write(line)
f.close()
import os
import subprocess
import time
import ConfigParser
def popenCommunicate(command_list, input=None):
subprocess_kw = dict(stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if input is not None:
subprocess_kw.update(stdin=subprocess.PIPE)
popen = subprocess.Popen(command_list, **subprocess_kw)
result = popen.communicate(input)[0]
if popen.returncode is None:
popen.kill()
if popen.returncode != 0:
raise ValueError('Issue during calling %r, result was:\n%s' % (
command_list, result))
return result
class CertificateAuthority:
def __init__(self, key, certificate, openssl_binary,
openssl_configuration, request_dir):
self.key = key
self.certificate = certificate
self.openssl_binary = openssl_binary
self.openssl_configuration = openssl_configuration
self.request_dir = request_dir
def checkAuthority(self):
file_list = [ self.key, self.certificate ]
ca_ready = True
for f in file_list:
if not os.path.exists(f):
ca_ready = False
break
if ca_ready:
return
for f in file_list:
if os.path.exists(f):
os.unlink(f)
try:
# no CA, let us create new one
popenCommunicate([self.openssl_binary, 'req', '-nodes', '-config',
self.openssl_configuration, '-new', '-x509', '-extensions',
'v3_ca', '-keyout', self.key, '-out', self.certificate,
'-days', '10950'], 'Automatic Certificate Authority\n')
except:
try:
for f in file_list:
if os.path.exists(f):
os.unlink(f)
except:
# do not raise during cleanup
pass
raise
def _checkCertificate(self, common_name, key, certificate):
file_list = [key, certificate]
ready = True
for f in file_list:
if not os.path.exists(f):
ready = False
break
if ready:
return False
for f in file_list:
if os.path.exists(f):
os.unlink(f)
csr = certificate + '.csr'
try:
popenCommunicate([self.openssl_binary, 'req', '-config',
self.openssl_configuration, '-nodes', '-new', '-keyout',
key, '-out', csr, '-days', '3650'],
common_name + '\n')
try:
popenCommunicate([self.openssl_binary, 'ca', '-batch', '-config',
self.openssl_configuration, '-out', certificate,
'-infiles', csr])
finally:
if os.path.exists(csr):
os.unlink(csr)
except:
try:
for f in file_list:
if os.path.exists(f):
os.unlink(f)
except:
# do not raise during cleanup
pass
raise
else:
return True
def checkRequestDir(self):
for request_file in os.listdir(self.request_dir):
parser = ConfigParser.RawConfigParser()
parser.readfp(open(os.path.join(self.request_dir, request_file), 'r'))
if self._checkCertificate(parser.get('certificate', 'name'),
parser.get('certificate', 'key_file'), parser.get('certificate',
'certificate_file')):
print 'Created certificate %r' % parser.get('certificate', 'name')
def runCertificateAuthority(args):
ca_conf = args[0]
ca = CertificateAuthority(ca_conf['key'], ca_conf['certificate'],
ca_conf['openssl_binary'], ca_conf['openssl_configuration'],
ca_conf['request_dir'])
while True:
ca.checkAuthority()
ca.checkRequestDir()
time.sleep(60)
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import time
import urllib
import xmlrpclib
import socket
def updateERP5(args):
# FIXME Use a dict
site_id = args[0]
mysql_string = args[1]
base_url = args[2]
memcached_provider = args[3]
conversion_server = args[4]
kumo_provider = args[5]
bt5_list = args[6]
bt5_repository_list = []
if len(args) > 7:
bt5_repository_list = args[7]
if len(bt5_list) > 0 and len(bt5_repository_list) == 0:
bt5_repository_list = ["http://www.erp5.org/dists/snapshot/bt5"]
erp5_catalog_storage = "erp5_mysql_innodb_catalog"
business_template_setup_finished = 0
external_service_assertion = 1
update_script_id = "ERP5Site_assertExternalServiceList"
sleep = 120
while True:
try:
proxy = xmlrpclib.ServerProxy(base_url)
print "Adding site at %r" % base_url
if proxy.isERP5SitePresent() == False:
url = '%s/manage_addProduct/ERP5/manage_addERP5Site' % base_url
result = urllib.urlopen(url, urllib.urlencode({
"id": site_id,
"erp5_catalog_storage": erp5_catalog_storage,
"erp5_sql_connection_string": mysql_string,
"cmf_activity_sql_connection_string": mysql_string, }))
print "ERP5 Site creation output: %s" % result.read()
if not business_template_setup_finished:
if proxy.isERP5SitePresent() == True:
print "Start to set initial business template setup."
# Update URL to ERP5 Site
erp5 = xmlrpclib.ServerProxy("%s/%s" % (base_url, site_id),
allow_none=1)
repository_list = erp5.portal_templates.getRepositoryList()
if len(bt5_repository_list) > 0 and \
set(bt5_repository_list) != set(repository_list):
erp5.portal_templates.\
updateRepositoryBusinessTemplateList(bt5_repository_list, None)
installed_bt5_list =\
erp5.portal_templates.getInstalledBusinessTemplateTitleList()
for bt5 in bt5_list:
if bt5 not in installed_bt5_list:
erp5.portal_templates.\
installBusinessTemplatesFromRepositories([bt5])
repository_set = set(erp5.portal_templates.getRepositoryList())
installed_bt5_list = erp5.portal_templates.\
getInstalledBusinessTemplateTitleList()
if (set(repository_set) == set(bt5_repository_list)) and \
len([i for i in bt5_list if i not in installed_bt5_list]) == 0:
print "Repositories updated and business templates installed."
business_template_setup_finished = 1
if external_service_assertion:
url = "%s/%s/%s" % (base_url, site_id, update_script_id)
result = urllib.urlopen(url, urllib.urlencode({
"memcached" : memcached_provider,
"kumo" : kumo_provider,
"conversion_server" : conversion_server,})).read()
external_service_assertion = not (result == "True")
except IOError:
print "Unable to create the ERP5 Site!"
except socket.error, e:
print "Unable to connect to ZOPE! %s" % e
except xmlrpclib.Fault, e:
print "XMLRPC Fault: %s" % e
time.sleep(sleep)
import sys
import os
import signal
import subprocess
import time
def execute(args):
"""Portable execution with process replacement"""
# Note: Candidate for slapos.lib.recipe
os.execv(args[0], args + sys.argv[1:])
child_pg = None
def executee(args):
"""Portable execution with process replacement and environment manipulation"""
exec_list = list(args[0])
environment = args[1]
env = os.environ.copy()
for k,v in environment.iteritems():
env[k] = v
os.execve(exec_list[0], exec_list + sys.argv[1:], env)
def sig_handler(signal, frame):
print 'Received signal %r, killing children and exiting' % signal
if child_pg is not None:
os.killpg(child_pg, signal.SIGHUP)
os.killpg(child_pg, signal.SIGTERM)
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGQUIT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
def execute_with_signal_translation(args):
"""Run process as children and translate from SIGTERM to another signal"""
child = subprocess.Popen(args, close_fds=True, preexec_fn=os.setsid)
child_pg = child.pid
try:
print 'Process %r started' % args
while True:
time.sleep(10)
finally:
os.killpg(child_pg, signal.SIGHUP)
os.killpg(child_pg, signal.SIGTERM)
import os
import glob
def controller(args):
"""Creates full or incremental backup
If no full backup is done, it is created
If full backup exists incremental backup is done starting with base
base is the newest (according to date) full or incremental backup
"""
innobackupex_incremental, innobackupex_full, full_backup, incremental_backup \
= args
if len(os.listdir(full_backup)) == 0:
print 'Doing full backup in %r' % full_backup
os.execv(innobackupex_full, [innobackupex_full, full_backup])
else:
backup_list = filter(os.path.isdir, glob.glob(full_backup + "/*") +
glob.glob(incremental_backup + "/*"))
backup_list.sort(key=lambda x: os.path.getmtime(x), reverse=True)
base = backup_list[0]
print 'Doing incremental backup in %r using %r as a base' % (
incremental_backup, base)
os.execv(innobackupex_incremental, [innobackupex_incremental,
'--incremental-basedir=%s'%base, incremental_backup])
import sys
import os
import signal
def killpidfromfile():
file = sys.argv[1]
sig = getattr(signal, sys.argv[2], None)
if sig is None:
raise ValueError('Unknwon signal name %s' % sys.argv[2])
if os.path.exists(file):
pid = int(open(file).read())
print 'Killing pid %s with signal %s' % (pid, sys.argv[2])
os.kill(pid, sig)
import os
import subprocess
import time
import sys
def runMysql(args):
sleep = 60
conf = args[0]
mysqld_wrapper_list = [conf['mysqld_binary'], '--defaults-file=%s' %
conf['configuration_file']]
# we trust mysql_install that if mysql directory is available mysql was
# correctly initalised
if not os.path.isdir(os.path.join(conf['data_directory'], 'mysql')):
while True:
# XXX: Protect with proper root password
# XXX: Follow http://dev.mysql.com/doc/refman/5.0/en/default-privileges.html
popen = subprocess.Popen([conf['mysql_install_binary'],
'--skip-name-resolve', '--no-defaults', '--datadir=%s' %
conf['data_directory']],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = popen.communicate()[0]
if popen.returncode is None or popen.returncode != 0:
print "Failed to initialise server.\nThe error was: %s" % result
print "Waiting for %ss and retrying" % sleep
time.sleep(sleep)
else:
print "Mysql properly initialised"
break
else:
print "MySQL already initialised"
print "Starting %r" % mysqld_wrapper_list[0]
sys.stdout.flush()
sys.stderr.flush()
os.execl(mysqld_wrapper_list[0], *mysqld_wrapper_list)
def updateMysql(args):
conf = args[0]
sleep = 30
is_succeed = False
while True:
if not is_succeed:
mysql_upgrade_list = [conf['mysql_upgrade_binary'], '--no-defaults', '--user=root', '--socket=%s' % conf['socket']]
mysql_upgrade = subprocess.Popen(mysql_upgrade_list, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = mysql_upgrade.communicate()[0]
if mysql_upgrade.returncode is None:
mysql_upgrade.kill()
if mysql_upgrade.returncode != 0 and not 'is already upgraded' in result:
print "Command %r failed with result:\n%s" % (mysql_upgrade_list, result)
print 'Sleeping for %ss and retrying' % sleep
else:
if mysql_upgrade.returncode == 0:
print "MySQL database upgraded with result:\n%s" % result
else:
print "No need to upgrade MySQL database"
mysql_list = [conf['mysql_binary'].strip(), '--no-defaults', '-B', '--user=root', '--socket=%s' % conf['socket']]
mysql = subprocess.Popen(mysql_list, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
result = mysql.communicate(conf['mysql_script'])[0]
if mysql.returncode is None:
mysql.kill()
if mysql.returncode != 0:
print 'Command %r failed with:\n%s' % (mysql_list, result)
print 'Sleeping for %ss and retrying' % sleep
else:
is_succeed = True
print 'SlapOS initialisation script succesfully applied on database.'
sys.stdout.flush()
sys.stderr.flush()
time.sleep(sleep)
<Location %(location)s>
Order Deny,Allow
Deny from all
Allow from %(allow_string)s
</Location>
SSLEngine on
SSLCertificateFile %(login_certificate)s
SSLCertificateKeyFile %(login_key)s
SSLRandomSeed startup builtin
SSLRandomSeed connect builtin
# Apache configuration file for Zope
# Automatically generated
# Basic server configuration
PidFile "%(pid_file)s"
LockFile "%(lock_file)s"
Listen %(ip)s:%(port)s
ServerAdmin %(server_admin)s
DefaultType text/plain
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
# As backend is trusting REMOTE_USER header unset it always
RequestHeader unset REMOTE_USER
# SSL Configuration
%(ssl_snippet)s
# Log configuration
ErrorLog "%(error_log)s"
LogLevel warn
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-Agent}i\"" combined
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b" common
CustomLog "%(access_log)s" common
# Directory protection
<Directory />
Options FollowSymLinks
AllowOverride None
Order deny,allow
Deny from all
</Directory>
%(path_enable)s
# Magic of Zope related rewrite
RewriteEngine On
%(rewrite_rule)s
# List of modules
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule ssl_module modules/mod_ssl.so
LoadModule mime_module modules/mod_mime.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule negotiation_module modules/mod_negotiation.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
LoadModule antiloris_module modules/mod_antiloris.so
# Path protected
<Location %(path)s>
Order Deny,Allow
Deny from all
Allow from %(access_control_string)s
</Location>
# Path enabled
<Location %(path)s>
Order Allow,Deny
Allow from all
</Location>
[app:main]
use = egg:cloudooo
#
## System config
#
debug_mode = True
# Folder where pid files, lock files and virtual frame buffer mappings
# are stored. In this folder is necessary create a folder tmp, because this
# folder is used to create all temporary documents.
working_path = %(working_path)s
# Folder where UNO library is installed
uno_path = %(uno_path)s
# Folder where soffice.bin is installed
office_binary_path = %(office_binary_path)s
#
## Monitor Settings
#
# Limit to use the Openoffice Instance. if pass of the limit, the instance is
# stopped and another is started.
limit_number_request = 100
# Interval to check the factory
monitor_interval = 10
timeout_response = 180
enable_memory_monitor = True
# Set the limit in MB
# e.g 1000 = 1 GB, 100 = 100 MB
limit_memory_used = 3000
#
## OOFactory Settings
#
# The pool consist of several OpenOffice.org instances
application_hostname = %(ip)s
# OpenOffice Port
openoffice_port = %(openoffice_port)s
# LD_LIBRARY_PATH passed to OpenOffice
env-LD_LIBRARY_PATH = %(LD_LIBRARY_PATH)s
#
# Mimetype Registry
# It is used to select the handler that will be used in conversion.
# Priority matters, first match take precedence on next lines.
mimetype_registry =
application/pdf * pdf
application/vnd.oasis.opendocument* * ooo
application/vnd.sun.xml* * ooo
text/* * ooo
image/* image/* imagemagick
video/* * ffmpeg
* application/vnd.oasis.opendocument* ooo
[server:main]
use = egg:PasteScript#wsgiutils
host = %(ip)s
port = %(port)s
global
maxconn 4096
defaults
log global
mode http
option httplog
option dontlognull
retries 1
option redispatch
maxconn 2000
timeout server 3000s
timeout queue 5s
timeout connect 10s
timeout client 3600s
listen %(name)s %(ip)s:%(port)s
cookie SERVERID insert
balance roundrobin
%(server_text)s
option httpchk GET %(server_check_path)s
stats uri /haproxy
stats realm Global\ statistics
CREATE DATABASE IF NOT EXISTS %(mysql_database)s;
GRANT ALL PRIVILEGES ON %(mysql_database)s.* TO %(mysql_user)s@'%%' IDENTIFIED BY '%(mysql_password)s';
#!/bin/sh
exec %(kumo_gateway_binary)s -F -E -m %(kumo_manager_ip)s:%(kumo_manager_port)s -t %(kumo_gateway_ip)s:%(kumo_gateway_port)s -o %(kumo_gateway_log)s
#!/bin/sh
exec %(kumo_manager_binary)s -a -l %(kumo_manager_ip)s:%(kumo_manager_port)s -o %(kumo_manager_log)s
#!/bin/sh
exec %(kumo_server_binary)s -l %(kumo_server_ip)s:%(kumo_server_port)s -L %(kumo_server_listen_port)s -m %(kumo_manager_ip)s:%(kumo_manager_port)s -s %(kumo_server_storage)s -o %(kumo_server_log)s
%(file_list)s {
daily
dateext
rotate 30
compress
notifempty
sharedscripts
create
postrotate
%(postrotate)s
endscript
olddir %(olddir)s
}
#!/bin/sh
exec %(memcached_binary)s -p %(memcached_port)s -U %(memcached_port)s -l %(memcached_ip)s
# ERP5 buildout my.cnf template based on my-huge.cnf shipped with mysql
# The MySQL server
[mysqld]
# ERP5 by default requires InnoDB storage. MySQL by default fallbacks to using
# different engine, like MyISAM. Such behaviour generates problems only, when
# tables requested as InnoDB are silently created with MyISAM engine.
#
# Loud fail is really required in such case.
sql-mode="NO_ENGINE_SUBSTITUTION"
skip-show-database
port = %(tcp_port)s
bind-address = %(ip)s
socket = %(socket)s
datadir = %(data_directory)s
pid-file = %(pid_file)s
log-error = %(error_log)s
log-slow-file = %(slow_query_log)s
long_query_time = 5
max_allowed_packet = 128M
query_cache_size = 32M
plugin-load = ha_innodb_plugin.so
# The following are important to configure and depend a lot on to the size of
# your database and the available resources.
#innodb_buffer_pool_size = 4G
#innodb_log_file_size = 256M
#innodb_log_buffer_size = 8M
# Some dangerous settings you may want to uncomment if you only want
# performance or less disk access. Useful for unit tests.
#innodb_flush_log_at_trx_commit = 0
#innodb_flush_method = nosync
#innodb_doublewrite = 0
#sync_frm = 0
# Uncomment the following if you need binary logging, which is recommended
# on production instances (either for replication or incremental backups).
#log-bin=mysql-bin
# Force utf8 usage
collation_server = utf8_unicode_ci
character_set_server = utf8
skip-character-set-client-handshake
[mysql]
no-auto-rehash
socket = %(socket)s
[mysqlhotcopy]
interactive-timeout
#
# OpenSSL example configuration file.
# This is mostly being used for generation of certificate requests.
#
# This definition stops the following lines choking if HOME isn't
# defined.
HOME = .
RANDFILE = $ENV::HOME/.rnd
# Extra OBJECT IDENTIFIER info:
#oid_file = $ENV::HOME/.oid
oid_section = new_oids
# To use this configuration file with the "-extfile" option of the
# "openssl x509" utility, name here the section containing the
# X.509v3 extensions to use:
# extensions =
# (Alternatively, use a configuration file that has only
# X.509v3 extensions in its main [= default] section.)
[ new_oids ]
# We can add new OIDs in here for use by 'ca', 'req' and 'ts'.
# Add a simple OID like this:
# testoid1=1.2.3.4
# Or use config file substitution like this:
# testoid2=${testoid1}.5.6
# Policies used by the TSA examples.
tsa_policy1 = 1.2.3.4.1
tsa_policy2 = 1.2.3.4.5.6
tsa_policy3 = 1.2.3.4.5.7
####################################################################
[ ca ]
default_ca = CA_default # The default ca section
####################################################################
[ CA_default ]
dir = %(working_directory)s # Where everything is kept
certs = $dir/certs # Where the issued certs are kept
crl_dir = $dir/crl # Where the issued crl are kept
database = $dir/index.txt # database index file.
#unique_subject = no # Set to 'no' to allow creation of
# several ctificates with same subject.
new_certs_dir = $dir/newcerts # default place for new certs.
certificate = $dir/cacert.pem # The CA certificate
serial = $dir/serial # The current serial number
crlnumber = $dir/crlnumber # the current crl number
# must be commented out to leave a V1 CRL
crl = $dir/crl.pem # The current CRL
private_key = $dir/private/cakey.pem # The private key
RANDFILE = $dir/private/.rand # private random number file
x509_extensions = usr_cert # The extentions to add to the cert
# Comment out the following two lines for the "traditional"
# (and highly broken) format.
name_opt = ca_default # Subject Name options
cert_opt = ca_default # Certificate field options
# Extension copying option: use with caution.
# copy_extensions = copy
# Extensions to add to a CRL. Note: Netscape communicator chokes on V2 CRLs
# so this is commented out by default to leave a V1 CRL.
# crlnumber must also be commented out to leave a V1 CRL.
# crl_extensions = crl_ext
default_days = 3650 # how long to certify for
default_crl_days= 30 # how long before next CRL
default_md = default # use public key default MD
preserve = no # keep passed DN ordering
# A few difference way of specifying how similar the request should look
# For type CA, the listed attributes must be the same, and the optional
# and supplied fields are just that :-)
policy = policy_match
# For the CA policy
[ policy_match ]
countryName = match
stateOrProvinceName = match
organizationName = match
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
# For the 'anything' policy
# At this point in time, you must list all acceptable 'object'
# types.
[ policy_anything ]
countryName = optional
stateOrProvinceName = optional
localityName = optional
organizationName = optional
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
####################################################################
[ req ]
default_bits = 2048
default_md = sha1
default_keyfile = privkey.pem
distinguished_name = req_distinguished_name
#attributes = req_attributes
x509_extensions = v3_ca # The extentions to add to the self signed cert
# Passwords for private keys if not present they will be prompted for
# input_password = secret
# output_password = secret
# This sets a mask for permitted string types. There are several options.
# default: PrintableString, T61String, BMPString.
# pkix : PrintableString, BMPString (PKIX recommendation before 2004)
# utf8only: only UTF8Strings (PKIX recommendation after 2004).
# nombstr : PrintableString, T61String (no BMPStrings or UTF8Strings).
# MASK:XXXX a literal mask value.
# WARNING: ancient versions of Netscape crash on BMPStrings or UTF8Strings.
string_mask = utf8only
# req_extensions = v3_req # The extensions to add to a certificate request
[ req_distinguished_name ]
countryName = Country Name (2 letter code)
countryName_value = %(country_code)s
countryName_min = 2
countryName_max = 2
stateOrProvinceName = State or Province Name (full name)
stateOrProvinceName_value = %(state)s
localityName = Locality Name (eg, city)
localityName_value = %(city)s
0.organizationName = Organization Name (eg, company)
0.organizationName_value = %(company)s
# we can do this but it is not needed normally :-)
#1.organizationName = Second Organization Name (eg, company)
#1.organizationName_default = World Wide Web Pty Ltd
commonName = Common Name (eg, your name or your server\'s hostname)
commonName_max = 64
emailAddress = Email Address
emailAddress_value = %(email_address)s
emailAddress_max = 64
# SET-ex3 = SET extension number 3
#[ req_attributes ]
#challengePassword = A challenge password
#challengePassword_min = 4
#challengePassword_max = 20
#
#unstructuredName = An optional company name
[ usr_cert ]
# These extensions are added when 'ca' signs a request.
# This goes against PKIX guidelines but some CAs do it and some software
# requires this to avoid interpreting an end user certificate as a CA.
basicConstraints=CA:FALSE
# Here are some examples of the usage of nsCertType. If it is omitted
# the certificate can be used for anything *except* object signing.
# This is OK for an SSL server.
# nsCertType = server
# For an object signing certificate this would be used.
# nsCertType = objsign
# For normal client use this is typical
# nsCertType = client, email
# and for everything including object signing:
# nsCertType = client, email, objsign
# This is typical in keyUsage for a client certificate.
# keyUsage = nonRepudiation, digitalSignature, keyEncipherment
# This will be displayed in Netscape's comment listbox.
nsComment = "OpenSSL Generated Certificate"
# PKIX recommendations harmless if included in all certificates.
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid,issuer
# This stuff is for subjectAltName and issuerAltname.
# Import the email address.
# subjectAltName=email:copy
# An alternative to produce certificates that aren't
# deprecated according to PKIX.
# subjectAltName=email:move
# Copy subject details
# issuerAltName=issuer:copy
#nsCaRevocationUrl = http://www.domain.dom/ca-crl.pem
#nsBaseUrl
#nsRevocationUrl
#nsRenewalUrl
#nsCaPolicyUrl
#nsSslServerName
# This is required for TSA certificates.
# extendedKeyUsage = critical,timeStamping
[ v3_req ]
# Extensions to add to a certificate request
basicConstraints = CA:FALSE
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
[ v3_ca ]
# Extensions for a typical CA
# PKIX recommendation.
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid:always,issuer
# This is what PKIX recommends but some broken software chokes on critical
# extensions.
#basicConstraints = critical,CA:true
# So we do this instead.
basicConstraints = CA:true
# Key usage: this is typical for a CA certificate. However since it will
# prevent it being used as an test self-signed certificate it is best
# left out by default.
# keyUsage = cRLSign, keyCertSign
# Some might want this also
# nsCertType = sslCA, emailCA
# Include email address in subject alt name: another PKIX recommendation
# subjectAltName=email:copy
# Copy issuer details
# issuerAltName=issuer:copy
# DER hex encoding of an extension: beware experts only!
# obj=DER:02:03
# Where 'obj' is a standard or added object
# You can even override a supported extension:
# basicConstraints= critical, DER:30:03:01:01:FF
[ crl_ext ]
# CRL extensions.
# Only issuerAltName and authorityKeyIdentifier make any sense in a CRL.
# issuerAltName=issuer:copy
authorityKeyIdentifier=keyid:always
[ proxy_cert_ext ]
# These extensions should be added when creating a proxy certificate
# This goes against PKIX guidelines but some CAs do it and some software
# requires this to avoid interpreting an end user certificate as a CA.
basicConstraints=CA:FALSE
# Here are some examples of the usage of nsCertType. If it is omitted
# the certificate can be used for anything *except* object signing.
# This is OK for an SSL server.
# nsCertType = server
# For an object signing certificate this would be used.
# nsCertType = objsign
# For normal client use this is typical
# nsCertType = client, email
# and for everything including object signing:
# nsCertType = client, email, objsign
# This is typical in keyUsage for a client certificate.
# keyUsage = nonRepudiation, digitalSignature, keyEncipherment
# This will be displayed in Netscape's comment listbox.
nsComment = "OpenSSL Generated Certificate"
# PKIX recommendations harmless if included in all certificates.
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid,issuer
# This stuff is for subjectAltName and issuerAltname.
# Import the email address.
# subjectAltName=email:copy
# An alternative to produce certificates that aren't
# deprecated according to PKIX.
# subjectAltName=email:move
# Copy subject details
# issuerAltName=issuer:copy
#nsCaRevocationUrl = http://www.domain.dom/ca-crl.pem
#nsBaseUrl
#nsRevocationUrl
#nsRenewalUrl
#nsCaPolicyUrl
#nsSslServerName
# This really needs to be in place for it to be a proxy certificate.
proxyCertInfo=critical,language:id-ppl-anyLanguage,pathlen:3,policy:foo
####################################################################
[ tsa ]
default_tsa = tsa_config1 # the default TSA section
[ tsa_config1 ]
# These are used by the TSA reply generation only.
dir = /etc/pki/tls # TSA root directory
serial = $dir/tsaserial # The current serial number (mandatory)
crypto_device = builtin # OpenSSL engine to use for signing
signer_cert = $dir/tsacert.pem # The TSA signing certificate
# (optional)
certs = $dir/cacert.pem # Certificate chain to include in reply
# (optional)
signer_key = $dir/private/tsakey.pem # The TSA private key (optional)
default_policy = tsa_policy1 # Policy if request did not specify it
# (optional)
other_policies = tsa_policy2, tsa_policy3 # acceptable policies (optional)
digests = md5, sha1 # Acceptable message digests (mandatory)
accuracy = secs:1, millisecs:500, microsecs:100 # (optional)
clock_precision_digits = 0 # number of digits after dot. (optional)
ordering = yes # Is ordering defined for timestamps?
# (optional, default: no)
tsa_name = yes # Must the TSA name be included in the reply?
# (optional, default: no)
ess_cert_id_chain = no # Must the ESS cert id chain be included?
# (optional, default: no)
known_tid_storage_identifier_dict = %(known_tid_storage_identifier_dict)s
base_url = '%(base_url)s'
address = '%(host)s'
port = %(port)s
#fork = False
#setuid = None
#setgid = None
burst_period = 30
full_dump_period = 300
timestamp_file_path = '%(timestamp_file_path)s'
logfile_name = '%(logfile)s'
pidfile_name = '%(pidfile)s'
status_file = '%(statusfile)s'
# ZEO configuration file generated by SlapOS
<zeo>
address %(zeo_ip)s:%(zeo_port)s
read-only false
invalidation-queue-size 100
pid-filename %(zeo_pid)s
</zeo>
%(zeo_filestorage_snippet)s
<eventlog>
<logfile>
path %(zeo_event_log)s
</logfile>
</eventlog>
# DeadlockDebugger configuration
<product-config DeadlockDebugger>
dump_url %(dump_url)s
secret %(secret)s
</product-config>
# TIDStorage connection
<product-config TIDStorage>
backend-ip %(host)s
backend-port %(port)s
</product-config>
<zodb_db %(storage_name)s>
mount-point %(mount_point)s
<zeoclient>
server %(address)s
storage %(storage_name)s
name %(storage_name)s
</zeoclient>
</zodb_db>
<zodb_db root>
<filestorage>
path %(zodb_root_path)s
</filestorage>
mount-point /
</zodb_db>
## Zope 2 configuration file generated by SlapOS
# Some defines
%%define INSTANCE %(instance)s
instancehome $INSTANCE
# Used products
%(products)s
# Environment override
<environment>
TMP %(tmp_directory)s
TMPDIR %(tmp_directory)s
HOME %(tmp_directory)s
PATH %(path)s
</environment>
# No need to debug
debug-mode off
# One thread is safe enough
zserver-threads %(thread_amount)s
# File location
pid-filename %(pid-filename)s
lock-filename %(lock-filename)s
# Temporary storage database (for sessions)
<zodb_db temporary>
<temporarystorage>
name temporary storage for sessioning
</temporarystorage>
mount-point /temp_folder
container-class Products.TemporaryFolder.TemporaryContainer
</zodb_db>
# Logging configuration
<eventlog>
<logfile>
path %(event_log)s
</logfile>
</eventlog>
<logger access>
<logfile>
path %(z2_log)s
</logfile>
</logger>
# Serving configuration
<http-server>
address %(address)s
</http-server>
# ZODB configuration
%(zodb_configuration_string)s
<zoperunner>
program $INSTANCE/bin/runzope
</zoperunner>
# ERP5 Timer Service
%%import timerserver
<timer-server>
interval 5
</timer-server>
import os
import sys
def runTestSuite(args):
env = os.environ.copy()
d = args[0]
env['OPENSSL_BINARY'] = d['openssl_binary']
env['TEST_CA_PATH'] = d['test_ca_path']
env['PATH'] = ':'.join([d['prepend_path']] + os.environ['PATH'].split(':'))
env['INSTANCE_HOME'] = d['instance_home']
env['REAL_INSTANCE_HOME'] = d['instance_home']
# Deal with Shebang size limitation
executable_filepath = d['call_list'][0]
file_object = open(executable_filepath, 'r')
line = file_object.readline()
file_object.close()
argument_list = []
if line[:2] == '#!':
executable_filepath = line[2:].strip()
argument_list.append(executable_filepath)
argument_list.extend(d['call_list'])
argument_list.extend(sys.argv[1:])
argument_list.append(env)
os.execle(executable_filepath, *argument_list)
import os
import sys
def runUnitTest(args):
env = os.environ.copy()
d = args[0]
env['OPENSSL_BINARY'] = d['openssl_binary']
env['TEST_CA_PATH'] = d['test_ca_path']
env['PATH'] = ':'.join([d['prepend_path']] + os.environ['PATH'].split(':'))
env['INSTANCE_HOME'] = d['instance_home']
env['REAL_INSTANCE_HOME'] = d['instance_home']
# Deal with Shebang size limitation
executable_filepath = d['call_list'][0]
file_object = open(executable_filepath, 'r')
line = file_object.readline()
file_object.close()
argument_list = []
if line[:2] == '#!':
executable_filepath = line[2:].strip()
argument_list.append(executable_filepath)
argument_list.extend(d['call_list'])
argument_list.extend(sys.argv[1:])
argument_list.append(env)
os.execle(executable_filepath, *argument_list)
[buildout]
extensions =
slapos.tool.rebootstrap
slapos.tool.networkcache
slapos.zcbworkarounds
mr.developer
find-links =
http://www.nexedi.org/static/packages/source/slapos.buildout/
http://dist.repoze.org
http://www.nexedi.org/static/packages/source/
extends =
# Exact version of Zope
http://svn.zope.org/repos/main/Zope/tags/2.12.18/versions.cfg
profile/logrotate.cfg
profile/dcron.cfg
profile/file.cfg
profile/fonts.cfg
profile/ghostscript.cfg
profile/git.cfg
profile/glib.cfg
profile/graphviz.cfg
profile/haproxy.cfg
profile/hookbox.cfg
profile/imagemagick.cfg
profile/kumo.cfg
profile/libreoffice-bin.cfg
profile/lxml-python.cfg
profile/mariadb.cfg
profile/memcached.cfg
profile/mysql-python.cfg
profile/pdftk.cfg
profile/pysvn-python.cfg
profile/python-2.6.cfg
profile/python-2.7.cfg
profile/python-ldap-python.cfg
profile/rdiff-backup.cfg
profile/stunnel.cfg
profile/subversion.cfg
profile/tesseract.cfg
profile/varnish.cfg
profile/w3-validator.cfg
profile/w3m.cfg
profile/xorg.cfg
profile/xpdf.cfg
profile/xtrabackup.cfg
profile/zabbix.cfg
profile/sed.cfg
profile/coreutils.cfg
profile/grep.cfg
versions = versions
parts =
itools-build
rdiff-backup
apache
apache-antiloris
file
graphviz
memcached
haproxy
varnish-2.1
stunnel
w3m
xpdf
libpng12
ghostscript
mariadb
imagemagick
kumo
libreoffice-bin
w3-validator
tesseract-eng-traineddata-unzip
tesseract
hookbox
bootstrap2.6
zabbix-agent
pdftk
dcron
# Buildoutish
eggs
instance-recipe-egg
testrunner
test_suite_runner
# basic Xorg
libXdmcp
libXext
libXau
libX11
# fonts
liberation-fonts
ipaex-fonts
# Zope products
products-deps
# Cloudooo specific part
cloudooo
# get git repositories
erp5
# Create instance template
template
[instance-recipe]
# Note: In case if specific instantiation recipe is used this is the place to
# put its name
name = slapos.recipe.erp5
[products]
# XXX: ERP5 related products are not defined as python distributions, so it is
# required to configure them in declarative manner
list = ${products-deps:location} ${erp5:location}/product
[bootstrap2.6]
python = python2.6
[rebootstrap]
# Default first version of rebootstrapped python
version = 2
section = python2.7
[template]
# Default template for erp5 instance.
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
md5sum = 16d09f1964101bbe128a81c7ffcf996e
output = ${buildout:directory}/template.cfg
mode = 0644
[itools]
pkgname = itools-0.50.8
location = ${buildout:parts-directory}/${:_buildout_section_name_}
lib = ${:location}/lib
source = ${:location}/src
# ugly itools building sections until itools and setuptools learn to get
# along:
[itools-download]
recipe = hexagonit.recipe.download
strip-top-level-dir = true
url = http://download.hforge.org/itools/0.50/${itools:pkgname}.tar.gz
md5sum = 12704cc354e2201e0921a832bf6b1556
destination = ${itools:source}
[itools-build]
recipe = plone.recipe.command
# reference itools-download:destination instead of itools:source to trigger its
# execution
# here, we unset PYTHONPATH to invoke different python than buildout one.
command =
cd ${itools-download:destination} &&
PYTHONPATH= ${python2.6:executable} setup.py build_ext -L ${glib:location}/lib -I ${glib:location}/include/glib-2.0:${glib:location}/lib/glib-2.0/include -R ${glib:location}/lib &&
PYTHONPATH= ${python2.6:executable} setup.py install_lib -d ${itools:lib}
update-command = ${:command}
location = ${itools:location}
stop-on-error = true
[products-deps]
# Recipe minitage.recipe.fetch is disabled, as it uses PATH variable, but it
# is not possible to change its environment to use localy delivered subversion
# nor git. plone.recipe.command can do same job, but it is controllable which
# binary will be used
recipe = plone.recipe.command
svn_param =--trust-server-cert --non-interactive --quiet
location = ${buildout:parts-directory}/${:_buildout_section_name_}
stop-on-error = true
command =
${subversion:location}/bin/svn checkout ${:svn_param} https://svn.plone.org/svn/collective/ExtFile/trunk ${:location}/ExtFile &&
${git:location}/bin/git clone --quiet git://git.hforge.org/Localizer.git ${:location}/Localizer
update-command =
if ${buildout:newest}; then
${subversion:location}/bin/svn checkout ${:svn_param} https://svn.plone.org/svn/collective/ExtFile/trunk ${:location}/ExtFile &&
cd ${:location}/Localizer &&
${git:location}/bin/git pull --quiet
fi
[erp5_repository_list]
repository_id_list = erp5
[erp5]
# Recipe zerokspot.recipe.git is disabled, as is not possible to change its
# environment to use localy delivered git.
# plone.recipe.command can do same job, but it is controllable which binary
# will be used
recipe = plone.recipe.command
location = ${buildout:parts-directory}/${:_buildout_section_name_}
stop-on-error = true
repository = http://git.erp5.org/repos/erp5.git
branch = master
command = ${git:location}/bin/git clone --quiet -b ${:branch} ${:repository} ${:location}
update-command = cd ${:location} && ${git:location}/bin/git pull --quiet
[products]
# XXX: ERP5 related products are not defined as python distributions, so it is
# required to configure them in declarative manner
list = ${products-deps:location} ${buildout:parts-directory}/erp5/product
[testrunner]
# XXX: Workaround for fact ERP5Type is not an distribution and does not
# expose entry point for test runner
recipe = zc.recipe.egg
python = python2.6
eggs = ${eggs:eggs}
extra-paths = ${eggs:extra-paths}
entry-points =
runUnitTest=Products.ERP5Type.tests.runUnitTest:main
scripts = runUnitTest
initialization =
import os
import sys
import Products
[Products.__path__.insert(0, p) for p in reversed(os.environ.get('INSERT_PRODUCTS_PATH', '').split(':')) if p]
Products.__path__.extend(reversed('''${products:list}'''.split()))
import Zope2
os.environ['SOFTWARE_HOME'] = os.path.abspath(os.path.dirname(os.path.dirname(Zope2.__file__)))
os.environ['ZOPE_SCRIPTS'] = ''
temp_bt5_path_list = ['/'.join(['''${buildout:parts-directory}''', x, 'bt5']) for x in '''${erp5_repository_list:repository_id_list}'''.split(' ')]
bt5_path_list = []
[bt5_path_list.extend([bt5_path, '%s/*' % bt5_path]) for bt5_path in temp_bt5_path_list]
os.environ['erp5_tests_bt5_path'] = ','.join(bt5_path_list)
sys.path[0:0] = ['/'.join(['''${buildout:parts-directory}''', x, 'tests']) for x in '''${erp5_repository_list:repository_id_list}'''.split(' ')]
import glob
product_test_path_list = []
[product_test_path_list.extend(glob.glob('/'.join(['''${buildout:parts-directory}''', x, 'product/*/tests']))) for x in '''${erp5_repository_list:repository_id_list}'''.split(' ')]
sys.path[0:0] = product_test_path_list
[test_suite_runner]
# XXX: Workaround for fact ERP5Type is not an distribution and does not
# expose entry point for test runner
recipe = zc.recipe.egg
python = python2.6
eggs = ${eggs:eggs}
extra-paths = ${eggs:extra-paths}
entry-points =
runTestSuite=Products.ERP5Type.tests.runTestSuite:main
scripts = runTestSuite
initialization =
import os
import sys
import Products
[Products.__path__.insert(0, p) for p in reversed(os.environ.get('INSERT_PRODUCTS_PATH', '').split(':')) if p]
Products.__path__.extend(reversed('''${products:list}'''.split()))
import Zope2
os.environ['SOFTWARE_HOME'] = os.path.abspath(os.path.dirname(os.path.dirname(Zope2.__file__)))
os.environ['ZOPE_SCRIPTS'] = ''
sys.path[0:0] = ['/'.join(['''${buildout:parts-directory}''', x]) for x in '''${erp5_repository_list:repository_id_list}'''.split(' ')]
[instance-recipe-egg]
recipe = zc.recipe.egg
python = python2.6
eggs =
# instantiation egg
${instance-recipe:name}
erp5.conflictresolver
[eggs]
recipe = zc.recipe.egg
python = python2.6
eggs =
${mysql-python:egg}
${lxml-python:egg}
${python-ldap-python:egg}
${pysvn-python:egg}
pycrypto
PyXML
SOAPpy
cElementTree
chardet
elementtree
erp5diff
ipdb
mechanize
numpy
ordereddict
paramiko
ply
python-magic
python-memcached
pytz
threadframe
timerserver
urlnorm
uuid
xml_marshaller
xupdate_processor
feedparser
argparse
# Zope 2.12 with patched acquisition
ZODB3
Zope2
Acquisition
# Other Zope 2 packages
Products.PluggableAuthService
Products.PluginRegistry
# CMF 2.2
Products.CMFActionIcons
Products.CMFCalendar
Products.CMFCore
Products.CMFDefault
Products.CMFTopic
Products.CMFUid
Products.DCWorkflow
Products.GenericSetup
five.localsitemanager
# Other products
Products.DCWorkflowGraph
Products.MimetypesRegistry
Products.ExternalEditor
Products.TIDStorage
Products.Zelenium
# Currently forked in our repository
# Products.PortalTransforms
# Dependency for our fork of PortalTransforms
StructuredText
# parameterizing the version of the generated python interpreter name by the
# python section version causes dependency between this egg section and the
# installation of python, which we don't want on an instance
interpreter = python2.6
scripts =
repozo
runzope
runzeo
tidstoraged
tidstorage_repozo
extra-paths =
${itools:location}/lib
[mysql-python]
python = python2.6
[lxml-python]
python = python2.6
[python-ldap-python]
python = python2.6
[pysvn-python]
python = python2.6
[cloudooo]
recipe = zc.recipe.egg
python = python2.6
eggs =
${lxml-python:egg}
cloudooo.handler.ffmpeg
cloudooo.handler.imagemagick
cloudooo.handler.ooo
cloudooo.handler.pdf
cloudooo
PasteScript
scripts =
paster=cloudooo_paster
[versions]
# Use SlapOS patched zc.buildout
zc.buildout = 1.5.3-dev-SlapOS-001
# pin Acquisition and Products.DCWorkflow to Nexedi flavour of eggs
Acquisition = 2.13.7nxd001
Products.DCWorkflow = 2.2.3nxd002
# Known version with works
numpy = 1.3.0
# Patched version which support python 2.6
PyXML = 0.8.4nxd001
# modified version that works fine for buildout installation
SOAPpy = 0.12.0nxd001
# we are still using this old stable version.
rdiff-backup = 1.0.5
# Zope Known Good Set is incompatible with buildout 1.5.0 in case of
# distribute and zc.buildout, and as ERP5 Appliance trunk wants to use
# the newest versions of buildout infrastructure clear the pin
distribute =
setuptools =
# official pysvn egg does not supports --include-dirs and
# --library-dirs, so we use our modified version
pysvn = 1.7.4nxd006
# Note: This is only backward compatibility profile link.
# Note: Please switch directly to forking/branching http://git.erp5.org/gitweb/slapos.git
extends = http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/heads/erp5:/software/erp5/software.cfg
[buildout]
parts =
testnode
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
[testnode]
recipe = erp5.recipe.testnode
buildbot_binary = ${buildout:bin-directory}/buildbot
slapgrid_partition_binary = ${buildout:bin-directory}/slapgrid-cp
slapgrid_software_binary = ${buildout:bin-directory}/slapgrid-sr
slapproxy_binary = ${buildout:bin-directory}/slapproxy
svn_binary = ${subversion:location}/bin/svn
git_binary = ${git:location}/bin/git
svnversion_binary = ${subversion:location}/bin/svnversion
zip_binary = ${zip:location}/bin/zip
[buildout]
extensions =
slapos.tool.rebootstrap
slapos.tool.networkcache
slapos.zcbworkarounds
mr.developer
find-links = http://www.nexedi.org/static/packages/source/slapos.buildout/
http://dist.repoze.org
http://www.nexedi.org/static/packages/source/
versions = versions
rebootstrap-section = python2.6
extends =
profile/python-2.6.cfg
profile/subversion.cfg
profile/git.cfg
profile/lxml-python.cfg
profile/zip.cfg
parts =
template
bootstrap
eggs
subversion
zip
git
[bootstrap]
recipe = zc.recipe.egg
eggs = zc.buildout
suffix =
scripts =
buildout=bootstrap2.6
arguments = sys.argv[1:] + ["bootstrap"]
[rebootstrap]
section = python2.6
version = 1
[versions]
# Use SlapOS patched zc.buildout
zc.buildout = 1.5.3-dev-SlapOS-001
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
erp5.recipe.testnode
slapos.tool.grid
slapos.tool.proxy
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/testnode-instance.cfg
md5sum = 0b44b72c9e21d760f7e27a89e9d10187
output = ${buildout:directory}/template.cfg
mode = 0644
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment