Commit 5a5d84fc authored by Rafael Monnerat's avatar Rafael Monnerat

Update Release Candidate

/reviewed-on nexedi/slapos!159
parents 8ca30103 3f4b983a
......@@ -11,3 +11,6 @@ slapos.cookbook.egg-info
.*.swp
*~
\#*\#
.eggs/
*.egg/
TEST_KNOWN_HOSTS
......@@ -4,6 +4,7 @@ parts =
apache-antiloris
extends =
../nghttp2/buildout.cfg
../gdbm/buildout.cfg
../libexpat/buildout.cfg
../libuuid/buildout.cfg
......@@ -17,15 +18,15 @@ extends =
../zlib/buildout.cfg
[apr]
recipe = hexagonit.recipe.download
ignore-existing = true
recipe = slapos.recipe.build:download-unpacked
strip-top-level-dir = false
version = 1.5.2
md5sum = 4e9769f3349fe11fc0a5e1b224c236aa
url = https://archive.apache.org/dist/apr/apr-${:version}.tar.bz2
[apr-util]
recipe = hexagonit.recipe.download
ignore-existing = true
recipe = slapos.recipe.build:download-unpacked
strip-top-level-dir = false
version = 1.5.4
url = https://archive.apache.org/dist/apr/apr-util-${:version}.tar.bz2
md5sum = 2202b18f269ad606d70e1864857ed93c
......@@ -50,6 +51,7 @@ configure-options = --disable-static
--enable-disk-cache
--enable-mem-cache
--enable-echo
--enable-http2
--enable-exception-hook
--enable-mods-shared=all
--enable-optional-fn-export
......@@ -76,6 +78,7 @@ configure-options = --disable-static
--with-pcre=${pcre:location}
--with-sqlite3=${sqlite3:location}
--with-gdbm=${gdbm:location}
--with-nghttp2=${nghttp2:location}
--without-lber
--without-ldap
--without-ndbm
......
diff -ur autoconf-2.69/bin/Makefile.in autoconf-2.69/bin/Makefile.in
--- autoconf-2.69/bin/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/bin/Makefile.in 2017-04-12 16:47:38.029273723 +0200
@@ -214,7 +214,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/lib/autoconf/Makefile.in autoconf-2.69/lib/autoconf/Makefile.in
--- autoconf-2.69/lib/autoconf/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/lib/autoconf/Makefile.in 2017-04-12 16:47:38.033273747 +0200
@@ -230,7 +230,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/lib/autoscan/Makefile.in autoconf-2.69/lib/autoscan/Makefile.in
--- autoconf-2.69/lib/autoscan/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/lib/autoscan/Makefile.in 2017-04-12 16:47:38.029273723 +0200
@@ -216,7 +216,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/lib/autotest/Makefile.in autoconf-2.69/lib/autotest/Makefile.in
--- autoconf-2.69/lib/autotest/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/lib/autotest/Makefile.in 2017-04-12 16:47:38.029273723 +0200
@@ -223,7 +223,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/lib/m4sugar/Makefile.in autoconf-2.69/lib/m4sugar/Makefile.in
--- autoconf-2.69/lib/m4sugar/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/lib/m4sugar/Makefile.in 2017-04-12 16:47:38.033273747 +0200
@@ -228,7 +228,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
diff -ur autoconf-2.69/tests/Makefile.in autoconf-2.69/tests/Makefile.in
--- autoconf-2.69/tests/Makefile.in 2012-04-25 04:40:26.000000000 +0200
+++ autoconf-2.69/tests/Makefile.in 2017-04-12 16:47:38.025273698 +0200
@@ -201,7 +201,7 @@
# apply to us.
MY_AUTOM4TE = \
autom4te_perllibdir='$(top_srcdir)'/lib \
- AUTOM4TE_CFG='$(AUTOM4TE_CFG)' $(top_builddir)/bin/autom4te \
+ AUTOM4TE_CFG='$(AUTOM4TE_CFG)' perl $(top_builddir)/bin/autom4te \
-B '$(top_builddir)'/lib -B '$(top_srcdir)'/lib # keep ` '
[buildout]
extends =
../m4/buildout.cfg
../patch/buildout.cfg
../perl/buildout.cfg
parts =
autoconf
......@@ -9,6 +10,9 @@ parts =
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz
md5sum = 82d05e03b93e45f5a39b828dc9c6c29b
patch-options = -p1
patches =
${:_profile_base_location_}/autoconf-2.69-shebang_workaround.patch#9d286e6f9c271dff361891e381be706d
environment =
M4=${m4:location}/bin/m4
PATH=${perl:location}/bin:%(PATH)s
PATH=${patch:location}/bin:${perl:location}/bin:%(PATH)s
--- automake-1.15/doc/help2man 2014-12-30 14:49:41.000000000 +0100
+++ automake-1.15/doc/help2man 2017-04-12 10:40:16.965600755 +0200
@@ -632,11 +632,13 @@
sub get_option_value
{
my ($prog, $opt) = @_;
+ $prog = `which $prog`;
+ chomp($prog);
my $stderr = $discard_stderr ? '/dev/null' : '&1';
my $value = join '',
map { s/ +$//; expand $_ }
map { dec $_ }
- `$prog $opt 2>$stderr`;
+ `perl $prog $opt 2>$stderr`;
unless ($value)
{
[buildout]
extends =
../autoconf/buildout.cfg
../patch/buildout.cfg
../perl/buildout.cfg
../xz-utils/buildout.cfg
parts =
......@@ -10,5 +11,8 @@ parts =
recipe = slapos.recipe.cmmi
md5sum = 9a1ddb0e053474d9d1105cfe39b0c48d
url = http://ftp.gnu.org/gnu/automake/automake-1.15.tar.xz
patch-options = -p1
patches =
${:_profile_base_location_}/automake-1.15-shebang_workaround.patch#203f9199b0e629de3630b5959f8cf73e
environment =
PATH =${autoconf:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${autoconf:location}/bin:${patch:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
......@@ -2,27 +2,18 @@
extends =
../gcc/buildout.cfg
../unzip/buildout.cfg
../zip/buildout.cfg
parts =
bazel
# The bazel binary contains a zip file. It must not be stripped.
do-not-strip-path = ${buildout:parts-directory}/bazel/bin/bazel
# [jdk]
# recipe = slapos.recipe.build
# url = http://download.oracle.com/otn-pub/java/jdk/8u112-b15/jdk-8u112-linux-x64.tar.gz
# md5sum = de9b7a90f0f5a13cfcaa3b01451d0337
# location = ${buildout:parts-directory}/${:_buildout_section_name_}
# java_home = ${:location}/java_home
# script =
# from zc.buildout.download import check_md5sum, ChecksumError
# download_dir = tempfile.mkdtemp()
# download_path = os.path.join(download_dir, 'jdk.tar.gz')
# self.cleanup_list.append(download_dir)
# call(['wget', '-q', '-O', download_path, '--header', 'Cookie: oraclelicense=accept-securebackup-cookie', self.options['url']])
# if not check_md5sum(download_path, self.options['md5sum']):raise ChecksumError()
# extract_dir = self.extract(download_path)
# java_home = '%(location)s/java_home'
# self.copyTree(os.path.join(extract_dir, 'jdk1.8.0_112'), java_home)
[zulu]
recipe = hexagonit.recipe.download
ignore-existing = true
url = http://cdn.azul.com/zulu/bin/zulu8.20.0.5-jdk8.0.121-linux_x64.tar.gz
md5sum = e5f4b1d997e50ffe4998c68c8ec45403
strip-top-level-dir = true
[template-bazel-crosstool]
recipe = slapos.recipe.template:jinja2
......@@ -43,14 +34,6 @@ context =
key include_path template-bazel-crosstool:include_path
key gcc_lib64_path template-bazel-crosstool:gcc_lib64_path
[python2.7]
# link to gcc libraries to solve GLIBXXX undefined reference error.
environment =
PATH=${patch:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include/ncursesw -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include
LDFLAGS=-L${zlib:location}/lib -L${readline:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${gcc:location}/lib64
[bazel]
recipe = slapos.recipe.build
url = https://github.com/bazelbuild/bazel/releases/download/0.4.3/bazel-0.4.3-dist.zip
......@@ -63,7 +46,7 @@ zip-bin = ${zip:location}/bin
gcc-bin = ${gcc:location}/bin
gcc-lib = ${gcc:location}/lib
gcc-lib64 = ${gcc:location}/lib64
java_home = PLEASE_INSTALL_JDK8_BY_YOURSELF_AND_SET_THE_PATH
java_home = ${zulu:location}
script =
extract_dir = self.extract(self.download(self.options['url'], self.options['md5sum']))
crosstool_path = os.path.join(extract_dir, 'tools', 'cpp', 'CROSSTOOL')
......
[buildout]
extends =
../gmp/buildout.cfg
parts = binutils
[mpfr]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/mpfr/mpfr-3.1.3.tar.xz
md5sum = 6969398cd2fbc56a6af570b5273c56a9
configure-options =
--with-gmp=${gmp:location}
--disable-static
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
LDFLAGS=-Wl,-rpath=${gmp:location}/lib
[mpc]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/mpc/mpc-1.0.3.tar.gz
md5sum = d6a1d5f8ddea3abd2cc3e98f58352d26
configure-options =
--with-gmp=${gmp:location}
--with-mpfr=${mpfr:location}
--disable-static
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${mpfr:location}/lib
[isl]
recipe = slapos.recipe.cmmi
url = ftp://gcc.gnu.org/pub/gcc/infrastructure/isl-0.16.1.tar.bz2
md5sum = ac1f25a0677912952718a51f5bc20f32
configure-options =
--with-gmp-prefix=${gmp:location}
--disable-static
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib
[binutils]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/binutils/binutils-2.27.tar.gz
md5sum = 41b053ed4fb2c6a8173ef421460fbb28
\ No newline at end of file
url = http://ftp.gnu.org/gnu/binutils/binutils-2.27.tar.bz2
md5sum = 2869c9bf3e60ee97c74ac2a6bf4e9d68
configure-options =
--disable-bootstrap
--with-mpc=${mpc:location}
--with-mpfr=${mpfr:location}
--with-gmp=${gmp:location}
--with-isl=${isl:location}
[buildout]
extends =
../scipy/buildout.cfg
../protobuf-python/buildout.cfg
../h5py/buildout.cfg
../pillow/buildout.cfg
../tensorflow/buildout.cfg
parts =
chainer-egg
[chainer-env]
CUDA_PATH=${cuda:cuda_toolkit_path}
[chainer]
recipe = zc.recipe.egg:custom
egg = chainer
setup-eggs =
${scipy:egg}
${numpy:egg}
${protobuf-python:egg}
${h5py:egg}
${pillow-python:egg}
environment = chainer-env
[chainer-egg]
recipe = zc.recipe.egg
initialization =
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
eggs =
${scipy:egg}
${numpy:egg}
${protobuf-python:egg}
filelock
nose
six
${chainer:egg}
${h5py:egg}
${pillow-python:egg}
interpreter = chainer-python
scripts = chainer-python
[versions]
chainer = 1.22.0
filelock = 2.0.7
nose = 1.3.7
six = 1.10.0
......@@ -7,8 +7,8 @@ parts =
[cmake]
recipe = slapos.recipe.cmmi
url = http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
md5sum = 9d38cd4e2c94c3cea97d0e2924814acc
url = https://cmake.org/files/v3.7/cmake-3.7.2.tar.gz
md5sum = 79bd7e65cd81ea3aa2619484ad6ff25a
environment =
CMAKE_INCLUDE_PATH=${ncurses:location}/include
CMAKE_LIBRARY_PATH=${ncurses:location}/lib
[buildout]
parts =
cryptopp
[cryptopp]
recipe = slapos.recipe.cmmi
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = https://github.com/weidai11/cryptopp/archive/CRYPTOPP_5_6_3.tar.gz
md5sum = 5bad00942de603ba6dd191d84eac2509
configure-command = true
make-options = PREFIX=${:location}
make-targets = static shared install
[cryptopp-5]
<= cryptopp
[cryptopp-5.6]
<= cryptopp-5
[cryptopp-5.6.3]
<= cryptopp-5.6
[buildout]
parts =
depot_tools
[depot_tools]
recipe = slapos.recipe.build:gitclone
repository = https://chromium.googlesource.com/chromium/tools/depot_tools.git
branch = master
git-executable = ${git:location}/bin/git
......@@ -21,6 +21,9 @@ parts =
# fail to run if exists.
[firefox]
<= firefox-51
[firefox-51]
recipe = slapos.recipe.build
slapos_promise =
file:firefox
......@@ -91,6 +94,73 @@ script =
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${xz-utils:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${zlib:location}/lib"
export PATH=${fontconfig:location}/bin:$PATH
exec %(location)s/firefox "$@" 2> /dev/null
""")
wrapper.close()
os.chmod(wrapper_location, 0755)
[firefox-45]
recipe = slapos.recipe.build
slapos_promise =
file:firefox
file:firefox-bin
depends =
${liberation-fonts:location}
${ipaex-fonts:location}
version = 45.0.1
# MD5SUMs are available at :
# https://ftp.mozilla.org/pub/mozilla.org/firefox/releases/${:version}/MD5SUMS
x86 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-i686/en-US/firefox-${:version}.tar.bz2 3756c8d06d6f915a3dff1dae643ee74b
x86-64 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-x86_64/en-US/firefox-${:version}.tar.bz2 0409177ef649ec90ffe7a421a19bc156
script =
if not self.options.get('url'): self.options['url'], self.options['md5sum'] = self.options[guessPlatform()].split(' ')
extract_dir = self.extract(self.download(self.options['url'], self.options.get('md5sum')))
workdir = guessworkdir(extract_dir)
self.copyTree(workdir, "%(location)s")
wrapper_location = os.path.join("%(location)s", "firefox-slapos")
wrapper = open(wrapper_location, 'w')
wrapper.write("""#!${dash:location}/bin/dash
cd %(location)s
export LD_LIBRARY_PATH="%(location)s"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${alsa:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${atk:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${bzip2:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${cairo:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${dbus:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${dbus-glib:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${fontconfig:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${freetype:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gdk-pixbuf:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gettext:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${glib:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gtk-2:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${harfbuzz:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libICE:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libSM:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libX11:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXau:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXcomposite:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXcursor:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXext:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXrender:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXt:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libffi:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libpng:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libtool:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libuuid:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libxcb:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libxml2:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${p11-kit:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${pango:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${pixman:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${xdamage:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${xfixes:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${zlib:location}/lib"
export PATH=${fontconfig:location}/bin:$PATH
exec %(location)s/firefox $*""")
wrapper.close()
os.chmod(wrapper_location, 0755)
......@@ -62,21 +62,16 @@ environment =
[gobject-introspection]
recipe = slapos.recipe.cmmi
url = https://github.com/GNOME/gobject-introspection/archive/GOBJECT_INTROSPECTION_1_45_2.tar.gz
pre-configure =
libtoolize -c -f
aclocal -I${pkgconfig:location}/share/aclocal -I${gettext:location}/share/aclocal -I${libtool:location}/share/aclocal
./autogen.sh
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/gobject-introspection-1.50.0.tar.xz
md5sum = 5af8d724f25d0c9cfbe6df41b77e5dc0
configure-options =
--disable-static
environment =
PATH=${autoconf:location}/bin:${automake:location}/bin:${pkgconfig:location}/bin:${libtool:location}/bin:${intltool:location}/bin:${gettext:location}/bin:${glib:location}/bin:${flex:location}/bin:${bison:location}/bin:%(PATH)s
PATH=${pkgconfig:location}/bin:${gettext:location}/bin:${glib:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
CPPFLAGS=-I${glib:location}/include/glib-2.0 -I${glib:location}/lib/glib-2.0/include
LDFLAGS=-L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${libffi:location}/lib -Wl,-rpath=${libffi:location}/lib -lffi
M4=${m4:location}/bin/m4
ACLOCAL_PATH=${pkgconfig:location}/share/aclocal:${gettext:location}/share/aclocal:${libtool:location}/share/aclocal:${glib:location}/share/aclocal:${intltool:location}/share/aclocal
PYTHON=${python2.7:location}/bin/python2.7
GLIB_CFLAGS=-I${glib:location}/include/glib-2.0 -I${glib:location}/lib/glib-2.0/include
GLIB_LIBS=-L${glib:location}/lib -lglib-2.0 -lintl -lgobject-2.0
......@@ -86,9 +81,9 @@ environment =
[pygobject3]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/GNOME/sources/pygobject/3.10/pygobject-3.10.2.tar.xz
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/pygobject-3.22.0.tar.xz
python-egg = ${buildout:parts-directory}/${:_buildout_section_name_}/lib/python2.7/site-packages
md5sum = f311155be8510df6ad8e4edf1cb463d4
md5sum = ed4117ed5d554d25fd7718807fbf819f
pre-configure =
sed -i 's#/usr/local#${gobject-introspection:location}#g' ${gobject-introspection:location}/lib/pkgconfig/gobject-introspection-1.0.pc
configure-options =
......
......@@ -10,8 +10,8 @@ recipe = rubygemsrecipe
url = https://rubygems.org/rubygems/rubygems-2.4.8.zip
ruby-executable = ${ruby:location}/bin/ruby
gems =
fluentd==0.12.24
fluent-plugin-td==0.10.27
fluentd==0.14.14
fluent-plugin-td==0.10.29
gem-options = --no-ri --no-rdoc --with-icu-lib=${icu:location}/lib/ --with-icu-dir=${icu:location}/
environment =
LDFLAGS = -L${icu:location}/lib -Wl,-rpath=${icu:location}/lib
......
......@@ -14,8 +14,8 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_}
[fonts-base]
destination = ${fonts:location}/${:_buildout_section_name_}
recipe = hexagonit.recipe.download
ignore-existing = true
location = ${:destination}
recipe = slapos.recipe.build:download-unpacked
strip-top-level-dir = true
# Liberation(tm) Fonts - a font family which aims at metric
......
......@@ -13,8 +13,8 @@ parts =
[freetype]
recipe = slapos.recipe.cmmi
url = http://download.savannah.gnu.org/releases/freetype/freetype-2.6.1.tar.bz2
md5sum = 35cb8f4d9e5906847901bb39324c2f80
url = http://download.savannah.gnu.org/releases/freetype/freetype-2.7.1.tar.bz2
md5sum = b3230110e0cab777e0df7631837ac36e
pkg_config_depends = ${zlib:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-options =
......
......@@ -11,38 +11,6 @@ extends =
parts =
gcc
[mpfr]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/mpfr/mpfr-3.1.3.tar.xz
md5sum = 6969398cd2fbc56a6af570b5273c56a9
configure-options =
--with-gmp=${gmp:location}
--disable-static
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
LDFLAGS=-Wl,-rpath=${gmp:location}/lib
[mpc]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/mpc/mpc-1.0.3.tar.gz
md5sum = d6a1d5f8ddea3abd2cc3e98f58352d26
configure-options =
--with-gmp=${gmp:location}
--with-mpfr=${mpfr:location}
--disable-static
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${mpfr:location}/lib
[isl]
recipe = slapos.recipe.cmmi
url = ftp://gcc.gnu.org/pub/gcc/infrastructure/isl-0.16.1.tar.bz2
md5sum = ac1f25a0677912952718a51f5bc20f32
configure-options =
--with-gmp-prefix=${gmp:location}
--disable-static
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib
[gcc-common]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/gcc/gcc-5.4.0/gcc-5.4.0.tar.bz2
......@@ -58,14 +26,13 @@ configure-options =
--with-gmp=${gmp:location}
--with-mpfr=${mpfr:location}
--with-mpc=${mpc:location}
--enable-languages="c,c++"
--enable-languages="c,c++,fortran"
--with-isl=${isl:location}
--with-ld=${binutils:location}/bin/ld
--with-nm=${binutils:location}/bin/nm
--with-as=${binutils:location}/bin/as
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib
PATH=${perl:location}/bin:${tar:location}/bin:%(PATH)s
PATH=${binutils:location}/bin:${perl:location}/bin:${tar:location}/bin:%(PATH)s
[gcc-minimal]
<= gcc-common
......@@ -81,17 +48,3 @@ configure-options =
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib
PATH=${perl:location}/bin:${tar:location}/bin:%(PATH)s
[gcc-fortran]
<= gcc-common
configure-options =
--disable-bootstrap
--disable-multilib
--with-gmp=${gmp:location}
--with-mpfr=${mpfr:location}
--with-mpc=${mpc:location}
--enable-languages="c,c++,fortran"
--with-isl=${isl:location}
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib
PATH=${perl:location}/bin:${tar:location}/bin:%(PATH)s
......@@ -37,7 +37,7 @@ url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs
md5sum = 8f3d383d48da22345937b66b01ab2960
[ghostscript-fonts]
recipe = hexagonit.recipe.download
ignore-existing = true
recipe = slapos.recipe.build:download-unpacked
strip-top-level-dir = false
url = http://downloads.ghostscript.com/public/fonts/urw-base35-v1.10.zip
md5sum = 66e8bbd8228519d5dba82b9433a61bb0
......@@ -11,10 +11,11 @@ parts =
[glib]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/glib-2.48.1.tar.xz
md5sum = 67bd3b75c9f6d5587b457dc01cdcd5bb
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/glib-2.50.2.tar.xz
md5sum = 5eeb2bfaf78a07be59585e8b6e80b1d6
configure-options =
--with-python=${python2.7:location}/bin/python2.7
--disable-libmount
--disable-static
--disable-selinux
--disable-fam
......
[buildout]
extends =
../gcc/buildout.cfg
../gettext/buildout.cfg
../glib/buildout.cfg
../libsigc/buildout.cfg
......@@ -11,14 +12,14 @@ parts =
[glibmm]
recipe = slapos.recipe.cmmi
# we keep using glibmm-2.44 that is buildable with non-C++11 compiler
url = http://ftp.gnome.org/pub/gnome/core/3.16/3.16.2/sources/glibmm-2.44.0.tar.xz
md5sum = 32ee4150b436d097fe2506d0b0b13a75
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/glibmm-2.50.0.tar.xz
md5sum = e7416beff6cba1f38c2bccd0dc8c3278
pkg_config_depends = ${glib:location}/lib/pkgconfig:${libsigc:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
configure-options =
--disable-documentation
environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:${glib:location}/bin:%(PATH)s
PATH=${gcc:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:${glib:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
CPPFLAGS=-I${gettext:location}/include
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib
......@@ -14,8 +14,8 @@ extends =
[groonga]
recipe = slapos.recipe.cmmi
url = http://packages.groonga.org/source/groonga/groonga-6.1.1.tar.gz
md5sum = f2dedb4b1a536a5e11a2b9a35664125b
url = http://packages.groonga.org/source/groonga/groonga-7.0.0.tar.gz
md5sum = 1b5383359ba1901e66b9e0910712569f
# temporary patch to respect more tokens in natural language mode.
patches =
${:_profile_base_location_}/groonga.patch#9ed02fbe8400402d3eab47eee149978b
......
......@@ -61,8 +61,8 @@ environment =
[pango]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/pango-1.40.1.tar.xz
md5sum = 6fc88c6529890d6c8e03074d57a3eceb
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/pango-1.40.3.tar.xz
md5sum = 17c26720f5a862a12f7e1745e2f1d966
pkg_config_depends = ${harfbuzz:location}/lib/pkgconfig:${harfbuzz:pkg_config_depends}
configure-options =
--disable-static
......@@ -75,8 +75,8 @@ environment =
[gdk-pixbuf]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/gdk-pixbuf-2.35.1.tar.xz
md5sum = 72e0c924d5dc96bfde58a3b65ed83744
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/gdk-pixbuf-2.36.0.tar.xz
md5sum = 1a3baf91956c7923dab49ee3de100ce1
pkg_config_depends = ${glib:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libX11:pkg_config_depends}:${pcre:location}/lib/pkgconfig
configure-options =
--disable-static
......@@ -84,7 +84,7 @@ configure-options =
--without-libintl-prefix
--with-x11
environment =
PATH=${glib:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${gettext:location}/bin:${glib:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
CPPFLAGS=-I${libtiff:location}/include -I${libjpeg:location}/include -I${libpng:location}/include
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -Wl,-rpath=${jbigkit:location}/lib -L${libtiff:location}/lib -Wl,-rpath=${libtiff:location}/lib -L${libjpeg:location}/lib -Wl,-rpath=${libjpeg:location}/lib -L${libpng:location}/lib -Wl,-rpath=${libpng:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
......@@ -92,21 +92,21 @@ environment =
[atk]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/atk-2.20.0.tar.xz
md5sum = 5187b0972f4d3905f285540b31395e20
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/atk-2.22.0.tar.xz
md5sum = c7f2adcf75e4058727174cde970e9129
configure-options =
--with-python=${python2.7:location}/bin/python2.7
--disable-gtk-doc-html
environment =
PATH=${glib:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${gettext:location}/bin:${glib:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${glib:location}/lib -Wl,-rpath=${gettext:location}/lib
LD_LIBRARY_PATH=${glib:location}/lib:${gettext:location}/lib
[gtk-2]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/gtk+-2.24.30.tar.xz
md5sum = 04568ba5c58b75e3c7543e45628ad789
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/gtk+-2.24.31.tar.xz
md5sum = 526a1008586094a2cbb4592fd3f9ee10
pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${atk:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig
configure-options =
--disable-static
......@@ -117,7 +117,7 @@ configure-options =
--disable-xinerama
--disable-gtk-doc-html
environment =
PATH=${gdk-pixbuf:location}/bin:${glib:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${gdk-pixbuf:location}/bin:${gettext:location}/bin:${glib:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
# not taken from pkg-config result...
CPPFLAGS=-I${libX11:location}/include/ -I${xproto:location}/include -I${kbproto:location}/include -I${libXrender:location}/include -I${render:location}/include -I${libXext:location}/include
......
......@@ -13,8 +13,8 @@ extends =
[at-spi2-core]
recipe = slapos.recipe.cmmi
url =http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/at-spi2-core-2.20.1.tar.xz
md5sum = cd11cba463e8f5e1f39ba69555a7382f
url =http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/at-spi2-core-2.22.0.tar.xz
md5sum = 3da5fe62a653e49dad1c47f9a46fee56
depends =
${perl-XML-Parser:location}
configure-options =
......@@ -23,20 +23,20 @@ environment =
PATH=${dbus:location}/bin:${gettext:location}/bin:${glib:location}/bin:${intltool:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${dbus:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
CPPFLAGS=-I${inputproto:location}/include -I${kbproto:location}/include -I${libX11:location}/include -I${libXi:location}/include -I${libXtst:location}/include -I${xextproto:location}/include -I${xproto:location}/include
LDFLAGS=-L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libXi:location}/lib -Wl,-rpath=${libXi:location}/lib -L${libXtst:location}/lib -Wl,-rpath=${libXtst:location}/lib
LDFLAGS=-L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libXi:location}/lib -Wl,-rpath=${libXi:location}/lib -L${libXtst:location}/lib -Wl,-rpath=${libXtst:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[at-spi2-atk]
recipe = slapos.recipe.cmmi
url =http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/at-spi2-atk-2.20.1.tar.xz
md5sum = 23309b6f8e1623871ace6347fb734dce
url =http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/at-spi2-atk-2.22.0.tar.xz
md5sum = aa62aed21b8e03dc44ab81ae49d893ca
environment =
PATH=${intltool:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${atk:location}/lib/pkgconfig:${at-spi2-core:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
[gtk-3]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/gtk+-3.20.4.tar.xz
md5sum = 0cceee599f2910c25bf4b9dde4ab2fb6
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/gtk+-3.22.3.tar.xz
md5sum = f0e6492896a2ca244501142319adaa95
pkg_config_depends = ${at-spi2-atk:location}/lib/pkgconfig:${at-spi2-core:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${libepoxy:location}/lib/pkgconfig:${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${atk:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig:${libXi:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
configure-options =
--disable-static
......
......@@ -2,6 +2,8 @@
extends =
../bzip2/buildout.cfg
../freetype/buildout.cfg
../gcc/buildout.cfg
../gettext/buildout.cfg
../glibmm/buildout.cfg
../gtk-2/buildout.cfg
../perl/buildout.cfg
......@@ -14,55 +16,57 @@ parts =
[cairomm]
recipe = slapos.recipe.cmmi
url = http://cairographics.org/releases/cairomm-1.11.2.tar.gz
md5sum = 732a3ff5b57401eb5dfeef795a2a0c52
url = http://cairographics.org/releases/cairomm-1.13.1.tar.gz
md5sum = 21fe892652741b7544f52da6965d27fc
pkg_config_depends = ${cairo:location}/lib/pkgconfig:${cairo:pkg_config_depends}:${libsigc:location}/lib/pkgconfig
configure-options =
--disable-static
--disable-documentation
environment =
PATH=${freetype:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${freetype:location}/bin:${gcc:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
CPPFLAGS=-I${libX11:location}/include -I${libXrender:location}/include -I${render:location}/include -I${xproto:location}/include
LDFLAGS=-L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libXext:location}/lib -Wl,-rpath=${libXext:location}/lib -L${libXrender:location}/lib -Wl,-rpath=${libXrender:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libXext:location}/lib -Wl,-rpath=${libXext:location}/lib -L${libXrender:location}/lib -Wl,-rpath=${libXrender:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[pangomm]
recipe = slapos.recipe.cmmi
# we keep using pangomm-2.36 for glibmm-2.44 that is buildable with non-C++11 compiler
url = http://ftp.gnome.org/pub/gnome/core/3.16/3.16.2/sources/pangomm-2.36.0.tar.xz
md5sum = 62910723211d86ab825b666b479871c9
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/pangomm-2.40.1.tar.xz
md5sum = 874eadd9434613dbacf0272c82c3ac23
pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${glibmm:location}/lib/pkgconfig:${glibmm:pkg_config_depends}:${cairomm:location}/lib/pkgconfig
configure-options =
--disable-static
--disable-documentation
environment =
PATH=${glib:location}/bin:${freetype:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${gcc:location}/bin:${glib:location}/bin:${freetype:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib
[atkmm]
recipe = slapos.recipe.cmmi
# we keep using atkmm-2.22 for glibmm-2.44 that is buildable with non-C++11 compiler
url = http://ftp.gnome.org/pub/gnome/core/3.12/3.12.2/sources/atkmm-2.22.7.tar.xz
md5sum = fec7db3fc47ba2e0c95d130ec865a236
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/atkmm-2.24.2.tar.xz
md5sum = d53b60b0f1be597e86070954a49cf0c3
pkg_config_depends = ${atk:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${glibmm:location}/lib/pkgconfig:${libsigc:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
configure-options =
--disable-static
--disable-documentation
environment =
PATH=${glib:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${gcc:location}/bin:${glib:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib
[gtkmm]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/GNOME/sources/gtkmm/2.24/gtkmm-2.24.4.tar.xz
md5sum = b9ac60c90959a71095f07f84dd39961d
url = http://ftp.gnome.org/pub/GNOME/sources/gtkmm/2.24/gtkmm-2.24.5.tar.xz
md5sum = 6c59ae8bbff48fad9132f23af347acf1
pkg_config_depends = ${pangomm:location}/lib/pkgconfig:${pangomm:pkg_config_depends}:${atkmm:location}/lib/pkgconfig:${atkmm:pkg_config_depends}:${gtk-2:location}/lib/pkgconfig:${gtk-2:pkg_config_depends}
configure-options =
--disable-static
--disable-documentation
environment =
PATH=${gdk-pixbuf:location}/bin:${glib:location}/bin:${gtk-2:location}/bin:${pango:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${gcc:location}/bin:${gdk-pixbuf:location}/bin:${glib:location}/bin:${gtk-2:location}/bin:${pango:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
......@@ -15,3 +15,11 @@ configure-options =
[icu]
<= icu4c
[icu4c-58.2]
<= icu4c
[icu4c-55.1]
<= icu4c
url = http://download.icu-project.org/files/icu4c/55.1/icu4c-55_1-src.tgz
md5sum = e2d523df79d6cb7855c2fbe284f4db29
......@@ -3,13 +3,17 @@ parts = inkscape
extends =
../boost-lib/buildout.cfg
../bzip2/buildout.cfg
../cmake/buildout.cfg
../freetype/buildout.cfg
../garbage-collector/buildout.cfg
../gcc/buildout.cfg
../gettext/buildout.cfg
../glibmm/buildout.cfg
../gtk-2/buildout.cfg
../gtkmm/buildout.cfg
../intltool/buildout.cfg
../lcms/buildout.cfg
../libjpeg/buildout.cfg
../libpng/buildout.cfg
../libsigc/buildout.cfg
../libxml2/buildout.cfg
......@@ -23,36 +27,36 @@ extends =
[gsl]
recipe = slapos.recipe.cmmi
url = ftp://ftp.gnu.org/gnu/gsl/gsl-1.16.tar.gz
md5sum = e49a664db13d81c968415cd53f62bc8b
url = http://ftp.gnu.org/gnu/gsl/gsl-2.3.tar.gz
md5sum = 905fcbbb97bc552d1037e34d200931a0
configure-options =
--disable-static
environment =
PATH=${glib:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PATH=${gcc:location}/bin:${glib:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${atk:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${glibmm:location}/lib/pkgconfig:${libsigc:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib
[inkscape]
recipe = slapos.recipe.cmmi
url = https://inkscape.org/en/gallery/item/3860/inkscape-0.91.tar.bz2
md5sum = 278dfa4514adcde23546370ec2c84581
patch-options = -p0
# based on
# http://bazaar.launchpad.net/~inkscape.dev/inkscape/trunk/revision/15017
patches =
${:_profile_base_location_}/inkscape-0.91_gcc6.patch#ce3ddbb90f7e5e81fd322469194b6c3c
pkg_config_depends = ${gtkmm:location}/lib/pkgconfig:${gtkmm:pkg_config_depends}:${gsl:location}/lib/pkgconfig:${popt:location}/lib/pkgconfig:${garbage-collector:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig
url = https://inkscape.org/gallery/item/10682/inkscape-0.92.1.tar_XlpI7qT.bz2
md5sum = db2eb2a566cf35ff949fc9ccb172889a
location = ${buildout:parts-directory}/${:_buildout_section_name_}
pkg_config_depends = ${freetype:location}/lib/pkgconfig:${gtkmm:location}/lib/pkgconfig:${gtkmm:pkg_config_depends}:${gsl:location}/lib/pkgconfig:${popt:location}/lib/pkgconfig:${garbage-collector:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig
configure-command = ${cmake:location}/bin/cmake
configure-options =
--disable-static
--disable-openmp
--disable-lcms
--disable-wpg
--disable-visio
--disable-cdr
--without-gnome-vfs
-DCMAKE_INSTALL_PREFIX=${:location}
-DCMAKE_C_COMPILER=${gcc:location}/bin/gcc
-DCMAKE_CXX_COMPILER=${gcc:location}/bin/g++
-DENABLE_POPPLER=OFF
-DWITH_GNOME_VFS=OFF
-DWITH_IMAGE_MAGICK=OFF
-DWITH_LIBCDR=OFF
-DWITH_LIBVISIO=OFF
-DWITH_LIBWPG=OFF
environment =
PATH=${freetype:location}/bin:${gdk-pixbuf:location}/bin:${gettext:location}/bin:${glib:location}/bin:${intltool:location}/bin:${libxml2:location}/bin:${pango:location}/bin:${patch:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PATH=${cmake:location}/bin:${freetype:location}/bin:${gcc:location}/bin:${gdk-pixbuf:location}/bin:${gettext:location}/bin:${glib:location}/bin:${intltool:location}/bin:${libxml2:location}/bin:${pango:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
CPPFLAGS=-I${boost-lib:location}/include -I${cairo:location}/include -I${garbage-collector:location}/include -I${libpng:location}/include -I${popt:location}/include -I${zlib:location}/include
# rpath seems not taken from pkgconfig...
LDFLAGS=-L${atk:location}/lib -Wl,-rpath=${atk:location}/lib -L${atkmm:location}/lib -Wl,-rpath=${atkmm:location}/lib -L${boost-lib:location}/lib -Wl,-rpath=${boost-lib:location}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${cairo:location}/lib -Wl,-rpath=${cairo:location}/lib -L${cairomm:location}/lib -Wl,-rpath=${cairomm:location}/lib -L${fontconfig:location}/lib -Wl,-rpath=${fontconfig:location}/lib -L${freetype:location}/lib -Wl,-rpath=${freetype:location}/lib -L${garbage-collector:location}/lib -Wl,-rpath=${garbage-collector:location}/lib -L${gdk-pixbuf:location}/lib -Wl,-rpath=${gdk-pixbuf:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${glibmm:location}/lib -Wl,-rpath=${glibmm:location}/lib -L${gsl:location}/lib -Wl,-rpath=${gsl:location}/lib -L${gtk-2:location}/lib -Wl,-rpath=${gtk-2:location}/lib -L${gtkmm:location}/lib -Wl,-rpath=${gtkmm:location}/lib -L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libXext:location}/lib -Wl,-rpath=${libXext:location}/lib -L${libpng:location}/lib -Wl,-rpath=${libpng:location}/lib -L${libsigc:location}/lib -Wl,-rpath=${libsigc:location}/lib -L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -L${libxslt:location}/lib -Wl,-rpath=${libxslt:location}/lib -L${pango:location}/lib -Wl,-rpath=${pango:location}/lib -L${pangomm:location}/lib -Wl,-rpath=${pangomm:location}/lib -L${popt:location}/lib -Wl,-rpath=${popt:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
CMAKE_INCLUDE_PATH=${boost-lib:location}/include:${freetype:location}/include:${garbage-collector:location}/include:${libjpeg:location}/include:${lcms2:location}/include:${libpng:location}/include:${zlib:location}/include
CMAKE_LIBRARY_PATH=${boost-lib:location}/lib:${freetype:location}/lib:${garbage-collector:location}/lib:${gcc:location}/lib:${gcc:location}/lib64:${lcms2:location}/lib:${libjpeg:location}/lib:${libpng:location}/lib:${zlib:location}/lib
LDFLAGS=-L${atk:location}/lib -Wl,-rpath=${atk:location}/lib -L${atkmm:location}/lib -Wl,-rpath=${atkmm:location}/lib -L${boost-lib:location}/lib -Wl,-rpath=${boost-lib:location}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${cairo:location}/lib -Wl,-rpath=${cairo:location}/lib -L${cairomm:location}/lib -Wl,-rpath=${cairomm:location}/lib -L${fontconfig:location}/lib -Wl,-rpath=${fontconfig:location}/lib -L${freetype:location}/lib -Wl,-rpath=${freetype:location}/lib -L${garbage-collector:location}/lib -Wl,-rpath=${garbage-collector:location}/lib -Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -L${gdk-pixbuf:location}/lib -Wl,-rpath=${gdk-pixbuf:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${glibmm:location}/lib -Wl,-rpath=${glibmm:location}/lib -L${gsl:location}/lib -Wl,-rpath=${gsl:location}/lib -L${gtk-2:location}/lib -Wl,-rpath=${gtk-2:location}/lib -L${gtkmm:location}/lib -Wl,-rpath=${gtkmm:location}/lib -L${lcms2:location}/lib -Wl,-rpath=${lcms2:location}/lib -L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libXext:location}/lib -Wl,-rpath=${libXext:location}/lib -L${libjpeg:location}/lib -Wl,-rpath=${libjpeg:location}/lib -L${libpng:location}/lib -Wl,-rpath=${libpng:location}/lib -L${libsigc:location}/lib -Wl,-rpath=${libsigc:location}/lib -L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -L${libxslt:location}/lib -Wl,-rpath=${libxslt:location}/lib -L${pango:location}/lib -Wl,-rpath=${pango:location}/lib -L${pangomm:location}/lib -Wl,-rpath=${pangomm:location}/lib -L${popt:location}/lib -Wl,-rpath=${popt:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
--- src/ui/dialog/layer-properties.cpp 2014-12-21 21:58:32 +0000
+++ src/ui/dialog/layer-properties.cpp 2016-07-18 17:19:25 +0000
@@ -146,7 +146,7 @@
destroy_();
Glib::signal_idle().connect(
sigc::bind_return(
- sigc::bind(sigc::ptr_fun(&::operator delete), this),
+ sigc::bind(sigc::ptr_fun<void*, void>(&::operator delete), this),
false
)
);
Description: Fix for CVE-2011-4516 and CVE-2011-4517
This patch fixes a possible denial of service and code execution via
heap-based buffer overflows.
Author: Michael Gilbert <michael.s.gilbert@gmail.com>
Origin: Patch thanks to Red Hat
Bug-Debian: http://bugs.debian.org/652649
Index: jasper-1.900.1/src/libjasper/jpc/jpc_cs.c
===================================================================
--- jasper-1.900.1.orig/src/libjasper/jpc/jpc_cs.c 2011-12-19 09:35:34.186909298 -0500
+++ jasper-1.900.1/src/libjasper/jpc/jpc_cs.c 2011-12-19 09:35:51.198909832 -0500
@@ -744,6 +744,10 @@
return -1;
}
compparms->numrlvls = compparms->numdlvls + 1;
+ if (compparms->numrlvls > JPC_MAXRLVLS) {
+ jpc_cox_destroycompparms(compparms);
+ return -1;
+ }
if (prtflag) {
for (i = 0; i < compparms->numrlvls; ++i) {
if (jpc_getuint8(in, &tmp)) {
@@ -1331,7 +1335,7 @@
jpc_crgcomp_t *comp;
uint_fast16_t compno;
crg->numcomps = cstate->numcomps;
- if (!(crg->comps = jas_alloc2(cstate->numcomps, sizeof(uint_fast16_t)))) {
+ if (!(crg->comps = jas_alloc2(cstate->numcomps, sizeof(jpc_crgcomp_t)))) {
return -1;
}
for (compno = 0, comp = crg->comps; compno < cstate->numcomps;
Description: CVE-2014-8137: double-free in in jas_iccattrval_destroy()
Origin: vendor, https://bugzilla.redhat.com/attachment.cgi?id=967283,
https://bugzilla.redhat.com/attachment.cgi?id=967284
Bug-Debian: https://bugs.debian.org/773463
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1173157
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Last-Update: 2014-12-20
--- a/src/libjasper/base/jas_icc.c
+++ b/src/libjasper/base/jas_icc.c
@@ -1010,7 +1010,6 @@ static int jas_icccurv_input(jas_iccattr
return 0;
error:
- jas_icccurv_destroy(attrval);
return -1;
}
@@ -1128,7 +1127,6 @@ static int jas_icctxtdesc_input(jas_icca
#endif
return 0;
error:
- jas_icctxtdesc_destroy(attrval);
return -1;
}
@@ -1207,8 +1205,6 @@ static int jas_icctxt_input(jas_iccattrv
goto error;
return 0;
error:
- if (txt->string)
- jas_free(txt->string);
return -1;
}
@@ -1329,7 +1325,6 @@ static int jas_icclut8_input(jas_iccattr
goto error;
return 0;
error:
- jas_icclut8_destroy(attrval);
return -1;
}
@@ -1498,7 +1493,6 @@ static int jas_icclut16_input(jas_iccatt
goto error;
return 0;
error:
- jas_icclut16_destroy(attrval);
return -1;
}
--- a/src/libjasper/jp2/jp2_dec.c
+++ b/src/libjasper/jp2/jp2_dec.c
@@ -291,7 +291,10 @@ jas_image_t *jp2_decode(jas_stream_t *in
case JP2_COLR_ICC:
iccprof = jas_iccprof_createfrombuf(dec->colr->data.colr.iccp,
dec->colr->data.colr.iccplen);
- assert(iccprof);
+ if (!iccprof) {
+ jas_eprintf("error: failed to parse ICC profile\n");
+ goto error;
+ }
jas_iccprof_gethdr(iccprof, &icchdr);
jas_eprintf("ICC Profile CS %08x\n", icchdr.colorspc);
jas_image_setclrspc(dec->image, fromiccpcs(icchdr.colorspc));
Description: CVE-2014-8138: heap overflow in jp2_decode()
Origin: vendor, https://bugzilla.redhat.com/attachment.cgi?id=967280
Bug-Debian: https://bugs.debian.org/773463
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1173162
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Last-Update: 2014-12-20
--- a/src/libjasper/jp2/jp2_dec.c
+++ b/src/libjasper/jp2/jp2_dec.c
@@ -389,6 +389,11 @@ jas_image_t *jp2_decode(jas_stream_t *in
/* Determine the type of each component. */
if (dec->cdef) {
for (i = 0; i < dec->numchans; ++i) {
+ /* Is the channel number reasonable? */
+ if (dec->cdef->data.cdef.ents[i].channo >= dec->numchans) {
+ jas_eprintf("error: invalid channel number in CDEF box\n");
+ goto error;
+ }
jas_image_setcmpttype(dec->image,
dec->chantocmptlut[dec->cdef->data.cdef.ents[i].channo],
jp2_getct(jas_image_clrspc(dec->image),
Description: CVE-2014-8157: dec->numtiles off-by-one check in jpc_dec_process_sot()
Origin: vendor, http://pkgs.fedoraproject.org/cgit/jasper.git/tree/jasper-CVE-2014-8157.patch
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1179282
Bug-Debian: https://bugs.debian.org/775970
Forwarded: not-needed
Author: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2015-01-22
--- a/src/libjasper/jpc/jpc_dec.c
+++ b/src/libjasper/jpc/jpc_dec.c
@@ -489,7 +489,7 @@ static int jpc_dec_process_sot(jpc_dec_t
dec->curtileendoff = 0;
}
- if (JAS_CAST(int, sot->tileno) > dec->numtiles) {
+ if (JAS_CAST(int, sot->tileno) >= dec->numtiles) {
jas_eprintf("invalid tile number in SOT marker segment\n");
return -1;
}
Description: CVE-2014-8158: unrestricted stack memory use in jpc_qmfb.c
Origin: vendor, http://pkgs.fedoraproject.org/cgit/jasper.git/tree/jasper-CVE-2014-8158.patch
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1179298
Bug-Debian: https://bugs.debian.org/775970
Forwarded: not-needed
Author: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2015-01-22
--- a/src/libjasper/jpc/jpc_qmfb.c
+++ b/src/libjasper/jpc/jpc_qmfb.c
@@ -306,11 +306,7 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
{
int bufsize = JPC_CEILDIVPOW2(numcols, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE];
-#else
- jpc_fix_t splitbuf[bufsize];
-#endif
jpc_fix_t *buf = splitbuf;
register jpc_fix_t *srcptr;
register jpc_fix_t *dstptr;
@@ -318,7 +314,6 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
register int m;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -326,7 +321,6 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
abort();
}
}
-#endif
if (numcols >= 2) {
hstartcol = (numcols + 1 - parity) >> 1;
@@ -360,12 +354,10 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
}
}
-#if !defined(HAVE_VLA)
/* If the split buffer was allocated on the heap, free this memory. */
if (buf != splitbuf) {
jas_free(buf);
}
-#endif
}
@@ -374,11 +366,7 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE];
-#else
- jpc_fix_t splitbuf[bufsize];
-#endif
jpc_fix_t *buf = splitbuf;
register jpc_fix_t *srcptr;
register jpc_fix_t *dstptr;
@@ -386,7 +374,6 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
register int m;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -394,7 +381,6 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
abort();
}
}
-#endif
if (numrows >= 2) {
hstartcol = (numrows + 1 - parity) >> 1;
@@ -428,12 +414,10 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
}
}
-#if !defined(HAVE_VLA)
/* If the split buffer was allocated on the heap, free this memory. */
if (buf != splitbuf) {
jas_free(buf);
}
-#endif
}
@@ -442,11 +426,7 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE * JPC_QMFB_COLGRPSIZE];
-#else
- jpc_fix_t splitbuf[bufsize * JPC_QMFB_COLGRPSIZE];
-#endif
jpc_fix_t *buf = splitbuf;
jpc_fix_t *srcptr;
jpc_fix_t *dstptr;
@@ -457,7 +437,6 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
int m;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -465,7 +444,6 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
abort();
}
}
-#endif
if (numrows >= 2) {
hstartcol = (numrows + 1 - parity) >> 1;
@@ -517,12 +495,10 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
}
}
-#if !defined(HAVE_VLA)
/* If the split buffer was allocated on the heap, free this memory. */
if (buf != splitbuf) {
jas_free(buf);
}
-#endif
}
@@ -531,11 +507,7 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE * JPC_QMFB_COLGRPSIZE];
-#else
- jpc_fix_t splitbuf[bufsize * numcols];
-#endif
jpc_fix_t *buf = splitbuf;
jpc_fix_t *srcptr;
jpc_fix_t *dstptr;
@@ -546,7 +518,6 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
int m;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -554,7 +525,6 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
abort();
}
}
-#endif
if (numrows >= 2) {
hstartcol = (numrows + 1 - parity) >> 1;
@@ -606,12 +576,10 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
}
}
-#if !defined(HAVE_VLA)
/* If the split buffer was allocated on the heap, free this memory. */
if (buf != splitbuf) {
jas_free(buf);
}
-#endif
}
@@ -619,18 +587,13 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
{
int bufsize = JPC_CEILDIVPOW2(numcols, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE];
-#else
- jpc_fix_t joinbuf[bufsize];
-#endif
jpc_fix_t *buf = joinbuf;
register jpc_fix_t *srcptr;
register jpc_fix_t *dstptr;
register int n;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Allocate memory for the join buffer from the heap. */
if (bufsize > QMFB_JOINBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -638,7 +601,6 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
abort();
}
}
-#endif
hstartcol = (numcols + 1 - parity) >> 1;
@@ -670,12 +632,10 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
++srcptr;
}
-#if !defined(HAVE_VLA)
/* If the join buffer was allocated on the heap, free this memory. */
if (buf != joinbuf) {
jas_free(buf);
}
-#endif
}
@@ -684,18 +644,13 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE];
-#else
- jpc_fix_t joinbuf[bufsize];
-#endif
jpc_fix_t *buf = joinbuf;
register jpc_fix_t *srcptr;
register jpc_fix_t *dstptr;
register int n;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Allocate memory for the join buffer from the heap. */
if (bufsize > QMFB_JOINBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -703,7 +658,6 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
abort();
}
}
-#endif
hstartcol = (numrows + 1 - parity) >> 1;
@@ -735,12 +689,10 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
++srcptr;
}
-#if !defined(HAVE_VLA)
/* If the join buffer was allocated on the heap, free this memory. */
if (buf != joinbuf) {
jas_free(buf);
}
-#endif
}
@@ -749,11 +701,7 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE * JPC_QMFB_COLGRPSIZE];
-#else
- jpc_fix_t joinbuf[bufsize * JPC_QMFB_COLGRPSIZE];
-#endif
jpc_fix_t *buf = joinbuf;
jpc_fix_t *srcptr;
jpc_fix_t *dstptr;
@@ -763,7 +711,6 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
register int i;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Allocate memory for the join buffer from the heap. */
if (bufsize > QMFB_JOINBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, JPC_QMFB_COLGRPSIZE * sizeof(jpc_fix_t)))) {
@@ -771,7 +718,6 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
abort();
}
}
-#endif
hstartcol = (numrows + 1 - parity) >> 1;
@@ -821,12 +767,10 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
srcptr += JPC_QMFB_COLGRPSIZE;
}
-#if !defined(HAVE_VLA)
/* If the join buffer was allocated on the heap, free this memory. */
if (buf != joinbuf) {
jas_free(buf);
}
-#endif
}
@@ -835,11 +779,7 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE * JPC_QMFB_COLGRPSIZE];
-#else
- jpc_fix_t joinbuf[bufsize * numcols];
-#endif
jpc_fix_t *buf = joinbuf;
jpc_fix_t *srcptr;
jpc_fix_t *dstptr;
@@ -849,7 +789,6 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
register int i;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Allocate memory for the join buffer from the heap. */
if (bufsize > QMFB_JOINBUFSIZE) {
if (!(buf = jas_alloc3(bufsize, numcols, sizeof(jpc_fix_t)))) {
@@ -857,7 +796,6 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
abort();
}
}
-#endif
hstartcol = (numrows + 1 - parity) >> 1;
@@ -907,12 +845,10 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
srcptr += numcols;
}
-#if !defined(HAVE_VLA)
/* If the join buffer was allocated on the heap, free this memory. */
if (buf != joinbuf) {
jas_free(buf);
}
-#endif
}
Description: CVE-2014-9029: Heap overflows in libjasper
Origin: vendor
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2014-11-28
--- a/src/libjasper/jpc/jpc_dec.c
+++ b/src/libjasper/jpc/jpc_dec.c
@@ -1280,7 +1280,7 @@ static int jpc_dec_process_coc(jpc_dec_t
jpc_coc_t *coc = &ms->parms.coc;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, coc->compno) > dec->numcomps) {
+ if (JAS_CAST(int, coc->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in COC marker segment\n");
return -1;
}
@@ -1306,7 +1306,7 @@ static int jpc_dec_process_rgn(jpc_dec_t
jpc_rgn_t *rgn = &ms->parms.rgn;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, rgn->compno) > dec->numcomps) {
+ if (JAS_CAST(int, rgn->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in RGN marker segment\n");
return -1;
}
@@ -1355,7 +1355,7 @@ static int jpc_dec_process_qcc(jpc_dec_t
jpc_qcc_t *qcc = &ms->parms.qcc;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, qcc->compno) > dec->numcomps) {
+ if (JAS_CAST(int, qcc->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in QCC marker segment\n");
return -1;
}
Description: CVE-2016-1577: Prevent double-free in jas_iccattrval_destroy()
Origin: vendor, http://www.openwall.com/lists/oss-security/2016/03/03/12
Bug-Ubuntu: https://launchpad.net/bugs/1547865
Bug-Debian: https://bugs.debian.org/816625
Forwarded: not-needed
Author: Tyler Hicks <tyhicks@canonical.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2016-03-05
--- a/src/libjasper/base/jas_icc.c
+++ b/src/libjasper/base/jas_icc.c
@@ -300,6 +300,7 @@ jas_iccprof_t *jas_iccprof_load(jas_stre
if (jas_iccprof_setattr(prof, tagtabent->tag, attrval))
goto error;
jas_iccattrval_destroy(attrval);
+ attrval = 0;
} else {
#if 0
jas_eprintf("warning: skipping unknown tag type\n");
Description: CVE-2016-2089: matrix rows_ NULL pointer dereference in jas_matrix_clip()
Origin: vendor
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1302636
Bug-Debian: https://bugs.debian.org/812978
Forwarded: not-needed
Author: Tomas Hoger <thoger@redhat.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2016-03-05
--- a/src/libjasper/base/jas_image.c
+++ b/src/libjasper/base/jas_image.c
@@ -426,6 +426,10 @@ int jas_image_readcmpt(jas_image_t *imag
return -1;
}
+ if (!data->rows_) {
+ return -1;
+ }
+
if (jas_matrix_numrows(data) != height || jas_matrix_numcols(data) != width) {
if (jas_matrix_resize(data, height, width)) {
return -1;
@@ -479,6 +483,10 @@ int jas_image_writecmpt(jas_image_t *ima
return -1;
}
+ if (!data->rows_) {
+ return -1;
+ }
+
if (jas_matrix_numrows(data) != height || jas_matrix_numcols(data) != width) {
return -1;
}
--- a/src/libjasper/base/jas_seq.c
+++ b/src/libjasper/base/jas_seq.c
@@ -262,6 +262,10 @@ void jas_matrix_divpow2(jas_matrix_t *ma
int rowstep;
jas_seqent_t *data;
+ if (!matrix->rows_) {
+ return;
+ }
+
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
rowstart += rowstep) {
@@ -282,6 +286,10 @@ void jas_matrix_clip(jas_matrix_t *matri
jas_seqent_t *data;
int rowstep;
+ if (!matrix->rows_) {
+ return;
+ }
+
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
rowstart += rowstep) {
@@ -306,6 +314,10 @@ void jas_matrix_asr(jas_matrix_t *matrix
int rowstep;
jas_seqent_t *data;
+ if (!matrix->rows_) {
+ return;
+ }
+
assert(n >= 0);
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
@@ -325,6 +337,10 @@ void jas_matrix_asl(jas_matrix_t *matrix
int rowstep;
jas_seqent_t *data;
+ if (!matrix->rows_) {
+ return;
+ }
+
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
rowstart += rowstep) {
@@ -367,6 +383,10 @@ void jas_matrix_setall(jas_matrix_t *mat
int rowstep;
jas_seqent_t *data;
+ if (!matrix->rows_) {
+ return;
+ }
+
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
rowstart += rowstep) {
Description: CVE-2016-2116: Prevent jas_stream_t memory leak in jas_iccprof_createfrombuf()
Origin: vendor, http://www.openwall.com/lists/oss-security/2016/03/03/12
Bug-Debian: https://bugs.debian.org/816626
Forwarded: not-needed
Author: Tyler Hicks <tyhicks@canoonical.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2016-03-05
--- a/src/libjasper/base/jas_icc.c
+++ b/src/libjasper/base/jas_icc.c
@@ -1693,6 +1693,8 @@ jas_iccprof_t *jas_iccprof_createfrombuf
jas_stream_close(in);
return prof;
error:
+ if (in)
+ jas_stream_close(in);
return 0;
}
......@@ -11,17 +11,7 @@ url = http://www.ece.uvic.ca/~mdadams/jasper/software/jasper-1.900.1.zip
md5sum = a342b2b4495b3e1394e161eb5d85d754
patch-options = -p1
patches =
${:_profile_base_location_}/misc-fixes.patch#1202be8418907dfe58f819f7b47da24f
${:_profile_base_location_}/fix-filename-buffer-overflow.patch#38403f9c82a18547beca16c9c6f4ce7a
${:_profile_base_location_}/CVE-2011-4516-and-CVE-2011-4517.patch#a9676718ed016f66a3c76acf764c9e72
${:_profile_base_location_}/CVE-2014-9029.patch#d69195cf17878f024cc0b580045ec314
${:_profile_base_location_}/CVE-2014-8137.patch#bc5103b9a33315538106bf6652383a10
${:_profile_base_location_}/CVE-2014-8138.patch#bfb9604fe84b6e686fea29bd760cf34d
${:_profile_base_location_}/CVE-2014-8157.patch#2fb5f62ba8a9f8afffc95a07d1194783
${:_profile_base_location_}/CVE-2014-8158.patch#9036077a1fab5de8819f210ea7b57a38
${:_profile_base_location_}/CVE-2016-1577.patch#bc970cf3e8535559454781ec54db2d15
${:_profile_base_location_}/CVE-2016-2089.patch#9b73eda015b04a6da493de89ce9b5685
${:_profile_base_location_}/CVE-2016-2116.patch#387df217963281827e006ab4f14f869a
${:_profile_base_location_}/jasper_1.900.1-debian1-2.4+deb8u3.patch#1cf61d1ebc87d355523d3484169671f7
configure-options =
--disable-static
--enable-shared
......
Description: Filename buffer overflow fix
This patch fixes a security hole by a bad buffer size handling.
Author: Roland Stigge <stigge@antcom.de>
Bug-Debian: http://bugs.debian.org/645118
--- a/src/libjasper/include/jasper/jas_stream.h
+++ b/src/libjasper/include/jasper/jas_stream.h
@@ -77,6 +77,7 @@
#include <jasper/jas_config.h>
#include <stdio.h>
+#include <limits.h>
#if defined(HAVE_FCNTL_H)
#include <fcntl.h>
#endif
@@ -99,6 +100,12 @@ extern "C" {
#define O_BINARY 0
#endif
+#ifdef PATH_MAX
+#define JAS_PATH_MAX PATH_MAX
+#else
+#define JAS_PATH_MAX 4096
+#endif
+
/*
* Stream open flags.
*/
@@ -251,7 +258,7 @@ typedef struct {
typedef struct {
int fd;
int flags;
- char pathname[L_tmpnam + 1];
+ char pathname[JAS_PATH_MAX + 1];
} jas_stream_fileobj_t;
#define JAS_STREAM_FILEOBJ_DELONCLOSE 0x01
......@@ -3090,3 +3090,1323 @@ Author: Roland Stigge <stigge@antcom.de>
If the version you run ($0) is already up to date, please
send the following data and any information you think might be
Description: Filename buffer overflow fix
This patch fixes a security hole by a bad buffer size handling.
Author: Roland Stigge <stigge@antcom.de>
Bug-Debian: http://bugs.debian.org/645118
--- a/src/libjasper/include/jasper/jas_stream.h
+++ b/src/libjasper/include/jasper/jas_stream.h
@@ -77,6 +77,7 @@
#include <jasper/jas_config.h>
#include <stdio.h>
+#include <limits.h>
#if defined(HAVE_FCNTL_H)
#include <fcntl.h>
#endif
@@ -99,6 +100,12 @@ extern "C" {
#define O_BINARY 0
#endif
+#ifdef PATH_MAX
+#define JAS_PATH_MAX PATH_MAX
+#else
+#define JAS_PATH_MAX 4096
+#endif
+
/*
* Stream open flags.
*/
@@ -251,7 +258,7 @@ typedef struct {
typedef struct {
int fd;
int flags;
- char pathname[L_tmpnam + 1];
+ char pathname[JAS_PATH_MAX + 1];
} jas_stream_fileobj_t;
#define JAS_STREAM_FILEOBJ_DELONCLOSE 0x01
Description: Fix for CVE-2011-4516 and CVE-2011-4517
This patch fixes a possible denial of service and code execution via
heap-based buffer overflows.
Author: Michael Gilbert <michael.s.gilbert@gmail.com>
Origin: Patch thanks to Red Hat
Bug-Debian: http://bugs.debian.org/652649
Index: jasper-1.900.1/src/libjasper/jpc/jpc_cs.c
===================================================================
--- jasper-1.900.1.orig/src/libjasper/jpc/jpc_cs.c 2011-12-19 09:35:34.186909298 -0500
+++ jasper-1.900.1/src/libjasper/jpc/jpc_cs.c 2011-12-19 09:35:51.198909832 -0500
@@ -744,6 +744,10 @@
return -1;
}
compparms->numrlvls = compparms->numdlvls + 1;
+ if (compparms->numrlvls > JPC_MAXRLVLS) {
+ jpc_cox_destroycompparms(compparms);
+ return -1;
+ }
if (prtflag) {
for (i = 0; i < compparms->numrlvls; ++i) {
if (jpc_getuint8(in, &tmp)) {
@@ -1331,7 +1335,7 @@
jpc_crgcomp_t *comp;
uint_fast16_t compno;
crg->numcomps = cstate->numcomps;
- if (!(crg->comps = jas_alloc2(cstate->numcomps, sizeof(uint_fast16_t)))) {
+ if (!(crg->comps = jas_alloc2(cstate->numcomps, sizeof(jpc_crgcomp_t)))) {
return -1;
}
for (compno = 0, comp = crg->comps; compno < cstate->numcomps;
Description: CVE-2014-9029: Heap overflows in libjasper
Origin: vendor, https://bugzilla.redhat.com/attachment.cgi?id=961994&action=diff
Bug-Debian: https://bugs.debian.org/772036
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1167537
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2014-11-28
--- a/src/libjasper/jpc/jpc_dec.c
+++ b/src/libjasper/jpc/jpc_dec.c
@@ -1280,7 +1280,7 @@ static int jpc_dec_process_coc(jpc_dec_t
jpc_coc_t *coc = &ms->parms.coc;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, coc->compno) > dec->numcomps) {
+ if (JAS_CAST(int, coc->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in COC marker segment\n");
return -1;
}
@@ -1306,7 +1306,7 @@ static int jpc_dec_process_rgn(jpc_dec_t
jpc_rgn_t *rgn = &ms->parms.rgn;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, rgn->compno) > dec->numcomps) {
+ if (JAS_CAST(int, rgn->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in RGN marker segment\n");
return -1;
}
@@ -1355,7 +1355,7 @@ static int jpc_dec_process_qcc(jpc_dec_t
jpc_qcc_t *qcc = &ms->parms.qcc;
jpc_dec_tile_t *tile;
- if (JAS_CAST(int, qcc->compno) > dec->numcomps) {
+ if (JAS_CAST(int, qcc->compno) >= dec->numcomps) {
jas_eprintf("invalid component number in QCC marker segment\n");
return -1;
}
Description: CVE-2014-8137: double-free in in jas_iccattrval_destroy()
Origin: vendor, https://bugzilla.redhat.com/attachment.cgi?id=967283,
https://bugzilla.redhat.com/attachment.cgi?id=967284
Bug-Debian: https://bugs.debian.org/773463
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1173157
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Last-Update: 2014-12-20
--- a/src/libjasper/base/jas_icc.c
+++ b/src/libjasper/base/jas_icc.c
@@ -1010,7 +1010,6 @@ static int jas_icccurv_input(jas_iccattr
return 0;
error:
- jas_icccurv_destroy(attrval);
return -1;
}
@@ -1128,7 +1127,6 @@ static int jas_icctxtdesc_input(jas_icca
#endif
return 0;
error:
- jas_icctxtdesc_destroy(attrval);
return -1;
}
@@ -1207,8 +1205,6 @@ static int jas_icctxt_input(jas_iccattrv
goto error;
return 0;
error:
- if (txt->string)
- jas_free(txt->string);
return -1;
}
@@ -1329,7 +1325,6 @@ static int jas_icclut8_input(jas_iccattr
goto error;
return 0;
error:
- jas_icclut8_destroy(attrval);
return -1;
}
@@ -1498,7 +1493,6 @@ static int jas_icclut16_input(jas_iccatt
goto error;
return 0;
error:
- jas_icclut16_destroy(attrval);
return -1;
}
--- a/src/libjasper/jp2/jp2_dec.c
+++ b/src/libjasper/jp2/jp2_dec.c
@@ -291,7 +291,10 @@ jas_image_t *jp2_decode(jas_stream_t *in
case JP2_COLR_ICC:
iccprof = jas_iccprof_createfrombuf(dec->colr->data.colr.iccp,
dec->colr->data.colr.iccplen);
- assert(iccprof);
+ if (!iccprof) {
+ jas_eprintf("error: failed to parse ICC profile\n");
+ goto error;
+ }
jas_iccprof_gethdr(iccprof, &icchdr);
jas_eprintf("ICC Profile CS %08x\n", icchdr.colorspc);
jas_image_setclrspc(dec->image, fromiccpcs(icchdr.colorspc));
Description: CVE-2014-8138: heap overflow in jp2_decode()
Origin: vendor, https://bugzilla.redhat.com/attachment.cgi?id=967280
Bug-Debian: https://bugs.debian.org/773463
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1173162
Forwarded: no
Author: Tomas Hoger <thoger@redhat.com>
Last-Update: 2014-12-20
--- a/src/libjasper/jp2/jp2_dec.c
+++ b/src/libjasper/jp2/jp2_dec.c
@@ -389,6 +389,11 @@ jas_image_t *jp2_decode(jas_stream_t *in
/* Determine the type of each component. */
if (dec->cdef) {
for (i = 0; i < dec->numchans; ++i) {
+ /* Is the channel number reasonable? */
+ if (dec->cdef->data.cdef.ents[i].channo >= dec->numchans) {
+ jas_eprintf("error: invalid channel number in CDEF box\n");
+ goto error;
+ }
jas_image_setcmpttype(dec->image,
dec->chantocmptlut[dec->cdef->data.cdef.ents[i].channo],
jp2_getct(jas_image_clrspc(dec->image),
Description: CVE-2014-8157: dec->numtiles off-by-one check in jpc_dec_process_sot()
Origin: vendor, http://pkgs.fedoraproject.org/cgit/jasper.git/tree/jasper-CVE-2014-8157.patch
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1179282
Bug-Debian: https://bugs.debian.org/775970
Forwarded: not-needed
Author: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2015-01-22
--- a/src/libjasper/jpc/jpc_dec.c
+++ b/src/libjasper/jpc/jpc_dec.c
@@ -489,7 +489,7 @@ static int jpc_dec_process_sot(jpc_dec_t
dec->curtileendoff = 0;
}
- if (JAS_CAST(int, sot->tileno) > dec->numtiles) {
+ if (JAS_CAST(int, sot->tileno) >= dec->numtiles) {
jas_eprintf("invalid tile number in SOT marker segment\n");
return -1;
}
Description: CVE-2014-8158: unrestricted stack memory use in jpc_qmfb.c
Origin: vendor, http://pkgs.fedoraproject.org/cgit/jasper.git/tree/jasper-CVE-2014-8158.patch
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1179298
Bug-Debian: https://bugs.debian.org/775970
Forwarded: not-needed
Author: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2015-01-22
--- a/src/libjasper/jpc/jpc_qmfb.c
+++ b/src/libjasper/jpc/jpc_qmfb.c
@@ -306,11 +306,7 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
{
int bufsize = JPC_CEILDIVPOW2(numcols, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE];
-#else
- jpc_fix_t splitbuf[bufsize];
-#endif
jpc_fix_t *buf = splitbuf;
register jpc_fix_t *srcptr;
register jpc_fix_t *dstptr;
@@ -318,7 +314,6 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
register int m;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -326,7 +321,6 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
abort();
}
}
-#endif
if (numcols >= 2) {
hstartcol = (numcols + 1 - parity) >> 1;
@@ -360,12 +354,10 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
}
}
-#if !defined(HAVE_VLA)
/* If the split buffer was allocated on the heap, free this memory. */
if (buf != splitbuf) {
jas_free(buf);
}
-#endif
}
@@ -374,11 +366,7 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE];
-#else
- jpc_fix_t splitbuf[bufsize];
-#endif
jpc_fix_t *buf = splitbuf;
register jpc_fix_t *srcptr;
register jpc_fix_t *dstptr;
@@ -386,7 +374,6 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
register int m;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -394,7 +381,6 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
abort();
}
}
-#endif
if (numrows >= 2) {
hstartcol = (numrows + 1 - parity) >> 1;
@@ -428,12 +414,10 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
}
}
-#if !defined(HAVE_VLA)
/* If the split buffer was allocated on the heap, free this memory. */
if (buf != splitbuf) {
jas_free(buf);
}
-#endif
}
@@ -442,11 +426,7 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE * JPC_QMFB_COLGRPSIZE];
-#else
- jpc_fix_t splitbuf[bufsize * JPC_QMFB_COLGRPSIZE];
-#endif
jpc_fix_t *buf = splitbuf;
jpc_fix_t *srcptr;
jpc_fix_t *dstptr;
@@ -457,7 +437,6 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
int m;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -465,7 +444,6 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
abort();
}
}
-#endif
if (numrows >= 2) {
hstartcol = (numrows + 1 - parity) >> 1;
@@ -517,12 +495,10 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
}
}
-#if !defined(HAVE_VLA)
/* If the split buffer was allocated on the heap, free this memory. */
if (buf != splitbuf) {
jas_free(buf);
}
-#endif
}
@@ -531,11 +507,7 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE * JPC_QMFB_COLGRPSIZE];
-#else
- jpc_fix_t splitbuf[bufsize * numcols];
-#endif
jpc_fix_t *buf = splitbuf;
jpc_fix_t *srcptr;
jpc_fix_t *dstptr;
@@ -546,7 +518,6 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
int m;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -554,7 +525,6 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
abort();
}
}
-#endif
if (numrows >= 2) {
hstartcol = (numrows + 1 - parity) >> 1;
@@ -606,12 +576,10 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
}
}
-#if !defined(HAVE_VLA)
/* If the split buffer was allocated on the heap, free this memory. */
if (buf != splitbuf) {
jas_free(buf);
}
-#endif
}
@@ -619,18 +587,13 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
{
int bufsize = JPC_CEILDIVPOW2(numcols, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE];
-#else
- jpc_fix_t joinbuf[bufsize];
-#endif
jpc_fix_t *buf = joinbuf;
register jpc_fix_t *srcptr;
register jpc_fix_t *dstptr;
register int n;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Allocate memory for the join buffer from the heap. */
if (bufsize > QMFB_JOINBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -638,7 +601,6 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
abort();
}
}
-#endif
hstartcol = (numcols + 1 - parity) >> 1;
@@ -670,12 +632,10 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
++srcptr;
}
-#if !defined(HAVE_VLA)
/* If the join buffer was allocated on the heap, free this memory. */
if (buf != joinbuf) {
jas_free(buf);
}
-#endif
}
@@ -684,18 +644,13 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE];
-#else
- jpc_fix_t joinbuf[bufsize];
-#endif
jpc_fix_t *buf = joinbuf;
register jpc_fix_t *srcptr;
register jpc_fix_t *dstptr;
register int n;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Allocate memory for the join buffer from the heap. */
if (bufsize > QMFB_JOINBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
@@ -703,7 +658,6 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
abort();
}
}
-#endif
hstartcol = (numrows + 1 - parity) >> 1;
@@ -735,12 +689,10 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
++srcptr;
}
-#if !defined(HAVE_VLA)
/* If the join buffer was allocated on the heap, free this memory. */
if (buf != joinbuf) {
jas_free(buf);
}
-#endif
}
@@ -749,11 +701,7 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE * JPC_QMFB_COLGRPSIZE];
-#else
- jpc_fix_t joinbuf[bufsize * JPC_QMFB_COLGRPSIZE];
-#endif
jpc_fix_t *buf = joinbuf;
jpc_fix_t *srcptr;
jpc_fix_t *dstptr;
@@ -763,7 +711,6 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
register int i;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Allocate memory for the join buffer from the heap. */
if (bufsize > QMFB_JOINBUFSIZE) {
if (!(buf = jas_alloc2(bufsize, JPC_QMFB_COLGRPSIZE * sizeof(jpc_fix_t)))) {
@@ -771,7 +718,6 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
abort();
}
}
-#endif
hstartcol = (numrows + 1 - parity) >> 1;
@@ -821,12 +767,10 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
srcptr += JPC_QMFB_COLGRPSIZE;
}
-#if !defined(HAVE_VLA)
/* If the join buffer was allocated on the heap, free this memory. */
if (buf != joinbuf) {
jas_free(buf);
}
-#endif
}
@@ -835,11 +779,7 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
{
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
-#if !defined(HAVE_VLA)
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE * JPC_QMFB_COLGRPSIZE];
-#else
- jpc_fix_t joinbuf[bufsize * numcols];
-#endif
jpc_fix_t *buf = joinbuf;
jpc_fix_t *srcptr;
jpc_fix_t *dstptr;
@@ -849,7 +789,6 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
register int i;
int hstartcol;
-#if !defined(HAVE_VLA)
/* Allocate memory for the join buffer from the heap. */
if (bufsize > QMFB_JOINBUFSIZE) {
if (!(buf = jas_alloc3(bufsize, numcols, sizeof(jpc_fix_t)))) {
@@ -857,7 +796,6 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
abort();
}
}
-#endif
hstartcol = (numrows + 1 - parity) >> 1;
@@ -907,12 +845,10 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
srcptr += numcols;
}
-#if !defined(HAVE_VLA)
/* If the join buffer was allocated on the heap, free this memory. */
if (buf != joinbuf) {
jas_free(buf);
}
-#endif
}
Description: CVE-2016-1577: Prevent double-free in jas_iccattrval_destroy()
Origin: vendor, http://www.openwall.com/lists/oss-security/2016/03/03/12
Bug-Ubuntu: https://launchpad.net/bugs/1547865
Bug-Debian: https://bugs.debian.org/816625
Forwarded: not-needed
Author: Tyler Hicks <tyhicks@canonical.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2016-03-05
--- a/src/libjasper/base/jas_icc.c
+++ b/src/libjasper/base/jas_icc.c
@@ -300,6 +300,7 @@ jas_iccprof_t *jas_iccprof_load(jas_stre
if (jas_iccprof_setattr(prof, tagtabent->tag, attrval))
goto error;
jas_iccattrval_destroy(attrval);
+ attrval = 0;
} else {
#if 0
jas_eprintf("warning: skipping unknown tag type\n");
Description: CVE-2016-2089: matrix rows_ NULL pointer dereference in jas_matrix_clip()
Origin: vendor
Bug-RedHat: https://bugzilla.redhat.com/show_bug.cgi?id=1302636
Bug-Debian: https://bugs.debian.org/812978
Forwarded: not-needed
Author: Tomas Hoger <thoger@redhat.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2016-03-05
--- a/src/libjasper/base/jas_image.c
+++ b/src/libjasper/base/jas_image.c
@@ -426,6 +426,10 @@ int jas_image_readcmpt(jas_image_t *imag
return -1;
}
+ if (!data->rows_) {
+ return -1;
+ }
+
if (jas_matrix_numrows(data) != height || jas_matrix_numcols(data) != width) {
if (jas_matrix_resize(data, height, width)) {
return -1;
@@ -479,6 +483,10 @@ int jas_image_writecmpt(jas_image_t *ima
return -1;
}
+ if (!data->rows_) {
+ return -1;
+ }
+
if (jas_matrix_numrows(data) != height || jas_matrix_numcols(data) != width) {
return -1;
}
--- a/src/libjasper/base/jas_seq.c
+++ b/src/libjasper/base/jas_seq.c
@@ -262,6 +262,10 @@ void jas_matrix_divpow2(jas_matrix_t *ma
int rowstep;
jas_seqent_t *data;
+ if (!matrix->rows_) {
+ return;
+ }
+
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
rowstart += rowstep) {
@@ -282,6 +286,10 @@ void jas_matrix_clip(jas_matrix_t *matri
jas_seqent_t *data;
int rowstep;
+ if (!matrix->rows_) {
+ return;
+ }
+
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
rowstart += rowstep) {
@@ -306,6 +314,10 @@ void jas_matrix_asr(jas_matrix_t *matrix
int rowstep;
jas_seqent_t *data;
+ if (!matrix->rows_) {
+ return;
+ }
+
assert(n >= 0);
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
@@ -325,6 +337,10 @@ void jas_matrix_asl(jas_matrix_t *matrix
int rowstep;
jas_seqent_t *data;
+ if (!matrix->rows_) {
+ return;
+ }
+
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
rowstart += rowstep) {
@@ -367,6 +383,10 @@ void jas_matrix_setall(jas_matrix_t *mat
int rowstep;
jas_seqent_t *data;
+ if (!matrix->rows_) {
+ return;
+ }
+
rowstep = jas_matrix_rowstep(matrix);
for (i = matrix->numrows_, rowstart = matrix->rows_[0]; i > 0; --i,
rowstart += rowstep) {
Description: CVE-2016-2116: Prevent jas_stream_t memory leak in jas_iccprof_createfrombuf()
Origin: vendor, http://www.openwall.com/lists/oss-security/2016/03/03/12
Bug-Debian: https://bugs.debian.org/816626
Forwarded: not-needed
Author: Tyler Hicks <tyhicks@canoonical.com>
Reviewed-by: Salvatore Bonaccorso <carnil@debian.org>
Last-Update: 2016-03-05
--- a/src/libjasper/base/jas_icc.c
+++ b/src/libjasper/base/jas_icc.c
@@ -1693,6 +1693,8 @@ jas_iccprof_t *jas_iccprof_createfrombuf
jas_stream_close(in);
return prof;
error:
+ if (in)
+ jas_stream_close(in);
return 0;
}
diff -Naur jasper-1.900.1-debian1.orig/src/libjasper/base/jas_stream.c jasper-1.900.1-debian1/src/libjasper/base/jas_stream.c
--- jasper-1.900.1-debian1.orig/src/libjasper/base/jas_stream.c 2017-02-07 22:46:28.000000000 +0100
+++ jasper-1.900.1-debian1/src/libjasper/base/jas_stream.c 2017-02-07 22:46:07.040456152 +0100
@@ -985,8 +985,9 @@
{
unsigned char *buf;
- assert(m->buf_);
- if (!(buf = jas_realloc(m->buf_, bufsize))) {
+ //assert(m->buf_);
+ assert(bufsize >= 0);
+ if (!(buf = jas_realloc(m->buf_, bufsize)) && bufsize) {
return -1;
}
m->buf_ = buf;
diff -Naur jasper-1.900.1-debian1.orig/src/libjasper/jpc/jpc_cs.c jasper-1.900.1-debian1/src/libjasper/jpc/jpc_cs.c
--- jasper-1.900.1-debian1.orig/src/libjasper/jpc/jpc_cs.c 2017-02-07 22:46:28.000000000 +0100
+++ jasper-1.900.1-debian1/src/libjasper/jpc/jpc_cs.c 2017-02-07 22:46:02.168382641 +0100
@@ -512,6 +512,16 @@
jas_free(siz->comps);
return -1;
}
+ if (siz->comps[i].hsamp == 0 || siz->comps[i].hsamp > 255) {
+ jas_eprintf("invalid XRsiz value %d\n", siz->comps[i].hsamp);
+ jas_free(siz->comps);
+ return -1;
+ }
+ if (siz->comps[i].vsamp == 0 || siz->comps[i].vsamp > 255) {
+ jas_eprintf("invalid YRsiz value %d\n", siz->comps[i].vsamp);
+ jas_free(siz->comps);
+ return -1;
+ }
siz->comps[i].sgnd = (tmp >> 7) & 1;
siz->comps[i].prec = (tmp & 0x7f) + 1;
}
diff -Naur jasper-1.900.1-debian1.orig/src/libjasper/jpc/jpc_dec.c jasper-1.900.1-debian1/src/libjasper/jpc/jpc_dec.c
--- jasper-1.900.1-debian1.orig/src/libjasper/jpc/jpc_dec.c 2017-02-07 22:46:28.000000000 +0100
+++ jasper-1.900.1-debian1/src/libjasper/jpc/jpc_dec.c 2017-02-07 22:46:13.736555435 +0100
@@ -674,7 +674,7 @@
uint_fast32_t tmpxend;
uint_fast32_t tmpyend;
jpc_dec_cp_t *cp;
- jpc_tsfb_band_t bnds[64];
+ jpc_tsfb_band_t bnds[JPC_MAXBANDS];
jpc_pchg_t *pchg;
int pchgno;
jpc_dec_cmpt_t *cmpt;
@@ -989,23 +989,23 @@
}
if (tile->cp) {
jpc_dec_cp_destroy(tile->cp);
- tile->cp = 0;
+ //tile->cp = 0;
}
if (tile->tcomps) {
jas_free(tile->tcomps);
- tile->tcomps = 0;
+ //tile->tcomps = 0;
}
if (tile->pi) {
jpc_pi_destroy(tile->pi);
- tile->pi = 0;
+ //tile->pi = 0;
}
if (tile->pkthdrstream) {
jas_stream_close(tile->pkthdrstream);
- tile->pkthdrstream = 0;
+ //tile->pkthdrstream = 0;
}
if (tile->pptstab) {
jpc_ppxstab_destroy(tile->pptstab);
- tile->pptstab = 0;
+ //tile->pptstab = 0;
}
tile->state = JPC_TILE_DONE;
@@ -1148,7 +1148,11 @@
return -1;
}
}
- jpc_dec_tilefini(dec, tile);
+ /* If the tile has not yet been finalized, finalize it. */
+ // OLD CODE: jpc_dec_tilefini(dec, tile);
+ if (tile->state != JPC_TILE_DONE) {
+ jpc_dec_tilefini(dec, tile);
+ }
}
/* We are done processing the code stream. */
@@ -1204,6 +1208,8 @@
dec->numhtiles = JPC_CEILDIV(dec->xend - dec->tilexoff, dec->tilewidth);
dec->numvtiles = JPC_CEILDIV(dec->yend - dec->tileyoff, dec->tileheight);
dec->numtiles = dec->numhtiles * dec->numvtiles;
+ JAS_DBGLOG(10, ("numtiles = %d; numhtiles = %d; numvtiles = %d;\n",
+ dec->numtiles, dec->numhtiles, dec->numvtiles));
if (!(dec->tiles = jas_alloc2(dec->numtiles, sizeof(jpc_dec_tile_t)))) {
return -1;
}
@@ -1228,6 +1234,7 @@
tile->pkthdrstreampos = 0;
tile->pptstab = 0;
tile->cp = 0;
+ tile->pi = 0;
if (!(tile->tcomps = jas_alloc2(dec->numcomps,
sizeof(jpc_dec_tcomp_t)))) {
return -1;
diff -Naur jasper-1.900.1-debian1.orig/src/libjasper/jpc/jpc_qmfb.c jasper-1.900.1-debian1/src/libjasper/jpc/jpc_qmfb.c
--- jasper-1.900.1-debian1.orig/src/libjasper/jpc/jpc_qmfb.c 2017-02-07 22:46:28.000000000 +0100
+++ jasper-1.900.1-debian1/src/libjasper/jpc/jpc_qmfb.c 2017-02-07 22:45:59.328339293 +0100
@@ -372,7 +372,7 @@
register jpc_fix_t *dstptr;
register int n;
register int m;
- int hstartcol;
+ int hstartrow;
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
@@ -383,8 +383,9 @@
}
if (numrows >= 2) {
- hstartcol = (numrows + 1 - parity) >> 1;
- m = (parity) ? hstartcol : (numrows - hstartcol);
+ hstartrow = (numrows + 1 - parity) >> 1;
+ m = (parity) ? hstartrow : (numrows - hstartrow);
+
/* Save the samples destined for the highpass channel. */
n = m;
dstptr = buf;
@@ -404,7 +405,7 @@
srcptr += stride << 1;
}
/* Copy the saved samples into the highpass channel. */
- dstptr = &a[hstartcol * stride];
+ dstptr = &a[hstartrow * stride];
srcptr = buf;
n = m;
while (n-- > 0) {
@@ -435,19 +436,20 @@
register int n;
register int i;
int m;
- int hstartcol;
+ int hstartrow;
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
- if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
+ if (!(buf = jas_alloc3(bufsize, JPC_QMFB_COLGRPSIZE, sizeof(jpc_fix_t)))) {
/* We have no choice but to commit suicide in this case. */
abort();
}
}
if (numrows >= 2) {
- hstartcol = (numrows + 1 - parity) >> 1;
- m = (parity) ? hstartcol : (numrows - hstartcol);
+ hstartrow = (numrows + 1 - parity) >> 1;
+ m = (parity) ? hstartrow : (numrows - hstartrow);
+
/* Save the samples destined for the highpass channel. */
n = m;
dstptr = buf;
@@ -479,7 +481,7 @@
srcptr += stride << 1;
}
/* Copy the saved samples into the highpass channel. */
- dstptr = &a[hstartcol * stride];
+ dstptr = &a[hstartrow * stride];
srcptr = buf;
n = m;
while (n-- > 0) {
@@ -520,7 +522,7 @@
/* Get a buffer. */
if (bufsize > QMFB_SPLITBUFSIZE) {
- if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
+ if (!(buf = jas_alloc3(bufsize, numcols, sizeof(jpc_fix_t)))) {
/* We have no choice but to commit suicide in this case. */
abort();
}
diff -Naur jasper-1.900.1-debian1.orig/src/libjasper/jpc/jpc_t2cod.c jasper-1.900.1-debian1/src/libjasper/jpc/jpc_t2cod.c
--- jasper-1.900.1-debian1.orig/src/libjasper/jpc/jpc_t2cod.c 2017-02-07 22:46:28.000000000 +0100
+++ jasper-1.900.1-debian1/src/libjasper/jpc/jpc_t2cod.c 2017-02-07 22:45:54.464331040 +0100
@@ -429,7 +429,7 @@
}
for (pi->compno = pchg->compnostart, pi->picomp =
- &pi->picomps[pi->compno]; pi->compno < JAS_CAST(int, pchg->compnoend); ++pi->compno,
+ &pi->picomps[pi->compno]; pi->compno < JAS_CAST(int, pchg->compnoend) && pi->compno < pi->numcomps; ++pi->compno,
++pi->picomp) {
pirlvl = pi->picomp->pirlvls;
pi->xstep = pi->picomp->hsamp * (1 << (pirlvl->prcwidthexpn +
From 03fe49ab96bf65fea784cdc256507ea88267fc7c Mon Sep 17 00:00:00 2001
From: Michael Adams <mdadams@ece.uvic.ca>
Date: Thu, 2 Mar 2017 08:07:04 -0800
Subject: [PATCH] Fixed some potential double-free problems in the JPC codec.
---
src/libjasper/jpc/jpc_enc.c | 75 +++++++++++++++++++++++++++++++++++++--------
1 file changed, 62 insertions(+), 13 deletions(-)
diff --git a/src/libjasper/jpc/jpc_enc.c b/src/libjasper/jpc/jpc_enc.c
index 03646d6..b8b47f1 100644
--- a/src/libjasper/jpc/jpc_enc.c
+++ b/src/libjasper/jpc/jpc_enc.c
@@ -1140,8 +1140,9 @@ static int jpc_enc_encodemainbody(jpc_enc_t *enc)
tilex = tileno % cp->numhtiles;
tiley = tileno / cp->numhtiles;
- if (!(enc->curtile = jpc_enc_tile_create(enc->cp, enc->image, tileno))) {
- abort();
+ if (!(enc->curtile = jpc_enc_tile_create(enc->cp, enc->image,
+ tileno))) {
+ return -1;
}
tile = enc->curtile;
@@ -2036,6 +2037,8 @@ jpc_enc_tile_t *jpc_enc_tile_create(jpc_enc_cp_t *cp, jas_image_t *image, int ti
return 0;
}
+/* Note: I don't think that it is necessary to marked destroyed subobjects
+as such in this function. */
void jpc_enc_tile_destroy(jpc_enc_tile_t *tile)
{
jpc_enc_tcmpt_t *tcmpt;
@@ -2047,16 +2050,21 @@ void jpc_enc_tile_destroy(jpc_enc_tile_t *tile)
tcmpt_destroy(tcmpt);
}
jas_free(tile->tcmpts);
+ /* tile->tcmpts = NULL; */
}
if (tile->lyrsizes) {
jas_free(tile->lyrsizes);
+ /* tile->lyrsizes = NULL; */
}
if (tile->pi) {
jpc_pi_destroy(tile->pi);
+ /* tile->pi = NULL; */
}
jas_free(tile);
+ /* tile = NULL; */
}
+/* Note: This constructor creates the object in place. */
static jpc_enc_tcmpt_t *tcmpt_create(jpc_enc_tcmpt_t *tcmpt, jpc_enc_cp_t *cp,
jas_image_t *image, jpc_enc_tile_t *tile)
{
@@ -2152,6 +2160,10 @@ static jpc_enc_tcmpt_t *tcmpt_create(jpc_enc_tcmpt_t *tcmpt, jpc_enc_cp_t *cp,
}
+/* Note: Since jpc_enc_tcmpt_t objects are created in-place, they might
+potentially be destroyed multiple times at different levels in the call
+chain. So, destroyed subobjects must be marked as destroyed to prevent
+problems such as double frees. */
static void tcmpt_destroy(jpc_enc_tcmpt_t *tcmpt)
{
jpc_enc_rlvl_t *rlvl;
@@ -2163,16 +2175,20 @@ static void tcmpt_destroy(jpc_enc_tcmpt_t *tcmpt)
rlvl_destroy(rlvl);
}
jas_free(tcmpt->rlvls);
+ tcmpt->rlvls = NULL;
}
if (tcmpt->data) {
jas_seq2d_destroy(tcmpt->data);
+ tcmpt->data = NULL;
}
if (tcmpt->tsfb) {
jpc_tsfb_destroy(tcmpt->tsfb);
+ tcmpt->tsfb = NULL;
}
}
+/* Note: This constructor creates the object in place. */
static jpc_enc_rlvl_t *rlvl_create(jpc_enc_rlvl_t *rlvl, jpc_enc_cp_t *cp,
jpc_enc_tcmpt_t *tcmpt, jpc_tsfb_band_t *bandinfos)
{
@@ -2254,6 +2270,10 @@ static jpc_enc_rlvl_t *rlvl_create(jpc_enc_rlvl_t *rlvl, jpc_enc_cp_t *cp,
return 0;
}
+/* Note: Since jpc_enc_rlvl_t objects are created in-place, they might
+potentially be destroyed multiple times at different levels in the call
+chain. So, destroyed subobjects must be marked as destroyed to prevent
+problems such as double frees. */
static void rlvl_destroy(jpc_enc_rlvl_t *rlvl)
{
jpc_enc_band_t *band;
@@ -2265,9 +2285,11 @@ static void rlvl_destroy(jpc_enc_rlvl_t *rlvl)
band_destroy(band);
}
jas_free(rlvl->bands);
+ rlvl->bands = NULL;
}
}
+/* Note: This constructor creates the object in place. */
static jpc_enc_band_t *band_create(jpc_enc_band_t *band, jpc_enc_cp_t *cp,
jpc_enc_rlvl_t *rlvl, jpc_tsfb_band_t *bandinfos)
{
@@ -2335,6 +2357,10 @@ if (band->data) {
return 0;
}
+/* Note: Since jpc_enc_band_t objects are created in-place, they might
+potentially be destroyed multiple times at different levels in the call
+chain. So, destroyed subobjects must be marked as destroyed to prevent
+problems such as double frees. */
static void band_destroy(jpc_enc_band_t *band)
{
jpc_enc_prc_t *prc;
@@ -2348,12 +2374,15 @@ static void band_destroy(jpc_enc_band_t *band)
prc_destroy(prc);
}
jas_free(band->prcs);
+ band->prcs = NULL;
}
if (band->data) {
jas_seq2d_destroy(band->data);
+ band->data = NULL;
}
}
+/* Note: This constructor creates the object in place. */
static jpc_enc_prc_t *prc_create(jpc_enc_prc_t *prc, jpc_enc_cp_t *cp, jpc_enc_band_t *band)
{
uint_fast32_t prcno;
@@ -2383,21 +2412,21 @@ static jpc_enc_prc_t *prc_create(jpc_enc_prc_t *prc, jpc_enc_cp_t *cp, jpc_enc_b
rlvl = band->rlvl;
tcmpt = rlvl->tcmpt;
-rlvlno = rlvl - tcmpt->rlvls;
+ rlvlno = rlvl - tcmpt->rlvls;
prcno = prc - band->prcs;
prcxind = prcno % rlvl->numhprcs;
prcyind = prcno / rlvl->numhprcs;
prc->band = band;
-tlprctlx = JPC_FLOORTOMULTPOW2(rlvl->tlx, rlvl->prcwidthexpn);
-tlprctly = JPC_FLOORTOMULTPOW2(rlvl->tly, rlvl->prcheightexpn);
-if (!rlvlno) {
- tlcbgtlx = tlprctlx;
- tlcbgtly = tlprctly;
-} else {
- tlcbgtlx = JPC_CEILDIVPOW2(tlprctlx, 1);
- tlcbgtly = JPC_CEILDIVPOW2(tlprctly, 1);
-}
+ tlprctlx = JPC_FLOORTOMULTPOW2(rlvl->tlx, rlvl->prcwidthexpn);
+ tlprctly = JPC_FLOORTOMULTPOW2(rlvl->tly, rlvl->prcheightexpn);
+ if (!rlvlno) {
+ tlcbgtlx = tlprctlx;
+ tlcbgtly = tlprctly;
+ } else {
+ tlcbgtlx = JPC_CEILDIVPOW2(tlprctlx, 1);
+ tlcbgtly = JPC_CEILDIVPOW2(tlprctly, 1);
+ }
/* Compute the coordinates of the top-left and bottom-right
corners of the precinct. */
@@ -2479,6 +2508,10 @@ if (!rlvlno) {
return 0;
}
+/* Note: Since jpc_enc_prc_t objects are created in-place, they might
+potentially be destroyed multiple times at different levels in the call
+chain. So, destroyed subobjects must be marked as destroyed to prevent
+problems such as double frees. */
static void prc_destroy(jpc_enc_prc_t *prc)
{
jpc_enc_cblk_t *cblk;
@@ -2490,22 +2523,29 @@ static void prc_destroy(jpc_enc_prc_t *prc)
cblk_destroy(cblk);
}
jas_free(prc->cblks);
+ prc->cblks = NULL;
}
if (prc->incltree) {
jpc_tagtree_destroy(prc->incltree);
+ prc->incltree = NULL;
}
if (prc->nlibtree) {
jpc_tagtree_destroy(prc->nlibtree);
+ prc->nlibtree = NULL;
}
if (prc->savincltree) {
jpc_tagtree_destroy(prc->savincltree);
+ prc->savincltree = NULL;
}
if (prc->savnlibtree) {
jpc_tagtree_destroy(prc->savnlibtree);
+ prc->savnlibtree = NULL;
}
}
-static jpc_enc_cblk_t *cblk_create(jpc_enc_cblk_t *cblk, jpc_enc_cp_t *cp, jpc_enc_prc_t *prc)
+/* Note: This constructor creates the object in place. */
+static jpc_enc_cblk_t *cblk_create(jpc_enc_cblk_t *cblk, jpc_enc_cp_t *cp,
+ jpc_enc_prc_t *prc)
{
jpc_enc_band_t *band;
uint_fast32_t cblktlx;
@@ -2563,6 +2603,10 @@ static jpc_enc_cblk_t *cblk_create(jpc_enc_cblk_t *cblk, jpc_enc_cp_t *cp, jpc_e
return 0;
}
+/* Note: Since jpc_enc_cblk_t objects are created in-place, they might
+potentially be destroyed multiple times at different levels in the call
+chain. So, destroyed subobjects must be marked as destroyed to prevent
+problems such as double frees. */
static void cblk_destroy(jpc_enc_cblk_t *cblk)
{
uint_fast16_t passno;
@@ -2573,18 +2617,23 @@ static void cblk_destroy(jpc_enc_cblk_t *cblk)
pass_destroy(pass);
}
jas_free(cblk->passes);
+ cblk->passes = NULL;
}
if (cblk->stream) {
jas_stream_close(cblk->stream);
+ cblk->stream = NULL;
}
if (cblk->mqenc) {
jpc_mqenc_destroy(cblk->mqenc);
+ cblk->mqenc = NULL;
}
if (cblk->data) {
jas_seq2d_destroy(cblk->data);
+ cblk->data = NULL;
}
if (cblk->flags) {
jas_seq2d_destroy(cblk->flags);
+ cblk->flags = NULL;
}
}
Backport of 988f8365f7d8ad8073b6786e433d34c553ecf568
From: Michael Adams <mdadams@ece.uvic.ca>
Also backport jas_safe_size_mul()
diff -aur jasper-1.900.1-debian1.orig/src/libjasper/base/jas_seq.c jasper-1.900.1-debian1/src/libjasper/base/jas_seq.c
--- jasper-1.900.1-debian1.orig/src/libjasper/base/jas_seq.c 2017-03-15 16:30:46.000000000 +0100
+++ jasper-1.900.1-debian1/src/libjasper/base/jas_seq.c 2017-03-15 16:36:57.410704785 +0100
@@ -101,9 +101,16 @@
{
jas_matrix_t *matrix;
int i;
+ size_t size;
+ matrix = 0;
+
+ if (numrows < 0 || numcols < 0) {
+ goto error;
+ }
+
if (!(matrix = jas_malloc(sizeof(jas_matrix_t)))) {
- return 0;
+ goto error;
}
matrix->flags_ = 0;
matrix->numrows_ = numrows;
@@ -111,21 +118,25 @@
matrix->rows_ = 0;
matrix->maxrows_ = numrows;
matrix->data_ = 0;
- matrix->datasize_ = numrows * numcols;
+ matrix->datasize_ = 0;
+
+ // matrix->datasize_ = numrows * numcols;
+ if (!jas_safe_size_mul(numrows, numcols, &size)) {
+ goto error;
+ }
+ matrix->datasize_ = size;
if (matrix->maxrows_ > 0) {
if (!(matrix->rows_ = jas_alloc2(matrix->maxrows_,
sizeof(jas_seqent_t *)))) {
- jas_matrix_destroy(matrix);
- return 0;
+ goto error;
}
}
if (matrix->datasize_ > 0) {
if (!(matrix->data_ = jas_alloc2(matrix->datasize_,
sizeof(jas_seqent_t)))) {
- jas_matrix_destroy(matrix);
- return 0;
+ goto error;
}
}
@@ -143,6 +154,12 @@
matrix->yend_ = matrix->numrows_;
return matrix;
+
+error:
+ if (matrix) {
+ jas_matrix_destroy(matrix);
+ }
+ return 0;
}
void jas_matrix_destroy(jas_matrix_t *matrix)
--- jasper-1.900.1.orig/src/libjasper/include/jasper/jas_math.h 2016-11-30 15:16:36.376026487 +0100
+++ jasper-1.900.1/src/libjasper/include/jasper/jas_math.h 2016-11-30 15:17:11.011071690 +0100
@@ -79,6 +79,7 @@
#include <assert.h>
#include <stdio.h>
#include <string.h>
+#include <stdint.h>
#ifdef __cplusplus
extern "C" {
@@ -110,6 +111,19 @@
#define JAS_ONES(n) \
((1 << (n)) - 1)
+inline static int jas_safe_size_mul(size_t x, size_t y, size_t *result)
+{
+ /* Check if overflow would occur */
+ if (x && y > SIZE_MAX / x) {
+ /* Overflow would occur. */
+ return 0;
+ }
+ if (result) {
+ *result = x * y;
+ }
+ return 1;
+}
+
#ifdef __cplusplus
}
#endif
From 1f0dfe5a42911b6880a1445f13f6d615ddb55387 Mon Sep 17 00:00:00 2001
From: Michael Adams <mdadams@ece.uvic.ca>
Date: Fri, 4 Nov 2016 07:20:23 -0700
Subject: [PATCH] Fixed an integer overflow problem in the JPC codec that later
resulted in the use of uninitialized data.
---
src/libjasper/jpc/jpc_t2cod.c | 20 ++++++++++----------
src/libjasper/jpc/jpc_t2cod.h | 20 ++++++++++----------
2 files changed, 20 insertions(+), 20 deletions(-)
diff --git a/src/libjasper/jpc/jpc_t2cod.c b/src/libjasper/jpc/jpc_t2cod.c
index 08315dd..174442a 100644
--- a/src/libjasper/jpc/jpc_t2cod.c
+++ b/src/libjasper/jpc/jpc_t2cod.c
@@ -432,18 +432,18 @@ static int jpc_pi_nextcprl(register jpc_pi_t *pi)
&pi->picomps[pi->compno]; pi->compno < JAS_CAST(int, pchg->compnoend) && pi->compno < pi->numcomps; ++pi->compno,
++pi->picomp) {
pirlvl = pi->picomp->pirlvls;
- pi->xstep = pi->picomp->hsamp * (1 << (pirlvl->prcwidthexpn +
- pi->picomp->numrlvls - 1));
- pi->ystep = pi->picomp->vsamp * (1 << (pirlvl->prcheightexpn +
- pi->picomp->numrlvls - 1));
+ pi->xstep = pi->picomp->hsamp * (JAS_CAST(uint_fast32_t, 1) <<
+ (pirlvl->prcwidthexpn + pi->picomp->numrlvls - 1));
+ pi->ystep = pi->picomp->vsamp * (JAS_CAST(uint_fast32_t, 1) <<
+ (pirlvl->prcheightexpn + pi->picomp->numrlvls - 1));
for (rlvlno = 1, pirlvl = &pi->picomp->pirlvls[1];
rlvlno < pi->picomp->numrlvls; ++rlvlno, ++pirlvl) {
- pi->xstep = JAS_MIN(pi->xstep, pi->picomp->hsamp * (1 <<
- (pirlvl->prcwidthexpn + pi->picomp->numrlvls -
- rlvlno - 1)));
- pi->ystep = JAS_MIN(pi->ystep, pi->picomp->vsamp * (1 <<
- (pirlvl->prcheightexpn + pi->picomp->numrlvls -
- rlvlno - 1)));
+ pi->xstep = JAS_MIN(pi->xstep, pi->picomp->hsamp *
+ (JAS_CAST(uint_fast32_t, 1) << (pirlvl->prcwidthexpn +
+ pi->picomp->numrlvls - rlvlno - 1)));
+ pi->ystep = JAS_MIN(pi->ystep, pi->picomp->vsamp *
+ (JAS_CAST(uint_fast32_t, 1) << (pirlvl->prcheightexpn +
+ pi->picomp->numrlvls - rlvlno - 1)));
}
for (pi->y = pi->ystart; pi->y < pi->yend;
pi->y += pi->ystep - (pi->y % pi->ystep)) {
diff --git a/src/libjasper/jpc/jpc_t2cod.h b/src/libjasper/jpc/jpc_t2cod.h
index 0a176c9..690e031 100644
--- a/src/libjasper/jpc/jpc_t2cod.h
+++ b/src/libjasper/jpc/jpc_t2cod.h
@@ -129,10 +129,10 @@ typedef struct {
jpc_pirlvl_t *pirlvls;
/* The horizontal sampling period. */
- int hsamp;
+ uint_fast32_t hsamp;
/* The vertical sampling period. */
- int vsamp;
+ uint_fast32_t vsamp;
} jpc_picomp_t;
@@ -171,32 +171,32 @@ typedef struct {
int lyrno;
/* The x-coordinate of the current position. */
- int x;
+ uint_fast32_t x;
/* The y-coordinate of the current position. */
- int y;
+ uint_fast32_t y;
/* The horizontal step size. */
- int xstep;
+ uint_fast32_t xstep;
/* The vertical step size. */
- int ystep;
+ uint_fast32_t ystep;
/* The x-coordinate of the top-left corner of the tile on the reference
grid. */
- int xstart;
+ uint_fast32_t xstart;
/* The y-coordinate of the top-left corner of the tile on the reference
grid. */
- int ystart;
+ uint_fast32_t ystart;
/* The x-coordinate of the bottom-right corner of the tile on the
reference grid (plus one). */
- int xend;
+ uint_fast32_t xend;
/* The y-coordinate of the bottom-right corner of the tile on the
reference grid (plus one). */
- int yend;
+ uint_fast32_t yend;
/* The current progression change. */
jpc_pchg_t *pchg;
[buildout]
extends =
../tensorflow/buildout.cfg
../python-PyYAML/buildout.cfg
../h5py/buildout.cfg
../pillow/buildout.cfg
parts =
......@@ -8,20 +9,23 @@ parts =
[keras-egg]
recipe = zc.recipe.egg
initialization =
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
eggs =
${scipy:egg}
${numpy:egg}
${tensorflow-build-install-egg:egg}
${protobuf-python:egg}
Keras
${python-PyYAML:egg}
six
keras
${h5py:egg}
${pillow-python:egg}
interpreter = keras-python
scripts = keras-python
[versions]
Keras = 1.2.1
Theano = 0.8.2
tensorflow = 0.12.0
Keras = 2.0.1
tensorflow = 1.0.1
h5py = 2.7.0rc2
Cython = 0.25.2
......@@ -25,6 +25,6 @@ configure-options =
environment =
CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include
LD_LIBRARY_PATH=${gcc-fortran:location}/lib:${gcc-fortran:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc-fortran:location}/lib -Wl,-rpath=${gcc-fortran:location}/lib64 -L${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath=${tokyocabinet:location}/lib -Wl,-rpath=${messagepack:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib
PATH=${gcc-fortran:location}/bin:${patch:location}/bin:%(PATH)s
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64 -L${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath=${tokyocabinet:location}/lib -Wl,-rpath=${messagepack:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib
PATH=${gcc:location}/bin:${patch:location}/bin:%(PATH)s
......@@ -39,5 +39,5 @@ configure-options =
make-options =
-C build
environment =
PATH=${gcc-fortran:location}/bin:${patch:location}/bin:%(PATH)s
LD_LIBRARY_PATH=${gcc-fortran:location}/lib:${gcc-fortran:location}/lib64
PATH=${gcc:location}/bin:${patch:location}/bin:%(PATH)s
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
......@@ -21,8 +21,8 @@ environment =
[librsvg]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/librsvg-2.40.15.tar.xz
md5sum = 3a66ab5b4fe1fb43b471708e4ff39a0e
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/librsvg-2.40.16.tar.xz
md5sum = f474fe37177a2bf8050787df2046095c
pkg_config_depends = ${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${zlib:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig:${libcroco:location}/lib/pkgconfig
configure-options =
--disable-static
......
[buildout]
extends =
../gcc/buildout.cfg
../m4/buildout.cfg
../perl/buildout.cfg
../xz-utils/buildout.cfg
......@@ -9,10 +10,11 @@ parts =
[libsigc]
recipe = slapos.recipe.cmmi
# we keep using libsigc++-2.4 for glibmm-2.44 that is buildable with non-C++11 compiler
url = http://ftp.gnome.org/pub/gnome/core/3.16/3.16.2/sources/libsigc++-2.4.1.tar.xz
md5sum = 55945ba6e1652f89999e910f6b52047c
url = http://ftp.gnome.org/pub/gnome/core/3.22/3.22.2/sources/libsigc++-2.10.0.tar.xz
md5sum = 70bcbde2c900e4925d6ef4bf50954195
configure-options =
--disable-documentation
environment =
PATH=${m4:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${gcc:location}/bin:${m4:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64
......@@ -2,6 +2,7 @@
extends =
../libjpeg/buildout.cfg
../jbigkit/buildout.cfg
../patch/buildout.cfg
../zlib/buildout.cfg
parts =
......@@ -15,6 +16,9 @@ configure-options =
--disable-static
--without-x
--disable-lzma
patch-options = -p1
patches =
${:_profile_base_location_}/tiff_4.0.7-5.debian.patch#f132ec0dc5cde03f9bbbe90b7a79fed4
environment =
CPPFLAGS=-I${libjpeg:location}/include -I${jbigkit:location}/include -I${zlib:location}/include
LDFLAGS=-L${libjpeg:location}/lib -Wl,-rpath=${libjpeg:location}/lib -L${jbigkit:location}/lib -Wl,-rpath=${jbigkit:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
......
commit 8c101323f5789ef6a0db952d53794e9c4ba48207
Author: erouault <erouault>
Date: Fri Dec 2 21:56:56 2016 +0000
* libtiff/tif_read.c, libtiff/tiffiop.h: fix uint32 overflow in
TIFFReadEncodedStrip() that caused an integer division by zero.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2596
diff --git a/ChangeLog b/ChangeLog
index 46a5d7c..668b66a 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,10 @@
+2016-12-02 Even Rouault <even.rouault at spatialys.com>
+
+ * libtiff/tif_read.c, libtiff/tiffiop.h: fix uint32 overflow in
+ TIFFReadEncodedStrip() that caused an integer division by zero.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2596
+
2016-11-19 Bob Friesenhahn <bfriesen@simple.dallas.tx.us>
* libtiff 4.0.7 released.
diff --git a/libtiff/tif_read.c b/libtiff/tif_read.c
index 8003592..29a311d 100644
--- a/libtiff/tif_read.c
+++ b/libtiff/tif_read.c
@@ -1,4 +1,4 @@
-/* $Id: tif_read.c,v 1.49 2016-07-10 18:00:21 erouault Exp $ */
+/* $Id: tif_read.c,v 1.50 2016-12-02 21:56:56 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -346,7 +346,7 @@ TIFFReadEncodedStrip(TIFF* tif, uint32 strip, void* buf, tmsize_t size)
rowsperstrip=td->td_rowsperstrip;
if (rowsperstrip>td->td_imagelength)
rowsperstrip=td->td_imagelength;
- stripsperplane=((td->td_imagelength+rowsperstrip-1)/rowsperstrip);
+ stripsperplane= TIFFhowmany_32_maxuint_compat(td->td_imagelength, rowsperstrip);
stripinplane=(strip%stripsperplane);
plane=(uint16)(strip/stripsperplane);
rows=td->td_imagelength-stripinplane*rowsperstrip;
diff --git a/libtiff/tiffiop.h b/libtiff/tiffiop.h
index 8bcd0c1..5294ee7 100644
--- a/libtiff/tiffiop.h
+++ b/libtiff/tiffiop.h
@@ -1,4 +1,4 @@
-/* $Id: tiffiop.h,v 1.89 2016-01-23 21:20:34 erouault Exp $ */
+/* $Id: tiffiop.h,v 1.90 2016-12-02 21:56:56 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -250,6 +250,10 @@ struct tiff {
#define TIFFhowmany_32(x, y) (((uint32)x < (0xffffffff - (uint32)(y-1))) ? \
((((uint32)(x))+(((uint32)(y))-1))/((uint32)(y))) : \
0U)
+/* Variant of TIFFhowmany_32() that doesn't return 0 if x close to MAXUINT. */
+/* Caution: TIFFhowmany_32_maxuint_compat(x,y)*y might overflow */
+#define TIFFhowmany_32_maxuint_compat(x, y) \
+ (((uint32)(x) / (uint32)(y)) + ((((uint32)(x) % (uint32)(y)) != 0) ? 1 : 0))
#define TIFFhowmany8_32(x) (((x)&0x07)?((uint32)(x)>>3)+1:(uint32)(x)>>3)
#define TIFFroundup_32(x, y) (TIFFhowmany_32(x,y)*(y))
#define TIFFhowmany_64(x, y) ((((uint64)(x))+(((uint64)(y))-1))/((uint64)(y)))
commit b412777317cabbf8ed89ca38fb180991cca89b8c
Author: erouault <erouault>
Date: Fri Dec 2 22:13:32 2016 +0000
* tools/tiffcp.c: avoid uint32 underflow in cpDecodedStrips that
can cause various issues, such as buffer overflows in the library.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2598
diff --git a/ChangeLog b/ChangeLog
index 668b66a..0f154d6 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,12 @@
2016-12-02 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffcp.c: avoid uint32 underflow in cpDecodedStrips that
+ can cause various issues, such as buffer overflows in the library.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2598
+
+2016-12-02 Even Rouault <even.rouault at spatialys.com>
+
* libtiff/tif_read.c, libtiff/tiffiop.h: fix uint32 overflow in
TIFFReadEncodedStrip() that caused an integer division by zero.
Reported by Agostino Sarubbo.
diff --git a/tools/tiffcp.c b/tools/tiffcp.c
index 338a3d1..6dfb9a9 100644
--- a/tools/tiffcp.c
+++ b/tools/tiffcp.c
@@ -1,4 +1,4 @@
-/* $Id: tiffcp.c,v 1.55 2016-10-08 15:54:57 erouault Exp $ */
+/* $Id: tiffcp.c,v 1.56 2016-12-02 22:13:32 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -985,7 +985,7 @@ DECLAREcpFunc(cpDecodedStrips)
tstrip_t s, ns = TIFFNumberOfStrips(in);
uint32 row = 0;
_TIFFmemset(buf, 0, stripsize);
- for (s = 0; s < ns; s++) {
+ for (s = 0; s < ns && row < imagelength; s++) {
tsize_t cc = (row + rowsperstrip > imagelength) ?
TIFFVStripSize(in, imagelength - row) : stripsize;
if (TIFFReadEncodedStrip(in, s, buf, cc) < 0
commit da99990ba6e1203798a59eb836fc6433ed6e3d66
Author: erouault <erouault>
Date: Fri Dec 2 23:05:51 2016 +0000
* libtiff/tif_pixarlog.c, libtiff/tif_luv.c: fix heap-based buffer
overflow on generation of PixarLog / LUV compressed files, with
ColorMap, TransferFunction attached and nasty plays with bitspersample.
The fix for LUV has not been tested, but suffers from the same kind
of issue of PixarLog.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2604
diff --git a/ChangeLog b/ChangeLog
index 0f154d6..93c01f8 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,13 @@
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
+ * libtiff/tif_pixarlog.c, libtiff/tif_luv.c: fix heap-based buffer
+ overflow on generation of PixarLog / LUV compressed files, with
+ ColorMap, TransferFunction attached and nasty plays with bitspersample.
+ The fix for LUV has not been tested, but suffers from the same kind
+ of issue of PixarLog.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2604
+
2016-12-02 Even Rouault <even.rouault at spatialys.com>
* tools/tiffcp.c: avoid uint32 underflow in cpDecodedStrips that
diff --git a/libtiff/tif_luv.c b/libtiff/tif_luv.c
index ca08f30..f42ac01 100644
--- a/libtiff/tif_luv.c
+++ b/libtiff/tif_luv.c
@@ -1,4 +1,4 @@
-/* $Id: tif_luv.c,v 1.43 2016-09-04 21:32:56 erouault Exp $ */
+/* $Id: tif_luv.c,v 1.44 2016-12-02 23:05:51 erouault Exp $ */
/*
* Copyright (c) 1997 Greg Ward Larson
@@ -158,6 +158,7 @@
typedef struct logLuvState LogLuvState;
struct logLuvState {
+ int encoder_state; /* 1 if encoder correctly initialized */
int user_datafmt; /* user data format */
int encode_meth; /* encoding method */
int pixel_size; /* bytes per pixel */
@@ -1552,6 +1553,7 @@ LogLuvSetupEncode(TIFF* tif)
td->td_photometric, "must be either LogLUV or LogL");
break;
}
+ sp->encoder_state = 1;
return (1);
notsupported:
TIFFErrorExt(tif->tif_clientdata, module,
@@ -1563,19 +1565,27 @@ notsupported:
static void
LogLuvClose(TIFF* tif)
{
+ LogLuvState* sp = (LogLuvState*) tif->tif_data;
TIFFDirectory *td = &tif->tif_dir;
+ assert(sp != 0);
/*
* For consistency, we always want to write out the same
* bitspersample and sampleformat for our TIFF file,
* regardless of the data format being used by the application.
* Since this routine is called after tags have been set but
* before they have been recorded in the file, we reset them here.
+ * Note: this is really a nasty approach. See PixarLogClose
*/
- td->td_samplesperpixel =
- (td->td_photometric == PHOTOMETRIC_LOGL) ? 1 : 3;
- td->td_bitspersample = 16;
- td->td_sampleformat = SAMPLEFORMAT_INT;
+ if( sp->encoder_state )
+ {
+ /* See PixarLogClose. Might avoid issues with tags whose size depends
+ * on those below, but not completely sure this is enough. */
+ td->td_samplesperpixel =
+ (td->td_photometric == PHOTOMETRIC_LOGL) ? 1 : 3;
+ td->td_bitspersample = 16;
+ td->td_sampleformat = SAMPLEFORMAT_INT;
+ }
}
static void
diff --git a/libtiff/tif_pixarlog.c b/libtiff/tif_pixarlog.c
index f4af2ba..9836dce 100644
--- a/libtiff/tif_pixarlog.c
+++ b/libtiff/tif_pixarlog.c
@@ -1,4 +1,4 @@
-/* $Id: tif_pixarlog.c,v 1.48 2016-09-23 22:12:18 erouault Exp $ */
+/* $Id: tif_pixarlog.c,v 1.49 2016-12-02 23:05:51 erouault Exp $ */
/*
* Copyright (c) 1996-1997 Sam Leffler
@@ -1233,8 +1233,10 @@ PixarLogPostEncode(TIFF* tif)
static void
PixarLogClose(TIFF* tif)
{
+ PixarLogState* sp = (PixarLogState*) tif->tif_data;
TIFFDirectory *td = &tif->tif_dir;
+ assert(sp != 0);
/* In a really sneaky (and really incorrect, and untruthful, and
* troublesome, and error-prone) maneuver that completely goes against
* the spirit of TIFF, and breaks TIFF, on close, we covertly
@@ -1243,8 +1245,19 @@ PixarLogClose(TIFF* tif)
* readers that don't know about PixarLog, or how to set
* the PIXARLOGDATFMT pseudo-tag.
*/
- td->td_bitspersample = 8;
- td->td_sampleformat = SAMPLEFORMAT_UINT;
+
+ if (sp->state&PLSTATE_INIT) {
+ /* We test the state to avoid an issue such as in
+ * http://bugzilla.maptools.org/show_bug.cgi?id=2604
+ * What appends in that case is that the bitspersample is 1 and
+ * a TransferFunction is set. The size of the TransferFunction
+ * depends on 1<<bitspersample. So if we increase it, an access
+ * out of the buffer will happen at directory flushing.
+ * Another option would be to clear those targs.
+ */
+ td->td_bitspersample = 8;
+ td->td_sampleformat = SAMPLEFORMAT_UINT;
+ }
}
static void
commit c533d200ecc45e00892a94f9bb2e762a5aa0b2ce
Author: erouault <erouault>
Date: Sat Dec 3 11:02:15 2016 +0000
* libtiff/tif_dirread.c: modify ChopUpSingleUncompressedStrip() to
instanciate compute ntrips as TIFFhowmany_32(td->td_imagelength, rowsperstrip),
instead of a logic based on the total size of data. Which is faulty is
the total size of data is not sufficient to fill the whole image, and thus
results in reading outside of the StripByCounts/StripOffsets arrays when
using TIFFReadScanline().
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2608.
* libtiff/tif_strip.c: revert the change in TIFFNumberOfStrips() done
for http://bugzilla.maptools.org/show_bug.cgi?id=2587 / CVE-2016-9273 since
the above change is a better fix that makes it unnecessary.
diff --git a/ChangeLog b/ChangeLog
index 93c01f8..9dbc7a0 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,20 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * libtiff/tif_dirread.c: modify ChopUpSingleUncompressedStrip() to
+ instanciate compute ntrips as TIFFhowmany_32(td->td_imagelength, rowsperstrip),
+ instead of a logic based on the total size of data. Which is faulty is
+ the total size of data is not sufficient to fill the whole image, and thus
+ results in reading outside of the StripByCounts/StripOffsets arrays when
+ using TIFFReadScanline().
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2608.
+
+ * libtiff/tif_strip.c: revert the change in TIFFNumberOfStrips() done
+ for http://bugzilla.maptools.org/show_bug.cgi?id=2587 / CVE-2016-9273 since
+ the above change is a better fix that makes it unnecessary.
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* libtiff/tif_pixarlog.c, libtiff/tif_luv.c: fix heap-based buffer
overflow on generation of PixarLog / LUV compressed files, with
ColorMap, TransferFunction attached and nasty plays with bitspersample.
diff --git a/libtiff/tif_dirread.c b/libtiff/tif_dirread.c
index 01070f2..f290528 100644
--- a/libtiff/tif_dirread.c
+++ b/libtiff/tif_dirread.c
@@ -1,4 +1,4 @@
-/* $Id: tif_dirread.c,v 1.204 2016-11-16 15:14:15 erouault Exp $ */
+/* $Id: tif_dirread.c,v 1.205 2016-12-03 11:02:15 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -5502,8 +5502,7 @@ ChopUpSingleUncompressedStrip(TIFF* tif)
uint64 rowblockbytes;
uint64 stripbytes;
uint32 strip;
- uint64 nstrips64;
- uint32 nstrips32;
+ uint32 nstrips;
uint32 rowsperstrip;
uint64* newcounts;
uint64* newoffsets;
@@ -5534,18 +5533,17 @@ ChopUpSingleUncompressedStrip(TIFF* tif)
return;
/*
- * never increase the number of strips in an image
+ * never increase the number of rows per strip
*/
if (rowsperstrip >= td->td_rowsperstrip)
return;
- nstrips64 = TIFFhowmany_64(bytecount, stripbytes);
- if ((nstrips64==0)||(nstrips64>0xFFFFFFFF)) /* something is wonky, do nothing. */
- return;
- nstrips32 = (uint32)nstrips64;
+ nstrips = TIFFhowmany_32(td->td_imagelength, rowsperstrip);
+ if( nstrips == 0 )
+ return;
- newcounts = (uint64*) _TIFFCheckMalloc(tif, nstrips32, sizeof (uint64),
+ newcounts = (uint64*) _TIFFCheckMalloc(tif, nstrips, sizeof (uint64),
"for chopped \"StripByteCounts\" array");
- newoffsets = (uint64*) _TIFFCheckMalloc(tif, nstrips32, sizeof (uint64),
+ newoffsets = (uint64*) _TIFFCheckMalloc(tif, nstrips, sizeof (uint64),
"for chopped \"StripOffsets\" array");
if (newcounts == NULL || newoffsets == NULL) {
/*
@@ -5562,18 +5560,18 @@ ChopUpSingleUncompressedStrip(TIFF* tif)
* Fill the strip information arrays with new bytecounts and offsets
* that reflect the broken-up format.
*/
- for (strip = 0; strip < nstrips32; strip++) {
+ for (strip = 0; strip < nstrips; strip++) {
if (stripbytes > bytecount)
stripbytes = bytecount;
newcounts[strip] = stripbytes;
- newoffsets[strip] = offset;
+ newoffsets[strip] = stripbytes ? offset : 0;
offset += stripbytes;
bytecount -= stripbytes;
}
/*
* Replace old single strip info with multi-strip info.
*/
- td->td_stripsperimage = td->td_nstrips = nstrips32;
+ td->td_stripsperimage = td->td_nstrips = nstrips;
TIFFSetField(tif, TIFFTAG_ROWSPERSTRIP, rowsperstrip);
_TIFFfree(td->td_stripbytecount);
diff --git a/libtiff/tif_strip.c b/libtiff/tif_strip.c
index b6098dd..6e9f2ef 100644
--- a/libtiff/tif_strip.c
+++ b/libtiff/tif_strip.c
@@ -1,4 +1,4 @@
-/* $Id: tif_strip.c,v 1.37 2016-11-09 23:00:49 erouault Exp $ */
+/* $Id: tif_strip.c,v 1.38 2016-12-03 11:02:15 erouault Exp $ */
/*
* Copyright (c) 1991-1997 Sam Leffler
@@ -63,15 +63,6 @@ TIFFNumberOfStrips(TIFF* tif)
TIFFDirectory *td = &tif->tif_dir;
uint32 nstrips;
- /* If the value was already computed and store in td_nstrips, then return it,
- since ChopUpSingleUncompressedStrip might have altered and resized the
- since the td_stripbytecount and td_stripoffset arrays to the new value
- after the initial affectation of td_nstrips = TIFFNumberOfStrips() in
- tif_dirread.c ~line 3612.
- See http://bugzilla.maptools.org/show_bug.cgi?id=2587 */
- if( td->td_nstrips )
- return td->td_nstrips;
-
nstrips = (td->td_rowsperstrip == (uint32) -1 ? 1 :
TIFFhowmany_32(td->td_imagelength, td->td_rowsperstrip));
if (td->td_planarconfig == PLANARCONFIG_SEPARATE)
commit 43576568ed4af4bd43409b7ff36939340141dfd6
Author: erouault <erouault>
Date: Sat Dec 3 11:15:18 2016 +0000
* libtiff/tif_ojpeg.c: make OJPEGDecode() early exit in case of failure in
OJPEGPreDecode(). This will avoid a divide by zero, and potential other issues.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2611
diff --git a/ChangeLog b/ChangeLog
index 9dbc7a0..5b23665 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,12 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * libtiff/tif_ojpeg.c: make OJPEGDecode() early exit in case of failure in
+ OJPEGPreDecode(). This will avoid a divide by zero, and potential other issues.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2611
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* libtiff/tif_dirread.c: modify ChopUpSingleUncompressedStrip() to
instanciate compute ntrips as TIFFhowmany_32(td->td_imagelength, rowsperstrip),
instead of a logic based on the total size of data. Which is faulty is
diff --git a/libtiff/tif_ojpeg.c b/libtiff/tif_ojpeg.c
index 30a1812..93839d8 100644
--- a/libtiff/tif_ojpeg.c
+++ b/libtiff/tif_ojpeg.c
@@ -1,4 +1,4 @@
-/* $Id: tif_ojpeg.c,v 1.65 2016-09-04 21:32:56 erouault Exp $ */
+/* $Id: tif_ojpeg.c,v 1.66 2016-12-03 11:15:18 erouault Exp $ */
/* WARNING: The type of JPEG encapsulation defined by the TIFF Version 6.0
specification is now totally obsolete and deprecated for new applications and
@@ -244,6 +244,7 @@ typedef enum {
typedef struct {
TIFF* tif;
+ int decoder_ok;
#ifndef LIBJPEG_ENCAP_EXTERNAL
JMP_BUF exit_jmpbuf;
#endif
@@ -722,6 +723,7 @@ OJPEGPreDecode(TIFF* tif, uint16 s)
}
sp->write_curstrile++;
}
+ sp->decoder_ok = 1;
return(1);
}
@@ -784,8 +786,14 @@ OJPEGPreDecodeSkipScanlines(TIFF* tif)
static int
OJPEGDecode(TIFF* tif, uint8* buf, tmsize_t cc, uint16 s)
{
+ static const char module[]="OJPEGDecode";
OJPEGState* sp=(OJPEGState*)tif->tif_data;
(void)s;
+ if( !sp->decoder_ok )
+ {
+ TIFFErrorExt(tif->tif_clientdata,module,"Cannot decode: decoder not correctly initialized");
+ return 0;
+ }
if (sp->libjpeg_jpeg_query_style==0)
{
if (OJPEGDecodeRaw(tif,buf,cc)==0)
commit 4307dad7fc153baefeb6bb677060e9c5157e5d42
Author: erouault <erouault>
Date: Sat Dec 3 11:35:56 2016 +0000
* tools/tiffcrop.c: fix readContigStripsIntoBuffer() in -i (ignore) mode so
that the output buffer is correctly incremented to avoid write outside bounds.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2620
diff --git a/ChangeLog b/ChangeLog
index 5b23665..d6a416b 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,12 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffcrop.c: fix readContigStripsIntoBuffer() in -i (ignore) mode so
+ that the output buffer is correctly incremented to avoid write outside bounds.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2620
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* libtiff/tif_ojpeg.c: make OJPEGDecode() early exit in case of failure in
OJPEGPreDecode(). This will avoid a divide by zero, and potential other issues.
Reported by Agostino Sarubbo.
diff --git a/tools/tiffcrop.c b/tools/tiffcrop.c
index 722b132..bdcbd63 100644
--- a/tools/tiffcrop.c
+++ b/tools/tiffcrop.c
@@ -1,4 +1,4 @@
-/* $Id: tiffcrop.c,v 1.46 2016-11-18 14:58:46 erouault Exp $ */
+/* $Id: tiffcrop.c,v 1.47 2016-12-03 11:35:56 erouault Exp $ */
/* tiffcrop.c -- a port of tiffcp.c extended to include manipulations of
* the image data through additional options listed below
@@ -3698,7 +3698,7 @@ static int readContigStripsIntoBuffer (TIFF* in, uint8* buf)
(unsigned long) strip, (unsigned long)rows);
return 0;
}
- bufp += bytes_read;
+ bufp += stripsize;
}
return 1;
commit 17d56c24c10ed300233164cc51380979124d6dd8
Author: erouault <erouault>
Date: Sat Dec 3 12:19:32 2016 +0000
* tools/tiffcrop.c: add 3 extra bytes at end of strip buffer in
readSeparateStripsIntoBuffer() to avoid read outside of heap allocated buffer.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2621
diff --git a/ChangeLog b/ChangeLog
index d6a416b..50db803 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,12 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffcrop.c: add 3 extra bytes at end of strip buffer in
+ readSeparateStripsIntoBuffer() to avoid read outside of heap allocated buffer.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2621
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* tools/tiffcrop.c: fix readContigStripsIntoBuffer() in -i (ignore) mode so
that the output buffer is correctly incremented to avoid write outside bounds.
Reported by Agostino Sarubbo.
diff --git a/tools/tiffcrop.c b/tools/tiffcrop.c
index bdcbd63..9122aab 100644
--- a/tools/tiffcrop.c
+++ b/tools/tiffcrop.c
@@ -1,4 +1,4 @@
-/* $Id: tiffcrop.c,v 1.47 2016-12-03 11:35:56 erouault Exp $ */
+/* $Id: tiffcrop.c,v 1.48 2016-12-03 12:19:32 erouault Exp $ */
/* tiffcrop.c -- a port of tiffcp.c extended to include manipulations of
* the image data through additional options listed below
@@ -4815,10 +4815,17 @@ static int readSeparateStripsIntoBuffer (TIFF *in, uint8 *obuf, uint32 length,
nstrips = TIFFNumberOfStrips(in);
strips_per_sample = nstrips /spp;
+ /* Add 3 padding bytes for combineSeparateSamples32bits */
+ if( (size_t) stripsize > 0xFFFFFFFFU - 3U )
+ {
+ TIFFError("readSeparateStripsIntoBuffer", "Integer overflow when calculating buffer size.");
+ exit(-1);
+ }
+
for (s = 0; (s < spp) && (s < MAX_SAMPLES); s++)
{
srcbuffs[s] = NULL;
- buff = _TIFFmalloc(stripsize);
+ buff = _TIFFmalloc(stripsize + 3);
if (!buff)
{
TIFFError ("readSeparateStripsIntoBuffer",
@@ -4827,6 +4834,9 @@ static int readSeparateStripsIntoBuffer (TIFF *in, uint8 *obuf, uint32 length,
_TIFFfree (srcbuffs[i]);
return 0;
}
+ buff[stripsize] = 0;
+ buff[stripsize+1] = 0;
+ buff[stripsize+2] = 0;
srcbuffs[s] = buff;
}
commit fc9eedf265394eb8a5633160a8fcdb7ece072701
Author: erouault <erouault>
Date: Sat Dec 3 13:00:03 2016 +0000
* tools/tiffcrop.c: fix integer division by zero when BitsPerSample is missing.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2619
diff --git a/ChangeLog b/ChangeLog
index 50db803..2940828 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,11 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffcrop.c: fix integer division by zero when BitsPerSample is missing.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2619
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* tools/tiffcrop.c: add 3 extra bytes at end of strip buffer in
readSeparateStripsIntoBuffer() to avoid read outside of heap allocated buffer.
Reported by Agostino Sarubbo.
diff --git a/tools/tiffcrop.c b/tools/tiffcrop.c
index 9122aab..21dd087 100644
--- a/tools/tiffcrop.c
+++ b/tools/tiffcrop.c
@@ -1,4 +1,4 @@
-/* $Id: tiffcrop.c,v 1.48 2016-12-03 12:19:32 erouault Exp $ */
+/* $Id: tiffcrop.c,v 1.49 2016-12-03 13:00:04 erouault Exp $ */
/* tiffcrop.c -- a port of tiffcp.c extended to include manipulations of
* the image data through additional options listed below
@@ -1164,7 +1164,7 @@ writeBufferToSeparateStrips (TIFF* out, uint8* buf,
tdata_t obuf;
(void) TIFFGetFieldDefaulted(out, TIFFTAG_ROWSPERSTRIP, &rowsperstrip);
- (void) TIFFGetField(out, TIFFTAG_BITSPERSAMPLE, &bps);
+ (void) TIFFGetFieldDefaulted(out, TIFFTAG_BITSPERSAMPLE, &bps);
bytes_per_sample = (bps + 7) / 8;
if( width == 0 ||
(uint32)bps * (uint32)spp > TIFF_UINT32_MAX / width ||
@@ -4760,7 +4760,7 @@ static int readSeparateStripsIntoBuffer (TIFF *in, uint8 *obuf, uint32 length,
int i, bytes_per_sample, bytes_per_pixel, shift_width, result = 1;
uint32 j;
int32 bytes_read = 0;
- uint16 bps, planar;
+ uint16 bps = 0, planar;
uint32 nstrips;
uint32 strips_per_sample;
uint32 src_rowsize, dst_rowsize, rows_processed, rps;
@@ -4780,7 +4780,7 @@ static int readSeparateStripsIntoBuffer (TIFF *in, uint8 *obuf, uint32 length,
}
memset (srcbuffs, '\0', sizeof(srcbuffs));
- TIFFGetField(in, TIFFTAG_BITSPERSAMPLE, &bps);
+ TIFFGetFieldDefaulted(in, TIFFTAG_BITSPERSAMPLE, &bps);
TIFFGetFieldDefaulted(in, TIFFTAG_PLANARCONFIG, &planar);
TIFFGetFieldDefaulted(in, TIFFTAG_ROWSPERSTRIP, &rps);
if (rps > length)
commit a1d523c27dafafadf589c77e834c52661f1c78fc
Author: erouault <erouault>
Date: Sat Dec 3 14:18:48 2016 +0000
* tools/tiffinfo.c: fix null pointer dereference in -r mode when the image has
no StripByteCount tag.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2594
diff --git a/ChangeLog b/ChangeLog
index dd27c7f..e41d00c 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,12 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffinfo.c: fix null pointer dereference in -r mode when the image has
+ no StripByteCount tag.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2594
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* tools/tiffcrop.c: fix integer division by zero when BitsPerSample is missing.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2619
diff --git a/tools/tiffinfo.c b/tools/tiffinfo.c
index b02c7d4..4d58055 100644
--- a/tools/tiffinfo.c
+++ b/tools/tiffinfo.c
@@ -1,4 +1,4 @@
-/* $Id: tiffinfo.c,v 1.25 2016-11-12 20:06:05 bfriesen Exp $ */
+/* $Id: tiffinfo.c,v 1.26 2016-12-03 14:18:49 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -417,7 +417,7 @@ TIFFReadRawData(TIFF* tif, int bitrev)
uint64* stripbc=NULL;
TIFFGetField(tif, TIFFTAG_STRIPBYTECOUNTS, &stripbc);
- if (nstrips > 0) {
+ if (stripbc != NULL && nstrips > 0) {
uint32 bufsize = (uint32) stripbc[0];
tdata_t buf = _TIFFmalloc(bufsize);
tstrip_t s;
commit 6d3ef98b2415b2edfa36a5ba600d5a824c094309
Author: erouault <erouault>
Date: Sat Dec 3 14:42:40 2016 +0000
* tools/tiffcp.c: avoid potential division by zero if BitsPerSamples tag is
missing.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2597
diff --git a/ChangeLog b/ChangeLog
index e41d00c..0d7b12d 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,12 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffcp.c: avoid potential division by zero if BitsPerSamples tag is
+ missing.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2597
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* tools/tiffinfo.c: fix null pointer dereference in -r mode when the image has
no StripByteCount tag.
Reported by Agostino Sarubbo.
diff --git a/tools/tiffcp.c b/tools/tiffcp.c
index 6dfb9a9..c8e48c3 100644
--- a/tools/tiffcp.c
+++ b/tools/tiffcp.c
@@ -1,4 +1,4 @@
-/* $Id: tiffcp.c,v 1.56 2016-12-02 22:13:32 erouault Exp $ */
+/* $Id: tiffcp.c,v 1.57 2016-12-03 14:42:40 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -1378,7 +1378,7 @@ DECLAREreadFunc(readSeparateTilesIntoBuffer)
uint8* bufp = (uint8*) buf;
uint32 tw, tl;
uint32 row;
- uint16 bps, bytes_per_sample;
+ uint16 bps = 0, bytes_per_sample;
tilebuf = _TIFFmalloc(tilesize);
if (tilebuf == 0)
@@ -1387,6 +1387,12 @@ DECLAREreadFunc(readSeparateTilesIntoBuffer)
(void) TIFFGetField(in, TIFFTAG_TILEWIDTH, &tw);
(void) TIFFGetField(in, TIFFTAG_TILELENGTH, &tl);
(void) TIFFGetField(in, TIFFTAG_BITSPERSAMPLE, &bps);
+ if( bps == 0 )
+ {
+ TIFFError(TIFFFileName(in), "Error, cannot read BitsPerSample");
+ status = 0;
+ goto done;
+ }
assert( bps % 8 == 0 );
bytes_per_sample = bps/8;
commit 9991b31a7c651e7b87a3ccd73b3dc5c67dcfdd60
Author: erouault <erouault>
Date: Sat Dec 3 15:30:31 2016 +0000
* tools/tif_dir.c: when TIFFGetField(, TIFFTAG_NUMBEROFINKS, ) is called,
limit the return number of inks to SamplesPerPixel, so that code that parses
ink names doesn't go past the end of the buffer.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2599
Reported by Agostino Sarubbo.
diff --git a/ChangeLog b/ChangeLog
index 0d7b12d..fb9fc0e 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,13 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tif_dir.c: when TIFFGetField(, TIFFTAG_NUMBEROFINKS, ) is called,
+ limit the return number of inks to SamplesPerPixel, so that code that parses
+ ink names doesn't go past the end of the buffer.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2599
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* tools/tiffcp.c: avoid potential division by zero if BitsPerSamples tag is
missing.
Reported by Agostino Sarubbo.
diff --git a/libtiff/tif_dir.c b/libtiff/tif_dir.c
index ad21655..2574e74 100644
--- a/libtiff/tif_dir.c
+++ b/libtiff/tif_dir.c
@@ -1,4 +1,4 @@
-/* $Id: tif_dir.c,v 1.127 2016-10-25 21:35:15 erouault Exp $ */
+/* $Id: tif_dir.c,v 1.128 2016-12-03 15:30:31 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -854,6 +854,32 @@ _TIFFVGetField(TIFF* tif, uint32 tag, va_list ap)
if( fip == NULL ) /* cannot happen since TIFFGetField() already checks it */
return 0;
+ if( tag == TIFFTAG_NUMBEROFINKS )
+ {
+ int i;
+ for (i = 0; i < td->td_customValueCount; i++) {
+ uint16 val;
+ TIFFTagValue *tv = td->td_customValues + i;
+ if (tv->info->field_tag != tag)
+ continue;
+ val = *(uint16 *)tv->value;
+ /* Truncate to SamplesPerPixel, since the */
+ /* setting code for INKNAMES assume that there are SamplesPerPixel */
+ /* inknames. */
+ /* Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2599 */
+ if( val > td->td_samplesperpixel )
+ {
+ TIFFWarningExt(tif->tif_clientdata,"_TIFFVGetField",
+ "Truncating NumberOfInks from %u to %u",
+ val, td->td_samplesperpixel);
+ val = td->td_samplesperpixel;
+ }
+ *va_arg(ap, uint16*) = val;
+ return 1;
+ }
+ return 0;
+ }
+
/*
* We want to force the custom code to be used for custom
* fields even if the tag happens to match a well known
commit 307a31765cb01245e3655ce168385dd7d51e14bd
Author: erouault <erouault>
Date: Sat Dec 3 15:44:15 2016 +0000
* tools/tiffcp.c: avoid potential division by zero if BitsPerSamples tag is
missing.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2607
diff --git a/ChangeLog b/ChangeLog
index ac2d922..94be038 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,12 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffcp.c: avoid potential division by zero if BitsPerSamples tag is
+ missing.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2607
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* tools/tif_dir.c: when TIFFGetField(, TIFFTAG_NUMBEROFINKS, ) is called,
limit the return number of inks to SamplesPerPixel, so that code that parses
ink names doesn't go past the end of the buffer.
diff --git a/tools/tiffcp.c b/tools/tiffcp.c
index c8e48c3..142cbb0 100644
--- a/tools/tiffcp.c
+++ b/tools/tiffcp.c
@@ -1,4 +1,4 @@
-/* $Id: tiffcp.c,v 1.57 2016-12-03 14:42:40 erouault Exp $ */
+/* $Id: tiffcp.c,v 1.58 2016-12-03 15:44:15 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -1569,7 +1569,7 @@ DECLAREwriteFunc(writeBufferToSeparateTiles)
uint8* bufp = (uint8*) buf;
uint32 tl, tw;
uint32 row;
- uint16 bps, bytes_per_sample;
+ uint16 bps = 0, bytes_per_sample;
obuf = _TIFFmalloc(TIFFTileSize(out));
if (obuf == NULL)
@@ -1578,6 +1578,12 @@ DECLAREwriteFunc(writeBufferToSeparateTiles)
(void) TIFFGetField(out, TIFFTAG_TILELENGTH, &tl);
(void) TIFFGetField(out, TIFFTAG_TILEWIDTH, &tw);
(void) TIFFGetField(out, TIFFTAG_BITSPERSAMPLE, &bps);
+ if( bps == 0 )
+ {
+ TIFFError(TIFFFileName(out), "Error, cannot read BitsPerSample");
+ _TIFFfree(obuf);
+ return 0;
+ }
assert( bps % 8 == 0 );
bytes_per_sample = bps/8;
commit b34209fede77aa203cd5adcd9638ccc70731a50f
Author: erouault <erouault>
Date: Sat Dec 3 16:40:01 2016 +0000
* tools/tiffcp.c: fix uint32 underflow/overflow that can cause heap-based
buffer overflow.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2610
diff --git a/ChangeLog b/ChangeLog
index 94be038..8ee76c0 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,12 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffcp.c: fix uint32 underflow/overflow that can cause heap-based
+ buffer overflow.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2610
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* tools/tiffcp.c: avoid potential division by zero if BitsPerSamples tag is
missing.
Reported by Agostino Sarubbo.
diff --git a/tools/tiffcp.c b/tools/tiffcp.c
index 142cbb0..6d96bb8 100644
--- a/tools/tiffcp.c
+++ b/tools/tiffcp.c
@@ -1,4 +1,4 @@
-/* $Id: tiffcp.c,v 1.58 2016-12-03 15:44:15 erouault Exp $ */
+/* $Id: tiffcp.c,v 1.59 2016-12-03 16:40:01 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -1163,7 +1163,7 @@ bad:
static void
cpStripToTile(uint8* out, uint8* in,
- uint32 rows, uint32 cols, int outskew, int inskew)
+ uint32 rows, uint32 cols, int outskew, int64 inskew)
{
while (rows-- > 0) {
uint32 j = cols;
@@ -1320,7 +1320,7 @@ DECLAREreadFunc(readContigTilesIntoBuffer)
tdata_t tilebuf;
uint32 imagew = TIFFScanlineSize(in);
uint32 tilew = TIFFTileRowSize(in);
- int iskew = imagew - tilew;
+ int64 iskew = (int64)imagew - (int64)tilew;
uint8* bufp = (uint8*) buf;
uint32 tw, tl;
uint32 row;
@@ -1348,7 +1348,7 @@ DECLAREreadFunc(readContigTilesIntoBuffer)
status = 0;
goto done;
}
- if (colb + tilew > imagew) {
+ if (colb > iskew) {
uint32 width = imagew - colb;
uint32 oskew = tilew - width;
cpStripToTile(bufp + colb,
commit 18bca4cf3057681689efb502175cbe5f01cb68c3
Author: erouault <erouault>
Date: Sat Dec 3 16:50:02 2016 +0000
* tools/tiffcp.c: replace assert( (bps % 8) == 0 ) by a non assert check.
Reported by Agostino Sarubbo.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2605
diff --git a/ChangeLog b/ChangeLog
index 8ee76c0..025eb72 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,11 @@
2016-12-03 Even Rouault <even.rouault at spatialys.com>
+ * tools/tiffcp.c: replace assert( (bps % 8) == 0 ) by a non assert check.
+ Reported by Agostino Sarubbo.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2605
+
+2016-12-03 Even Rouault <even.rouault at spatialys.com>
+
* tools/tiffcp.c: fix uint32 underflow/overflow that can cause heap-based
buffer overflow.
Reported by Agostino Sarubbo.
diff --git a/tools/tiffcp.c b/tools/tiffcp.c
index 6d96bb8..49c9d37 100644
--- a/tools/tiffcp.c
+++ b/tools/tiffcp.c
@@ -1,4 +1,4 @@
-/* $Id: tiffcp.c,v 1.59 2016-12-03 16:40:01 erouault Exp $ */
+/* $Id: tiffcp.c,v 1.60 2016-12-03 16:50:02 erouault Exp $ */
/*
* Copyright (c) 1988-1997 Sam Leffler
@@ -45,7 +45,6 @@
#include <string.h>
#include <ctype.h>
-#include <assert.h>
#ifdef HAVE_UNISTD_H
# include <unistd.h>
@@ -1393,7 +1392,12 @@ DECLAREreadFunc(readSeparateTilesIntoBuffer)
status = 0;
goto done;
}
- assert( bps % 8 == 0 );
+ if( (bps % 8) != 0 )
+ {
+ TIFFError(TIFFFileName(in), "Error, cannot handle BitsPerSample that is not a multiple of 8");
+ status = 0;
+ goto done;
+ }
bytes_per_sample = bps/8;
for (row = 0; row < imagelength; row += tl) {
@@ -1584,7 +1588,12 @@ DECLAREwriteFunc(writeBufferToSeparateTiles)
_TIFFfree(obuf);
return 0;
}
- assert( bps % 8 == 0 );
+ if( (bps % 8) != 0 )
+ {
+ TIFFError(TIFFFileName(out), "Error, cannot handle BitsPerSample that is not a multiple of 8");
+ _TIFFfree(obuf);
+ return 0;
+ }
bytes_per_sample = bps/8;
for (row = 0; row < imagelength; row += tl) {
commit 8c9dbee088d8b43cdae47b9c5f711058bd1f17f1
Author: erouault <erouault>
Date: Tue Dec 13 18:15:48 2016 +0000
* libtiff/tif_fax3.h: revert change done on 2016-01-09 that made
Param member of TIFFFaxTabEnt structure a uint16 to reduce size of
the binary. It happens that the Hylafax software uses the tables that
follow this typedef (TIFFFaxMainTable, TIFFFaxWhiteTable,
TIFFFaxBlackTable), also they are not in a public libtiff header.
Raised by Lee Howard.
Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2636
diff --git a/ChangeLog b/ChangeLog
index ee6fd802..558db20e 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,13 @@
+2016-12-13 Even Rouault <even.rouault at spatialys.com>
+
+ * libtiff/tif_fax3.h: revert change done on 2016-01-09 that made
+ Param member of TIFFFaxTabEnt structure a uint16 to reduce size of
+ the binary. It happens that the Hylafax software uses the tables that
+ follow this typedef (TIFFFaxMainTable, TIFFFaxWhiteTable,
+ TIFFFaxBlackTable), also they are not in a public libtiff header.
+ Raised by Lee Howard.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2636
+
2016-12-03 Even Rouault <even.rouault at spatialys.com>
* tools/tiffcp.c: replace assert( (bps % 8) == 0 ) by a non assert check.
diff --git a/libtiff/tif_fax3.h b/libtiff/tif_fax3.h
index e0b2ca6b..45ce43f1 100644
--- a/libtiff/tif_fax3.h
+++ b/libtiff/tif_fax3.h
@@ -1,4 +1,4 @@
-/* $Id: tif_fax3.h,v 1.11 2016-01-23 21:20:34 erouault Exp $ */
+/* $Id: tif_fax3.h,v 1.12 2016-12-13 18:15:48 erouault Exp $ */
/*
* Copyright (c) 1990-1997 Sam Leffler
@@ -81,10 +81,12 @@ extern void _TIFFFax3fillruns(unsigned char*, uint32*, uint32*, uint32);
#define S_MakeUp 11
#define S_EOL 12
+/* WARNING: do not change the layout of this structure as the Halyfax software */
+/* really depends on it. See http://bugzilla.maptools.org/show_bug.cgi?id=2636 */
typedef struct { /* state table entry */
unsigned char State; /* see above */
unsigned char Width; /* width of code in bits */
- uint16 Param; /* unsigned 16-bit run length in bits */
+ uint32 Param; /* unsigned 32-bit run length in bits (holds on 16 bit actually, but cannot be changed. See above warning) */
} TIFFFaxTabEnt;
extern const TIFFFaxTabEnt TIFFFaxMainTable[];
From c7153361a4041260719b340f73f2f76b0969235c Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Tue, 20 Dec 2016 17:28:17 +0000
Subject: [PATCH] * tools/tiff2pdf.c: avoid potential heap-based overflow in
t2p_readwrite_pdf_image_tile(). Fixes
http://bugzilla.maptools.org/show_bug.cgi?id=2640
---
ChangeLog | 6 ++++++
tools/tiff2pdf.c | 2 +-
2 files changed, 7 insertions(+), 1 deletion(-)
diff --git a/ChangeLog b/ChangeLog
index 6be3602..91ba4e6 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,9 @@
+2016-12-20 Even Rouault <even.rouault at spatialys.com>
+
+ * tools/tiff2pdf.c: avoid potential heap-based overflow in
+ t2p_readwrite_pdf_image_tile().
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2640
+
2016-12-13 Even Rouault <even.rouault at spatialys.com>
* libtiff/tif_fax3.h: revert change done on 2016-01-09 that made
diff --git a/tools/tiff2pdf.c b/tools/tiff2pdf.c
index 47d7629..db196e0 100644
--- a/tools/tiff2pdf.c
+++ b/tools/tiff2pdf.c
@@ -2895,7 +2895,7 @@ tsize_t t2p_readwrite_pdf_image_tile(T2P
return(0);
}
if(TIFFGetField(input, TIFFTAG_JPEGTABLES, &count, &jpt) != 0) {
- if (count >= 4) {
+ if (count > 4) {
/* Ignore EOI marker of JpegTables */
_TIFFmemcpy(buffer, jpt, count - 2);
bufferoffset += count - 2;
From 5c080298d59efa53264d7248bbe3a04660db6ef7 Mon Sep 17 00:00:00 2001
From: erouault <erouault>
Date: Wed, 11 Jan 2017 19:25:44 +0000
Subject: [PATCH] * tools/tiffcp.c: error out cleanly in cpContig2SeparateByRow
and cpSeparate2ContigByRow if BitsPerSample != 8 to avoid heap based
overflow. Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2656 and
http://bugzilla.maptools.org/show_bug.cgi?id=2657
---
ChangeLog | 7 +++++++
tools/tiffcp.c | 24 ++++++++++++++++++++++--
2 files changed, 29 insertions(+), 2 deletions(-)
diff --git a/ChangeLog b/ChangeLog
index f78cad0..064f25b 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,10 @@
+2017-01-11 Even Rouault <even.rouault at spatialys.com>
+
+ * tools/tiffcp.c: error out cleanly in cpContig2SeparateByRow and
+ cpSeparate2ContigByRow if BitsPerSample != 8 to avoid heap based overflow.
+ Fixes http://bugzilla.maptools.org/show_bug.cgi?id=2656 and
+ http://bugzilla.maptools.org/show_bug.cgi?id=2657
+
2016-12-20 Even Rouault <even.rouault at spatialys.com>
* tools/tiff2pdf.c: avoid potential heap-based overflow in
diff --git a/tools/tiffcp.c b/tools/tiffcp.c
index bdf754c..8bbcd52 100644
--- a/tools/tiffcp.c
+++ b/tools/tiffcp.c
@@ -591,7 +591,7 @@ static copyFunc pickCopyFunc(TIFF*, TIFF*, uint16, uint16);
static int
tiffcp(TIFF* in, TIFF* out)
{
- uint16 bitspersample, samplesperpixel = 1;
+ uint16 bitspersample = 1, samplesperpixel = 1;
uint16 input_compression, input_photometric = PHOTOMETRIC_MINISBLACK;
copyFunc cf;
uint32 width, length;
@@ -1067,6 +1067,16 @@ DECLAREcpFunc(cpContig2SeparateByRow)
register uint32 n;
uint32 row;
tsample_t s;
+ uint16 bps = 0;
+
+ (void) TIFFGetField(in, TIFFTAG_BITSPERSAMPLE, &bps);
+ if( bps != 8 )
+ {
+ TIFFError(TIFFFileName(in),
+ "Error, can only handle BitsPerSample=8 in %s",
+ "cpContig2SeparateByRow");
+ return 0;
+ }
inbuf = _TIFFmalloc(scanlinesizein);
outbuf = _TIFFmalloc(scanlinesizeout);
@@ -1120,6 +1130,16 @@ DECLAREcpFunc(cpSeparate2ContigByRow)
register uint32 n;
uint32 row;
tsample_t s;
+ uint16 bps = 0;
+
+ (void) TIFFGetField(in, TIFFTAG_BITSPERSAMPLE, &bps);
+ if( bps != 8 )
+ {
+ TIFFError(TIFFFileName(in),
+ "Error, can only handle BitsPerSample=8 in %s",
+ "cpSeparate2ContigByRow");
+ return 0;
+ }
inbuf = _TIFFmalloc(scanlinesizein);
outbuf = _TIFFmalloc(scanlinesizeout);
@@ -1784,7 +1804,7 @@ pickCopyFunc(TIFF* in, TIFF* out, uint16 bitspersample, uint16 samplesperpixel)
uint32 w, l, tw, tl;
int bychunk;
- (void) TIFFGetField(in, TIFFTAG_PLANARCONFIG, &shortv);
+ (void) TIFFGetFieldDefaulted(in, TIFFTAG_PLANARCONFIG, &shortv);
if (shortv != config && bitspersample != 8 && samplesperpixel > 1) {
fprintf(stderr,
"%s: Cannot handle different planar configuration w/ bits/sample != 8\n",
......@@ -2,17 +2,17 @@
extends =
../patch/buildout.cfg
../popt/buildout.cfg
../xz-utils/buildout.cfg
parts = logrotate
[logrotate]
recipe = slapos.recipe.cmmi
url = https://fedorahosted.org/releases/l/o/logrotate/logrotate-3.7.9.tar.gz
md5sum = eeba9dbca62a9210236f4b83195e4ea5
patch-options = -p1
patches =
${:_profile_base_location_}/logrotate-3.7.9-O_CLOEXEC.optional.patch#6beac248c978b767d4bccc1b7eebe6bd
configure-command = true
make-options = PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
url = https://github.com/logrotate/logrotate/releases/download/3.11.0/logrotate-3.11.0.tar.xz
md5sum = 3a9280e4caeb837427a2d54518fbcdac
# BBB this is only for backward-compatibility.
post-install =
ln -s . ${buildout:parts-directory}/${:_buildout_section_name_}/usr
environment =
PATH=${patch:location}/bin:%(PATH)s
POPT_DIR=${popt:location}/include -L${popt:location}/lib -Wl,-rpath=${popt:location}/lib
PATH=${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${popt:location}/include
LDFLAGS=-L${popt:location}/lib -Wl,-rpath=${popt:location}/lib
......@@ -22,13 +22,12 @@ parts =
[mariadb]
recipe = slapos.recipe.cmmi
version = 10.1.21
url = http://ftp.osuosl.org/pub/mariadb/mariadb-10.1.21/source/mariadb-10.1.21.tar.gz
url = https://downloads.mariadb.com/MariaDB/mariadb-10.1.21/source/mariadb-10.1.21.tar.gz
md5sum = ff8b96edeeb22425117309a249055310
location = ${buildout:parts-directory}/${:_buildout_section_name_}
patch-options = -p0
patches =
${:_profile_base_location_}/mariadb_10.1.21_create_system_tables__no_test.patch#3c76aa9564a162f13aced7c0a3f783b3
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = ${cmake:location}/bin/cmake
configure-options =
-DCMAKE_INSTALL_PREFIX=${:location}
......@@ -55,7 +54,8 @@ configure-options =
-DWITHOUT_DAEMON_EXAMPLE=1
-DCMAKE_C_FLAGS="-I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${xz-utils:location}/include -I${zlib:location}/include"
-DCMAKE_CXX_FLAGS="-I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${xz-utils:location}/include -I${zlib:location}/include"
-DCMAKE_INSTALL_RPATH=${jemalloc:location}/lib:${libaio:location}/lib:${libxml2:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${pcre:location}/lib:${readline5:location}/lib:${xz-utils:location}/lib:${zlib:location}/lib
-DCMAKE_INSTALL_RPATH=${:CMAKE_INSTALL_RPATH}
CMAKE_INSTALL_RPATH=${jemalloc:location}/lib:${libaio:location}/lib:${libxml2:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${pcre:location}/lib:${readline5:location}/lib:${xz-utils:location}/lib:${zlib:location}/lib
environment =
CMAKE_PROGRAM_PATH=${cmake:location}/bin
CMAKE_INCLUDE_PATH=${libaio:location}/include:${libaio:location}/include:${libxml2:location}/include:${ncurses:location}/include:${openssl:location}/include:${pcre:location}/include:${readline5:location}/include:${xz-utils:location}/include:${zlib:location}/include
......@@ -70,8 +70,8 @@ post-install =
# mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users.
# http://mroonga.github.com/
recipe = slapos.recipe.cmmi
url = http://packages.groonga.org/source/mroonga/mroonga-6.11.tar.gz
md5sum = 1c30bb6d89dbee0cc2f9b0a5cfaaccf0
url = http://packages.groonga.org/source/mroonga/mroonga-7.00.tar.gz
md5sum = befcbb7de8f5dd27366ea513dc092f3f
pre-configure =
mkdir fake_mariadb_source &&
ln -s ${mariadb:location}/include/mysql/private fake_mariadb_source/sql
......
[buildout]
extends =
buildout.cfg
[mariarocks-source]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
repository = https://github.com/MariaDB/server.git
branch = 10.2-mariarocks
[mariarocks]
<= mariadb
url =
md5sum =
patches =
path = ${mariarocks-source:location}
[mariadb]
url = https://downloads.mariadb.com/MariaDB/mariadb-10.2.5/source/mariadb-10.2.5.tar.gz
md5sum = 16fb01bb375c8d8a91a9ecf491397d51
pre-configure =
${git:location}/bin/git submodule update --init
post-install =
make clean
sed -i -e '/(rc == -1 || errno == EINTR)/s/||/\&\&/' libmariadb/plugins/pvio/pvio_socket.c
sed -i -e 's,set(SYSTEM_LIBS $${CMAKE_THREAD_LIBS_INIT}),set(SYSTEM_LIBS $${CMAKE_THREAD_LIBS_INIT} $${LIBRT}),' storage/rocksdb/build_rocksdb.cmake
configure-options +=
-DPLUGIN_DAEMON_EXAMPLE=NO
-DPLUGIN_EXAMPLE=NO
-DPLUGIN_MROONGA=NO
-DCMAKE_C_COMPILER=${gcc:location}/bin/gcc
-DCMAKE_CXX_COMPILER=${gcc:location}/bin/g++
-DCMAKE_INSTALL_RPATH=${gcc:location}/lib:${gcc:location}/lib64:${:CMAKE_INSTALL_RPATH}
[buildout]
extends =
../automake/buildout.cfg
../libtool/buildout.cfg
../pkgconfig/buildout.cfg
parts =
nghttp2
[nghttp2]
recipe = slapos.recipe.cmmi
url = https://github.com/nghttp2/nghttp2/archive/v1.20.0.zip
md5sum = e77319851b27c2366aa1cc8a8eabe73e
location = ${buildout:parts-directory}/${:_buildout_section_name_}
pre-configure =
autoreconf -fisv -I ${libtool:location}/share/aclocal -I ${pkgconfig:location}/share/aclocal
automake
autoconf
environment =
PATH=${autoconf:location}/bin:${automake:location}/bin:${git:location}/bin:${libtool:location}/bin:${m4:location}/bin:%(PATH)s
......@@ -8,8 +8,11 @@ parts = nspr
recipe = slapos.recipe.cmmi
url = https://ftp.mozilla.org/pub/nspr/releases/v4.13.1/src/nspr-4.13.1.tar.gz
md5sum = 9c44298a6fc478b3c0a4e98f4f9981ed
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = nspr/configure
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--enable-64bit
--enable-ipv6
environment=
LDFLAGS=-Wl,-rpath=${:location}/lib
......@@ -19,6 +19,7 @@ make-options =
SQLITE_INCLUDE_DIR=${sqlite3:location}/include
SQLITE_LIB_DIR=${sqlite3:location}/lib
LDFLAGS="-Wl,-rpath=${:location}/lib -Wl,-rpath=${nspr:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib"
RPATH="-Wl,-rpath=${:location}/lib -Wl,-rpath=${nspr:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib"
all
post-install =
cp -aLr dist/*OBJ/* ${:location}/
......
......@@ -6,7 +6,7 @@ extends =
../openblas/buildout.cfg
[numpy-env]
PATH = ${gcc-fortran:location}/bin:%(PATH)s
PATH = ${gcc:location}/bin:%(PATH)s
OPENBLAS = ${openblas:location}/lib/libopenblas.so
LAPACK = ${openblas:location}/lib/libopenblas.so
ATLAS = ${openblas:location}/lib/libopenblas.so
......@@ -14,6 +14,6 @@ ATLAS = ${openblas:location}/lib/libopenblas.so
[numpy]
environment = numpy-env
rpath =
${gcc-fortran:location}/lib
${gcc-fortran:location}/lib64
${gcc:location}/lib
${gcc:location}/lib64
${openblas:location}/lib
......@@ -36,6 +36,6 @@ make-options =
make-targets =
PREFIX="${buildout:parts-directory}/${:_buildout_section_name_}" install
environment =
PATH=${gcc-fortran:location}/bin:${patch:location}/bin:${perl:location}/bin:%(PATH)s
LD_LIBRARY_PATH=${gcc-fortran:location}/lib:${gcc-fortran:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc-fortran:location}/lib -Wl,-rpath=${gcc-fortran:location}/lib64
PATH=${gcc:location}/bin:${patch:location}/bin:${perl:location}/bin:%(PATH)s
LD_LIBRARY_PATH=${gcc:location}/lib:${gcc:location}/lib64
LDFLAGS=-Wl,-rpath=${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib64
......@@ -16,8 +16,8 @@ parts =
[openssl]
recipe = slapos.recipe.cmmi
url = https://www.openssl.org/source/openssl-1.0.2j.tar.gz
md5sum = 96322138f0b69e61b7212bc53d5e912b
url = https://www.openssl.org/source/openssl-1.0.2k.tar.gz
md5sum = f965fc0bf01bf882b31314b61391ae65
location = ${buildout:parts-directory}/${:_buildout_section_name_}
# 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with INSTALL_PREFIX). Used by slapos.package.git/obs
......@@ -37,6 +37,8 @@ configure-options =
shared no-idea no-mdc2 no-rc5 zlib
-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${:location}/lib
&& make depend
make-options =
SHARED_LDFLAGS='-Wl,-rpath=${:location}/lib -Wl,-rpath=${zlib:location}/lib'
make-targets =
install_sw && x=${:location}/etc/ssl/certs && rm -f $x/* &&
for i in ${ca-certificates:location}/certs/*/*.crt; do
......
......@@ -4,8 +4,8 @@ parts =
[pcre]
recipe = slapos.recipe.cmmi
url = http://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.39.tar.bz2
md5sum = e3fca7650a0556a2647821679d81f585
url = http://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.40.tar.bz2
md5sum = 41a842bf7dcecd6634219336e2167d1d
configure-options =
--disable-static
--enable-unicode-properties
......@@ -11,5 +11,13 @@ recipe = slapos.recipe.cmmi
version = 2.2.15
url = http://www.percona.com/redir/downloads/percona-toolkit/${:version}/tarball/percona-toolkit-${:version}.tar.gz
md5sum = 022f40dadaea9025820530ea1f986192
depends =
${perl-DBI:location}
${perl-DBD-mariadb:location}
pre-configure =
sed -i -e 's,^#!/usr/bin/env perl,#!perl,' bin/*
configure-command =
${perl:location}/bin/perl Makefile.PL
location = ${buildout:parts-directory}/${:_buildout_section_name_}
make-options =
INSTALLSITESCRIPT=${:location}/bin
......@@ -10,3 +10,6 @@ url = http://www.cpan.org/modules/by-module/DBI/DBI-1.631.tar.gz
md5sum = 444d3c305e86597e11092b517794a840
configure-command =
${perl:location}/bin/perl Makefile.PL
location = ${buildout:parts-directory}/${:_buildout_section_name_}
make-options =
INSTALLSITESCRIPT=${:location}/bin
--- Encode-HanExtra-0.23/Makefile.PL.orig 2017-04-14 13:22:30.511213646 +0200
+++ Encode-HanExtra-0.23/Makefile.PL 2017-04-14 13:22:41.935282773 +0200
@@ -179,7 +179,7 @@
my $plib = $self->{PERL_CORE} ? '"-I$(PERL_LIB)"' : '';
my $ucopts = '-"Q"';
$str .=
- qq{\n\t\$(PERL) $plib $enc2xs $ucopts -o \$\@ -f $table.fnm\n\n};
+ qq{\n\t$enc2xs $ucopts -o \$\@ -f $table.fnm\n\n};
open(FILELIST, ">$table.fnm")
|| die "Could not open $table.fnm: $!";
foreach my $file (@{ $tables{$table} }) {
[buildout]
extends =
../patch/buildout.cfg
../perl/buildout.cfg
parts =
perl-Encode-HanExtra
......@@ -10,3 +11,8 @@ url = http://www.cpan.org/modules/by-module/Encode/Encode-HanExtra-0.23.tar.gz
md5sum = e1d3bc32c1c8ee304235a06fbcd5d5a4
configure-command =
${perl:location}/bin/perl Makefile.PL
patch-options = -p1
patches =
${:_profile_base_location_}/Encode-HanExtra-0.23_use_shebang.patch#4ac804013a68a3e6b7c3bc4115d25a5e
environment =
PATH=${patch:location}/bin:%(PATH)s
......@@ -10,3 +10,6 @@ url = http://www.cpan.org/modules/by-module/Net/Net-IP-1.26.tar.gz
md5sum = 3a98e3ac45d69ea38a63a7e678bd716d
configure-command =
${perl:location}/bin/perl Makefile.PL
location = ${buildout:parts-directory}/${:_buildout_section_name_}
make-options =
INSTALLSITESCRIPT=${:location}/bin
......@@ -35,3 +35,6 @@ url = http://www.cpan.org/modules/by-module/LWP/libwww-perl-6.06.tar.gz
md5sum = 292496bd2a13da95903b383f59009200
configure-command =
${perl:location}/bin/perl Makefile.PL
location = ${buildout:parts-directory}/${:_buildout_section_name_}
make-options =
INSTALLSITESCRIPT=${:location}/bin
[buildout]
parts =
pole
[pole]
recipe = slapos.recipe.cmmi
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = https://bitbucket.org/dimin/pole/get/c15e513.tar.gz
md5sum = 691a5d80863df868804c5a9defa0c8a6
configure-command = true
make-binary =
make-options =
make-targets = true
post-install =
set -e -x
mkdir bin
cd pole
g++ -o ../bin/poledump pole.cpp poledump.cpp
mv ../bin '${:location}'
......@@ -9,7 +9,6 @@ extends =
../zlib/buildout.cfg
parts =
gcc-fortran
r-language
[r-language]
......@@ -33,6 +32,6 @@ configure-options =
--without-ICU
--without-x
environment =
PATH=${gcc-fortran:location}/bin:%(PATH)s
PATH=${gcc:location}/bin:%(PATH)s
CPPFLAGS=-I${bzip2:location}/include -I${openblas:location}/include -I${pcre:location}/include -I${readline:location}/include -I${ncurses:location}/include -I${xz-utils:location}/include -I${zlib:location}/include
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${gcc-fortran:location}/lib -Wl,-rpath=${gcc-fortran:location}/lib -L${gcc-fortran:location}/lib64 -Wl,-rpath=${gcc-fortran:location}/lib64 -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${openblas:location}/lib -Wl,-rpath=${openblas:location}/lib -L${pcre:location}/lib -Wl,-rpath=${pcre:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${xz-utils:location}/lib -Wl,-rpath=${xz-utils:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${gcc:location}/lib -Wl,-rpath=${gcc:location}/lib -L${gcc:location}/lib64 -Wl,-rpath=${gcc:location}/lib64 -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${openblas:location}/lib -Wl,-rpath=${openblas:location}/lib -L${pcre:location}/lib -Wl,-rpath=${pcre:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib -L${xz-utils:location}/lib -Wl,-rpath=${xz-utils:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
......@@ -8,7 +8,6 @@ extends =
parts =
eggs
gcc-fortran
r-language
[eggs]
......
......@@ -10,7 +10,7 @@ extends =
# see ../manpy/buildout.cfg as an example
[rpy2_env]
PATH = ${gcc-fortran:location}/bin:${r-language:location}/bin:%(PATH)s
PATH = ${gcc:location}/bin:${r-language:location}/bin:%(PATH)s
[rpy2]
recipe = zc.recipe.egg:custom
......@@ -21,8 +21,8 @@ initialization =
import os
os.environ['PATH'] = '${r-language:location}/bin' + os.pathsep + os.environ.get('PATH', '')
rpath =
${gcc-fortran:location}/lib
${gcc-fortran:location}/lib64
${gcc:location}/lib
${gcc:location}/lib64
${pcre:location}/lib
${readline:location}/lib
${xz-utils:location}/lib
......
[buildout]
extends =
../numpy/buildout.cfg
../cython/buildout.cfg
../numpy/openblas.cfg
../scipy/buildout.cfg
parts =
......@@ -16,6 +17,28 @@ environment = scikit-learn-env
setup-eggs =
${numpy:egg}
${scipy:egg}
rpath =
${gcc:location}/lib
${gcc:location}/lib64
${openblas:location}/lib
[scikit-learn-repository]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
repository = https://github.com/scikit-learn/scikit-learn.git
location = ${buildout:parts-directory}/scikit-learn
[scikit-learn-develop]
# This only work if we use zc.recipe.egg from zc.buildout 2 patched
# by nexedi (zc.recipe.egg = 2.0.3+slapos001 and zc.buildout = 2.5.2+slapos003)
recipe = zc.recipe.egg:develop
egg = scikit-learn
setup = ${scikit-learn-repository:location}
environment = scikit-learn-env
setup-eggs =
${cython:egg}
${numpy:egg}
${scipy:egg}
rpath =
${gcc-fortran:location}/lib
${gcc-fortran:location}/lib64
......
......@@ -28,13 +28,7 @@ parts =
py
firewalld-patch
extensions -=
buildout-versions
extensions +=
slapos.rebootstrap
show-picked-versions = true
rebootstrap-directory = rebootstrap
[environment]
# Note: For now original PATH is appended to the end, as not all tools are
......@@ -129,9 +123,3 @@ eggs =
${slapos:eggs}
interpreter = py
scripts = py
[versions]
setuptools = 28.8.0
slapos.rebootstrap = 3.7
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
......@@ -13,7 +13,7 @@ parts =
[numpy-egg]
recipe = zc.recipe.egg
initialization =
import scipy.stats # load our own libstdc++ explicitly at the very beginning
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
eggs =
setuptools
${scipy:egg}
......@@ -26,28 +26,16 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_}
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/tensorflow/tensorflow
tag = v0.12.0
tag = v1.0.1
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --recurse-submodules --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/tensorflow-v0.12.0.patch ) || (rm -fr ${:location}; exit 1)
[template-tensorflow-bzl]
recipe = slapos.recipe.template:jinja2
location = ${buildout:parts-directory}/${:_buildout_section_name_}
mode = 640
filename = tensorflow.bzl
template = ${:_profile_base_location_}/${:filename}.in
rendered = ${:location}/${:filename}
tf_extension_linkopts = '-lrt', '-Wl,-rpath=${gcc:location}/lib64'
context =
key tf_extension_linkopts template-tensorflow-bzl:tf_extension_linkopts
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --recurse-submodules --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/tensorflow-v1.0.1.patch ) || (rm -fr ${:location}; exit 1)
[cuda]
tf_need_cuda = 1
tf_cuda_version = 8.0
tf_cudnn_version = 6.5
tf_cudnn_version = 5.1.10
tf_cuda_compute_capabilities = 5.2,6.1
cuda_toolkit_path = /usr/local/cuda
cudnn_install_path = /usr/local/cuda
......@@ -55,7 +43,6 @@ cudnn_install_path = /usr/local/cuda
[tensorflow-build]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
tensorflow-bzl-file-path = ${template-tensorflow-bzl:rendered}
workdir = ${tensorflow-repository:location}
gcc-bin = ${gcc:location}/bin
gcc-lib = ${gcc:location}/lib
......@@ -79,6 +66,9 @@ script =
os.environ.get('LIBRARY_PATH') or '']),
'PYTHON_BIN_PATH':self.options['numpy-python-command'],
'PYTHON_LIB_PATH':self.options['python27-lib'],
'CC_OPT_FLAGS':'-march=native',
'TF_NEED_JEMALLOC':'1',
'TF_ENABLE_XLA':'0',
'TF_NEED_GCP':'0',
'TF_NEED_HDFS':'0',
'TF_NEED_OPENCL':'0',
......@@ -96,8 +86,6 @@ script =
if not os.path.exists(env['CUDA_TOOLKIT_PATH']): env['TF_NEED_CUDA'] = '0'
env['LD_LIBRARY_PATH'] = env['LIBRARY_PATH']
env['LDFLAGS'] = '-lW,-rpath='+self.options['gcc-lib64']
shutil.copy(self.options['tensorflow-bzl-file-path'],
os.path.join(workdir, 'tensorflow', 'tensorflow.bzl'))
call(['./configure'], cwd=workdir, env=env)
bazel_command = (env['TF_NEED_CUDA'] == '1' and
['bazel', 'build', '-c', 'opt', '--copt', '-march=native', '--config', 'cuda', '-s', '--verbose_failures', '//tensorflow/tools/pip_package:build_pip_package']
......@@ -116,7 +104,7 @@ egg = tensorflow
script =
os.makedirs(location)
workdir = self.options['tensorflow-repository-path']
egg_name = 'tensorflow-0.12.0-py2.7-linux-x86_64.egg'
egg_name = 'tensorflow-1.0.1-py2.7-linux-x86_64.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
call(['bazel-bin/tensorflow/tools/pip_package/build_pip_package', dist_dir], cwd=workdir)
......
diff -N -u -r tensorflow.orig/tensorflow/tools/pip_package/build_pip_package.sh tensorflow/tensorflow/tools/pip_package/build_pip_package.sh
--- tensorflow.orig/tensorflow/tools/pip_package/build_pip_package.sh 2017-01-04 12:16:06.540748399 +0900
+++ tensorflow/tensorflow/tools/pip_package/build_pip_package.sh 2017-01-04 12:16:19.852748187 +0900
@@ -136,7 +136,7 @@
pushd ${TMPDIR}
rm -f MANIFEST
echo $(date) : "=== Building wheel"
- "${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${GPU_FLAG} >/dev/null
+ "${PYTHON_BIN_PATH:-python}" setup.py bdist_egg ${GPU_FLAG} >/dev/null
mkdir -p ${DEST}
cp dist/* ${DEST}
popd
diff -N -u -r tensorflow.orig/tensorflow/tools/pip_package/setup.py tensorflow/tensorflow/tools/pip_package/setup.py
--- tensorflow.orig/tensorflow/tools/pip_package/setup.py 2017-01-04 12:16:06.540748399 +0900
+++ tensorflow/tensorflow/tools/pip_package/setup.py 2017-01-04 12:17:29.836747073 +0900
@@ -34,7 +34,7 @@
REQUIRED_PACKAGES = [
'numpy >= 1.11.0',
'six >= 1.10.0',
- 'protobuf == 3.1.0',
+ 'protobuf == 3.1.0.post1',
]
project_name = 'tensorflow'
diff -N -u -r tensorflow.orig/tensorflow/workspace.bzl tensorflow/tensorflow/workspace.bzl
--- tensorflow.orig/tensorflow/workspace.bzl 2017-01-04 12:16:06.568748399 +0900
+++ tensorflow/tensorflow/workspace.bzl 2017-01-04 12:16:19.852748187 +0900
@@ -228,9 +228,9 @@
native.new_http_archive(
name = "zlib_archive",
- url = "http://zlib.net/zlib-1.2.8.tar.gz",
- sha256 = "36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d",
- strip_prefix = "zlib-1.2.8",
+ url = "https://github.com/madler/zlib/archive/v1.2.9.zip",
+ sha256 = "819936ec260b679726f21dd9c1ef9c553ce0281988842c24a9c42652cbca698a",
+ strip_prefix = "zlib-1.2.9",
build_file = str(Label("//:zlib.BUILD")),
)
diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh
index e2bee21..1df90c1 100755
--- a/tensorflow/tools/pip_package/build_pip_package.sh
+++ b/tensorflow/tools/pip_package/build_pip_package.sh
@@ -136,7 +136,7 @@ function main() {
pushd ${TMPDIR}
rm -f MANIFEST
echo $(date) : "=== Building wheel"
- "${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${GPU_FLAG} >/dev/null
+ "${PYTHON_BIN_PATH:-python}" setup.py bdist_egg ${GPU_FLAG} >/dev/null
mkdir -p ${DEST}
cp dist/* ${DEST}
popd
# -*- Python -*-
# Parse the bazel version string from `native.bazel_version`.
def _parse_bazel_version(bazel_version):
# Remove commit from version.
version = bazel_version.split(" ", 1)[0]
# Split into (release, date) parts and only return the release
# as a tuple of integers.
parts = version.split('-', 1)
# Turn "release" into a tuple of strings
version_tuple = ()
for number in parts[0].split('.'):
version_tuple += (str(number),)
return version_tuple
# Given a source file, generate a test name.
# i.e. "common_runtime/direct_session_test.cc" becomes
# "common_runtime_direct_session_test"
def src_to_test_name(src):
return src.replace("/", "_").split(".")[0]
# Check that a specific bazel version is being used.
def check_version(bazel_version):
if "bazel_version" not in dir(native):
fail("\nCurrent Bazel version is lower than 0.2.1, expected at least %s\n" % bazel_version)
elif not native.bazel_version:
print("\nCurrent Bazel is not a release version, cannot check for compatibility.")
print("Make sure that you are running at least Bazel %s.\n" % bazel_version)
else:
current_bazel_version = _parse_bazel_version(native.bazel_version)
minimum_bazel_version = _parse_bazel_version(bazel_version)
if minimum_bazel_version > current_bazel_version:
fail("\nCurrent Bazel version is {}, expected at least {}\n".format(
native.bazel_version, bazel_version))
pass
# Return the options to use for a C++ library or binary build.
# Uses the ":optmode" config_setting to pick the options.
load(
"//tensorflow/core:platform/default/build_config_root.bzl",
"tf_cuda_tests_tags",
"tf_sycl_tests_tags",
)
load(
"@local_config_cuda//cuda:build_defs.bzl",
"if_cuda",
)
# List of proto files for android builds
def tf_android_core_proto_sources():
return ["//tensorflow/core:" + p
for p in tf_android_core_proto_sources_relative()]
# As tf_android_core_proto_sources, but paths relative to
# //third_party/tensorflow/core.
def tf_android_core_proto_sources_relative():
return [
"example/example.proto",
"example/feature.proto",
"framework/allocation_description.proto",
"framework/attr_value.proto",
"framework/cost_graph.proto",
"framework/device_attributes.proto",
"framework/function.proto",
"framework/graph.proto",
"framework/kernel_def.proto",
"framework/log_memory.proto",
"framework/node_def.proto",
"framework/op_def.proto",
"framework/resource_handle.proto",
"framework/step_stats.proto",
"framework/summary.proto",
"framework/tensor.proto",
"framework/tensor_description.proto",
"framework/tensor_shape.proto",
"framework/tensor_slice.proto",
"framework/types.proto",
"framework/versions.proto",
"lib/core/error_codes.proto",
"protobuf/config.proto",
"protobuf/tensor_bundle.proto",
"protobuf/saver.proto",
"util/memmapped_file_system.proto",
"util/saved_tensor_slice.proto",
]
# Returns the list of pb.h and proto.h headers that are generated for
# tf_android_core_proto_sources().
def tf_android_core_proto_headers():
return (["//tensorflow/core/" + p.replace(".proto", ".pb.h")
for p in tf_android_core_proto_sources_relative()] +
["//tensorflow/core/" + p.replace(".proto", ".proto.h")
for p in tf_android_core_proto_sources_relative()])
# Returns the list of protos for which proto_text headers should be generated.
def tf_proto_text_protos_relative():
return [p for p in tf_android_core_proto_sources_relative()]
def if_android_arm(a):
return select({
"//tensorflow:android_arm": a,
"//conditions:default": [],
})
def if_android_arm64(a):
return select({
"//tensorflow:android_arm64": a,
"//conditions:default": [],
})
def if_not_android(a):
return select({
"//tensorflow:android": [],
"//conditions:default": a,
})
def if_android(a):
return select({
"//tensorflow:android": a,
"//conditions:default": [],
})
def if_ios(a):
return select({
"//tensorflow:ios": a,
"//conditions:default": [],
})
def if_mobile(a):
return select({
"//tensorflow:android": a,
"//tensorflow:ios": a,
"//conditions:default": [],
})
def if_not_mobile(a):
return select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//conditions:default": a,
})
def if_not_windows(a):
return select({
"//tensorflow:windows": [],
"//conditions:default": a,
})
def tf_copts():
return (["-DEIGEN_AVOID_STL_ARRAY",
"-Iexternal/gemmlowp",
"-Wno-sign-compare",
"-fno-exceptions"] +
if_cuda(["-DGOOGLE_CUDA=1"]) +
if_android_arm(["-mfpu=neon"]) +
select({
"//tensorflow:android": [
"-std=c++11",
"-DTF_LEAN_BINARY",
"-O2",
],
"//tensorflow:darwin": [],
"//tensorflow:windows": [
"/DLANG_CXX11",
"/D__VERSION__=\\\"MSVC\\\"",
"/DPLATFORM_WINDOWS",
"/DEIGEN_HAS_C99_MATH",
"/DTENSORFLOW_USE_EIGEN_THREADPOOL",
],
"//tensorflow:ios": ["-std=c++11"],
"//conditions:default": ["-pthread"]}))
def tf_opts_nortti_if_android():
return if_android([
"-fno-rtti",
"-DGOOGLE_PROTOBUF_NO_RTTI",
"-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER",
])
# Given a list of "op_lib_names" (a list of files in the ops directory
# without their .cc extensions), generate a library for that file.
def tf_gen_op_libs(op_lib_names, deps=None):
# Make library out of each op so it can also be used to generate wrappers
# for various languages.
if not deps:
deps = []
for n in op_lib_names:
native.cc_library(name=n + "_op_lib",
copts=tf_copts(),
srcs=["ops/" + n + ".cc"],
deps=deps + ["//tensorflow/core:framework"],
visibility=["//visibility:public"],
alwayslink=1,
linkstatic=1,)
def tf_gen_op_wrapper_cc(name, out_ops_file, pkg="",
op_gen="//tensorflow/cc:cc_op_gen_main"):
# Construct an op generator binary for these ops.
tool = out_ops_file + "_gen_cc"
native.cc_binary(
name = tool,
copts = tf_copts(),
linkopts = ["-lm"],
linkstatic = 1, # Faster to link this one-time-use binary dynamically
deps = ([op_gen, pkg + ":" + name + "_op_lib"])
)
# Run the op generator.
if name == "sendrecv_ops" or name == "function_ops":
include_internal = "1"
else:
include_internal = "0"
native.genrule(
name=name + "_genrule",
outs=[out_ops_file + ".h", out_ops_file + ".cc"],
tools=[":" + tool],
cmd=("$(location :" + tool + ") $(location :" + out_ops_file + ".h) " +
"$(location :" + out_ops_file + ".cc) " + include_internal))
# Given a list of "op_lib_names" (a list of files in the ops directory
# without their .cc extensions), generate individual C++ .cc and .h
# files for each of the ops files mentioned, and then generate a
# single cc_library called "name" that combines all the
# generated C++ code.
#
# For example, for:
# tf_gen_op_wrappers_cc("tf_ops_lib", [ "array_ops", "math_ops" ])
#
#
# This will ultimately generate ops/* files and a library like:
#
# cc_library(name = "tf_ops_lib",
# srcs = [ "ops/array_ops.cc",
# "ops/math_ops.cc" ],
# hdrs = [ "ops/array_ops.h",
# "ops/math_ops.h" ],
# deps = [ ... ])
def tf_gen_op_wrappers_cc(name,
op_lib_names=[],
other_srcs=[],
other_hdrs=[],
pkg="",
deps=[
"//tensorflow/cc:ops",
"//tensorflow/cc:scope",
"//tensorflow/cc:const_op",
],
op_gen="//tensorflow/cc:cc_op_gen_main",
visibility=None):
subsrcs = other_srcs
subhdrs = other_hdrs
for n in op_lib_names:
tf_gen_op_wrapper_cc(n, "ops/" + n, pkg=pkg, op_gen=op_gen)
subsrcs += ["ops/" + n + ".cc"]
subhdrs += ["ops/" + n + ".h"]
native.cc_library(name=name,
srcs=subsrcs,
hdrs=subhdrs,
deps=deps + [
"//tensorflow/core:core_cpu",
"//tensorflow/core:framework",
"//tensorflow/core:lib",
"//tensorflow/core:protos_all_cc",
],
copts=tf_copts(),
alwayslink=1,
visibility=visibility)
# Invoke this rule in .../tensorflow/python to build the wrapper library.
def tf_gen_op_wrapper_py(name, out=None, hidden=None, visibility=None, deps=[],
require_shape_functions=False, hidden_file=None,
generated_target_name=None):
# Construct a cc_binary containing the specified ops.
tool_name = "gen_" + name + "_py_wrappers_cc"
if not deps:
deps = ["//tensorflow/core:" + name + "_op_lib"]
native.cc_binary(
name = tool_name,
linkopts = ["-lm"],
copts = tf_copts(),
linkstatic = 1, # Faster to link this one-time-use binary dynamically
deps = (["//tensorflow/core:framework",
"//tensorflow/python:python_op_gen_main"] + deps),
visibility = ["//tensorflow:internal"],
)
# Invoke the previous cc_binary to generate a python file.
if not out:
out = "ops/gen_" + name + ".py"
if hidden:
# `hidden` is a list of op names to be hidden in the generated module.
native.genrule(
name=name + "_pygenrule",
outs=[out],
tools=[tool_name],
cmd=("$(location " + tool_name + ") " + ",".join(hidden)
+ " " + ("1" if require_shape_functions else "0") + " > $@"))
elif hidden_file:
# `hidden_file` is file containing a list of op names to be hidden in the
# generated module.
native.genrule(
name=name + "_pygenrule",
outs=[out],
srcs=[hidden_file],
tools=[tool_name],
cmd=("$(location " + tool_name + ") @$(location "
+ hidden_file + ") " + ("1" if require_shape_functions else "0")
+ " > $@"))
else:
# No ops should be hidden in the generated module.
native.genrule(
name=name + "_pygenrule",
outs=[out],
tools=[tool_name],
cmd=("$(location " + tool_name + ") "
+ ("1" if require_shape_functions else "0") + " > $@"))
# Make a py_library out of the generated python file.
if not generated_target_name:
generated_target_name = name
native.py_library(name=generated_target_name,
srcs=[out],
srcs_version="PY2AND3",
visibility=visibility,
deps=[
"//tensorflow/python:framework_for_generated_wrappers",
],)
# Define a bazel macro that creates cc_test for tensorflow.
# TODO(opensource): we need to enable this to work around the hidden symbol
# __cudaRegisterFatBinary error. Need more investigations.
def tf_cc_test(name, srcs, deps, linkstatic=0, tags=[], data=[], size="medium",
suffix="", args=None, linkopts=[]):
native.cc_test(name="%s%s" % (name, suffix),
srcs=srcs,
size=size,
args=args,
copts=tf_copts(),
data=data,
deps=deps,
linkopts=["-lpthread", "-lm"] + linkopts,
linkstatic=linkstatic,
tags=tags)
# Part of the testing workflow requires a distinguishable name for the build
# rules that involve a GPU, even if otherwise identical to the base rule.
def tf_cc_test_gpu(name, srcs, deps, linkstatic=0, tags=[], data=[],
size="medium", suffix="", args=None):
tf_cc_test(name, srcs, deps, linkstatic=linkstatic, tags=tags, data=data,
size=size, suffix=suffix, args=args)
def tf_cuda_cc_test(name, srcs, deps, tags=[], data=[], size="medium",
linkstatic=0, args=[], linkopts=[]):
tf_cc_test(name=name,
srcs=srcs,
deps=deps,
tags=tags + ["manual"],
data=data,
size=size,
linkstatic=linkstatic,
linkopts=linkopts,
args=args)
tf_cc_test(name=name,
srcs=srcs,
suffix="_gpu",
deps=deps + if_cuda(["//tensorflow/core:gpu_runtime"]),
linkstatic=if_cuda(1, 0),
tags=tags + tf_cuda_tests_tags(),
data=data,
size=size,
linkopts=linkopts,
args=args)
# Create a cc_test for each of the tensorflow tests listed in "tests"
def tf_cc_tests(srcs, deps, linkstatic=0, tags=[], size="medium",
args=None, linkopts=[]):
for src in srcs:
tf_cc_test(
name=src_to_test_name(src),
srcs=[src],
deps=deps,
linkstatic=linkstatic,
tags=tags,
size=size,
args=args,
linkopts=linkopts)
def tf_cc_tests_gpu(srcs, deps, linkstatic=0, tags=[], size="medium",
args=None):
tf_cc_tests(srcs, deps, linkstatic, tags=tags, size=size, args=args)
def tf_cuda_cc_tests(srcs, deps, tags=[], size="medium", linkstatic=0,
args=None, linkopts=[]):
for src in srcs:
tf_cuda_cc_test(
name=src_to_test_name(src),
srcs=[src],
deps=deps,
tags=tags,
size=size,
linkstatic=linkstatic,
args=args,
linkopts=linkopts)
def _cuda_copts():
"""Gets the appropriate set of copts for (maybe) CUDA compilation.
If we're doing CUDA compilation, returns copts for our particular CUDA
compiler. If we're not doing CUDA compilation, returns an empty list.
"""
common_cuda_opts = ["-x", "cuda", "-DGOOGLE_CUDA=1"]
return select({
"//conditions:default": [],
"@local_config_cuda//cuda:using_nvcc": (
common_cuda_opts +
[
"-nvcc_options=expt-relaxed-constexpr",
"-nvcc_options=ftz=true",
]
),
"@local_config_cuda//cuda:using_clang": (
common_cuda_opts +
[
"-fcuda-flush-denormals-to-zero",
"--cuda-path=external/local_config_cuda/cuda",
"--cuda-gpu-arch=sm_35",
]
),
}) + select({
# Pass -O3 when building CUDA code with clang; some important
# optimizations are not enabled at O2.
"@local_config_cuda//cuda:using_clang_opt": ["-O3"],
"//conditions:default": [],
})
# Build defs for TensorFlow kernels
# When this target is built using --config=cuda, a cc_library is built
# that passes -DGOOGLE_CUDA=1 and '-x cuda', linking in additional
# libraries needed by GPU kernels.
def tf_gpu_kernel_library(srcs, copts=[], cuda_copts=[], deps=[], hdrs=[],
**kwargs):
copts = copts + _cuda_copts() + if_cuda(cuda_copts) + tf_copts()
native.cc_library(
srcs = srcs,
hdrs = hdrs,
copts = copts,
deps = deps + if_cuda([
"//tensorflow/core:cuda",
"//tensorflow/core:gpu_lib",
]),
alwayslink=1,
**kwargs)
def tf_cuda_library(deps=None, cuda_deps=None, copts=None, **kwargs):
"""Generate a cc_library with a conditional set of CUDA dependencies.
When the library is built with --config=cuda:
- both deps and cuda_deps are used as dependencies
- the cuda runtime is added as a dependency (if necessary)
- The library additionally passes -DGOOGLE_CUDA=1 to the list of copts
Args:
- cuda_deps: BUILD dependencies which will be linked if and only if:
'--config=cuda' is passed to the bazel command line.
- deps: dependencies which will always be linked.
- copts: copts always passed to the cc_library.
- kwargs: Any other argument to cc_library.
"""
if not deps:
deps = []
if not cuda_deps:
cuda_deps = []
if not copts:
copts = []
native.cc_library(
deps = deps + if_cuda(cuda_deps + ["//tensorflow/core:cuda"]),
copts = copts + if_cuda(["-DGOOGLE_CUDA=1"]),
**kwargs)
def tf_kernel_library(name, prefix=None, srcs=None, gpu_srcs=None, hdrs=None,
deps=None, alwayslink=1, **kwargs):
"""A rule to build a TensorFlow OpKernel.
May either specify srcs/hdrs or prefix. Similar to tf_cuda_library,
but with alwayslink=1 by default. If prefix is specified:
* prefix*.cc (except *.cu.cc) is added to srcs
* prefix*.h (except *.cu.h) is added to hdrs
* prefix*.cu.cc and prefix*.h (including *.cu.h) are added to gpu_srcs.
With the exception that test files are excluded.
For example, with prefix = "cast_op",
* srcs = ["cast_op.cc"]
* hdrs = ["cast_op.h"]
* gpu_srcs = ["cast_op_gpu.cu.cc", "cast_op.h"]
* "cast_op_test.cc" is excluded
With prefix = "cwise_op"
* srcs = ["cwise_op_abs.cc", ..., "cwise_op_tanh.cc"],
* hdrs = ["cwise_ops.h", "cwise_ops_common.h"],
* gpu_srcs = ["cwise_op_gpu_abs.cu.cc", ..., "cwise_op_gpu_tanh.cu.cc",
"cwise_ops.h", "cwise_ops_common.h",
"cwise_ops_gpu_common.cu.h"]
* "cwise_ops_test.cc" is excluded
"""
if not srcs:
srcs = []
if not hdrs:
hdrs = []
if not deps:
deps = []
if prefix:
if native.glob([prefix + "*.cu.cc"], exclude = ["*test*"]):
if not gpu_srcs:
gpu_srcs = []
gpu_srcs = gpu_srcs + native.glob([prefix + "*.cu.cc", prefix + "*.h"],
exclude = ["*test*"])
srcs = srcs + native.glob([prefix + "*.cc"],
exclude = ["*test*", "*.cu.cc"])
hdrs = hdrs + native.glob([prefix + "*.h"], exclude = ["*test*", "*.cu.h"])
cuda_deps = ["//tensorflow/core:gpu_lib"]
if gpu_srcs:
for gpu_src in gpu_srcs:
if gpu_src.endswith(".cc") and not gpu_src.endswith(".cu.cc"):
fail("{} not allowed in gpu_srcs. .cc sources must end with .cu.cc".format(gpu_src))
tf_gpu_kernel_library(
name = name + "_gpu",
srcs = gpu_srcs,
deps = deps,
**kwargs)
cuda_deps.extend([":" + name + "_gpu"])
tf_cuda_library(
name = name,
srcs = srcs,
hdrs = hdrs,
copts = tf_copts(),
cuda_deps = cuda_deps,
linkstatic = 1, # Needed since alwayslink is broken in bazel b/27630669
alwayslink = alwayslink,
deps = deps,
**kwargs)
def tf_kernel_libraries(name, prefixes, deps=None, libs=None, **kwargs):
"""Makes one target per prefix, and one target that includes them all.
Args:
name: The name of the omnibus cc_library target that depends on each
generated tf_kernel_library target.
prefixes: A list of source file name prefixes used to generate individual
libraries. See the definition of tf_kernel_library for details.
deps: The dependencies list associated with each generated target.
libs: Additional tf_kernel_library targets that should be included in the
omnibus cc_library target but not as deps of individual libraries.
This can be used, for example, if a library that was previously
generated by this rule is refactored into a separate definition
in order to specify more or fewer deps for it.
Other attributes are forwarded to each individual target but not to the
omnibus cc_library target.
"""
for p in prefixes:
tf_kernel_library(name=p, prefix=p, deps=deps, **kwargs)
native.cc_library(name=name,
deps=[":" + p for p in prefixes] + (libs or []))
# Bazel rules for building swig files.
def _py_wrap_cc_impl(ctx):
srcs = ctx.files.srcs
if len(srcs) != 1:
fail("Exactly one SWIG source file label must be specified.", "srcs")
module_name = ctx.attr.module_name
src = ctx.files.srcs[0]
inputs = set([src])
inputs += ctx.files.swig_includes
for dep in ctx.attr.deps:
inputs += dep.cc.transitive_headers
inputs += ctx.files._swiglib
swig_include_dirs = set(_get_repository_roots(ctx, inputs))
swig_include_dirs += sorted([f.dirname for f in ctx.files._swiglib])
args = ["-c++",
"-python",
"-module", module_name,
"-o", ctx.outputs.cc_out.path,
"-outdir", ctx.outputs.py_out.dirname]
args += ["-l" + f.path for f in ctx.files.swig_includes]
args += ["-I" + i for i in swig_include_dirs]
args += [src.path]
outputs = [ctx.outputs.cc_out,
ctx.outputs.py_out]
ctx.action(executable=ctx.executable._swig,
arguments=args,
inputs=list(inputs),
outputs=outputs,
mnemonic="PythonSwig",
progress_message="SWIGing " + src.path)
return struct(files=set(outputs))
_py_wrap_cc = rule(
attrs = {
"srcs": attr.label_list(
mandatory = True,
allow_files = True,
),
"swig_includes": attr.label_list(
cfg = "data",
allow_files = True,
),
"deps": attr.label_list(
allow_files = True,
providers = ["cc"],
),
"module_name": attr.string(mandatory = True),
"py_module_name": attr.string(mandatory = True),
"_swig": attr.label(
default = Label("@swig//:swig"),
executable = True,
cfg = "host",
),
"_swiglib": attr.label(
default = Label("@swig//:templates"),
allow_files = True,
),
},
outputs = {
"cc_out": "%{module_name}.cc",
"py_out": "%{py_module_name}.py",
},
implementation = _py_wrap_cc_impl,
)
def _get_repository_roots(ctx, files):
"""Returns abnormal root directories under which files reside.
When running a ctx.action, source files within the main repository are all
relative to the current directory; however, files that are generated or exist
in remote repositories will have their root directory be a subdirectory,
e.g. bazel-out/local-fastbuild/genfiles/external/jpeg_archive. This function
returns the set of these devious directories, ranked and sorted by popularity
in order to hopefully minimize the number of I/O system calls within the
compiler, because includes have quadratic complexity.
"""
result = {}
for f in files:
root = f.root.path
if root:
if root not in result:
result[root] = 0
result[root] -= 1
work = f.owner.workspace_root
if work:
if root:
root += "/"
root += work
if root:
if root not in result:
result[root] = 0
result[root] -= 1
return [k for v, k in sorted([(v, k) for k, v in result.items()])]
# Bazel rule for collecting the header files that a target depends on.
def _transitive_hdrs_impl(ctx):
outputs = set()
for dep in ctx.attr.deps:
outputs += dep.cc.transitive_headers
return struct(files=outputs)
_transitive_hdrs = rule(
attrs = {
"deps": attr.label_list(
allow_files = True,
providers = ["cc"],
),
},
implementation = _transitive_hdrs_impl,
)
def transitive_hdrs(name, deps=[], **kwargs):
_transitive_hdrs(name=name + "_gather",
deps=deps)
native.filegroup(name=name,
srcs=[":" + name + "_gather"])
# Create a header only library that includes all the headers exported by
# the libraries in deps.
def cc_header_only_library(name, deps=[], **kwargs):
_transitive_hdrs(name=name + "_gather",
deps=deps)
native.cc_library(name=name,
hdrs=[":" + name + "_gather"],
**kwargs)
def tf_custom_op_library_additional_deps():
return [
"@protobuf//:protobuf",
"//third_party/eigen3",
"//tensorflow/core:framework_headers_lib",
]
# Traverse the dependency graph along the "deps" attribute of the
# target and return a struct with one field called 'tf_collected_deps'.
# tf_collected_deps will be the union of the deps of the current target
# and the tf_collected_deps of the dependencies of this target.
def _collect_deps_aspect_impl(target, ctx):
alldeps = set()
if hasattr(ctx.rule.attr, "deps"):
for dep in ctx.rule.attr.deps:
alldeps = alldeps | set([dep.label])
if hasattr(dep, "tf_collected_deps"):
alldeps = alldeps | dep.tf_collected_deps
return struct(tf_collected_deps=alldeps)
collect_deps_aspect = aspect(
implementation=_collect_deps_aspect_impl,
attr_aspects=["deps"])
def _dep_label(dep):
label = dep.label
return label.package + ":" + label.name
# This rule checks that the transitive dependencies of targets listed
# in the 'deps' attribute don't depend on the targets listed in
# the 'disallowed_deps' attribute.
def _check_deps_impl(ctx):
disallowed_deps = ctx.attr.disallowed_deps
for input_dep in ctx.attr.deps:
if not hasattr(input_dep, "tf_collected_deps"):
continue
for dep in input_dep.tf_collected_deps:
for disallowed_dep in disallowed_deps:
if dep == disallowed_dep.label:
fail(_dep_label(input_dep) + " cannot depend on " +
_dep_label(disallowed_dep))
return struct()
check_deps = rule(
_check_deps_impl,
attrs = {
"deps": attr.label_list(
aspects=[collect_deps_aspect],
mandatory = True,
allow_files = True
),
"disallowed_deps": attr.label_list(
mandatory = True,
allow_files = True
)},
)
# Helper to build a dynamic library (.so) from the sources containing
# implementations of custom ops and kernels.
def tf_custom_op_library(name, srcs=[], gpu_srcs=[], deps=[]):
cuda_deps = [
"//tensorflow/core:stream_executor_headers_lib",
"@local_config_cuda//cuda:cudart_static",
]
deps = deps + tf_custom_op_library_additional_deps()
if gpu_srcs:
basename = name.split(".")[0]
native.cc_library(
name = basename + "_gpu",
srcs = gpu_srcs,
copts = _cuda_copts(),
deps = deps + if_cuda(cuda_deps))
cuda_deps.extend([":" + basename + "_gpu"])
check_deps(name=name+"_check_deps",
deps=deps + if_cuda(cuda_deps),
disallowed_deps=["//tensorflow/core:framework",
"//tensorflow/core:lib"])
native.cc_binary(name=name,
srcs=srcs,
deps=deps + if_cuda(cuda_deps),
data=[name + "_check_deps"],
copts=tf_copts(),
linkshared=1,
linkopts = select({
"//conditions:default": [
"-lm",
],
"//tensorflow:darwin": [],
}),
)
def tf_extension_linkopts():
return [{{ tf_extension_linkopts }}] # No extension link opts
def tf_extension_copts():
return [] # No extension c opts
def tf_py_wrap_cc(name, srcs, swig_includes=[], deps=[], copts=[], **kwargs):
module_name = name.split("/")[-1]
# Convert a rule name such as foo/bar/baz to foo/bar/_baz.so
# and use that as the name for the rule producing the .so file.
cc_library_name = "/".join(name.split("/")[:-1] + ["_" + module_name + ".so"])
cc_library_pyd_name = "/".join(name.split("/")[:-1] + ["_" + module_name + ".pyd"])
extra_deps = []
_py_wrap_cc(name=name + "_py_wrap",
srcs=srcs,
swig_includes=swig_includes,
deps=deps + extra_deps,
module_name=module_name,
py_module_name=name)
extra_linkopts = select({
"@local_config_cuda//cuda:darwin": [
"-Wl,-exported_symbols_list",
"//tensorflow:tf_exported_symbols.lds"
],
"//tensorflow:windows": [
],
"//conditions:default": [
"-Wl,--version-script",
"//tensorflow:tf_version_script.lds"
]})
extra_deps += select({
"@local_config_cuda//cuda:darwin": [
"//tensorflow:tf_exported_symbols.lds"
],
"//tensorflow:windows": [
],
"//conditions:default": [
"//tensorflow:tf_version_script.lds"
]
})
native.cc_binary(
name=cc_library_name,
srcs=[module_name + ".cc"],
copts=(copts + ["-Wno-self-assign",
"-Wno-sign-compare",
"-Wno-write-strings"]
+ tf_extension_copts()),
linkopts=tf_extension_linkopts() + extra_linkopts,
linkstatic=1,
linkshared=1,
deps=deps + extra_deps)
native.genrule(
name = "gen_" + cc_library_pyd_name,
srcs = [":" + cc_library_name],
outs = [cc_library_pyd_name],
cmd = "cp $< $@",
)
native.py_library(name=name,
srcs=[":" + name + ".py"],
srcs_version="PY2AND3",
data=select({
"//tensorflow:windows": [":" + cc_library_pyd_name],
"//conditions:default": [":" + cc_library_name],
}))
def tf_py_test(name, srcs, size="medium", data=[], main=None, args=[],
tags=[], shard_count=1, additional_deps=[], flaky=0):
native.py_test(
name=name,
size=size,
srcs=srcs,
main=main,
args=args,
tags=tags,
visibility=["//tensorflow:internal"],
shard_count=shard_count,
data=data,
deps=[
"//tensorflow/python:extra_py_tests_deps",
"//tensorflow/python:gradient_checker",
] + additional_deps,
flaky=flaky,
srcs_version="PY2AND3")
def cuda_py_test(name, srcs, size="medium", data=[], main=None, args=[],
shard_count=1, additional_deps=[], tags=[], flaky=0):
test_tags = tags + tf_cuda_tests_tags()
tf_py_test(name=name,
size=size,
srcs=srcs,
data=data,
main=main,
args=args,
tags=test_tags,
shard_count=shard_count,
additional_deps=additional_deps,
flaky=flaky)
def sycl_py_test(name, srcs, size="medium", data=[], main=None, args=[],
shard_count=1, additional_deps=[], tags=[], flaky=0):
test_tags = tags + tf_sycl_tests_tags()
tf_py_test(name=name,
size=size,
srcs=srcs,
data=data,
main=main,
args=args,
tags=test_tags,
shard_count=shard_count,
additional_deps=additional_deps,
flaky=flaky)
def py_tests(name,
srcs,
size="medium",
additional_deps=[],
data=[],
tags=[],
shard_count=1,
prefix=""):
for src in srcs:
test_name = src.split("/")[-1].split(".")[0]
if prefix:
test_name = "%s_%s" % (prefix, test_name)
tf_py_test(name=test_name,
size=size,
srcs=[src],
main=src,
tags=tags,
shard_count=shard_count,
data=data,
additional_deps=additional_deps)
def cuda_py_tests(name, srcs, size="medium", additional_deps=[], data=[],
shard_count=1, tags=[], prefix=""):
test_tags = tags + tf_cuda_tests_tags()
py_tests(name=name, size=size, srcs=srcs, additional_deps=additional_deps,
data=data, tags=test_tags, shard_count=shard_count,prefix=prefix)
# Creates a genrule named <name> for running tools/proto_text's generator to
# make the proto_text functions, for the protos passed in <srcs>.
#
# Return a struct with fields (hdrs, srcs) containing the names of the
# generated files.
def tf_generate_proto_text_sources(name, srcs_relative_dir, srcs):
out_hdrs = ([p.replace(".proto", ".pb_text.h") for p in srcs] +
[p.replace(".proto", ".pb_text-impl.h") for p in srcs])
out_srcs = [p.replace(".proto", ".pb_text.cc") for p in srcs]
native.genrule(
name = name,
srcs = srcs + ["//tensorflow/tools/proto_text:placeholder.txt"],
outs = out_hdrs + out_srcs,
cmd = "$(location //tensorflow/tools/proto_text:gen_proto_text_functions) " +
"$(@D) " + srcs_relative_dir + " $(SRCS)",
tools = ["//tensorflow/tools/proto_text:gen_proto_text_functions"],
)
return struct(hdrs=out_hdrs, srcs=out_srcs)
def tf_genrule_cmd_append_to_srcs(to_append):
return ("cat $(SRCS) > $(@) && " +
"echo >> $(@) && " +
"echo " + to_append + " >> $(@)")
def tf_version_info_genrule():
native.genrule(
name = "version_info_gen",
srcs = [
"//tensorflow/tools/git:gen/spec.json",
"//tensorflow/tools/git:gen/head",
"//tensorflow/tools/git:gen/branch_ref",
],
outs = ["util/version_info.cc"],
cmd = "$(location //tensorflow/tools/git:gen_git_source.py) --generate $(SRCS) \"$@\"",
local = 1,
tools = ["//tensorflow/tools/git:gen_git_source.py"],
)
......@@ -46,8 +46,7 @@ make-options =
LIBTOOL=${libtool:location}/bin/libtool
[tesseract-eng-traineddata]
recipe = hexagonit.recipe.download
ignore-existing = true
recipe = slapos.recipe.build:download-unpacked
strip-top-level-dir = true
url = http://tesseract-ocr.googlecode.com/files/tesseract-ocr-3.01.eng.tar.gz
md5sum = 89c139a73e0e7b1225809fc7b226b6c9
[buildout]
extends =
../gettext/buildout.cfg
../garbage-collector/buildout.cfg
../ncurses/buildout.cfg
../openssl/buildout.cfg
......@@ -39,7 +40,7 @@ patches =
${:_profile_base_location_}/w3m-0.5.2-gc72.patch#5a74b6379240c8dd1372efa7926c8bdc
environment =
PATH=${patch:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PATH=${gettext:location}/bin:${patch:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${garbage-collector:location}/lib/pkgconfig
CPPFLAGS=-I${ncurses:location}/include/ -I${zlib:location}/include/ -I${garbage-collector:location}/include
LDFLAGS=-Wl,--as-needed -L${garbage-collector:location}/lib -Wl,-rpath=${garbage-collector:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[buildout]
extends =
../gettext/buildout.cfg
../openssl/buildout.cfg
../patch/buildout.cfg
../pcre/buildout.cfg
../perl/buildout.cfg
../pkgconfig/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg
......@@ -28,7 +30,7 @@ patches =
${:_profile_base_location_}/wget-doc.makefile.patch#0d23cf1ee81268a94699aebbb26058e6
environment =
PATH=${patch:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PATH=${gettext:location}/bin:${patch:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl:location}/lib -L${pcre:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${openssl:location}/lib
CPPFLAGS=-I${zlib:location}/include -I${openssl:location}/include -I${pcre:location}/include
......@@ -194,7 +194,7 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
if [ -e %(backup_signature)s ]; then
cd $BACKUP_DIR
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > ../proof.signature
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -0 sha256sum | LC_ALL=C sort -k 66 > ../proof.signature
cmp backup.signature ../proof.signature || SUCCEEDED=false
diff -ruw backup.signature ../proof.signature > ../backup.diff
# XXX If there is a difference on the backup, we should publish the
......
......@@ -120,7 +120,7 @@ class Password(object):
if not passwd:
passwd = self.generatePassword(int(options_get('bytes', '8')))
self.update = self.install
self.passwd = passwd
self.passwd = passwd.strip('\n')
# Password must not go into .installed file, for 2 reasons:
# security of course but also to prevent buildout to always reinstall.
options.get = lambda option, *args, **kw: passwd \
......
......@@ -168,18 +168,6 @@ class Recipe(GenericBaseRecipe):
self.generateCertificate()
wrapper = self.createFile(self.options['wrapper'], self.substituteTemplate(
self.getTemplateFilename('registry-run.in'), dict(
parameter='@%s' % self.options['config-file'],
pid_file=self.options['pid-file'],
command=self.options['command']
)
)
)
os.chmod(self.options['wrapper'], stat.S_IRWXU)
path_list.append(wrapper)
registry_url = 'http://%s:%s/' % (self.options['ipv4'], self.options['port'])
token_dict, add_token_dict, rm_token_dict = self.generateSlaveTokenList(
self.slave_list, token_save_path)
......@@ -204,13 +192,12 @@ class Recipe(GenericBaseRecipe):
service_dict = dict(token_base_path=token_list_path,
token_json=token_save_path,
db=self.options['db-path'],
partition_id=self.computer_partition_id,
computer_id=self.computer_id,
registry_url=registry_url)
service_dict['server_url'] = self.server_url
service_dict['cert_file'] = self.cert_file
service_dict['key_file'] = self.key_file
registry_url=registry_url,
server_url=self.server_url,
cert_file=self.cert_file,
key_file=self.key_file)
request_add = self.createPythonScript(
self.options['manager-wrapper'].strip(),
......@@ -218,55 +205,5 @@ class Recipe(GenericBaseRecipe):
)
path_list.append(request_add)
request_drop = self.createPythonScript(
self.options['drop-service-wrapper'].strip(),
'%s.re6stnet.requestRemoveToken' % __name__, service_dict
)
path_list.append(request_drop)
request_check = self.createPythonScript(
self.options['check-service-wrapper'].strip(),
'%s.re6stnet.checkService' % __name__, service_dict
)
path_list.append(request_check)
revoke_check = self.createPythonScript(
self.options['revoke-service-wrapper'].strip(),
'%s.re6stnet.requestRevoqueCertificate' % __name__, service_dict
)
path_list.append(revoke_check)
# Send connection parameters of slave instances
if token_dict:
self.slap.initializeConnection(self.server_url, self.key_file,
self.cert_file)
computer_partition = self.slap.registerComputerPartition(
self.computer_id,
self.computer_partition_id)
for slave_reference, token in token_dict.iteritems():
try:
status_file = os.path.join(token_list_path, '%s.status' % slave_reference)
status = self.readFile(status_file) or 'New token requested'
msg = status
if status == 'TOKEN_ADDED':
msg = 'Token is ready for use'
elif status == 'TOKEN_USED':
msg = 'Token not available, it has been used to generate re6stnet certificate.'
ipv6_file = os.path.join(token_list_path, '%s.ipv6' % slave_reference)
ipv6 = self.readFile(ipv6_file) or '::'
ipv4_file = os.path.join(token_list_path, '%s.ipv4' % slave_reference)
node_ipv4 = self.readFile(ipv4_file) or '0.0.0.0'
computer_partition.setConnectionDict(
{'token':token, '1_info':msg, 'ipv6': ipv6, 'ipv4': node_ipv4},
slave_reference)
except Exception:
self.logger.fatal("Error while sending slave %s informations: %s",
slave_reference, traceback.format_exc())
return path_list
# -*- coding: utf-8 -*-
import httplib
import logging
import json
import os
import time
import sqlite3
import slapos
import traceback
import logging
import socket
import select
from re6st import tunnel, ctl, registry, utils, x509
from OpenSSL import crypto
from re6st import registry
log = logging.getLogger('SLAPOS-RE6STNET')
logging.basicConfig(level=logging.INFO)
logging.trace = logging.debug
class iterRoutes(object):
_waiting = True
def __new__(cls, control_socket, network):
self = object.__new__(cls)
c = ctl.Babel(control_socket, self, network)
c.request_dump()
while self._waiting:
args = {}, {}, ()
c.select(*args)
utils.select(*args)
return (prefix
for neigh_routes in c.neighbours.itervalues()
for prefix in neigh_routes[1]
if prefix)
def babel_dump(self):
self._waiting = False
def loadJsonFile(path):
if os.path.exists(path):
with open(path, 'r') as f:
......@@ -57,84 +33,75 @@ def readFile(path):
return content
return ''
def getDb(db_path):
db = sqlite3.connect(db_path, isolation_level=None,
check_same_thread=False)
db.text_factory = str
return db.cursor()
def updateFile(file_path, value):
if readFile(file_path) != value:
writeFile(file_path, value)
return True
return False
def bang(args):
computer_guid = args['computer_id']
partition_id = args['partition_id']
def getComputerPartition(server_url, key_file, cert_file, computer_guid, partition_id):
slap = slapos.slap.slap()
# Redeploy instance to update published information
slap.initializeConnection(args['server_url'], args['key_file'],
args['cert_file'])
partition = slap.registerComputerPartition(computer_guid=computer_guid,
partition_id=partition_id)
partition.bang(message='Published parameters changed!')
log.info("Bang with message 'parameters changed'...")
slap.initializeConnection(server_url,
key_file,
cert_file)
return slap.registerComputerPartition(computer_guid=computer_guid,
partition_id=partition_id)
def requestAddToken(args, can_bang=True):
def requestAddToken(client, base_token_path):
time.sleep(3)
registry_url = args['registry_url']
base_token_path = args['token_base_path']
path_list = [x for x in os.listdir(base_token_path) if x.endswith('.add')]
log.info("Searching tokens to add at %s and found %s." % (base_token_path, path_list))
if not path_list:
log.info("No new token to add. Exiting...")
return
client = registry.RegistryClient(registry_url)
call_bang = False
for reference_key in path_list:
request_file = os.path.join(base_token_path, reference_key)
token = readFile(request_file)
log.info("Including token %s for %s" % (token, reference_key))
if token :
reference = reference_key.split('.')[0]
# email is unique as reference is also unique
email = '%s@slapos' % reference.lower()
try:
result = client.requestAddToken(token, email)
result = client.addToken(email, token)
except Exception:
log.debug('Request add token fail for %s... \n %s' % (request_file,
log.info('Request add token fail for %s... \n %s' % (request_file,
traceback.format_exc()))
continue
if result and result == token:
if result in (token, None):
# update information
log.info("New token added for slave instance %s. Updating file status..." %
reference)
writeFile(os.path.join(base_token_path, '%s.status' % reference),
'TOKEN_ADDED')
status_file = os.path.join(base_token_path, '%s.status' % reference)
updateFile(status_file, 'TOKEN_ADDED')
os.unlink(request_file)
call_bang = True
else:
log.debug('Bad token. Request add token fail for %s...' % request_file)
if can_bang and call_bang:
bang(args)
def requestRemoveToken(args):
base_token_path = args['token_base_path']
def requestRemoveToken(client, base_token_path):
path_list = [x for x in os.listdir(base_token_path) if x.endswith('.remove')]
if not path_list:
log.info("No token to delete. Exiting...")
return
client = registry.RegistryClient(args['registry_url'])
for reference_key in path_list:
request_file = os.path.join(base_token_path, reference_key)
token = readFile(request_file)
if token :
reference = reference_key.split('.')[0]
try:
result = client.requestDeleteToken(token)
result = client.deleteToken(token)
except httplib.NOTFOUND:
# Token is alread removed.
result = True
except Exception:
log.debug('Request delete token fail for %s... \n %s' % (request_file,
traceback.format_exc()))
......@@ -142,10 +109,12 @@ def requestRemoveToken(args):
else:
# certificate is invalidated, it will be revoked
writeFile(os.path.join(base_token_path, '%s.revoke' % reference), '')
if result == 'True':
if result in (True, 'True'):
# update information
log.info("Token deleted for slave instance %s. Clean up file status..." %
reference)
if result in ['True', 'False']:
os.unlink(request_file)
status_file = os.path.join(base_token_path, '%s.status' % reference)
......@@ -159,198 +128,109 @@ def requestRemoveToken(args):
log.debug('Bad token. Request add token fail for %s...' % request_file)
def requestRevoqueCertificate(args):
base_token_path = args['token_base_path']
db = getDb(args['db'])
path_list = [x for x in os.listdir(base_token_path) if x.endswith('.revoke')]
client = registry.RegistryClient(args['registry_url'])
for reference_key in path_list:
reference = reference_key.split('.')[0]
# XXX - email is always unique
email = '%s@slapos' % reference.lower()
cert_string = ''
try:
cert_string, = db.execute("SELECT cert FROM cert WHERE email = ?",
(email,)).next()
except StopIteration:
# Certificate was not generated yet !!!
pass
try:
if cert_string:
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_string)
cn = x509.subnetFromCert(cert)
result = client.revoke(str(cn))
time.sleep(2)
except Exception:
log.debug('Request revoke certificate fail for %s... \n %s' % (reference,
traceback.format_exc()))
continue
else:
if revokeByMail(args['registry_url'],
'%s@slapos' % reference.lower(),
args['db']):
os.unlink(os.path.join(base_token_path, reference_key))
log.info("Certificate revoked for slave instance %s." % reference)
return
log.info("Failed to revoke email for %s" % reference)
def dumpIPv6Network(slave_reference, db, network, ipv6_file):
email = '%s@slapos' % slave_reference.lower()
try:
cert_string, = db.execute("SELECT cert FROM cert WHERE email = ?",
(email,)).next()
except StopIteration:
# Certificate was not generated yet !!!
pass
try:
if cert_string:
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_string)
cn = x509.subnetFromCert(cert)
subnet = network + utils.binFromSubnet(cn)
ipv6 = utils.ipFromBin(subnet)
changed = readFile(ipv6_file) != ipv6
writeFile(ipv6_file, ipv6)
return ipv6, utils.binFromSubnet(cn), changed
except Exception:
log.debug('XXX for %s... \n %s' % (slave_reference,
traceback.format_exc()))
def checkService(client, base_token_path, token_json, computer_partition):
token_dict = loadJsonFile(token_json)
updated = False
if not token_dict:
return
def sendto(sock, prefix, code):
return sock.sendto("%s\0%c" % (prefix, code), ('::1', tunnel.PORT))
# Check token status
for slave_reference, token in token_dict.iteritems():
log.info("%s %s" % (slave_reference, token))
status_file = os.path.join(base_token_path, '%s.status' % slave_reference)
if not os.path.exists(status_file):
# This token is not added yet!
log.info("Token %s dont exist yet." % status_file)
continue
def recv(sock, code):
try:
prefix, msg = sock.recv(1<<16).split('\0', 1)
int(prefix, 2)
except ValueError:
pass
else:
if msg and ord(msg[0]) == code:
return prefix, msg[1:]
return None, None
if not client.isToken(str(token)):
# Token is used to register client
updateFile(status_file, 'TOKEN_USED')
log.info("Token status of %s updated to 'used'." % slave_reference)
def dumpIPv4Network(ipv6_prefix, network, ipv4_file, sock, peer_prefix_list):
try:
status = readFile(status_file)
log.info("Token %s has %s State." % (status_file, status))
if ipv6_prefix == "00000000000000000000000000000000":
# workarround to ignore the first node
ipv6 = "::"
ipv4 = "0.0.0.0"
changed = readFile(ipv4_file) != ipv4
writeFile(ipv4_file, ipv4)
return ipv4, changed
peers = []
peer_list = [prefix for prefix in peer_prefix_list if prefix == ipv6_prefix ]
msg = status
if status == 'TOKEN_ADDED':
msg = 'Token is ready for use'
elif status == 'TOKEN_USED':
msg = 'Token not available, it has been used to generate re6stnet certificate.'
if len(peer_list) == 0:
raise ValueError("Unable to find such prefix on database")
peer = peer_list[0]
sendto(sock, peer, 1)
s = sock,
timeout = 15
end = timeout + time.time()
while select.select(s, (), (), timeout)[0]:
prefix, msg = recv(sock, 1)
if prefix == peer:
break
timeout = max(0, end - time.time())
else:
logging.info("Timeout while querying address for %s/%s", int(peer, 2), len(peer))
msg = ""
email = '%s@slapos' % slave_reference.lower()
if status == 'TOKEN_USED':
try:
ipv6 = client.getIPv6Address(str(email))
except Exception:
log.info('Error for dump ipv6 for %s... \n %s' % (slave_reference,
traceback.format_exc()))
if "," in msg:
ipv4 = msg.split(',')[0]
else:
ipv4 = "0.0.0.0"
changed = readFile(ipv4_file) != ipv4
writeFile(ipv4_file, ipv4)
return ipv4, changed
log.info("%s, IPV6 = %s" % (slave_reference, ipv6))
try:
ipv4 = client.getIPv4Information(str(email)) or "0.0.0.0"
except Exception:
log.info('XXX for %s... \n %s' % (ipv6_prefix,
log.info('Error for dump ipv4 for %s... \n %s' % (slave_reference,
traceback.format_exc()))
return "0.0.0.0", False
def checkService(args, can_bang=True):
base_token_path = args['token_base_path']
token_dict = loadJsonFile(args['token_json'])
log.info("%s, IPV4 = %s" % (slave_reference, ipv4))
try:
log.info("Update parameters for %s" % slave_reference)
# Normalise the values as simple strings to be on the same format that
# the values which come from master.
computer_partition.setConnectionDict({'token': str(token),
'1_info': str(msg),
'ipv6': str(ipv6),
'ipv4': str(ipv4)},
slave_reference)
except Exception:
log.fatal("Error while sending slave %s informations: %s",
slave_reference, traceback.format_exc())
if not token_dict:
return
db = getDb(args['db'])
call_bang = False
def manage(args, can_bang=True):
computer_guid = args['computer_id']
partition_id = args['partition_id']
slap = slapos.slap.slap()
client = registry.RegistryClient(args['registry_url'])
ca = client.getCa()
network = x509.networkFromCa(crypto.load_certificate(crypto.FILETYPE_PEM, ca))
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
server_url = args['server_url']
key_file = args['key_file']
cert_file = args['cert_file']
peer_prefix_list = [prefix for prefix in
iterRoutes("/var/run/re6stnet/babeld.sock", network)]
# Check token status
for slave_reference, token in token_dict.iteritems():
status_file = os.path.join(base_token_path, '%s.status' % slave_reference)
ipv6_file = os.path.join(base_token_path, '%s.ipv6' % slave_reference)
ipv4_file = os.path.join(base_token_path, '%s.ipv4' % slave_reference)
if not os.path.exists(status_file):
# This token is not added yet!
log.info("Token %s dont exist yet." % status_file)
continue
msg = readFile(status_file)
log.info("Token %s has %s State." % (status_file, msg))
if msg == 'TOKEN_USED':
log.info("Dumping ipv6...")
ipv6, ipv6_prefix, ipv6_changed = dumpIPv6Network(slave_reference, db, network, ipv6_file)
log.info("%s, IPV6 = %s, IPV6_PREFIX = %s" % (slave_reference, ipv6, ipv6_prefix))
_, ipv4_changed = dumpIPv4Network(ipv6_prefix, network, ipv4_file, sock, peer_prefix_list)
if ipv4_changed or ipv6_changed:
call_bang = True
continue
# Check if token is not in the database
status = False
try:
token_found, = db.execute("SELECT token FROM token WHERE token = ?",
(token,)).next()
if token_found == token:
status = True
except StopIteration:
pass
if not status:
# Token is used to register client
call_bang = True
try:
writeFile(status_file, 'TOKEN_USED')
dumpIPv6Network(slave_reference, db, network, ipv6_file)
dumpIPv4Network(ipv6_prefix, network, ipv4_file, sock, peer_prefix_list)
log.info("Token status of %s updated to 'used'." % slave_reference)
except IOError:
# XXX- this file should always exists
log.debug('Error when writing in file %s. Clould not update status of %s...' %
(status_file, slave_reference))
if call_bang and can_bang:
bang(args)
client = registry.RegistryClient(args['registry_url'])
base_token_path = args['token_base_path']
token_json = args['token_json']
def manage(args):
log.info("ADD TOKEN")
# Request Add new tokens
requestAddToken(args)
requestAddToken(client, base_token_path)
log.info("Remove TOKEN")
# Request delete removed token
requestRemoveToken(args)
requestRemoveToken(client, base_token_path)
computer_partition = getComputerPartition(server_url, key_file,
cert_file, computer_guid, partition_id)
log.info("Update Services")
# check status of all token
checkService(args)
checkService(client, base_token_path,
token_json, computer_partition)
......@@ -25,6 +25,7 @@
#
##############################################################################
from getpass import getpass
import hmac
import pwd
import grp
import os
......@@ -33,19 +34,24 @@ import shlex
from slapos.recipe.librecipe import GenericBaseRecipe
def login_shell(args):
password = args['password']
password_file = args['password-file']
if password_file:
with open(password_file, 'r') as password_file:
password = password_file.read()
if (password != ''):
entered_password = getpass()
else:
entered_password = ''
if entered_password != password:
if not hmac.compare_digest(entered_password, password):
return 1
else:
commandline = shlex.split(args['shell'])
path = commandline[0]
os.execv(path, commandline)
else:
return 1
def shellinabox(args):
certificate_dir = args['certificate_dir']
......@@ -95,7 +101,7 @@ class Recipe(GenericBaseRecipe):
self.options['login-shell'],
'%s.login_shell' % __name__,
{
'password': self.options['password'],
'password-file': self.options['password-file'],
'shell': self.options['shell']
}
)
......
......@@ -33,7 +33,6 @@ class Re6stnetTest(unittest.TestCase):
'ipv4': '127.0.0.1',
'port': '9201',
'pid-file': '/path/to/pid/file',
'db-path': '/path/to/db',
'command': '/path/to/command',
'manager-wrapper': os.path.join(self.base_dir, 'manager_wrapper'),
'drop-service-wrapper': os.path.join(self.base_dir, 'drop_wrapper'),
......@@ -86,22 +85,10 @@ class Re6stnetTest(unittest.TestCase):
self.assertIn("'key_file': '/path/to/key'", content)
self.assertIn("'cert_file': '/path/to/cert'", content)
self.assertIn("'server_url': 'http://server.com'", content)
self.assertIn("'db': '%s'" % self.options['db-path'], content)
self.assertIn("'token_base_path': '%s'" % self.token_dir, content)
self.assertIn("'registry_url': 'http://%s:%s/'" % (self.options['ipv4'],
self.options['port']), content)
def checkRegistryWrapper(self):
path = os.path.join(self.base_dir, 'wrapper')
self.assertTrue(os.path.exists(path))
content = ""
config_file = os.path.join(self.base_dir, 'config')
with open(path, 'r') as f:
content = f.read()
self.assertIn("@%s" % config_file, content)
self.assertIn("/path/to/pid/file", content)
self.assertIn("/path/to/command", content)
def fake_generateCertificates(self):
return
......@@ -182,10 +169,6 @@ class Re6stnetTest(unittest.TestCase):
self.assertEqual(token_dict['SOFTINST-58778'], second_add)
self.checkWrapper(os.path.join(self.base_dir, 'manager_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'drop_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'check_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'revoke_wrapper'))
self.checkRegistryWrapper()
# Remove one element
self.options.update({
......@@ -226,8 +209,4 @@ class Re6stnetTest(unittest.TestCase):
self.assertItemsEqual(os.listdir(self.options['token-dir']), [])
self.checkWrapper(os.path.join(self.base_dir, 'manager_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'drop_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'check_wrapper'))
self.checkWrapper(os.path.join(self.base_dir, 'revoke_wrapper'))
self.checkRegistryWrapper()
......@@ -51,29 +51,29 @@ pycrypto = 2.6.1
pycurl = 7.43.0
slapos.recipe.download = 1.0
slapos.recipe.template = 2.8
slapos.toolbox = 0.65
slapos.toolbox = 0.66
smmap = 0.9.0
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
GitPython = 2.0.8
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
atomize = 0.2.0
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
feedparser = 5.2.1
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
lockfile = 0.12.2
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
paramiko = 2.0.1
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
rpdb = 0.1.5
......@@ -461,6 +461,6 @@ Solution 2 (IPv6 only)
It is also possible to directly allow the service to listen on 80 and 443 ports using the following command:
setcap 'cap_net_bind_service=+ep' /opt/slapgrid/$APACHE_FRONTEND_SOFTWARE_RELEASE_MD5/parts/apache-2.2/bin/httpd
setcap 'cap_net_bind_service=+ep' /opt/slapgrid/$APACHE_FRONTEND_SOFTWARE_RELEASE_MD5/parts/apache/bin/httpd
Then specify in the instance parameters "port" and "plain_http_port" to be 443 and 80, respectively.
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# But avoid directories, they are not portable.
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[template]
filename = instance.cfg
md5sum = f686f765e55d1dce2e55a400f0714b3e
[template-apache-frontend]
filename = instance-apache-frontend.cfg
md5sum = e2f60121bc1116319b4d20b4c4badc1c
[template-apache-replicate]
filename = instance-apache-replicate.cfg.in
md5sum = 9b17c835bcd927269cf510bf612f5985
[template-slave-list]
filename = templates/apache-custom-slave-list.cfg.in
md5sum = 8a8db11499f0e3b00c0bd7d1e5afc99b
[template-slave-configuration]
filename = templates/custom-virtualhost.conf.in
md5sum = d103143e5d50682bd5ad43117d82e2fa
[template-replicate-publish-slave-information]
filename = templates/replicate-publish-slave-information.cfg.in
md5sum = 665e83d660c9b779249b2179d7ce4b4e
[template-apache-frontend-configuration]
filename = templates/apache.conf.in
md5sum = 51e0c8957e3cf30e09c7b836143c15c9
[template-custom-slave-list]
filename = templates/apache-custom-slave-list.cfg.in
md5sum = 8a8db11499f0e3b00c0bd7d1e5afc99b
[template-not-found-html]
filename = templates/notfound.html
md5sum = f20d6c3d2d94fb685f8d26dfca1e822b
[template-default-virtualhost]
filename = templates/000.conf.in
md5sum = d98a01182f38868612948c87d5231428
[template-default-slave-virtualhost]
filename = templates/default-virtualhost.conf.in
md5sum = 8fda2e88476ccb3652036302364f3506
[template-cached-slave-virtualhost]
filename = templates/cached-virtualhost.conf.in
md5sum = 1a1a53d9ac4a1591c017d86850a94796
[template-log-access]
filename = templates/template-log-access.conf.in
md5sum = f85005b430978f3bd24ee7ce11b0e304
[template-empty]
filename = templates/empty.in
md5sum = c2314c3a9c3412a38d14b312d3df83c1
[template-wrapper]
filename = templates/wrapper.in
md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
[template-trafficserver-records-config]
filename = templates/trafficserver/records.config.jinja2
md5sum = 7578b780c6bffdbadb130b796c874d29
[template-trafficserver-storage-config]
filename = templates/trafficserver/storage.config.jinja2
md5sum = 117238225b3fc3c5b5be381815f44c67
[template-nginx-configuration]
filename = templates/nginx.cfg.in
md5sum = 18633ce55e53340efa1ba7693aac4152
[template-nginx-eventsource-slave-virtualhost]
filename = templates/nginx-eventsource-slave.conf.in
md5sum = a5186f666acb2f040ede04c91e60408f
[template-nginx-notebook-slave-virtualhost]
filename = templates/nginx-notebook-slave.conf.in
md5sum = 82d74a7f2aceb2b4a7acc6259291b7f2
[buildout]
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../component/git/buildout.cfg
../../component/dash/buildout.cfg
......@@ -20,15 +21,12 @@ extends =
# Monitoring stack (keep on bottom)
../../stack/monitor/buildout.cfg
extensions -=
buildout-versions
parts +=
template
template-apache-frontend
template-apache-replicate
apache-2.2
apache-antiloris-apache-2.2
apache
apache-antiloris
stunnel
......@@ -49,102 +47,79 @@ eggs +=
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
md5sum = f686f765e55d1dce2e55a400f0714b3e
output = ${buildout:directory}/template.cfg
mode = 0644
[template-apache-frontend]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-apache-frontend.cfg
md5sum = 6d437f8a3836484d42bf9bf8d747e201
output = ${buildout:directory}/template-apache-frontend.cfg
mode = 0644
[template-apache-replicate]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-apache-replicate.cfg.in
md5sum = 9b17c835bcd927269cf510bf612f5985
mode = 0644
[template-slave-list]
[download-template]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache-custom-slave-list.cfg.in
md5sum = 377aa36899c963ac65558b4888f8264b
url = ${:_profile_base_location_}/templates/${:filename}
mode = 640
[template-slave-list]
<=download-template
filename = apache-custom-slave-list.cfg.in
[template-slave-configuration]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/custom-virtualhost.conf.in
md5sum = d103143e5d50682bd5ad43117d82e2fa
mode = 640
<=download-template
filename = custom-virtualhost.conf.in
[template-replicate-publish-slave-information]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/replicate-publish-slave-information.cfg.in
md5sum = 665e83d660c9b779249b2179d7ce4b4e
mode = 640
<=download-template
filename = replicate-publish-slave-information.cfg.in
[template-apache-frontend-configuration]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache.conf.in
md5sum = 7d5f0f38e4dd81ff26f2499890b13315
mode = 640
<=download-template
filename = apache.conf.in
[template-custom-slave-list]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/apache-default-slave-list.cfg.in
md5sum = 5252c0db72b57ce6debb9d4fb4e706a1
mode = 640
<=download-template
filename = apache-default-slave-list.cfg.in
[template-not-found-html]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/notfound.html
<=download-template
filename = notfound.html
md5sum = f20d6c3d2d94fb685f8d26dfca1e822b
mode = 640
[template-default-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/000.conf.in
md5sum = d98a01182f38868612948c87d5231428
mode = 640
<=download-template
filename = 000.conf.in
[template-default-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/default-virtualhost.conf.in
md5sum = e5ed71c5e22ab91e33a71bd09879e23c
mode = 640
<=download-template
filename = default-virtualhost.conf.in
[template-cached-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/cached-virtualhost.conf.in
md5sum = 432e55df3b42243a98b564cca57e2396
mode = 640
<=download-template
filename = cached-virtualhost.conf.in
[template-log-access]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/template-log-access.conf.in
md5sum = f85005b430978f3bd24ee7ce11b0e304
mode = 640
<=download-template
filename = template-log-access.conf.in
[template-empty]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/empty.in
md5sum = c2314c3a9c3412a38d14b312d3df83c1
mode = 640
<=download-template
filename = empty.in
[template-wrapper]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/templates/wrapper.in
output = ${buildout:directory}/template-wrapper.cfg
mode = 0644
md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
[template-trafficserver-records-config]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/templates/trafficserver/${:filename}
md5sum = 59287d2de3d948b135619edd211a5e84
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = records.config.jinja2
download-only = true
......@@ -154,7 +129,6 @@ mode = 0644
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/templates/trafficserver/${:filename}
md5sum = 23c83efaf2fb0f9f0b3096bbfdda05c1
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = storage.config.jinja2
download-only = true
......@@ -164,21 +138,16 @@ mode = 0644
[template-nginx-configuration]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/templates/nginx.cfg.in
md5sum = 18633ce55e53340efa1ba7693aac4152
output = ${buildout:directory}/template-nginx.cfg.in
mode = 0644
[template-nginx-eventsource-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/nginx-eventsource-slave.conf.in
md5sum = a5186f666acb2f040ede04c91e60408f
mode = 0644
<=download-template
filename = nginx-eventsource-slave.conf.in
[template-nginx-notebook-slave-virtualhost]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/nginx-notebook-slave.conf.in
md5sum = 82d74a7f2aceb2b4a7acc6259291b7f2
mode = 0644
<=download-template
filename = nginx-notebook-slave.conf.in
# Migrated from KVM recipe
[http-proxy]
......
......@@ -8,25 +8,12 @@
[buildout]
extends =
# Extend in this order, otherwise "parts" will be taken from git profile
../../component/git/buildout.cfg
common.cfg
parts +=
slapos.cookbook-repository
slapos.toolbox-dev
develop =
${:parts-directory}/slapos.cookbook-repository
[slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.git
branch = master
git-executable = ${git:location}/bin/git
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command =
grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link &&
[slapos.toolbox-dev]
recipe = zc.recipe.egg:develop
egg = slapos.toolbox
setup = ${slapos.toolbox-repository:location}
......@@ -22,12 +22,14 @@ parts =
promise-apache-frontend-v6-http
promise-apache-frontend-cached
promise-apache-frontend-ssl-cached
promise-apache-is-process-older-than-dependency-set
promise-nginx-frontend-v4-https
promise-nginx-frontend-v4-http
promise-nginx-frontend-v6-https
promise-nginx-frontend-v6-http
promise-nginx-configuration
promise-nginx-is-process-older-than-dependency-set
trafficserver-launcher
trafficserver-reload
......@@ -37,6 +39,7 @@ parts =
trafficserver-plugin-config
trafficserver-storage-config
trafficserver-promise-listen-port
trafficserver-promise-cache-availability
## Nginx
nginx-frontend
......@@ -45,6 +48,8 @@ parts =
## Monitor for apache
monitor-base
monitor-ats-cache-stats-wrapper
monitor-traffic-summary-last-stats-wrapper
monitor-apache-server-status-wrapper
monitor-verify-re6st-connectivity
extends = ${monitor-template:output}
......@@ -114,6 +119,7 @@ configuration.apache-ca-certificate =
configuration.open-port = 80 443
configuration.extra_slave_instance_list =
configuration.disk-cache-size = 8G
configuration.ram-cache-size = 1G
configuration.trafficserver-autoconf-port = 8083
configuration.trafficserver-mgmt-port = 8084
configuration.re6st-verification-url = http://[2001:67c:1254:4::1]/index.html
......@@ -121,7 +127,7 @@ configuration.re6st-verification-url = http://[2001:67c:1254:4::1]/index.html
[frontend-configuration]
template-log-access = ${template-log-access:target}
log-access-configuration = $${directory:etc}/apache-log-access.conf
apache-directory = ${apache-2.2:location}
apache-directory = ${apache:location}
apache-ipv6 = $${instance-parameter:ipv6-random}
apache-https-port = $${instance-parameter:configuration.port}
......@@ -147,6 +153,15 @@ extra-context =
key http_port instance-parameter:configuration.plain_http_port
key https_port instance-parameter:configuration.port
[software-release-path]
template-empty = ${template-empty:target}
template-slave-configuration = ${template-slave-configuration:target}
template-default-slave-virtualhost = ${template-default-slave-virtualhost:target}
template-cached-slave-virtualhost = ${template-cached-slave-virtualhost:target}
template-nginx-eventsource-slave-virtualhost = ${template-nginx-eventsource-slave-virtualhost:target}
template-nginx-notebook-slave-virtualhost = ${template-nginx-notebook-slave-virtualhost:target}
apache-location = ${apache:location}
[dynamic-custom-personal-template-slave-list]
< = jinja2-template-base
template = ${template-slave-list:target}
......@@ -172,18 +187,21 @@ extra-context =
key global_ipv6 slap-network-information:global-ipv6
key cache_port apache-configuration:cache-port
key varnginx directory:varnginx
raw empty_template ${template-empty:target}
raw template_custom_slave_configuration ${template-slave-configuration:target}
raw template_default_slave_configuration ${template-default-slave-virtualhost:target}
raw template_cached_slave_configuration ${template-cached-slave-virtualhost:target}
raw template_eventsource_slave_configuration ${template-nginx-eventsource-slave-virtualhost:target}
raw template_notebook_slave_configuration ${template-nginx-notebook-slave-virtualhost:target}
key empty_template software-release-path:template-empty
key template_custom_slave_configuration software-release-path:template-slave-configuration
key template_default_slave_configuration software-release-path:template-default-slave-virtualhost
key template_cached_slave_configuration software-release-path:template-cached-slave-virtualhost
key template_eventsource_slave_configuration software-release-path:template-nginx-eventsource-slave-virtualhost
key template_notebook_slave_configuration software-release-path:template-nginx-notebook-slave-virtualhost
raw software_type single-custom-personal
section logrotate_dict logrotate
section frontend_configuration frontend-configuration
section apache_configuration apache-configuration
section nginx_configuration nginx-configuration
key monitor_base_url monitor-instance-parameter:monitor-base-url
key promise_directory monitor-directory:promises
key report_directory monitor-directory:reports
raw bin_directory ${buildout:bin-directory}
[dynamic-virtualhost-template-slave]
<= jinja2-template-base
......@@ -202,7 +220,7 @@ extra-context =
template = ${template-apache-frontend-configuration:target}
rendered = $${apache-configuration:frontend-configuration}
extra-context =
raw httpd_home ${apache-2.2:location}
key httpd_home software-release-path:apache-location
key httpd_mod_ssl_cache_directory apache-directory:mod-ssl
key domain instance-parameter:configuration.domain
key document_root apache-directory:document-root
......@@ -230,7 +248,7 @@ extra-context =
[apache-frontend]
recipe = slapos.cookbook:wrapper
command-line = ${apache-2.2:location}/bin/httpd -f $${dynamic-apache-frontend-template:rendered} -DFOREGROUND
command-line = ${apache:location}/bin/httpd -f $${dynamic-apache-frontend-template:rendered} -DFOREGROUND
wrapper-path = $${directory:service}/frontend_apache
wait-for-files =
$${ca-frontend:cert-file}
......@@ -260,7 +278,7 @@ error-log = $${directory:log}/frontend-apache-error.log
pid-file = $${directory:run}/httpd.pid
protected-path = /
access-control-string = none
frontend-configuration-verification = ${apache-2.2:location}/bin/httpd -Sf $${:frontend-configuration} > /dev/null
frontend-configuration-verification = ${apache:location}/bin/httpd -Sf $${:frontend-configuration} > /dev/null
frontend-graceful-command = $${:frontend-configuration-verification}; if [ $? -eq 0 ]; then kill -USR1 $(cat $${:pid-file}); fi
# Comunication with ats
......@@ -271,7 +289,7 @@ ssl-cache-through-port = 26012
# Create wrapper for "apachectl conftest" in bin
[configtest]
recipe = slapos.cookbook:wrapper
command-line = ${apache-2.2:location}/bin/httpd -f $${directory:etc}/apache_frontend.conf -t
command-line = ${apache:location}/bin/httpd -f $${directory:etc}/apache_frontend.conf -t
wrapper-path = $${directory:bin}/apache-configtest
[certificate-authority]
......@@ -337,7 +355,7 @@ command = $${logrotate:wrapper}
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
logrotate-binary = ${logrotate:location}/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
......@@ -396,6 +414,7 @@ cache-path = $${trafficserver-directory:cache-path}
disk-cache-size = $${instance-parameter:configuration.disk-cache-size}
autoconf-port = $${instance-parameter:configuration.trafficserver-autoconf-port}
mgmt-port = $${instance-parameter:configuration.trafficserver-mgmt-port}
ram-cache-size = $${instance-parameter:configuration.ram-cache-size}
[trafficserver-configuration-directory]
recipe = plone.recipe.command
......@@ -414,37 +433,40 @@ command-line = ${trafficserver:location}/bin/traffic_line -x
wrapper-path = $${trafficserver-variable:reload-path}
environment = TS_ROOT=$${buildout:directory}
# XXX Dedicated Jinja Section without slapparameter
[trafficserver-jinja2-template-base]
recipe = slapos.recipe.template:jinja2
rendered = $${trafficserver-directory:configuration}/$${:filename}
extra-context =
mode = 600
context =
section ats_directory trafficserver-directory
section ats_configuration trafficserver-variable
$${:extra-context}
[trafficserver-records-config]
< = jinja2-template-base
< = trafficserver-jinja2-template-base
template = ${template-trafficserver-records-config:location}/${template-trafficserver-records-config:filename}
rendered = $${trafficserver-directory:configuration}/records.config
mode = 700
filename = records.config
extra-context =
import os_module os
section ats_directory trafficserver-directory
section ats_configuration trafficserver-variable
[trafficserver-storage-config]
< = jinja2-template-base
< = trafficserver-jinja2-template-base
template = ${template-trafficserver-storage-config:location}/${template-trafficserver-storage-config:filename}
rendered = $${trafficserver-directory:configuration}/storage.config
mode = 700
extra-context =
section trafficserver trafficserver-variable
filename = storage.config
[trafficserver-remap-config]
< = jinja2-template-base
< = trafficserver-jinja2-template-base
template = ${template-empty:target}
rendered = $${trafficserver-configuration-directory:target}/remap.config
mode = 700
filename = remap.config
context =
key content trafficserver-variable:remap
[trafficserver-plugin-config]
< = jinja2-template-base
< = trafficserver-jinja2-template-base
template = ${template-empty:target}
rendered = $${trafficserver-configuration-directory:target}/plugin.config
mode = 700
filename = plugin.config
context =
key content trafficserver-variable:plugin-config
......@@ -454,6 +476,27 @@ path = $${directory:promise}/trafficserver-port-listening
hostname = $${trafficserver-variable:local-ip}
port = $${trafficserver-variable:input-port}
[trafficserver-line]
recipe = slapos.cookbook:wrapper
command-line = ${trafficserver:location}/bin/traffic_line
wrapper-path = $${directory:bin}/traffic_line
environment = TS_ROOT=$${buildout:directory}
parameters-extra = true
[trafficserver-promise-cache-availability]
recipe = collective.recipe.template
input =
inline:#!${buildout:executable}
import subprocess
import sys
traffic_line = "$${trafficserver-line:wrapper-path}"
result = float(subprocess.check_output([traffic_line, '-r', 'proxy.node.cache.percent_free' ]))
if result != 0: sys.exit(0)
sys.stderr.write("Cache not available, availability: %s" % result)
sys.exit(127)
output = $${directory:promise}/trafficserver-cache-availability
mode = 700
### End of ATS sections
### Apaches Graceful and promises
......@@ -510,6 +553,14 @@ path = $${directory:promise}/apache_ssl_cached
hostname = $${instance-parameter:ipv4-random}
port = $${apache-configuration:ssl-cache-through-port}
[promise-apache-is-process-older-than-dependency-set]
recipe = slapos.cookbook:wrapper
command-line = ${buildout:bin-directory}/is-process-older-than-dependency-set $${apache-configuration:pid-file}
wrapper-path = $${directory:promise}/apache-frontend-is-running-actual-software-release
parameters-extra = true
[slap_connection]
# Kept for backward compatiblity
computer_id = $${slap-connection:computer-id}
......@@ -555,16 +606,35 @@ password = $${slap-parameter:monitor-password}
private-path-list +=
$${directory:logrotate-backup}
[monitor-traffic-summary-last-stats-wrapper]
< = jinja2-template-base
template = ${template-wrapper:output}
rendered = $${monitor-directory:reports}/traffic-summary-last-stats_every_1_hour
mode = 0700
command = export TS_ROOT=$${buildout:directory} && echo "<pre>$(${trafficserver:location}/bin/traffic_logstats -f $${trafficserver-directory:log}/squid.blog)</pre>"
extra-context =
key content monitor-traffic-summary-last-stats-wrapper:command
# Produce ATS Cache stats
[monitor-ats-cache-stats-wrapper]
< = jinja2-template-base
template = ${template-wrapper:output}
rendered = $${monitor-directory:reports}/ats-cache-stats
rendered = $${monitor-directory:reports}/ats-cache-stats_every_1_hour
mode = 0700
command = export TS_ROOT=$${buildout:directory} && echo "<pre>$(${trafficserver:location}/bin/traffic_shell $${monitor-ats-cache-stats-config:rendered})</pre>"
extra-context =
key content monitor-ats-cache-stats-wrapper:command
[monitor-apache-server-status-wrapper]
< = jinja2-template-base
template = ${template-wrapper:output}
rendered = $${monitor-directory:reports}/monitor-apache-server-status-wrapper
mode = 0700
command = ${curl:location}/bin/curl -s http://$${instance-parameter:ipv4-random}:$${instance-parameter:configuration.plain_http_port}/server-status -u $${monitor-htpasswd:username}:$${monitor-htpasswd:passwd} 2>&1
extra-context =
key content monitor-apache-server-status-wrapper:command
[monitor-ats-cache-stats-config]
< = jinja2-template-base
template = ${template-empty:target}
......@@ -646,5 +716,8 @@ path = $${directory:promise}/nginx_frontend_ipv6_http
hostname = $${instance-parameter:ipv6-random}
port = $${instance-parameter:configuration.plain_nginx_port}
[promise-nginx-is-process-older-than-dependency-set]
recipe = slapos.cookbook:wrapper
command-line = ${buildout:bin-directory}/is-process-older-than-dependency-set $${nginx-configuration:pid-file}
wrapper-path = $${directory:promise}/promise-nginx-is-process-older-than-dependency-set
parameters-extra = true
......@@ -96,6 +96,27 @@
"default": ""
},
"monitor-ipv6-test": {
"title": "IPv6 Address to Monitor Packet Lost",
"description": "IPv6 Address for the frontend keep monitoring with ping6 (without brackets)",
"type": "string",
"default": ""
},
"monitor-ipv4-test": {
"title": "IPv4 Address to Monitor Packet Lost",
"description": "IPv4 Address for the frontend keep monitoring with ping",
"type": "string",
"default": ""
},
"re6st-optimal-test": {
"title": "IPv6 and IPv4 Address to test Re6st",
"description": "IPv6 and IPv6 Address for the frontend test if re6st is on the optimal status (use ipv6,ipv4)",
"type": "string",
"default": ""
},
"enable_cache": {
"title": "Enable Cache",
"description": "If set to true, http caching server (Apache Traffic Server) will be used between frontend apache and backend",
......
......@@ -2,10 +2,6 @@
extends = common.cfg
[versions]
setuptools = 28.8.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
PyRSS2Gen = 1.1
apache-libcloud = 0.19.0
cns.recipe.symlink = 0.2.3
......@@ -15,9 +11,10 @@ plone.recipe.command = 1.1
pycrypto = 2.6.1
rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.8
slapos.toolbox = 0.65
slapos.toolbox = 0.66
smmap = 0.9.0
numpy = 1.11.2
pyasn1 = 0.2.3
websockify = 0.8.0
......@@ -34,29 +31,29 @@ erp5.util = 0.4.46
passlib = 1.6.5
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
GitPython = 2.0.8
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
atomize = 0.2.0
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
feedparser = 5.2.1
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
lockfile = 0.12.2
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
paramiko = 2.0.1
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
pycurl = 7.43.0
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
rpdb = 0.1.5
......@@ -27,6 +27,13 @@ context =
{{ key }} = {{ value }}
{% endfor %}
post = {{ apache_configuration.get('frontend-graceful-command') }}
frequency = daily
rotatep-num = 30
sharedscripts = true
notifempty = true
create = true
[cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = {{ custom_ssl_directory }}/requests/
......@@ -98,7 +105,7 @@ crl = {{ custom_ssl_directory }}/crl/
{% do slave_publish_dict.__setitem__('secure_access', 'https://%s' % slave_instance.get('custom_domain')) -%}
{% endif -%}
[slave-log-directory]
[slave-log-directories]
{{slave_reference}}-log-folder = {{ slave_log_folder }}
{# Set slave logrotate entry #}
......@@ -108,12 +115,6 @@ recipe = slapos.cookbook:logrotate.d
name = ${:_buildout_section_name_}
log = {{slave_parameter_dict.get('access_log')}} {{slave_parameter_dict.get('error_log')}}
backup = {{ slave_log_folder }}
frequency = daily
rotatep-num = 30
post = {{ apache_configuration.get('frontend-graceful-command') }}
sharedscripts = true
notifempty = true
create = true
{# integrate current logs inside #}
[{{slave_ln_section}}]
......@@ -227,6 +228,58 @@ extra-context =
section slave_parameter {{ slave_configuration_section_name }}
{{ '\n' }}
{% set check_error_log_section_title = 'check-%s-error-log-last-hour' % slave_instance.get('slave_reference') -%}
{% do part_list.append(check_error_log_section_title) -%}
[{{ check_error_log_section_title }}]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/check-error-on-apache-log -l {{ slave_instance.get('error_log') }} -d 3600
filename = {{ check_error_log_section_title }}
wrapper-path = {{ promise_directory }}/${:filename}
{% set check_error_log_section_title = 'check-%s-error-log-last-day' % slave_instance.get('slave_reference') -%}
{% do part_list.append(check_error_log_section_title) -%}
[{{ check_error_log_section_title }}]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/check-error-on-apache-log -l {{ slave_instance.get('error_log') }} -d 86400
filename = {{ check_error_log_section_title }}
wrapper-path = {{ promise_directory }}/${:filename}
{% set monitor_ipv6_test = slave_instance.get('monitor-ipv6-test', '') %}
{% if monitor_ipv6_test %}
{% set monitor_ipv6_section_title = 'check-%s-ipv6-packet-list-test' % slave_instance.get('slave_reference') %}
{% do part_list.append(monitor_ipv6_section_title) -%}
[{{ monitor_ipv6_section_title }}]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/is-icmp-packet-lost -a {{monitor_ipv6_test}}
filename = {{ monitor_ipv6_section_title }}
wrapper-path = {{ promise_directory }}/${:filename}
{% endif %}
{% set monitor_ipv4_test = slave_instance.get('monitor-ipv4-test', '') %}
{% if monitor_ipv4_test %}
{% set monitor_ipv4_section_title = 'check-%s-ipv4-packet-list-test' % slave_instance.get('slave_reference') %}
{% do part_list.append(monitor_ipv4_section_title) -%}
[{{ monitor_ipv4_section_title }}]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/is-icmp-packet-lost -4 -a {{monitor_ipv4_test}}
filename = {{ monitor_ipv4_section_title }}
wrapper-path = {{ promise_directory }}/${:filename}
{% endif %}
{% set re6st_optimal_test = slave_instance.get('re6st-optimal-test', '') %}
{% if re6st_optimal_test %}
{% set re6st_ipv6, re6st_ipv4 = re6st_optimal_test.split(",") %}
{% set re6st_optimal_test_section_title = 'check-%s-re6st-optimal-test' % slave_instance.get('slave_reference') %}
{% do part_list.append(re6st_optimal_test_section_title) -%}
[{{ re6st_optimal_test_section_title }}]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/check-re6st-optimal-status -4 {{re6st_ipv4}} -6 {{re6st_ipv6}}
filename = {{ re6st_optimal_test_section_title }}
wrapper-path = {{ promise_directory }}/${:filename}
{% endif %}
{# ############################### #}
{# Publish Slave Information #}
{% if not extra_slave_instance_list -%}
......@@ -240,6 +293,8 @@ recipe = slapos.cookbook:publish
{% else -%}
{% do slave_instance_information_list.append(slave_publish_dict) -%}
{% endif -%}
{# End of the main for loop#}
{% endfor -%}
###############################################
......@@ -263,9 +318,6 @@ extra-context =
[slave-log-directories]
recipe = slapos.cookbook:mkdirectory
{% for key, value in slave_log_dict.iteritems() -%}
{{ key }} = {{ value }}
{% endfor %}
{# Define log access #}
[apache-log-access]
......@@ -278,9 +330,7 @@ extra-context =
raw apache_configuration_directory {{apache_configuration_directory}}
{# Publish information for the instance #}
{% set publish_section_title = 'publish-apache-information' -%}
{% do part_list.append(publish_section_title) -%}
[{{ publish_section_title }}]
[publish-apache-information]
recipe = slapos.cookbook:publish
public-ipv4 = {{ public_ipv4 }}
private-ipv4 = {{ local_ipv4 }}
......@@ -295,6 +345,7 @@ parts +=
{% for part in part_list -%}
{{ ' %s' % part }}
{% endfor %}
publish-apache-information
apache-log-access
eggs-directory = {{ eggs_directory }}
......
......@@ -44,8 +44,7 @@ CustomLog "{{ access_log }}" combined
</Directory>
<Directory {{ document_root }}>
Order Allow,Deny
Allow from All
Require all granted
Options -Indexes
ErrorDocument 404 /notfound.html
RewriteEngine on
......@@ -53,10 +52,11 @@ CustomLog "{{ access_log }}" combined
</Directory>
# List of modules
#LoadModule unixd_module modules/mod_unixd.so
#LoadModule access_compat_module modules/mod_access_compat.so
#LoadModule authz_core_module modules/mod_authz_core.so
LoadModule unixd_module {{ httpd_home }}/modules/mod_unixd.so
LoadModule access_compat_module {{ httpd_home }}/modules/mod_access_compat.so
LoadModule authz_core_module {{ httpd_home }}/modules/mod_authz_core.so
LoadModule authz_host_module {{ httpd_home }}/modules/mod_authz_host.so
LoadModule authn_core_module {{ httpd_home }}/modules/mod_authn_core.so
LoadModule log_config_module {{ httpd_home }}/modules/mod_log_config.so
LoadModule deflate_module {{ httpd_home }}/modules/mod_deflate.so
LoadModule setenvif_module {{ httpd_home }}/modules/mod_setenvif.so
......@@ -71,13 +71,18 @@ LoadModule negotiation_module {{ httpd_home }}/modules/mod_negotiation.so
LoadModule rewrite_module {{ httpd_home }}/modules/mod_rewrite.so
LoadModule headers_module {{ httpd_home }}/modules/mod_headers.so
LoadModule cache_module {{ httpd_home }}/modules/mod_cache.so
LoadModule mem_cache_module {{ httpd_home }}/modules/mod_mem_cache.so
LoadModule cache_socache_module {{ httpd_home }}/modules/mod_cache_socache.so
LoadModule socache_shmcb_module {{ httpd_home }}/modules/mod_socache_shmcb.so
LoadModule antiloris_module {{ httpd_home }}/modules/mod_antiloris.so
LoadModule alias_module {{ httpd_home }}/modules/mod_alias.so
LoadModule autoindex_module {{ httpd_home }}/modules/mod_autoindex.so
LoadModule auth_basic_module {{ httpd_home }}/modules/mod_auth_basic.so
LoadModule authz_user_module {{ httpd_home }}/modules/mod_authz_user.so
LoadModule authn_file_module {{ httpd_home }}/modules/mod_authn_file.so
LoadModule filter_module {{ httpd_home }}/modules/mod_filter.so
LoadModule http2_module {{ httpd_home }}/modules/mod_http2.so
LoadModule info_module {{ httpd_home }}/modules/mod_info.so
LoadModule status_module {{ httpd_home }}/modules/mod_status.so
# The following directives modify normal HTTP response behavior to
# handle known problems with browser implementations.
......@@ -106,6 +111,22 @@ BrowserMatch "^Dreamweaver-WebDAV-SCM1" redirect-carefully
IPReadLimit {{ slapparameter_dict.get('ip-read-limit', '10') }}
</IfModule>
ExtendedStatus On
<Location /server-status>
SetHandler server-status
Order Deny,Allow
Deny from all
Allow from All
AuthType basic
AuthName "Apache Server Status"
AuthBasicProvider file
AuthUserFile {{ instance_home }}/etc/monitor-htpasswd
Require valid-user
</Location>
# Deflate
AddOutputFilterByType DEFLATE text/html text/plain text/xml text/css text/javascript application/x-javascript application/javascript
BrowserMatch ^Mozilla/4 gzip-only-text/html
......
{% set TRUE_VALUES = ['y', 'yes', '1', 'true'] -%}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{% set ssl_proxy_verify = ('' ~ slave_parameter.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES -%}
# Only accept generic (i.e not Zope) backends on http
<VirtualHost *:{{ cached_port }}>
ServerName {{ slave_parameter.get('custom_domain') }}
{%- if 'server-alias' in slave_parameter -%}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor %}
{%- endif %}
SSLProxyEngine on
{% set ssl_proxy_verify = ('' ~ slave_parameter.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES -%}
{% if ssl_proxy_verify -%}
{% if 'ssl_proxy_ca_crt' in slave_parameter -%}
SSLProxyCACertificateFile {{ slave_parameter.get('path_to_ssl_proxy_ca_crt', '') }}
......@@ -25,20 +24,15 @@
RewriteEngine On
RewriteRule ^/(.*)$ {{ slave_parameter.get('backend_url', '') }}/$1 [L,P]
</VirtualHost>
<VirtualHost *:{{ ssl_cached_port }}>
ServerName {{ slave_parameter.get('custom_domain') }}
{%- if 'server-alias' in slave_parameter -%}
{% set server_alias_list = slave_parameter.get('server-alias', '').split() %}
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor %}
{%- endif %}
SSLProxyEngine on
{% set ssl_proxy_verify = ('' ~ slave_parameter.get('ssl-proxy-verify', '')).lower() in TRUE_VALUES -%}
{% if ssl_proxy_verify -%}
{% if 'ssl_proxy_ca_crt' in slave_parameter -%}
SSLProxyCACertificateFile {{ slave_parameter.get('path_to_ssl_proxy_ca_crt', '') }}
......
......@@ -33,6 +33,8 @@
SSLProtocol all -SSLv2 -SSLv3
SSLCipherSuite ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:HIGH:!aNULL:!MD5
SSLHonorCipherOrder on
Protocols h2 http/1.1
{% for key, value in ssl_configuration_list -%}
{% if value in slave_parameter -%}
......@@ -92,7 +94,7 @@
{%- for server_alias in server_alias_list %}
ServerAlias {{ server_alias }}
{% endfor -%}
{% endfor %}
SSLProxyEngine on
{% if ssl_proxy_verify -%}
......@@ -106,6 +108,7 @@
# Rewrite part
ProxyPreserveHost On
ProxyTimeout 600
{% if disable_via_header %}
Header unset Via
{% endif -%}
......@@ -120,6 +123,8 @@
# Remove "Secure" from cookies, as backend may be https
Header edit Set-Cookie "(?i)^(.+);secure$" "$1"
Protocols h2 http/1.1
{% if disable_no_cache_header %}
RequestHeader unset Cache-Control
RequestHeader unset Pragma
......
......@@ -331,7 +331,7 @@ CONFIG proxy.config.cache.permit.pinning INT 0
# default the ram cache size to AUTO_SIZE (-1) based on cache size
# (approximately 10 MB of RAM cache per GB of disk cache)
# alternatively, set to a fixed value such as 21474836480 (20GB)
CONFIG proxy.config.cache.ram_cache.size INT {{ slapparameter_dict.get('ram-cache-size', '1G') }}
CONFIG proxy.config.cache.ram_cache.size INT {{ ats_configuration.get('ram-cache-size', '1G') }}
# Replacement algorithm
# 0 : Clocked Least Frequently Used by Size (CLFUS) w/optional compression
# 1 : LRU w/o optional compression - trivially simple
......
{{ trafficserver.get("cache-path") }} {{ trafficserver.get("disk-cache-size") }} volume={{ trafficserver.get("hostname") }}
\ No newline at end of file
{{ ats_configuration.get("cache-path") }} {{ ats_configuration.get("disk-cache-size") }} volume={{ ats_configuration.get("hostname") }}
\ No newline at end of file
......@@ -8,10 +8,6 @@ parts =
template
download-cache = ${:directory}/download-cache
# Required for recent buildout.
extensions -=
buildout-versions
[template]
recipe = slapos.recipe.template:jinja2
# XXX: "template.cfg" is hardcoded in instanciation recipe
......@@ -79,8 +75,3 @@ packages +=
dh-autoreconf pkg-config doxygen maven xmlto
# hellorina (shouldn't parts like lxml-python depend on the python of the SR?)
python-dev
[versions]
setuptools = 30.1.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
......@@ -48,7 +48,7 @@ scipy = 0.13.3
simpy = 3.0.5
zope.dottedname = 4.1.0
tablib = 0.10.0
mysqlclient = 1.3.9
mysqlclient = 1.3.10
# indirect dependancies
cp.recipe.cmd = 0.5
......
......@@ -161,6 +161,13 @@
},
"type": "object"
},
"smtp": {
"description": "Mail queuing and relay service",
"additionalProperties": {
"$ref": "./instance-smtp-schema.json#/properties"
},
"type": "object"
},
"mariadb": {
"description": "Relational database service",
"additionalProperties": {
......@@ -251,6 +258,11 @@
}
},
"type": "object"
},
"wendelin-core-zblk-fmt": {
"description": "In wendelin.core there are 2 formats for storing data, so called ZBlk0 and ZBlk1. See https://lab.nexedi.com/nexedi/wendelin.core/blob/2e5e1d3d/bigfile/file_zodb.py#L19 for more details.",
"default": "",
"type": "string"
}
}
}
......@@ -94,7 +94,7 @@ port = 8080
shell = $${shell:wrapper}
wrapper = $${rootdirectory:bin}/shellinaboxd
shellinabox-binary = ${shellinabox:location}/bin/shellinaboxd
password = $${pwgen:passwd}
password-file = $${pwgen:storage-path}
directory = $${buildout:directory}/
login-shell = $${rootdirectory:bin}/login
certificate-directory = $${directory:shellinabox}
......
......@@ -10,11 +10,6 @@ extends =
../../component/apache/buildout.cfg
../../stack/monitor/buildout.cfg
extensions -=
buildout-versions
show-picked-versions = true
parts =
slapos-cookbook
template
......@@ -61,12 +56,9 @@ recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-default.cfg
output = ${buildout:directory}/template-default.cfg
mode = 0644
md5sum = 8e171816b6caef52ac75c2f8f6a69fc3
md5sum = 05519f3887a309d3ec069e0aa9f52ebc
[versions]
PyXML = 0.8.5
erp5.util = 0.4.46
erp5.util = 0.4.47
slapos.recipe.template = 2.7
setuptools = 28.8.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
......@@ -4,7 +4,6 @@ extends =
../../../../stack/slapos.cfg
parts =
slapos-cookbook
instance-template
[instance-template]
......
......@@ -5,10 +5,9 @@ develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
parts =
request-resilient-instance
deploy-unit-test
deploy-scalability-test
request-resilient-kvm
deploy-standalone-resiliency-test
[directory]
recipe = slapos.cookbook:mkdirectory
......@@ -27,37 +26,30 @@ recipe = slapos.cookbook:wrapper
# XXX: put it in etc/run in case of scalability test so that it runs automatically.
wrapper-path = ${directory:bin}/runKVMResiliencyTestSuite
testnode-parameters = --test-result-path={{ slapparameter_dict.get('test-result-path') }} --revision={{ slapparameter_dict.get('test-suite-revision') }} --node-title={{ slapparameter_dict.get('scalability-launcher-title') }} --test-suite={{ slapparameter_dict.get('test-suite') }} --test-suite-master-url={{ slapparameter_dict.get('test-suite-master-url') }} --log-path=${directory:log}
kvm-test-parameters = server_url=${slap-connection:server-url} key_file=${slap-connection:key-file} cert_file=${slap-connection:cert-file} computer_id=${slap-connection:computer-id} partition_id=${slap-connection:partition-id} software=${slap-connection:software-release-url} namebase=kvm root_instance_name='${request-resilient-kvm:name}'
command-line = {{ bin_directory }}/runResiliencyScalabilityTestNode ${:testnode-parameters} ${:kvm-test-parameters}
test-parameters = server_url=${slap-connection:server-url} key_file=${slap-connection:key-file} cert_file=${slap-connection:cert-file} computer_id=${slap-connection:computer-id} partition_id=${slap-connection:partition-id} software=${slap-connection:software-release-url} namebase=kvm root_instance_name='${request-resilient-instance:name}'
command-line = {{ bin_directory }}/runResiliencyScalabilityTestNode ${:testnode-parameters} ${:test-parameters}
[deploy-unit-test]
recipe = collective.recipe.template
#testnode-parameters = --test-result-path={{ slapparameter_dict.get('test-result-path') }} --revision={{ slapparameter_dict.get('test-suite-revision') }} --node-title={{ slapparameter_dict.get('scalability-launcher-title') }} --test-suite={{ slapparameter_dict.get('test-suite') }} --test-suite-master-url={{ slapparameter_dict.get('test-suite-master-url') }} --log-path=${directory:log}
kvm-test-parameters = server_url=${slap-connection:server-url} key_file=${slap-connection:key-file} cert_file=${slap-connection:cert-file} computer_id=${slap-connection:computer-id} partition_id=${slap-connection:partition-id} software=${slap-connection:software-release-url} namebase=kvm root_instance_name='${request-resilient-kvm:name}'
test-parameters = server_url=${slap-connection:server-url} key_file=${slap-connection:key-file} cert_file=${slap-connection:cert-file} computer_id=${slap-connection:computer-id} partition_id=${slap-connection:partition-id} software=${slap-connection:software-release-url} namebase=kvm root_instance_name='${request-resilient-instance:name}'
input = inline:
#!/bin/sh
exec {{ bin_directory }}/runResiliencyUnitTestTestNode $@ ${:kvm-test-parameters}
exec {{ bin_directory }}/runResiliencyUnitTestTestNode $@ ${:test-parameters}
output = ${directory:bin}/runTestSuite
mode = 755
[deploy-standalone-resiliency-test]
# Used to manually run the KVM test if we don't have a running testnode.
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/runStandaloneResiliencyTestSuite
command-line = {{ bin_directory }}/runStandaloneResiliencyTest --test-suite-title=kvm ${:kvm-test-parameters}
kvm-test-parameters = server_url=${slap-connection:server-url} key_file=${slap-connection:key-file} cert_file=${slap-connection:cert-file} computer_id=${slap-connection:computer-id} partition_id=${slap-connection:partition-id} software=${slap-connection:software-release-url} namebase=kvm root_instance_name='${request-resilient-kvm:name}'
[request-resilient-kvm]
[request-resilient-instance]
<= slap-connection
recipe = slapos.cookbook:request
software-url = ${slap-connection:software-release-url}
software-type = kvm-resilient
name = Resilient KVM (Root Instance)
name = Resilient Instance (Root Instance)
{% for key, value in slapparameter_dict.get('cluster', {}).iteritems() -%}
config-{{ key }} = {{ dumps(value) }}
{% endfor -%}
config-virtual-hard-drive-url = ${slap-parameter:virtual-hard-drive-url}
config-virtual-hard-drive-md5sum = ${slap-parameter:virtual-hard-drive-md5sum}
config-virtual-hard-drive-gzipped = ${slap-parameter:virtual-hard-drive-gzipped}
config-resiliency-backup-periodicity = */5 * * * *
config-resilient-clone-number = 1
config-ignore-known-hosts-file = false
......@@ -66,5 +58,6 @@ return = ipv6
sla-computer_guid = ${slap-connection:computer-id}
[slap-parameter]
virtual-hard-drive-url = http://www.nexedi.org/static/slapos/kvm_resiliency_test/virtual.qcow2
virtual-hard-drive-md5sum = 465e1024447997e7b86ee2e5151e031b
virtual-hard-drive-url = {{ default_test_image_url }}
virtual-hard-drive-md5sum = {{ default_test_image_md5sum }}
virtual-hard-drive-gzipped = true
......@@ -8,7 +8,6 @@ parts =
request-resilient-instance
deploy-unit-test
deploy-scalability-test
deploy-standalone-resiliency-test
[directory]
recipe = slapos.cookbook:mkdirectory
......@@ -38,13 +37,6 @@ input = inline:
output = ${directory:bin}/runTestSuite
mode = 755
[deploy-standalone-resiliency-test]
# Used to manually run the resilient test if we don't have a running testnode.
recipe = slapos.cookbook:wrapper
test-suite-title = slaprunner
command-line = {{ bin_directory }}/runStandaloneResiliencyTest --test-suite-title=${:test-suite-title} ${deploy-scalability-test:test-parameters}
wrapper-path = ${directory:bin}/runStandaloneResiliencyTestSuite
[request-resilient-instance]
<= slap-connection
recipe = slapos.cookbook:request
......@@ -57,6 +49,7 @@ config-{{ key }} = {{ dumps(value) }}
config-resiliency-backup-periodicity = */10 * * * *
config-resilient-clone-number = 1
config-ignore-known-hosts-file = false
config-cpu-usage-ratio = 1
# XXX hardcoded
#config-frontend-domain = google.com
# XXX Hack to deploy Root Instance on the same computer as the type-test Instance
......
[buildout]
extends = ${template:output}
[switch_softwaretype]
default = $${:test}
RootSoftwareInstance = $${:test}
# Used for the test of resiliency. The system wants a "test" software_type.
test = $${dynamic-template-resilient-test:rendered}
[dynamic-template-resilient-test]
recipe = slapos.recipe.template:jinja2
template = ${template-resilient-test:location}/${template-resilient-test:filename}
rendered = $${buildout:directory}/template-resilient-test.cfg
bin-directory = ${buildout:bin-directory}
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
raw bin_directory ${buildout:bin-directory}
${template-resilient-test:extra-context}
mode = 0644
[buildout]
extends =
../../kvm/development.cfg
testsuite.cfg
parts += template-erp5testnode
[default-test-image]
recipe = hexagonit.recipe.download
ignore-existing = true
filename = ${:_buildout_section_name_}
url = http://www.nexedi.org/static/slapos/kvm_resiliency_test/virtual.qcow.gz
md5sum = dd82c771f6f7738fb4b0fc1330ed8236
download-only = true
mode = 0644
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[template-resilient-test]
filename = instance-kvm-resilient-test.cfg.jinja2
md5sum = 71ddbdeb8769bcb0ebb3c9407ef7e36c
# Ingest extra-context, on a the final template-resilient-test rendering
# always ingest raw values.
extra-context =
raw default_test_image_url file://${default-test-image:location}/${default-test-image:filename}
raw default_test_image_md5sum ${default-test-image:md5sum}
[buildout]
extends =
../../slaprunner/development.cfg
testsuite.cfg
parts += template-erp5testnode
[template-resilient-test]
filename = instance-resilient-test.cfg.jinja2
md5sum = 268d07ed48199d34259aff1ba544d852
[buildout]
extends = ../development.cfg
parts += template-erp5testnode
# Change default software-type to be "test", so that it can be run using erp5testnode.
[template-erp5testnode]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
md5sum = 59a13145d3df30f38442ce172330dfb3
md5sum = e5adcd511bca060bfeccec48b57c635c
output = ${buildout:directory}/template.cfg
mode = 0644
[template]
output = ${buildout:directory}/template-original.cfg
[template-resilient-test]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
mode = 0644
download-only = true
on-update = true
extra-context =
......@@ -7,6 +7,6 @@
"description": "Fluentd configuration. You can write it entirely by yourself. See fluentd-agent.conf.jinja2.in",
"default": "",
"type": "string"
},
}
}
}
......@@ -5,6 +5,8 @@ extends = {{ instance_gitlab_cfg }}
parts +=
cron-entry-gitlab-backup
resiliency-exclude-file
gitlab-resiliency-restore-script
# -export specific instance parameters
[instance-parameter]
......@@ -29,10 +31,49 @@ environment =
# XXX: `/usr/bin` has to be in the PATH environment variable to be able to use
# `which` command in gitlab-backup, `chrt` in xnice, ...
# and `/bin` for `sed` command in gitlab-backup restore
PATH=/bin:/usr/bin:${buildout:directory}/bin:{{ coreutils_location }}/bin:{{ grep_location }}/bin:{{ tar_location }}/bin:{{ gzip_location }}/bin:{{ gopath_bin }}:{{ git_location }}/bin
PATH=${buildout:directory}/bin:{{ coreutils_location }}/bin:{{ grep_location }}/bin:{{ tar_location }}/bin:{{ gzip_location }}/bin:{{ gopath_bin }}:{{ git_location }}/bin:/bin:/usr/bin
[cron-entry-gitlab-backup]
<= cron-entry
# run backup script on a regular basis (given as instance parameter)
frequency = ${instance-parameter:configuration.backup_frequency}
command = ${exporter:wrapper-path}
[resiliency-exclude-file]
# Generate rdiff exclude file in case of resiliency
recipe = collective.recipe.template
input = inline: gitlab-shell-work*
gitlab-work*
var/backup/**
var/repositories*
var/repositories/**
srv/postgresql/**
srv/postgresql
etc/service/postgres-start
srv/redis/**
srv/unicorn/unicorn.socket
output = ${directory:srv}/exporter.exclude
[gitlab-resiliency-restore-script]
# script run by resilient stack to restore gitlab instance
# this section should be added only on runner import instance
recipe = slapos.recipe.template:jinja2
mode = 0700
template= {{ gitlab_restore_sh_in }}
rendered= ${directory:srv}/runner-import-restore
context =
raw bash_bin {{ bash_bin }}
raw go_work_bin {{ gopath_bin }}
raw git_location {{ git_location }}
raw bin_directory ${directory:bin}
raw etc_directory ${directory:etc}
raw run_directory ${directory:run}
raw postgress_script ${service-postgresql:services}/postgres-start
raw redis_script ${service-redis:wrapper}
raw unicorn_script ${service-unicorn:wrapper-path}
raw sidekiq_script ${service-sidekiq:wrapper-path}
raw gitlab_backup_dir ${gitlab-backup-directory:backup-gitlab.git}
raw redis_pid_file ${service-redis:pid_file}
raw postgres_pid_file ${service-postgresql:pgdata-directory}/postmaster.pid
raw gitlab_work_location ${gitlab-work:location}
raw promise_lab_location ${directory:promise.slow}
[buildout]
extends =
{{ instance_gitlab_cfg }}
{{ instance_gitlab_export_cfg }}
parts +=
install-demo-backup
[root-password]
passwd = root1234
[service-unicorn]
environment =
GITLAB_ROOT_PASSWORD=${root-password:passwd}
[backend-info]
# host = ${instance-parameter:ipv4-random}
[publish-instance-info]
password = ${root-password:passwd}
# token for default.user user in gitlab demo backup
# Edit this token if needed
private-token = SLurtnxPscPsU-SDm4oN
# raw URL for latest commit on setup.py in gitlab demo backup.
latest-file-uri = ${:backend_url}/open/slapos/raw/94c96d42c22e16836dadddac7c8061f4a8c6ca7a/setup.py
[instance-parameter]
# backup more often, 10 minutes seems the minimal
configuration.backup_frequency = */10 * * * *
[install-demo-backup]
recipe = plone.recipe.command
stop-on-error = false
backup-done = ${directory:var}/backup.ready
command =
if [ -f "${:backup-done}" ]; then
echo "Demo backup installed."
else
rm -rf ${secrets:secrets} ${directory:var}/backup/* &&
mkdir -p ${secrets:secrets} ${directory:var}/tmp &&
cp -r {{ gitlab_demo_backup_path }}/secrets/* ${secrets:secrets} &&
cp -rf {{ gitlab_demo_backup_path }}/backup-gitlab.git/ ${directory:var}/tmp &&
cd ${directory:var}/tmp/backup-gitlab.git/
PATH=${directory:bin}:{{ gopath_bin }}:{{ git_location }}/bin:$PATH
gitlab-backup restore -vupok -go HEAD &&
touch ${:backup-done}
fi
update-command = ${:command}
......@@ -12,6 +12,7 @@ offline = true
recipe = slapos.cookbook:softwaretype
gitlab = $${instance-gitlab.cfg:rendered}
gitlab-export = $${instance-gitlab-export.cfg:rendered}
gitlab-test = $${instance-gitlab-test.cfg:rendered}
default = $${:gitlab}
# TODO -import, -pull-backup
......@@ -71,6 +72,7 @@ context =
raw rack_attack_rb_in ${rack_attack.rb.in:target}
raw resque_yml_in ${resque.yml.in:target}
raw smtp_settings_rb_in ${smtp_settings.rb.in:target}
raw gitlab_restore_sh_in ${template-gitlab-resiliency-restore.sh.in:target}
raw unicorn_rb_in ${unicorn.rb.in:target}
$${:context-extra}
......@@ -85,3 +87,11 @@ template= ${instance-gitlab.cfg.in:target}
template= ${instance-gitlab-export.cfg.in:target}
context-extra =
raw instance_gitlab_cfg $${instance-gitlab.cfg:rendered}
[instance-gitlab-test.cfg]
<= instance-cfg
template= ${instance-gitlab-test.cfg.in:target}
context-extra =
raw instance_gitlab_cfg $${instance-gitlab.cfg:rendered}
raw instance_gitlab_export_cfg $${instance-gitlab-export.cfg:rendered}
raw gitlab_demo_backup_path ${gitlab-demo-backup.git:location}
......@@ -252,13 +252,14 @@ recipe = zc.recipe.egg
eggs =
plone.recipe.command
cns.recipe.symlink
collective.recipe.template
[instance.cfg]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
output = ${buildout:directory}/instance.cfg
md5sum = 2329ddc4934e900785aa669adc214c23
md5sum = a01b42efe47811cadd70daf47850de93
# macro: download a shell script and put it rendered into <software>/bin/
[binsh]
......@@ -325,7 +326,11 @@ md5sum = b05fad928ffbb689b4415837525c62d1
[instance-gitlab-export.cfg.in]
<= download-file
md5sum = 00bd4ddf75a40a9d18d8243289f68ee9
md5sum = 319d7dbe3ad9b260c1e292cfc0d13b11
[instance-gitlab-test.cfg.in]
<= download-file
md5sum = cc8065104458af311c2ffa9ae20235a6
[macrolib.cfg.in]
<= download-file
......@@ -351,10 +356,19 @@ md5sum = 7c89a730889e3224548d9abe51a2d719
<= download-template
md5sum = c67ea492e17f774d0e18f1217338a55f
[template-gitlab-resiliency-restore.sh.in]
<= download-template
md5sum = 420d999daae050351a51487b9d2f8fe0
[unicorn.rb.in]
<= download-template
md5sum = 83921db1835d9e81cbbe808631cc40a9
[gitlab-demo-backup.git]
recipe = hexagonit.recipe.download
url = https://lab.nexedi.com/alain.takoudjou/labdemo.backup/repository/archive.tar.gz?ref=master
md5sum = d40e5e211dc9a4e5ada9c0250377c639
strip-top-level-dir = true
[versions]
cns.recipe.symlink = 0.2.3
......
#!{{ bash_bin }}
# DO NOT RUN THIS SCRIPT ON PRODUCTION INSTANCE
# DaTA WILL BE ERASED
set -e
echo "###################################################################################"
echo "# #"
echo "# Warning: DO NOT RUN THIS SCRIPT ON PRODUCTION INSTANCE DaTA WILL BE ERASED !!! #"
echo "# #"
echo "###################################################################################"
echo -e "\nWill start in 10 seconds, cancel execution if you didn't want to run this script."
sleep 10
postgres_executable="{{ postgress_script }}"
redis_executable="{{ redis_script }}"
git_backup_directory="{{ gitlab_backup_dir }}"
redis_pid_file="{{ redis_pid_file }}"
postgres_pid_file="{{ postgres_pid_file }}"
bin_location="{{ bin_directory }}"
run_location="{{ run_directory }}"
git_location="{{ git_location }}"
go_work_bin="{{ go_work_bin }}"
etc_location="{{ etc_directory }}"
gitlab_work="{{ gitlab_work_location }}"
promise_check="{{ promise_lab_location }}"
unicorn_script="{{ unicorn_script }}"
sidekiq_script="{{ sidekiq_script }}"
# export GIT_EXEC_PATH=$git_location/libexec/git-core/
check_process () {
pid_file=$1
pname=$2
if [ -e "$pid_file" ]; then
pid=$(head -n 1 $pid_file) > /dev/null 2>&1
if kill -0 "$pid"; then
echo "$pname is already running with pid $pid. Aborting."
exit 1
fi
fi
}
kill_process () {
pid=$1
R=0
kill -0 "$pid" > /dev/null 2>&1 || R=$?
if [ $R -eq 0 ]; then
kill -TERM $pid
fi
}
check_process $postgres_pid_file "Postgres"
check_process $redis_pid_file "Redis"
check_process $run_location/unicorn.pid "Unicorn"
if [ -f "$postgres_pid_file" ]; then
rm $postgres_pid_file
fi
echo "Starting Postgres..."
$postgres_executable &
postgres_pid=$!
trap "echo 'kill $postgres_pid" EXIT TERM INT
echo "Starting Redis server..."
$redis_executable &
redis_pid=$!
trap "kill $redis_pid" EXIT TERM INT
echo "[OK]"
echo "Restoring gitlab data..."
# XXX - workaround until this problem is fixed on runner1
sed -ie "s/connection.execute('TRUNCATE schema_migrations')\s*$/connection.execute('TRUNCATE schema_migrations') if connection.table_exists? 'schema_migrations'/g" $gitlab_work/lib/tasks/gitlab/db.rake
cd $git_backup_directory
PATH=$bin_location:$go_work_bin:$git_location/bin:$PATH gitlab-backup restore -vupok -go HEAD
echo "Checking gitlab promises..."
echo "[info] Not all promises are checked!"
$promise_check/gitlab-app
echo "Starting Unicorn to check gitlab-shell promise..."
$unicorn_script &
unicorn_pid=$!
trap "kill $unicorn_pid" EXIT TERM INT
sleep 60
if [ -s "$run_location/unicorn.pid" ]; then
unicorn_ppid=$(head -n 1 $run_location/unicorn.pid) > /dev/null 2>&1
trap "kill $unicorn_ppid" EXIT TERM INT
fi
$promise_check/gitlab-shell
#echo "starting Sidekiq to check sidekiq promise..."
#$sidekiq_script &
#sidekiq_pid=$!
#trap "kill $sidekiq_pid" EXIT TERM INT
#$promise_check/sidekiq
kill_process $postgres_pid
kill_process $redis_pid
kill_process $unicorn_pid
RESTORE_EXIT_CODE=$?
if [ $RESTORE_EXIT_CODE -eq 0 ]; then
echo 'Backup restoration successfully completed.'
else
echo 'Backup restoration failed.'
fi
exit $RESTORE_EXIT_CODE
\ No newline at end of file
......@@ -6,10 +6,6 @@ parts =
slapos-cookbook
template
# Required for recent buildout.
extensions -=
buildout-versions
[template]
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/instance.cfg.in
......@@ -44,8 +40,3 @@ recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_buildout_section_name_}
mode = 755
md5sum = 78b77a6bda9958f547f7d89b747731e3
[versions]
setuptools = 30.1.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
[buildout]
parts =
instance
ipython_notebook
read-knowledge0
publish-connection-parameter
erp5-kernel
kernel-json
custom-js
monitor-base
extends =
{{ monitor_template_rendered }}/template-monitor.cfg
eggs-directory = {{ eggs_directory }}
......@@ -94,6 +99,26 @@ ipython_dir = ${:home}/ipython
ipython_kernel_dir = ${:ipython_dir}/kernels
erp5_kernel_dir = ${:ipython_kernel_dir}/ERP5
[ipython_notebook]
# This part is called like this because knowledge0.write uses the part name for
# the section name in the config file.
recipe = slapos.cookbook:zero-knowledge.write
password =
filename = knowledge0.cfg
[read-knowledge0]
recipe = slapos.cookbook:zero-knowledge.read
filename = knowledge0.cfg
[monitor-instance-parameter]
monitor-base-url = ${monitor-frontend-promise:url}
# In case you're using a developer instance you should edit these in:
# monitor-base-url = ${monitor-httpd-conf-parameter:url}
# cors-domains = softinstXXXXX.host.vifib.net (or equivalent)
# interface-url = https://softinstXXXXX.host.vifib.net/erp5/web_site_module/monitoring_rjs_unsafe
instance-configuration =
raw jupyter-password ${read-knowledge0:password}
[publish-connection-parameter]
recipe = slapos.cookbook:publish.serialised
url = https://[${instance-parameter:host}]:${instance-parameter:port}
......
......@@ -8,6 +8,7 @@ extends =
../../component/pandas/buildout.cfg
../../component/openssl/buildout.cfg
../../component/seaborn/buildout.cfg
../../stack/monitor/buildout.cfg
parts =
slapos-cookbook
ipython-notebook
......@@ -28,17 +29,15 @@ eggs +=
patsy
[download-file-base]
recipe = hexagonit.recipe.download
ignore-existing = true
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/template/${:filename}
download-only = true
destination = ${buildout:parts-directory}/${:_buildout_section_name_}
mode = 0644
[ipython-notebook-config]
<= download-file-base
filename = ipython_notebook_config.py.jinja
md5sum = a5bc4ee8539109d1de7ab33b4c2c97ea
md5sum = 95a76a80718e2a933dd4854dcf9a4a65
[ipython-notebook-set-password]
<= download-file-base
......@@ -48,7 +47,7 @@ md5sum = d7d4a7e19d55bf14007819258bf42100
[erp5-kernel]
<= download-file-base
filename = ERP5kernel.py.jinja
md5sum = cbd35bbe54b66e9b2f73487ecdbc6976
md5sum = 5e7e0acef2f32e75f8f17bc851414aa1
[kernel-json]
<= download-file-base
......@@ -65,7 +64,7 @@ recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/instance.cfg.in
rendered = ${buildout:directory}/template.cfg
mode = 0644
md5sum = 26a28b74c8b18c92108c751b87abe80c
md5sum = b6555d25fd89bf92f47eda60a29c2b44
context =
key bin_directory buildout:bin-directory
key develop_eggs_directory buildout:develop-eggs-directory
......@@ -82,105 +81,89 @@ context =
key kernel_json_filename kernel-json:filename
key custom_js_location custom-js:location
key custom_js_filename custom-js:filename
key monitor_template_rendered buildout:directory
[versions]
PyRSS2Gen = 1.1
Pygments = 2.0.2
cns.recipe.symlink = 0.2.3
ipython = 4.0.0
matplotlib = 1.4.3
# Required by:
# ipython==4.0.0
mistune = 0.7.1
nose = 1.3.7
numpy = 1.9.2
pandas = 0.16.2
Pygments = 2.2.0
astor = 0.5
backports-abc = 0.5
backports.shutil-get-terminal-size = 1.0.0
cycler = 0.10.0
ipykernel = 4.5.2
ipython = 5.3.0
ipython-genutils = 0.1.0
ipywidgets = 6.0.0
jupyter-client = 5.0.0
jupyter-core = 4.3.0
matplotlib = 2.0.0
mistune = 0.7.3
nbformat = 4.3.0
notebook = 4.4.1
pandas = 0.19.2
plone.recipe.command = 1.1
pyzmq = 14.7.0
scikit-learn = 0.16.1
scipy = 0.15.1
slapos.recipe.template = 2.8
terminado = 0.5
tornado = 4.2
requests = 2.7.0
# Required for erp5_kernel
ipykernel = 4.2.1
# Required by:
# tornado==4.2
backports.ssl-match-hostname = 3.4.0.2
# Required by:
# tornado==4.2
certifi = 2015.4.28
prompt-toolkit = 1.0.13
ptyprocess = 0.5.1
pyzmq = 16.0.2
scikit-learn = 0.18.1
seaborn = 0.7.1
simplegeneric = 0.8.1
slapos.recipe.template = 2.10
statsmodels = 0.8.0
terminado = 0.6
tornado = 4.4.2
traitlets = 4.3.2
widgetsnbextension = 2.0.0
# Required by:
# mock==1.1.3
funcsigs = 0.4
# tornado==4.4.2
certifi = 2017.1.23
# Required by:
# matplotlib==1.4.3
mock = 1.1.3
# notebook==4.3.2
# nbconvert 4.2.0 depends on entrypoints egg that is not available as tar/zip source.
nbconvert = 4.1.0
# Required by:
# terminado==0.5
ptyprocess = 0.5
# patsy==0.4.1
numpy = 1.12.0
# Required by:
# matplotlib==1.4.3
# pandas==0.16.2
python-dateutil = 2.4.2
# ipython==5.3.0
pathlib2 = 2.2.1
# Required by:
# ipython==4.0.0
notebook = 4.0.6
simplegeneric = 0.8.1
# statsmodels==0.8.0
patsy = 0.4.1
# Required by:
# nbformat==4.0.1
# notebook==4.0.6
# traitlets==4.1.0b1
ipython-genutils = 0.1.0
# ipython==5.3.0
pexpect = 4.2.1
# Required by:
# ipykernel==4.2.1
# notebook==4.0.6
jupyter-client = 4.1.1
# ipython==5.3.0
pickleshare = 0.7.4
# Required by:
# nbformat==4.0.1
# notebook==4.0.6
# jupyter-client==4.1.1
# nbconvert==4.1.0
jupyter-core = 4.0.6
# matplotlib==2.0.0
# pandas==0.19.2
python-dateutil = 2.6.0
# Required by:
# notebook==4.0.6
nbconvert = 4.1.0
# pathlib2==2.2.1
scandir = 1.5
# Required by:
# notebook==4.0.6
# nbconvert==4.1.0
nbformat = 4.0.1
# statsmodels==0.8.0
scipy = 0.19.0
# Required by:
# pickleshare==0.5
path.py = 8.1.2
# tornado==4.4.2
singledispatch = 3.4.0.3
# Required by:
# ipython==4.0.0
pexpect = 4.0.1
# matplotlib==2.0.0
subprocess32 = 3.2.7
# Required by:
# ipython==4.0.0
pickleshare = 0.5
# Required by:
# ipython==4.0.0
# notebook==4.0.6
traitlets = 4.1.0b1
astor = 0.5
# prompt-toolkit==1.0.13
wcwidth = 0.1.7
......@@ -177,6 +177,7 @@ class ERP5Kernel(Kernel):
'reference': self.reference,
'title': self.title,
'request_reference': request_reference,
'store_history': kwargs.get('store_history')
})
# Set value for status_code for self object which would later be used to
......@@ -256,7 +257,7 @@ class ERP5Kernel(Kernel):
# Check for status_code before making request to erp5 and make request in
# only if the status_code is in the allowed_HTTP_request_code_list
if self.status_code in self.allowed_HTTP_request_code_list:
self.make_erp5_request(code=code)
self.make_erp5_request(code=code, store_history=store_history)
# For 200 status_code, Kernel will receive predefined format for data
# from erp5 which is either json of result or simple result string
......
'''
This script initializes Jupyter's configuration such as passwords and other
things. It is run by IPython hence why it can use functions like get_config().
'''
import ConfigParser
import random
from notebook.auth import passwd
import os
def random_password(length = 10):
result = ""
for i in range(0, length):
result = result + chr(random.randint(0, 25) + ord('a'))
return result
knowledge_0 = '{{ config_cfg }}'
if not os.path.exists(knowledge_0):
print "Your software does <b>not</b> embed 0-knowledge. \
This interface is useless in this case</body></html>"
exit(0)
c = get_config()
parser = ConfigParser.ConfigParser()
parser.read(knowledge_0)
if parser.has_option("ipython_notebook", "password"):
c.NotebookApp.password = parser.get("ipython_notebook", "password")
if not parser.has_section("ipython_notebook"):
parser.add_section("ipython_notebook")
if not parser.has_option("ipython_notebook", "password") or \
parser.get("ipython_notebook", "password") == "":
parser.set("ipython_notebook", "password", random_password())
c.NotebookApp.password = passwd(parser.get("ipython_notebook", "password"))
with open(knowledge_0, 'w') as file:
parser.write(file)
\ No newline at end of file
......@@ -4,24 +4,18 @@ extends =
../../component/6tunnel/buildout.cfg
../../component/curl/buildout.cfg
../../component/dash/buildout.cfg
../../component/dcron/buildout.cfg
../../component/gzip/buildout.cfg
../../component/qemu-kvm/buildout.cfg
../../component/logrotate/buildout.cfg
../../component/noVNC/buildout.cfg
../../component/openssl/buildout.cfg
../../component/dcron/buildout.cfg
../../component/netcat/buildout.cfg
../../component/pycurl/buildout.cfg
../../stack/slapos.cfg
../../stack/nodejs.cfg
../../stack/resilient/buildout.cfg
../../stack/monitor/buildout.cfg
# stacks are listed from most generic to most specific,
# to avoid versioning issues
parts =
common-parts =
template
eggs
......@@ -29,6 +23,9 @@ parts =
rdiff-backup
pbs-recipe-egg
parts = ${:common-parts}
#XXX-Cedric : Currently, one can only access to KVM using noVNC.
# Ideally one should be able to access KVM by using either NoVNC or VNC.
# Problem is : no native crypto support in web browsers. So we have to disable ssl
......@@ -92,7 +89,7 @@ command =
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
md5sum = 061604d32cc626352dc3d221bdeaf804
md5sum = 5c159864ce41394486867c27d65c2f50
output = ${buildout:directory}/template.cfg
mode = 0644
......@@ -101,7 +98,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm.cfg.jinja2
mode = 644
md5sum = d09d37dc3bcf34da8d31b11215866b27
md5sum = b7b06fa8586198640ca0bb185ab48161
download-only = true
on-update = true
......@@ -123,20 +120,11 @@ md5sum = 700676dff4a3835a9d6cde015d91922e
download-only = true
on-update = true
[template-kvm-resilient-test]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-resilient-test.cfg.jinja2
md5sum = 6b40e280201aaf9258c4cb5de7c1f5b4
mode = 0644
download-only = true
on-update = true
[template-kvm-import]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-import.cfg.jinja2.in
md5sum = d5d3b7debf3141913eac926830ea166d
md5sum = 5e21ad759c148d26134ecefbb4d3d9f1
mode = 0644
download-only = true
on-update = true
......@@ -146,7 +134,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/kvm-import.sh.jinja2
filename = kvm-import.sh.jinja2
md5sum = 926a11421921c29f91fae8240bbcf585
md5sum = cd0008f1689dfca9b77370bc4d275b70
download-only = true
mode = 0755
......@@ -155,7 +143,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-export.cfg.jinja2
mode = 644
md5sum = 13276ead8cf5f9eda28e8dbda35a9bcf
md5sum = c2ab8a5ff9d8cd351682382ea06b3328
download-only = true
on-update = true
......@@ -164,7 +152,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/kvm-export.sh.jinja2
filename = kvm-export.sh.jinja2
md5sum = 22bd2e0c8fdb39a764a14c403a3bd752
md5sum = c18e958bf374d5fa35dbb952686595fb
download-only = true
mode = 0755
......@@ -251,17 +239,6 @@ md5sum = 599dbbbd438fe7801e3f8642ae9e9a78
download-only = true
on-update = true
[template-logrotate-base]
recipe = slapos.recipe.template:jinja2
filename = instance-logrotate-base.cfg
template = ${:_profile_base_location_}/instance-logrotate-base.cfg.in
rendered = ${buildout:parts-directory}/${:_buildout_section_name_}/instance-logrotate-base.cfg
md5sum = f28fbd310944f321ccb34b2a34c82005
context =
key dcron_location dcron:location
key gzip_location gzip:location
key logrotate_location logrotate:location
[template-httpd]
recipe = slapos.recipe.template:jinja2
filename = template-httpd.cfg
......
# Development profile of slaprunner.
# Exactly the same as software.cfg, but fetch the slapos.cookbook and
# slapos.toolbox from git repository instead of fetching stable version,
# allowing to play with bleeding edge environment.
[buildout]
extends =
../../component/git/buildout.cfg
common.cfg
extends = common.cfg
../../stack/slapos-dev.cfg
parts +=
slapos.cookbook-repository
slapos.toolbox-repository
slapos.core-repository
erp5.util-repository
check-recipe
develop =
${:parts-directory}/slapos.cookbook-repository
${:parts-directory}/slapos.core-repository
${:parts-directory}/slapos.toolbox-repository
${:parts-directory}/erp5.util-repository
[slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.git
branch = master
git-executable = ${git:location}/bin/git
[slapos.core-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.core.git
branch = master
git-executable = ${git:location}/bin/git
[slapos.toolbox-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.toolbox.git
branch = master
git-executable = ${git:location}/bin/git
[erp5.util-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/erp5.git
branch = master
git-executable = ${git:location}/bin/git
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command =
grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link &&
grep parts ${buildout:develop-eggs-directory}/slapos.core.egg-link &&
grep parts ${buildout:develop-eggs-directory}/slapos.toolbox.egg-link &&
grep parts ${buildout:develop-eggs-directory}/erp5.util.egg-link
[versions]
slapos.cookbook =
slapos.core =
slapos.toolbox =
erp5.util =
lockfile =
# Development eggs
${:common-parts}
[buildout]
parts =
switch-softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
extends = ${template-resilient-templates:output}
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = $${:test}
kvm = $${dynamic-template-kvm:rendered}
nbd = ${template-nbd:output}
frontend = ${template-frontend:output}
kvm-resilient = $${dynamic-template-kvm-resilient:rendered}
kvm-import = $${dynamic-template-kvm-import:rendered}
kvm-export = $${dynamic-template-kvm-export:rendered}
# Used for the test of resiliency. The system wants a "test" software_type.
test = $${dynamic-template-kvm-resilient-test:rendered}
frozen = ${instance-frozen:output}
pull-backup = ${template-pull-backup:output}
# XXX - If this configuration is not generated by slapgrid, use empty values
[storage-configuration]
storage-home =
[network-information]
global-ipv4-network =
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
storage-home = $${storage-configuration:storage-home}
[dynamic-template-kvm]
recipe = slapos.recipe.template:jinja2
template = ${template-kvm:location}/instance-kvm.cfg.jinja2
rendered = $${buildout:directory}/template-kvm.cfg
extensions = jinja2.ext.do
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key global_ipv4_prefix network-information:global-ipv4-network
key slapparameter_dict slap-configuration:configuration
key storage_dict slap-configuration:storage-dict
key tap_network_dict slap-configuration:tap-network-information-dict
raw ansible_promise_tpl ${template-ansible-promise:location}/${template-ansible-promise:filename}
raw curl_executable_location ${curl:location}/bin/curl
raw dash_executable_location ${dash:location}/bin/dash
raw dcron_executable_location ${dcron:location}/sbin/crond
raw debian_amd64_netinst_location ${debian-amd64-netinst.iso:location}/${debian-amd64-netinst.iso:filename}
raw file_download_script ${file-download-script:location}/${file-download-script:filename}
raw logrotate_cfg ${template-logrotate-base:rendered}
raw novnc_location ${noVNC:location}
raw netcat_bin ${netcat:location}/bin/netcat
raw openssl_executable_location ${openssl:location}/bin/openssl
raw python_executable ${buildout:executable}
raw qemu_executable_location ${kvm:location}/bin/qemu-system-x86_64
raw qemu_img_executable_location ${kvm:location}/bin/qemu-img
raw qemu_start_promise_tpl ${template-qemu-ready:location}/${template-qemu-ready:filename}
raw sixtunnel_executable_location ${6tunnel:location}/bin/6tunnel
raw template_httpd_cfg ${template-httpd:rendered}
raw template_content ${template-content:location}/${template-content:filename}
raw template_kvm_controller_run ${template-kvm-controller:location}/${template-kvm-controller:filename}
raw template_kvm_run ${template-kvm-run:location}/${template-kvm-run:filename}
raw template_monitor ${monitor2-template:rendered}
raw websockify_executable_location ${buildout:directory}/bin/websockify
template-parts-destination = ${template-parts:destination}
template-replicated-destination = ${template-replicated:destination}
import-list = file parts :template-parts-destination
file replicated :template-replicated-destination
mode = 0644
[dynamic-template-kvm-resilient]
recipe = slapos.recipe.template:jinja2
template = ${template-kvm-resilient:location}/instance-kvm-resilient.cfg.jinja2
rendered = $${buildout:directory}/template-kvm-resilient.cfg
extensions = jinja2.ext.do
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
raw curl_executable_location ${curl:location}/bin/curl
raw template_monitor ${monitor2-template:rendered}
template-parts-destination = ${template-parts:destination}
template-replicated-destination = ${template-replicated:destination}
import-list = file parts :template-parts-destination
file replicated :template-replicated-destination
mode = 0644
[dynamic-template-kvm-import]
recipe = slapos.recipe.template:jinja2
template = ${template-kvm-import:location}/instance-kvm-import.cfg.jinja2.in
rendered = $${buildout:directory}/template-kvm-import.cfg
extensions = jinja2.ext.do
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
raw template_kvm_import ${template-kvm-import-script:location}/${template-kvm-import-script:filename}
raw pbsready_import_template ${pbsready-import:output}
raw template_monitor ${monitor2-template:rendered}
key slapparameter_dict slap-configuration:configuration
raw zcat_binary ${gzip:location}/bin/zcat
raw gzip_binary ${gzip:location}/bin/gzip
mode = 0644
[dynamic-template-kvm-export]
recipe = slapos.recipe.template:jinja2
template = ${template-kvm-export:location}/instance-kvm-export.cfg.jinja2
rendered = $${buildout:directory}/template-kvm-export.cfg
extensions = jinja2.ext.do
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
raw kvm_template $${dynamic-template-kvm:rendered}
raw template_kvm_export ${template-kvm-export-script:location}/${template-kvm-export-script:filename}
key pbsready_export_template template-pbsready-export:rendered
raw gzip_binary ${gzip:location}/bin/gzip
key slapparameter_dict slap-configuration:configuration
mode = 0644
[dynamic-template-kvm-resilient-test]
recipe = slapos.recipe.template:jinja2
template = ${template-kvm-resilient-test:location}/instance-kvm-resilient-test.cfg.jinja2
rendered = $${buildout:directory}/template-kvm-resilient-test.cfg
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
raw bin_directory ${buildout:bin-directory}
mode = 0644
......@@ -214,8 +214,7 @@
"description": "Disk size, in GB.",
"type": "integer",
"default": 10,
"minimum": 1,
"maximum": 1000
"minimum": 1
},
"disk-type": {
"title": "Disk type",
......@@ -332,8 +331,7 @@
"title": "Size of additional disk to create for virtual machine, in Gigabytes",
"description": "Specify the size of additional disk to create for virtual machine in data folder of SlapOS Node. Requires instance_storage_home to be configured on SlapOS Node.",
"type": "integer",
"minimum": 10,
"maximum": 1000,
"minimum": 5,
"default": 20
},
"external-disk-format": {
......@@ -344,13 +342,13 @@
"enum": ["qcow2", "raw", "vdi", "vmdk", "cloop", "qed"]
},
"use-tap": {
"title": "Use QEMU TAP network interface",
"title": "Enable QEMU TAP network interface",
"description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.",
"type": "boolean",
"default": true
},
"use-nat": {
"title": "Use QEMU USER Mode interface (NAT)",
"title": "Enable QEMU USER Mode interface (NAT)",
"description": "Use QEMU user-mode network stack (NAT).",
"type": "boolean",
"default": true
......
{% set monitor = str(slapparameter_dict.get('enable-monitor', False)).lower() == 'true' -%}
[buildout]
extends =
{{ kvm_template }}
......@@ -17,10 +14,8 @@ parts +=
novnc-promise
cron
frontend-promise
{% if monitor -%}
# monitor parts
monitor-base
{% endif %}
# Create the exporter executable, which is a simple shell script
[exporter]
......
......@@ -8,8 +8,6 @@ offline = true
# even extend the kvm instance profile.
extends =
{{ pbsready_import_template }}
{% if str(slapparameter_dict.get('enable-monitor', True)).lower() == 'true' -%}
{{ ' ' ~ template_monitor }}
[resilient-publish-connection-parameter]
monitor-base-url = ${publish:monitor-base-url}
......@@ -30,7 +28,6 @@ password = {{ slapparameter_dict['monitor-password'] }}
instance-configuration =
raw takeover-url ${resilient-publish-connection-parameter:takeover-url}
raw takeover-password ${resilient-publish-connection-parameter:takeover-password}
{% endif -%}
[directory]
recipe = slapos.cookbook:mkdirectory
......
......@@ -524,24 +524,11 @@ command = {{ python_executable }} {{ file_download_script }} {{ bootstrap_url }}
update-command =
stop-on-error = true
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
# One time per week
frequency = 0 0 * * *
command = ${logrotate:wrapper}
[logrotate-vm-bootstrap]
< = logrotate-entry-base
name = vm-bootstrap
log = ${directory:public}/ansible/vm-bootstrap.log
[logrotate-entry-base]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
[slap-parameter]
# Default values if not specified
frontend-software-type = frontend
......
[buildout]
parts =
cron-entry-logrotate
[cron]
recipe = slapos.cookbook:cron
cron-entries = ${logrotate-directory:cron-entries}
dcrond-binary = {{ dcron_location }}/sbin/crond
crontabs = ${logrotate-directory:crontabs}
cronstamps = ${logrotate-directory:cronstamps}
catcher = ${cron-simplelogger:wrapper}
binary = ${logrotate-directory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = ${logrotate-directory:bin}/cron_simplelogger
log = ${logrotate-directory:log}/cron.log
[logrotate]
recipe = slapos.cookbook:logrotate
logrotate-entries = ${logrotate-directory:logrotate-entries}
backup = ${logrotate-directory:logrotate-backup}
logrotate-binary = {{ logrotate_location }}/usr/sbin/logrotate
gzip-binary = {{ gzip_location }}/bin/gzip
gunzip-binary = {{ gzip_location }}/bin/gunzip
wrapper = ${logrotate-directory:bin}/logrotate
conf = ${logrotate-directory:etc}/logrotate.conf
state-file = ${logrotate-directory:srv}/logrotate.status
[cron-entry-logrotate]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = logrotate
frequency = 0 0 * * *
command = ${logrotate:wrapper}
[logrotate-directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = ${:etc}/cron.d
cronstamps = ${:etc}/cronstamps
crontabs = ${:etc}/crontabs
logrotate-backup = ${:backup}/logrotate
logrotate-entries = ${:etc}/logrotate.d
bin = ${buildout:directory}/bin
srv = ${buildout:directory}/srv
backup = ${:srv}/backup
etc = ${buildout:directory}/etc
services = ${:etc}/service
log = ${buildout:directory}/var/log
[buildout]
parts =
switch-softwaretype
switch_softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
extends = ${template-resilient-templates:output}
[switch-softwaretype]
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = $${:kvm}
kvm-cluster = $${dynamic-template-kvm-cluster:rendered}
......@@ -19,9 +19,6 @@ kvm-resilient = $${dynamic-template-kvm-resilient:rendered}
kvm-import = $${dynamic-template-kvm-import:rendered}
kvm-export = $${dynamic-template-kvm-export:rendered}
# Used for the test of resiliency. The system wants a "test" software_type.
test = $${dynamic-template-kvm-resilient-test:rendered}
frozen = ${instance-frozen:output}
pull-backup = ${template-pull-backup:output}
......@@ -106,8 +103,8 @@ context =
raw template_kvm_run ${template-kvm-run:location}/${template-kvm-run:filename}
raw template_monitor ${monitor2-template:rendered}
raw websockify_executable_location ${buildout:directory}/bin/websockify
template-parts-destination = ${template-parts:destination}
template-replicated-destination = ${template-replicated:destination}
template-parts-destination = ${template-parts:target}
template-replicated-destination = ${template-replicated:target}
import-list = file parts :template-parts-destination
file replicated :template-replicated-destination
mode = 0644
......@@ -123,8 +120,8 @@ context =
key slapparameter_dict slap-configuration:configuration
raw curl_executable_location ${curl:location}/bin/curl
raw template_monitor ${monitor2-template:rendered}
template-parts-destination = ${template-parts:destination}
template-replicated-destination = ${template-replicated:destination}
template-parts-destination = ${template-parts:target}
template-replicated-destination = ${template-replicated:target}
import-list = file parts :template-parts-destination
file replicated :template-replicated-destination
mode = 0644
......@@ -154,19 +151,8 @@ context =
key eggs_directory buildout:eggs-directory
raw template_kvm_import ${template-kvm-import-script:location}/${template-kvm-import-script:filename}
raw pbsready_import_template ${pbsready-import:output}
raw template_monitor ${monitor2-template:rendered}
key slapparameter_dict slap-configuration:configuration
raw zcat_binary ${gzip:location}/bin/zcat
raw gzip_binary ${gzip:location}/bin/gzip
mode = 0644
[dynamic-template-kvm-resilient-test]
recipe = slapos.recipe.template:jinja2
template = ${template-kvm-resilient-test:location}/instance-kvm-resilient-test.cfg.jinja2
rendered = $${buildout:directory}/template-kvm-resilient-test.cfg
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
raw bin_directory ${buildout:bin-directory}
mode = 0644
[buildout]
extends = development.cfg
# Change default software-type to be "test", so that it can be run using erp5testnode.
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-for-erp5testnode.cfg.in
md5sum = 9e026495cbb0f5ea43adb2afb8574475
output = ${buildout:directory}/template.cfg
mode = 0644
......@@ -5,7 +5,7 @@ extends = common.cfg
# XXX - use websockify = 0.5.1 for compatibility with kvm frontend
websockify = 0.5.1
slapos.toolbox = 0.65
slapos.toolbox = 0.66
erp5.util = 0.4.46
apache-libcloud = 1.1.0
collective.recipe.environment = 0.2.0
......
......@@ -25,5 +25,5 @@ $QMP_CLIENT --socket {{ socket_path }} --drive-backup $BACKUP_DIR/$BACKUP_FILE
echo "Compressing backup..."
{{ gzip_binary }} --force --rsyncable $BACKUP_DIR/$BACKUP_FILE
cd $BACKUP_DIR && find -type f ! -name backup.signature -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > backup.signature
cd $BACKUP_DIR && find -type f ! -name backup.signature -print0 | xargs -0 sha256sum | LC_ALL=C sort -k 66 > backup.signature
......@@ -9,7 +9,7 @@ umask 077
write_backup_proof () {
cd {{ directory['backup'] }}
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > {{ directory['srv'] }}/proof.signature
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -0 sha256sum | LC_ALL=C sort -k 66 > {{ directory['srv'] }}/proof.signature
diff -ruw {{ directory['backup'] }} {{ directory['srv'] }}/proof.signature > {{ directory['srv'] }}/backup.diff
}
......
......@@ -10,6 +10,8 @@ parts =
monitor-collect-csv-wrapper
fluentd-wrapper
monitor-base
monitor-check-memory-usage
monitor-check-cpu-usage
publish-connection-information
......@@ -99,11 +101,38 @@ command-line =
wrapper-path = ${monitor-directory:reports}/monitor-collect-csv-dump
parameters-extra = true
[monitor-check-cpu-usage]
recipe = slapos.cookbook:wrapper
command-line = ${monitor-directory:bin}/python {{ monitor_check_system_health }} cpu ${init-monitor-parameters:cpu-load-file}
wrapper-path = ${monitor-directory:promises}/system-CPU-load-check
[monitor-check-memory-usage]
recipe = slapos.cookbook:wrapper
command-line = ${monitor-directory:bin}/python {{ monitor_check_system_health }} mem ${init-monitor-parameters:mem-free-file} ${directory:monitor}
wrapper-path = ${monitor-directory:promises}/system-MEMORY-usage-check
[publish-connection-information]
recipe = slapos.cookbook:publish
monitor-setup-url = https://monitor.app.officejs.com/#page=settings_configurator&url=${publish:monitor-url}&username=${publish:monitor-user}&password=${publish:monitor-password}
server_log_url = ${publish:monitor-base-url}/${slap-configuration:private-hash}/
[monitor-instance-parameter]
instance-configuration =
file max-cpu-load-per-core ${init-monitor-parameters:cpu-load-file}
file min-free-mem-percent ${init-monitor-parameters:mem-free-file}
[init-monitor-parameters]
recipe = plone.recipe.command
cpu-load-file = ${directory:monitor}/cpu-load-tolerance
mem-free-file = ${directory:monitor}/mem-free-limit
command =
if [ ! -s "${:cpu-load-file}" ]; then
echo "1.5" > ${:cpu-load-file}
fi
if [ ! -s "${:mem-free-file}" ]; then
echo "7.0" > ${:mem-free-file}
fi
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = ${slap-connection:computer-id}
......
......@@ -24,6 +24,7 @@ context = key buildout buildout:bin-directory
raw fluentd_location ${fluentd:location}
raw fluent_conf_output ${fluentd-agent-conf:output}
raw monitor_collect_csv_dump ${monitor-collect-csv-dump:output}
raw monitor_check_system_health ${monitor-system-health:output}
mode = 0644
[instance-base-distributor]
......@@ -51,4 +52,3 @@ partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
#!/usr/bin/env python
import subprocess
import os
import re
import json
cpu_command_list = ['top', '-n', '1', '-b']
mem_command_list = ['free', '-m']
head_command_list = ['head', '-n', '5']
cpu_core_cmd_list = ['nproc']
def cpu_usage(tolerance=1.5):
# tolerance=1.5 => accept up to 1.5 =150% CPU load
uptime_result = subprocess.check_output(['uptime'])
line = uptime_result.strip().split(' ')
load, load5, long_load = line[-3:]
core_count = int(subprocess.check_output(cpu_core_cmd_list).strip())
threshold = core_count * tolerance
if float(long_load) > threshold:
# display top statistics
top = subprocess.Popen(cpu_command_list, stdout=subprocess.PIPE)
result = subprocess.check_output(head_command_list, stdin=top.stdout)
message = "CPU load is high: %s %s %s\n\n" % (load, load5, long_load)
message += result
return message
def check_last_result(file, last_value, threshold=7.0, elt_count=5):
mem_average = 0.0
value_list = []
if os.path.exists(file):
with open(file) as f:
values = f.read()
value_list = values.split(' ')
size = len(value_list)
value_list.append(str(last_value))
if size >= elt_count:
while len(value_list) > elt_count:
value_list.pop(0)
# calculate average
average = sum([float(l) for l in value_list])/(size * 1.0)
if average < threshold:
mem_average = round(average, 2)
else:
value_list.append(str(last_value))
with open(file, 'w') as f:
f.write(' '.join(value_list))
return mem_average
def memory_usage(storage_file, threshold=7.0, elt_count=5):
mem_stats = subprocess.check_output(mem_command_list)
result_list = mem_stats.split('\n')
usage = re.sub('\s+', ' ', result_list[1])
usage_real = re.sub('\s+', ' ', result_list[2])
usage_list = usage.split(' ')
mem_total = float(usage_list[1])
mem_free = float(usage_real.split(' ')[-1])
if mem_free == 0.0:
mem_available = 0.0
else:
mem_available = round(mem_free * 100 / (mem_total * 1.0), 2)
average = check_last_result(
storage_file,
mem_available,
threshold=threshold,
elt_count=elt_count)
if average != 0.0 and average < threshold:
# mem used at (threshold)% at least
message = "Memory usage is high. %s%% is available (%s%% for last %s minutes).\n\n" % (
mem_available, average, elt_count)
message += mem_stats
return message
swap_usage = re.sub('\s+', ' ', result_list[3])
swap_usage_list = swap_usage.split(' ')
swap_total = float(swap_usage_list[1])
swap_free = float(swap_usage_list[3])
if swap_total > 1:
if swap_free == 0.0:
swap_available = 0.0
else:
swap_available = round(swap_free * 100 / (swap_total * 1.0), 2) * 100
if swap_available < threshold*1.7:
message = "Memory SWAP usage is high. %s%% is available.\n\n" % swap_available
message += mem_stats
return message
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: %s [cpu | mem] CONFIG_FILE [BASE_DIR]" % os.path.basename(sys.argv[0])
exit(2)
check_type = sys.argv[1]
threshold = None
if len(sys.argv) >= 3:
config_file = sys.argv[2]
if os.path.exists(config_file):
with open(config_file) as f:
try:
threshold = float(f.read())
if not threshold > 0:
threshold = None
except ValueError:
pass
if check_type == "cpu":
result = cpu_usage(threshold or 1.5)
if result:
print result
exit(1)
elif check_type == "mem":
directory = ""
if len(sys.argv) >= 4:
directory = sys.argv[3]
if not os.path.exists(directory) or not os.path.isdir(directory):
directory = os.getcwd()
storage_file = os.path.join(directory, 'mem-usage.mo')
result = memory_usage(storage_file, threshold=(threshold or 4.0), elt_count=10)
if result:
print result
exit(1)
else:
exit(3)
exit(0)
\ No newline at end of file
......@@ -25,14 +25,14 @@ parts =
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
md5sum = 0c5e94beede86a91d7b01f61a7290f86
md5sum = 3ff5fb2710bf0ea84632c6d6d3894dd9
mode = 0644
[template-monitor]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-monitor.cfg.jinja2
destination = ${buildout:directory}/template-base-monitor.cfg
md5sum = df37b01b194f9621ce8928fe361db728
md5sum = c513cf70b8671840b2ba049d2826a661
mode = 0644
[template-monitor-distributor]
......@@ -77,6 +77,13 @@ filename = collect_csv_dump.py
output = ${:destination}/${:filename}
md5sum = cad2402bbd21907cfed6bc5af8c5d3ab
[monitor-system-health]
<= monitor-template-script
url = ${:_profile_base_location_}/script/${:filename}
filename = check_system_health.py
output = ${:destination}/${:filename}
md5sum = 7eb74a0be4995c6a1015a9a1eb6874c6
[extra-eggs]
<= monitor-eggs
interpreter = pythonwitheggs
......
......@@ -85,7 +85,7 @@ ssl = {{ dumps(bool(slapparameter_dict['ssl'])) }}
cluster = {{ dumps(slapparameter_dict['cluster']) }}
masters = ${publish:masters}
database-adapter = MySQL
wait-database = 60
wait-database = -1
engine = {{ slapparameter_dict.get('engine', '') }}
{% for i in range(slapparameter_dict.get('storage-count', 1)) -%}
......
......@@ -15,8 +15,7 @@ init_file = {{ parameter_dict['init-file'] }}
log_warnings = 1
disable-log-bin
### Enables TokuDB
plugin-load = ha_tokudb
plugin-load = ha_tokudb;ha_rocksdb
## The following settings come from ERP5 configuration.
......
......@@ -4,7 +4,7 @@
"""
import argparse, os, re, shutil, subprocess, sys, traceback
from erp5.util import taskdistribution
from time import gmtime, strftime
from time import gmtime, sleep, strftime, time
# pattern to get test counts from stdout
SUMMARY_RE = re.compile(
......@@ -82,7 +82,8 @@ def main():
if not test_result_line:
break
temp = os.path.join(TEMP_DIRECTORY, 'tests-' + test_result_line.name)
adapter = test_result_line.name
temp = os.path.join(TEMP_DIRECTORY, 'tests-' + adapter)
if os.path.exists(temp):
shutil.rmtree(temp)
os.mkdir(temp)
......@@ -90,46 +91,48 @@ def main():
args = [RUN_NEO_TESTS_COMMAND, '-ufz']
command = ' '.join(args)
env = {'TEMP': temp,
'NEO_TESTS_ADAPTER': test_result_line.name,
'NEO_TESTS_ADAPTER': adapter,
'NEO_TEST_ZODB_FUNCTIONAL': '1',
'NEO_DB_USER': 'root',
'NEO_DB_SOCKET': NEO_DB_SOCKET}
'NEO_DB_USER': 'root'}
try:
if adapter == 'MySQL':
env['NEO_DB_SOCKET'] = NEO_DB_SOCKET
timeout = time() + 60
while not os.path.exists(NEO_DB_SOCKET):
if timeout < time():
raise RuntimeError("MySQL server not started")
sleep(1)
with open(os.devnull) as stdin:
p = subprocess.Popen(args, stdin=stdin, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=env)
except Exception:
# Catch any exception here, to warn user instead of being silent,
# by generating fake error result
result = dict(status_code=-1,
command=command,
stderr=traceback.format_exc(),
stdout='')
# XXX: inform test node master of error
raise EnvironmentError(result)
# parse test stdout / stderr, hint to speed up use files first!
end = time()
stderr = traceback.format_exc()
status_dict = {}
else:
stdout, stderr = p.communicate()
date = strftime("%Y/%m/%d %H:%M:%S", gmtime())
end = time()
test_count, unexpected_count, expected_count, skip_count, duration = \
parseTestStdOut(stdout)
status_dict = dict(
test_count = test_count,
error_count = unexpected_count, # XXX
failure_count = expected_count, # XXX
skip_count = skip_count,
duration = duration,
stdout= stdout)
# print to stdout so we can see in testnode logs
sys.stdout.write(stdout)
sys.stderr.write(stderr)
# report status back to Nexedi ERP5
test_result_line.stop(
test_count = test_count,
error_count = unexpected_count, # XXX
failure_count = expected_count, # XXX
skip_count = skip_count,
duration = duration,
date = date,
command = command,
stdout= stdout,
stderr= stderr,
html_test_result='')
date = strftime("%Y/%m/%d %H:%M:%S", gmtime(end)),
stderr=stderr,
**status_dict)
if __name__ == "__main__":
main()
......@@ -11,6 +11,7 @@ extends =
#LXML
../../component/lxml-python/buildout.cfg
#END LXML
../../component/msgpack-python/buildout.cfg
../../component/patch/buildout.cfg
../../component/python-mysqlclient/buildout.cfg
../../component/python-cryptography/buildout.cfg
......@@ -41,6 +42,7 @@ environment = neoppod-setup-env
[neoppod]
recipe = zc.recipe.egg
eggs = neoppod[admin, ctl, master, storage-mysqldb]
${msgpack-python:egg}
${python-mysqlclient:egg}
psutil
BTrees
......@@ -97,36 +99,58 @@ md5sum = c6e0db7344a26a55580102e3cc33c984
[instance-neo-storage-mysql]
<= download-base-neo
md5sum = 22206396eff084af3e0587494a012174
md5sum = c1cdb4289ff458545de15aca65121b80
[template-neo-my-cnf]
<= download-base-neo
url = ${:_profile_base_location_}/my.cnf.in
md5sum = cfdd59d42ae540563a964ccaf8fec232
md5sum = 87d18c7021e4d43756813a83c9da5e97
[versions]
BTrees = 4.4.1
ZODB = 4.4.5
persistent = 4.2.2
apache-libcloud = 1.5.0
ecdsa = 0.13
gitdb2 = 2.0.0
msgpack-python = 0.4.8
mysqlclient = 1.3.10
persistent = 4.2.3
pycrypto = 2.6.1
pycurl = 7.43.0
slapos.recipe.template = 2.10
slapos.toolbox = 0.66
smmap2 = 2.0.1
transaction = 1.7.0
zdaemon = 4.1.0
zodbpickle = 0.6.0
zodbtools = 0.0.0.dev3
# Required by slapos.toolbox = 0.65
slapos.toolbox = 0.65
GitPython = 2.1.1
# Required by:
# slapos.toolbox==0.66
GitPython = 2.1.3
# Required by:
# slapos.toolbox==0.66
PyRSS2Gen = 1.1
apache-libcloud = 1.5.0
# Required by:
# slapos.toolbox==0.66
atomize = 0.2.0
# Required by:
# slapos.toolbox==0.66
dnspython = 1.14.0
ecdsa = 0.13
# Required by:
# slapos.toolbox==0.66
feedparser = 5.2.1
gitdb2 = 2.0.0
# slapos.toolbox==0.66
lockfile = 0.12.2
mysqlclient = 1.3.9
paramiko = 2.1.1
# Required by:
# slapos.toolbox==0.66
paramiko = 2.1.2
# Required by:
# slapos.toolbox==0.66
passlib = 1.7.1
pycrypto = 2.6.1
pycurl = 7.43.0
smmap2 = 2.0.1
[buildout]
extends =
software.cfg
../../component/mariadb/mariarocks.cfg
......@@ -26,7 +26,7 @@ md5sum = ee8401a4e7d82bf488a57e3399f9ce48
[runTestSuite.in]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_buildout_section_name_}
md5sum = 050593aef62fd4aa241d8ad378111c36
md5sum = 982de37ea52c10938e46037adf2894c1
[runTestSuite_py]
recipe = zc.recipe.egg
......
{% set part_list = [] -%}
{% set crontab_line_list = [] -%}
[buildout]
parts =
nginx-service
htpasswd
htpasswd-runner
publish-connection-information
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
......@@ -35,23 +44,35 @@ error_log = $${directory:log}/nginx-error.log
ip = $${slap-network-information:global-ipv6}
local_ip = $${slap-network-information:local-ipv4}
port = 9443
publisher_location_prefix = /pub
publisher_push_stream_store_messages = off
publisher_client_max_body_size = 16k
publisher_client_body_buffer_size = 16k
subscriber_allow_origin = '*'
subscriber_location_prefix = /sub
# Prevent to use credential if origin is star
subscriber_allow_credential = 'false'
subscriber_allow_methods = 'GET, HEAD, OPTIONS'
subscriber_allow_headers = 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since'
# Add parts generated by template
[buildout]
parts =
nginx-service
{% for part in part_list -%}
{{ ' %s' % part }}
{% endfor -%}
[htpasswd]
recipe = slapos.cookbook:generate.password
storage-path = $${directory:etc}/.pwd
bytes = 8
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[htpasswd-runner]
recipe = plone.recipe.command
stop-on-error = true
htpasswd-path = $${directory:etc}/.htpasswd
command = if [ ! -f "$${:htpasswd-path}" ]; then ${buildout:bin-directory}/htpasswd -cb $${:htpasswd-path} $${:user} $${:password}; fi
update-command = $${:command}
user = admin
password = $${htpasswd:passwd}
[publish-connection-information]
recipe = slapos.cookbook:publish
init-password = $${htpasswd:passwd}
init-user = $${htpasswd-runner:user}
publisher-url = http://$${htpasswd-runner:user}:$${htpasswd:passwd}@[$${nginx-configuration:ip}]:$${nginx-configuration:port}$${nginx-configuration:publisher_location_prefix}
subscriber-url = http://$${htpasswd-runner:user}:$${htpasswd:passwd}@[$${nginx-configuration:ip}]:$${nginx-configuration:port}$${nginx-configuration:subscriber_location_prefix}
[buildout]
extends =
../../stack/slapos.cfg
../../component/dash/buildout.cfg
......@@ -13,6 +12,10 @@ parts =
template-nginx-service
template-nginx
[slapos-cookbook]
eggs +=
slapos.toolbox
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
......@@ -30,17 +33,17 @@ mode = 0644
[template-nginx-configuration]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/template-nginx.cfg.in
md5sum = 58cd6a59089ab93bb3ec0f5d12888500
md5sum = f5658154b82282bc1871f18ddf4529d8
output = ${buildout:directory}/template-nginx.cfg.in
mode = 0644
[template-nginx]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-nginx.cfg.in
md5sum = 478085d0abb0ffc0b259c038bddae205
md5sum = 936fea88f5548c4f14e287f1b27dc127
output = ${buildout:directory}/instance-nginx.cfg.in
mode = 0644
[versions]
plone.recipe.command = 1.1
slapos.recipe.template = 2.7
slapos.toolbox = 0.66
......@@ -65,12 +65,15 @@ server {
client_body_temp_path $${directory:varnginx} 1 2;
proxy_temp_path $${directory:varnginx} 1 2;
auth_basic "Nginx Access";
auth_basic_user_file $${htpasswd-runner:htpasswd-path};
## Serve an error 204 (No Content) for favicon.ico
location = /favicon.ico {
return 204;
}
location /pub {
location $${nginx-configuration:publisher_location_prefix} {
push_stream_publisher;
push_stream_channels_path $arg_id;
......@@ -85,7 +88,7 @@ server {
}
location ~ /sub/(.*) {
location ~ $${nginx-configuration:subscriber_location_prefix}/(.*) {
# activate subscriber mode for this location
add_header "Access-Control-Allow-Origin" $${nginx-configuration:subscriber_allow_origin};
add_header 'Access-Control-Allow-Credentials' $${nginx-configuration:subscriber_allow_credential};
......
......@@ -132,15 +132,19 @@ template = {{ parameter_dict['template-re6st-registry-conf'] }}
rendered = ${directory:etc}/re6st-registry.conf
context = section parameter_dict re6st-registry-conf-dict
[re6st-registry-wrapper]
recipe = slapos.recipe.template:jinja2
template = {{ parameter_dict['template-registry-run'] }}
rendered = ${directory:services}/re6st-registry
pid-file = ${directory:run}/registry.pid
context =
key pid_file :pid-file
raw re6st_command {{ re6st_registry }}
key re6st_conf re6st-registry-conf:rendered
[re6st-registry]
recipe = slapos.cookbook:re6stnet.registry
command = {{ re6st_registry }}
wrapper = ${directory:services}/re6st-registry
pid-file = ${directory:run}/registry.pid
manager-wrapper = ${directory:bin}/re6stManageToken
check-service-wrapper = ${directory:bin}/re6stCheckService
drop-service-wrapper = ${directory:bin}/re6stManageDeleteToken
revoke-service-wrapper = ${directory:bin}/re6stRevokeCertificate
openssl-bin = {{ openssl_bin }}/openssl
python-bin = {{ python_bin }}
ipv6-prefix = {{ slapparameter_dict.get('ipv6-prefix', '2001:db8:24::/48') }}
......@@ -167,32 +171,18 @@ recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:script}/re6st-token-manager
command-line = "{{ python_bin }}" ${re6st-registry:manager-wrapper}
[cron-entry-re6st-check]
[cron-entry-re6st-manage]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = re6stnet-check-token
frequency = */5 * * * *
command = {{ python_bin }} ${re6st-registry:check-service-wrapper}
[cron-entry-re6st-revoke]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = re6stnet-revoke-cert
frequency = */5 * * * *
command = {{ python_bin }} ${re6st-registry:revoke-service-wrapper}
[cron-entry-re6st-drop]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = re6stnet-drop-token
frequency = */5 * * * *
command = {{ python_bin }} ${re6st-registry:drop-service-wrapper}
command = {{ python_bin }} ${re6st-registry:manager-wrapper}
[logrotate-entry-re6stnet]
< = logrotate-entry-base
name = re6stnet
log = ${re6st-registry-conf-dict:logfile}
post = test ! -s ${re6st-registry:pid-file} || {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${re6st-registry:pid-file} -s USR1
post = test ! -s ${re6st-registry-wrapper:pid-file} || {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${re6st-registry-wrapper:pid-file} -s USR1
[re6st-registry-promise]
recipe = slapos.cookbook:check_port_listening
......@@ -227,9 +217,7 @@ parts =
logrotate-entry-re6stnet
re6stnet-manage
cron-entry-logrotate
cron-entry-re6st-check
cron-entry-re6st-drop
cron-entry-re6st-revoke
cron-entry-re6st-manage
apache-httpd
apache-httpd-graceful
publish
......
......@@ -36,6 +36,7 @@ template-apache-conf = {{ template_apache_conf }}
template-wrapper = {{ template_wrapper }}
apache-location = {{ apache_location }}
template-re6st-registry-conf = {{ template_re6st_registry_conf }}
template-registry-run = {{ template_registry_run }}
[dynamic-template-re6stnet]
< = jinja2-template-base
......
#!/bin/bash
echo $$ > {{ pid_file }}
exec {{ re6st_command }} @{{ re6st_conf }}
[buildout]
extends =
../../component/re6stnet/buildout.cfg
../../component/dash/buildout.cfg
../../component/dcron/buildout.cfg
../../component/gzip/buildout.cfg
......@@ -15,16 +14,11 @@ extends =
# Monitoring stack (keep on bottom)
../../stack/monitor/buildout.cfg
extensions -=
buildout-versions
parts +=
slapos-cookbook-develop
slapos-cookbook
eggs
dash
babeld
re6stnet
template
[eggs]
......@@ -45,7 +39,6 @@ eggs =
${python-cffi:egg}
${python-cryptography:egg}
pyOpenSSL
miniupnpc
re6stnet
[download-base]
......@@ -71,23 +64,25 @@ context =
< = template-jinja2-base
filename = template.cfg
template = ${:_profile_base_location_}/instance.cfg.in
md5sum = 84387d5af13561447c1170704a2076a6
md5sum = 610fc6fd0444d3bab3fca4478572749a
extra-context =
key apache_location apache:location
key dash_location dash:location
key logrotate_location logrotate:location
key openssl_location openssl:location
key template_apache_conf template-apache-conf:target
key template_re6stnet template-re6stnet:target
key template_re6st_registry_conf template-re6st-registry-conf:target
key template_logrotate_base template-logrotate-base:rendered
key template_wrapper template-wrapper:target
key template_registry_run template-registry-run:target
key monitor2_template_rendered monitor2-template:rendered
raw python_with_eggs ${buildout:directory}/bin/${extra-eggs:interpreter}
[template-re6stnet]
< = download-base
filename = instance-re6stnet.cfg.in
md5sum = 6e9452d283e82e2f512a9f9edb17fe3a
md5sum = 9e5e6cf010ec2a329e6bc8def7f95ba7
[template-apache-conf]
< = download-base
......@@ -104,10 +99,12 @@ md5sum = 5dc218f887faeffc466e41c7d6191e49
filename = wrapper.in
md5sum = 69e63cb58267335e21da772bd867657e
[template-registry-run]
< = download-base
filename = registry-run.in
md5sum = 0bf4f2c03e06b55c6c6cc55fa33e65d6
[versions]
setuptools = 28.8.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
apache-libcloud = 0.17.0
ecdsa = 0.13
gitdb = 0.6.4
......@@ -115,15 +112,15 @@ plone.recipe.command = 1.1
pycrypto = 2.6.1
pycurl = 7.43.0
slapos.recipe.template = 2.7
slapos.toolbox = 0.65
slapos.toolbox = 0.66
smmap = 0.9.0
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
GitPython = 2.0.8
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
atomize = 0.2.0
# Required by:
......@@ -131,11 +128,11 @@ atomize = 0.2.0
backports.ssl-match-hostname = 3.4.0.2
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
feedparser = 5.1.3
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
lockfile = 0.12.2
# Required by:
......@@ -143,10 +140,10 @@ lockfile = 0.12.2
miniupnpc = 1.9
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
paramiko = 2.0.1
# Required by:
# slapos.toolbox = 0.65
# slapos.toolbox = 0.66
rpdb = 0.1.5
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# But avoid directories, they are not portable.
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[template]
filename = instance.cfg
md5sum = 02755403c9f0b52d717160b0b2d0cfb7
[template-runner]
filename = instance-runner.cfg
md5sum = 07905ec872e84bc42c6ab089f5270ac2
[template-runner-import-script]
filename = template/runner-import.sh.jinja2
md5sum = 5db39d7f56fd1f96ce6466e9055841a1
[instance-runner-import]
filename = instance-runner-import.cfg.in
md5sum = 9db9957f452bda370cb2d5cc2e833e85
[template-runner-export-script]
filename = template/runner-export.sh.jinja2
md5sum = 86a429492dba25364f6ec2b318ba1f85
[instance-runner-export]
filename = instance-runner-export.cfg.in
md5sum = 852a2ed99af566d27e5e4403334a3376
[template-resilient]
filename = instance-resilient.cfg.jinja2
md5sum = a902b84ac7d1e29a7fdb06cbc7dec150
[template_nginx_conf]
filename = nginx_conf.in
md5sum = 2b06f7eb9a1d45d250d4b92a944db925
[template_httpd_conf]
filename = httpd_conf.in
md5sum = 40825ef2d259ae3b86dfd2f28f597d3a
[template_launcher]
filename = launcher.in
md5sum = 525e37ea8b2acf6209869999b15071a6
[template-slapos-cfg]
filename = template/slapos.cfg.in
md5sum = ef16446d432e1397182b1654fe920ffb
[template-parameters]
filename = parameters.xml.in
md5sum = f8446fcf254b4929eb828a9a1d7e5f62
[template-bash-profile]
filename = template/bash_profile.in
md5sum = 1c88cbca0c1e705eeb4b544ef4616097
[template-supervisord]
filename = template/supervisord.conf.in
md5sum = d294d0dafd265048399de6da8c96345f
[template-listener-slapgrid]
filename = template/listener_slapgrid.py.in
md5sum = 49d50410cf7467175a841eb7cd0d93d4
[monitor-check-webrunner-internal-instance]
filename = template/monitor-check-webrunner-internal-instances.py
md5sum = acaac32cf1bd45714272468a89f4f119
[template-resilient-software-release-information]
filename = template/resilient_software_release_information.py.in
md5sum = 2451072826a9ad9425d62c9e9c7f6284
[buildout]
extends =
buildout.hash.cfg
../../component/bash/buildout.cfg
../../component/busybox/buildout.cfg
../../component/curl/buildout.cfg
......@@ -37,11 +38,6 @@ common-parts =
rdiff-backup
pbs-recipe-egg
extensions -=
buildout-versions
show-picked-versions = true
parts =
${:common-parts}
......@@ -49,162 +45,97 @@ parts =
[shellinabox]
<= shellinabox-github
[template]
[template-base]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
md5sum = 06107f93ebe78905c957a4c4fc4edf16
url = ${:_profile_base_location_}/${:filename}
mode = 0644
[template-runner]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner.cfg
output = ${buildout:directory}/template-runner.cfg.in
md5sum = 07905ec872e84bc42c6ab089f5270ac2
[download-base]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
mode = 0644
[template-runner-import-script]
recipe = hexagonit.recipe.download
[download-only-base]
< = download-base
ignore-existing = true
url = ${:_profile_base_location_}/template/runner-import.sh.jinja2
download-only = true
md5sum = 5db39d7f56fd1f96ce6466e9055841a1
[template-download-base]
# Downloads from template directory into current directory
< = download-only-base
url = ${:_profile_base_location_}/template/${:filename}
[template]
< = template-base
output = ${buildout:directory}/template.cfg
[template-runner]
< = template-base
output = ${buildout:directory}/template-runner.cfg.in
[template-runner-import-script]
< = template-download-base
filename = runner-import.sh.jinja2
mode = 0644
[instance-runner-import]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner-import.cfg.in
< = template-base
output = ${buildout:directory}/instance-runner-import.cfg
md5sum = 9db9957f452bda370cb2d5cc2e833e85
mode = 0644
[template-runner-export-script]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/runner-export.sh.jinja2
download-only = true
md5sum = 2e96b9daa5cd20fc77e9573a9ce3fe52
< = template-download-base
filename = runner-export.sh.jinja2
mode = 0644
[instance-runner-export]
< = download-base
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-runner-export.cfg.in
filename = instance-runner-export.cfg.in
md5sum = 852a2ed99af566d27e5e4403334a3376
mode = 0644
[template-resilient]
< = download-base
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-resilient.cfg.jinja2
md5sum = a902b84ac7d1e29a7fdb06cbc7dec150
filename = instance-resilient.cfg.jinja2
mode = 0644
[template-resilient-test]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-resilient-test.cfg.jinja2
download-only = true
md5sum = 09a955bde79fb11565d689b0415e135c
filename = instance-resilient-test.cfg.jinja2
mode = 0644
[template_nginx_conf]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/nginx_conf.in
download-only = true
md5sum = 2b06f7eb9a1d45d250d4b92a944db925
filename = nginx_conf.in
mode = 0644
< = download-only-base
[template_httpd_conf]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/httpd_conf.in
download-only = true
md5sum = 40825ef2d259ae3b86dfd2f28f597d3a
filename = httpd_conf.in
mode = 0644
< = download-only-base
[template_launcher]
< = download-base
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/launcher.in
md5sum = 525e37ea8b2acf6209869999b15071a6
filename = launcher.in
mode = 0644
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[template-slapos-cfg]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/${:filename}
md5sum = ef16446d432e1397182b1654fe920ffb
< = template-download-base
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = slapos.cfg.in
download-only = true
mode = 0644
[template-parameters]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
md5sum = f8446fcf254b4929eb828a9a1d7e5f62
< = download-only-base
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = parameters.xml.in
download-only = true
mode = 0644
[template-bash-profile]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/${:filename}
md5sum = 1c88cbca0c1e705eeb4b544ef4616097
< = template-download-base
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = bash_profile.in
download-only = true
mode = 0644
[template-supervisord]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/${:filename}
md5sum = d294d0dafd265048399de6da8c96345f
< = template-download-base
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = supervisord.conf.in
download-only = true
mode = 0644
[template-listener-slapgrid]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/${:filename}
md5sum = 49d50410cf7467175a841eb7cd0d93d4
< = template-download-base
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = listener_slapgrid.py.in
download-only = true
mode = 0644
[monitor-check-webrunner-internal-instance]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/template/${:filename}
download-only = true
#md5sum = 4c44d617d5bfd1db8695200e896480c0
< = template-download-base
destination = ${buildout:parts-directory}/${:filename}
filename = monitor-check-webrunner-internal-instances.py
mode = 0644
[template-resilient-software-release-information]
recipe = hexagonit.recipe.download
ignore-existing = true
download-only = true
md5sum = 2451072826a9ad9425d62c9e9c7f6284
url = ${:_profile_base_location_}/template/${:filename}
< = template-download-base
filename = resilient_software_release_information.py.in
mode = 0644
[eggs]
recipe = zc.recipe.egg
......@@ -231,7 +162,3 @@ interpreter = pythonwitheggs
eggs +=
supervisor
[versions]
setuptools = 28.8.0
zc.buildout = 2.5.2+slapos005
zc.recipe.egg = 2.0.3+slapos002
......@@ -5,65 +5,12 @@
[buildout]
extends = common.cfg
../../stack/slapos-dev.cfg
parts =
parts +=
# Development eggs
slapos.toolbox-dev
slapos.cookbook-dev
slapos.core-dev
erp5.util-dev
slapos-cookbook
${:common-parts}
[slapos.toolbox-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.toolbox.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[slapos.toolbox-dev]
recipe = zc.recipe.egg:develop
egg = slapos.toolbox
setup = ${slapos.toolbox-repository:location}
[slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[slapos.cookbook-dev]
recipe = zc.recipe.egg:develop
egg = slapos.cookbook
setup = ${slapos.cookbook-repository:location}
# Used for resiliency tests only
[erp5.util-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/erp5.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[erp5.util-dev]
recipe = zc.recipe.egg:develop
egg = erp5.util
setup = ${erp5.util-repository:location}
[slapos.core-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.core.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[slapos.core-dev]
recipe = zc.recipe.egg:develop
egg = slapos.core
setup = ${slapos.core-repository:location}
[versions]
slapos.cookbook =
slapos.core =
......
......@@ -129,12 +129,6 @@
"type": "string",
"default": "monitor.app.officejs.com"
},
"cpu-usage-ratio": {
"title": "CPU Usage Ratio",
"description": "Ratio of the CPU use for compilation, if value is set to n, compilation will use number-of-cpu/n of cpus (need instance restart)",
"type": "integer",
"default" : 4
},
"resilient-clone-number": {
"title": "Number of Resilient Clones",
"description": "Number of clones to be deployed when a resilient runner, if missing a single clone is provided. Its value can be 1 or 2",
......
......@@ -11,7 +11,6 @@ extends = ${template-resilient-templates:output}
recipe = slapos.cookbook:softwaretype
default = $${instance-base-runner:rendered}
resilient = $${instance-resilient:rendered}
test = $${instance-resilient-test:rendered}
runner = $${instance-base-runner:rendered}
runner-import = ${instance-runner-import:output}
runner-export = $${template-runner-export:rendered}
......@@ -42,24 +41,12 @@ context = key buildout buildout:bin-directory
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
template-parts-destination = ${template-parts:destination}
template-replicated-destination = ${template-replicated:destination}
template-parts-destination = ${template-parts:target}
template-replicated-destination = ${template-replicated:target}
import-list = file parts :template-parts-destination
file replicated :template-replicated-destination
mode = 0644
[instance-resilient-test]
recipe = slapos.recipe.template:jinja2
template = ${template-resilient-test:location}/instance-resilient-test.cfg.jinja2
rendered = $${buildout:directory}/template-resilient-test.cfg
bin-directory = ${buildout:bin-directory}
context =
key bin_directory instance-resilient-test:bin-directory
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
mode = 0644
[template-runner-export]
recipe = slapos.recipe.template:jinja2
template = ${instance-runner-export:target}
......
......@@ -17,7 +17,7 @@ gunicorn = 19.4.5
prettytable = 0.7.2
pycurl = 7.43.0
slapos.recipe.template = 2.9
slapos.toolbox = 0.65
slapos.toolbox = 0.66
smmap = 0.9.0
# Required by:
......
......@@ -60,7 +60,7 @@ if [ -d {{ directory['backup'] }}/runner/software ]; then
rm {{ directory['backup'] }}/runner/software/*
fi
cd {{ directory['backup'] }} && find -type f ! -name backup.signature -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > backup.signature
cd {{ directory['backup'] }} && find -type f ! -name backup.signature -print0 | xargs -0 sha256sum | LC_ALL=C sort -k 66 > backup.signature
# Check that export didn't happen during backup of instances
tmp_backup_sum=$(mktemp -p {{ directory['tmp'] }})
......
[buildout]
extends = ${template:output}
[switch_softwaretype]
default = $${:test}
......@@ -66,10 +66,6 @@ slapos.recipe.template = 2.7
# Explicitly disable download-cache
download-cache =
# Generate list of automatically chosen eggs version
extensions +=
buildout-versions
# Separate from site eggs
allowed-eggs-from-site-packages =
include-site-packages = false
......
[buildout]
extensions += buildout-versions
versions = versions
parts =
......
......@@ -18,3 +18,8 @@ eggs += ${wendelin.core-dev:egg}
# to installing non-dev egg
[versions]
wendelin.core =
scikit-learn =
# install scikit-learn git clone
[scikit-learn]
<= scikit-learn-develop
[buildout]
extends =
../../component/keras/buildout.cfg
../../component/chainer/buildout.cfg
software.cfg
parts +=
keras-egg
chainer-egg
[eggs]
eggs +=
......@@ -11,16 +13,12 @@ eggs +=
${protobuf-python:egg}
${h5py:egg}
${pillow-python:egg}
Keras
[bazel]
# For Debian 8 users
# Add jessie-backports to /etc/apt/sources.list
# deb http://ftp.jp.debian.org/debian jessie-backports main
# Then install jdk
# apt-get update
# apt-get -t jessie-backports install openjdk-8-jdk
java_home = /usr/lib/jvm/java-8-openjdk-amd64
${python-PyYAML:egg}
six
keras
filelock
nose
${chainer:egg}
[cuda]
# If you use cuda, please adjust paramters for your environment
......@@ -28,7 +26,7 @@ java_home = /usr/lib/jvm/java-8-openjdk-amd64
# https://developer.nvidia.com/cuda-gpus
tf_need_cuda = 1
tf_cuda_version = 8.0
tf_cudnn_version = 6.5
tf_cudnn_version = 5.1.10
tf_cuda_compute_capabilities = 5.2,6.1
cuda_toolkit_path = /usr/local/cuda
cudnn_install_path = /usr/local/cuda
......
......@@ -17,7 +17,7 @@ parts +=
[eggs]
initialization =
import scipy.stats # load our own libstdc++ explicitly at the very beginning
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
extra-paths +=
${wendelin:location}
eggs +=
......@@ -36,7 +36,7 @@ initialization =
import struct, os
arch = 8 * struct.calcsize("P")
rerun = not os.getenv('LD_PRELOAD')
if rerun: os.environ['LD_PRELOAD'] = '''${gcc-fortran:location}/lib%s/libstdc++.so''' % arch
if rerun: os.environ['LD_PRELOAD'] = '''${gcc:location}/lib%s/libstdc++.so''' % arch
if rerun: os.execve(os.path.realpath(__file__), sys.argv, os.environ)
[wendelin_test_suite_runner]
......@@ -77,4 +77,4 @@ revision = e8154cf06f01155f3f97d06954993f98bc1c9132
[versions]
msgpack-python = 0.4.8
wendelin.core = 0.9
wendelin.core = 0.11
......@@ -69,7 +69,7 @@ pycrypto = 2.6
apache-libcloud = 0.12.4
async = 0.6.1
gitdb = 0.5.4
mysqlclient = 1.3.9
mysqlclient = 1.3.10
plone.recipe.command = 1.1
slapos.recipe.template = 2.4.2
slapos.toolbox = 0.40.4
......
......@@ -91,8 +91,6 @@ parts +=
w3-validator
tesseract
hookbox
perl-DBD-mariadb
perl-DBI
percona-toolkit
zabbix-agent
dash
......@@ -102,6 +100,7 @@ parts +=
# Buildoutish
eggs
eggs-all-scripts
testrunner
test_suite_runner
......@@ -269,7 +268,6 @@ context =
key root_common root-common:target
key site_zcml site-zcml:target
key sixtunnel_location 6tunnel:location
key slapos_core_version versions:slapos.core
key stunnel_location stunnel:location
key template_apache_conf template-apache-backend-conf:target
key template_balancer template-balancer:target
......@@ -488,6 +486,7 @@ eggs = ${neoppod:eggs}
pycountry
xfw
jsonschema
selenium
# Needed for checking ZODB Components source code
pylint
pytracemalloc
......@@ -562,6 +561,12 @@ Products.DCWorkflow-patch-options = -p1
python-magic-patches = ${:_profile_base_location_}/../../component/egg-patch/python_magic/magic.patch#de0839bffac17801e39b60873a6c2068
python-magic-patch-options = -p1
[eggs-all-scripts]
recipe = zc.recipe.egg
eggs =
munnel
patch-binary = ${eggs:patch-binary}
[zodbanalyze]
recipe = zc.recipe.egg
eggs = erp5.util
......@@ -595,20 +600,16 @@ scripts +=
Acquisition = 2.13.9+SlapOSPatched001
Products.DCWorkflow = 2.2.4+SlapOSPatched001
pysvn = 1.7.10+SlapOSPatched002
python-ldap = 2.4.29+SlapOSPatched001
python-ldap = 2.4.32+SlapOSPatched001
python-magic = 0.4.12+SlapOSPatched001
# specify dev version to be sure that an old released version is not used
cloudooo = 1.2.5-dev
uritemplate = 3.0.0
google-api-python-client = 1.6.1
# use newer version than specified in ZTK
PasteDeploy = 1.5.2
Pygments = 2.1.3
argparse = 1.4.0
coverage = 4.3.1
coverage = 4.3.4
zope.dottedname = 4.1.0
# test_UserManagerInterfaces in testERP5Security fails with 1.10.0.
......@@ -660,83 +661,59 @@ WSGIUtils = 0.7
# ZEO 5 requires transaction >= 2
ZEO = 4.3.1
ZODB3 = 3.11.0
Zope2 = 2.13.25
astor = 0.5
# astroid 1.4.1 breaks testDynamicClassGeneration
astroid = 1.3.8
backports-abc = 0.5
backports.shutil-get-terminal-size = 1.0.0
chardet = 2.3.0
csp-eventlet = 0.7.0
cycler = 0.10.0
erp5diff = 0.8.1.7
eventlet = 0.20.1
five.formlib = 1.0.4
five.localsitemanager = 2.0.5
greenlet = 0.4.11
google-api-python-client = 1.6.1
greenlet = 0.4.12
http-parser = 0.8.3
httplib2 = 0.9.2
httplib2 = 0.10.3
huBarcode = 1.0.0
interval = 1.0.0
ipdb = 0.10.2
ipykernel = 4.5.2
ipython = 5.2.1
ipython-genutils = 0.1.0
ipywidgets = 5.2.2
logilab-common = 1.3.0
matplotlib = 2.0.0
mistune = 0.7.3
msgpack-python = 0.4.8
notebook = 4.3.2
munnel = 0.3
nt-svcutils = 2.13.0
numpy = 1.12.0
oauth2client = 4.0.0
objgraph = 3.1.0
pandas = 0.19.2
ply = 3.10
polib = 1.0.8
pprofile = 1.10.0
prompt-toolkit = 1.0.10
ptyprocess = 0.5.1
pyasn1 = 0.2.3
pyasn1-modules = 0.0.8
pycountry = 17.1.8
pycrypto = 2.6.1
pycurl = 7.43.0
pyflakes = 1.5.0
# pylint 1.5.1 breaks testDynamicClassGeneration
pylint = 1.4.4
python-memcached = 1.58
pytracemalloc = 1.2
pyzmq = 16.0.2
qrcode = 5.3
restkit = 4.2.2
rsa = 3.4.2
rtjp-eventlet = 0.3.2
scikit-learn = 0.18.1
scipy = 0.18.1
simplegeneric = 0.8.1
socketpool = 0.5.3
spyne = 2.12.14
suds = 0.4
terminado = 0.6
threadframe = 0.2
timerserver = 2.0.2
tornado = 4.4.2
traitlets = 4.3.1
urlnorm = 1.1.4
uuid = 1.30
validictory = 1.1.0
widgetsnbextension = 1.2.6
xfw = 0.10
xupdate-processor = 0.4
selenium = 2.53.1
# Required by:
# Products.CMFCore==2.2.10
Products.ZSQLMethods = 2.13.5
# Required by:
# tornado==4.4.2
backports.ssl-match-hostname = 3.5.0.1
# Required by:
# tornado==4.4.2
certifi = 2017.1.23
# Required by:
# eventlet==0.20.1
enum-compat = 0.0.2
......@@ -749,52 +726,13 @@ fpconst = 0.7.2
# objgraph==3.1.0
graphviz = 0.5.2
# Required by:
# notebook==4.3.2
# nbconvert 4.2.0 depends on entrypoints egg that is not available as tar/zip source.
nbconvert = 4.1.0
# Required by:
# nbconvert==4.1.0
# notebook==4.3.2
nbformat = 4.2.0
# Required by:
# Pillow==4.0.0
olefile = 0.44
# Required by:
# ipython==5.2.1
pathlib2 = 2.2.1
# Required by:
# ipython==5.2.1
pexpect = 4.2.1
# Required by:
# ipython==5.2.1
pickleshare = 0.7.4
# Required by:
# matplotlib==2.0.0
# pandas==0.19.2
python-dateutil = 2.6.0
# Required by:
# pathlib2==2.2.1
scandir = 1.4
# Required by:
# tornado==4.4.2
singledispatch = 3.4.0.3
# Required by:
# matplotlib==2.0.0
subprocess32 = 3.2.7
# Required by:
# prompt-toolkit==1.0.10
wcwidth = 0.1.7
# munnel==0.3
python-libmilter = 1.0.3
# Required by:
# zope.app.testing==3.8.1
......
......@@ -15,7 +15,7 @@
# not need these here).
[mariadb-resiliency-after-import-script]
filename = instance-mariadb-resiliency-after-import-script.sh.in
md5sum = 394109d9373f4375d5e1599d5d1500ac
md5sum = 1e89de954d816b93f76d9b75820d192c
[template-mariadb]
filename = instance-mariadb.cfg.in
......@@ -55,7 +55,7 @@ md5sum = 79f789360e71146486c82a7a10834bae
[template-postfix]
filename = instance-postfix.cfg.in
md5sum = 2fd9ec619456fd00e7482cebc5c41f76
md5sum = e217f8b55f22d99ee2352eba6807d6f6
[template-postfix-master-cf]
filename = postfix_master.cf.in
......@@ -63,7 +63,7 @@ md5sum = 9ac81647368068a1a98a785d08074b43
[template-postfix-main-cf]
filename = postfix_main.cf.in
md5sum = 66b273861888c8bd59e46de399d2c1d8
md5sum = 4fe9bb5225b9d7650d2916a0945821bd
[template-postfix-aliases]
filename = postfix_aliases.in
......@@ -71,7 +71,7 @@ md5sum = 0969fbb25b05c02ef3c2d437b2f4e1a0
[template]
filename = instance.cfg.in
md5sum = 9d6c7b1a17cf2a5987e8c9f4c9239736
md5sum = 8ab417cf1ca98d2840c80a266f0e2be7
[monitor-template-dummy]
filename = dummy.cfg
......@@ -79,7 +79,7 @@ md5sum = d41d8cd98f00b204e9800998ecf8427e
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = b5f49c90017684aa3389ef3f97ece509
md5sum = 07f45a5d3a9fd0fe227af82c7ede7631
[template-zeo]
filename = instance-zeo.cfg.in
......@@ -87,7 +87,7 @@ md5sum = 7610bafda245c008ccf0b6ea58ce21c2
[template-zope]
filename = instance-zope.cfg.in
md5sum = 5e7145e8fa17046581b66e9690bf5186
md5sum = b7e92234825f9d72ccb9b6c4745b6ce7
[template-balancer]
filename = instance-balancer.cfg.in
......
......@@ -146,6 +146,7 @@ config-mysql-url-list = ${request-mariadb:connection-database-list}
config-site-id = {{ dumps(site_id) }}
config-smtp-url = ${request-smtp:connection-url}
config-timezone = {{ dumps(slapparameter_dict.get('timezone', 'UTC')) }}
config-wendelin-core-zblk-fmt = {{ dumps(slapparameter_dict.get('wendelin-core-zblk-fmt', '')) }}
config-zodb-dict = {{ dumps(zodb_dict) }}
{% for server_type, server_dict in storage_dict.iteritems() -%}
{% if server_type == 'neo' -%}
......
......@@ -23,7 +23,11 @@ update_executable='${update-mysql:output}'
# Make sure mariadb is not already running
if [ -e "$pid_file" ]; then
pid=$(cat "$pid_file") > /dev/null 2>&1
pid=$(cat "$pid_file")
if [ $? -ne 0 ]; then
echo "Cannot read Mariadb pidfile, assuming running. Aborting."
exit 1
fi
if kill -0 "$pid"; then
echo "Mariadb is already running with pid $pid. Aborting."
exit 1
......@@ -45,8 +49,7 @@ if [ -n "$binlog_path" ]; then
fi
echo "Starting mariadb..."
# XXX hardcoded
"$server_executable" --innodb-flush-method=nosync --skip-innodb-doublewrite &
"$server_executable" --innodb-flush-method=nosync --skip-innodb-doublewrite --innodb-flush-log-at-trx-commit=0 --sync-frm=0 --slow-query-log=0 --skip-log-bin &
mysqld_pid=$!
trap "kill $mysqld_pid" EXIT TERM INT
sleep 30
......
......@@ -12,6 +12,7 @@
{% do alias_dict.setdefault('postmaster', [slapparameter_dict['postmaster']]) -%}
{% set smtpd_sasl_user = slapparameter_dict['smtpd-sasl-user'] -%}
{% set smtpd_sasl_password = slapparameter_dict['smtpd-sasl-password'] -%}
{% set milter_list = [] %}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
......@@ -55,14 +56,34 @@ var-spool-postfix-public = ${:var-spool-postfix}/public
var-spool-postfix-saved = ${:var-spool-postfix}/saved
var-spool-postfix-trace = ${:var-spool-postfix}/trace
{% if divert -%}
{% set milter_port = tcpv4_port + 1 -%}
{% set socket = 'inet:' ~ ip ~ ':' ~ milter_port -%}
[{{ section('divert-milter') }}]
recipe = slapos.cookbook:wrapper
parameters-extra = true
command-line =
'{{ parameter_dict['buildout-bin-directory'] }}/munnel'
--listen '{{ socket }}'
-- {{ ' '.join(divert) }}
wrapper-path = ${directory:run}/munnel
{% do milter_list.append(socket) -%}
[{{ section('munnel-promise') }}]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promise}/munnel
hostname = {{ ip }}
port = {{ milter_port }}
{% endif -%}
[configuration]
smtp = {{ dumps(tcpv4_port) }}
inet-interfaces = {{ dumps(ip) }}
alias-dict = {{ dumps(alias_dict) }}
relayhost = {{ dumps(relay.get('host')) }}
relay-sasl-credential = {{ dumps(relay.get('sasl-credential')) }}
divert = {{ dumps(divert) }}
cyrus-sasldb = ${directory:etc-cyrus}/postfix.gdbm
milter-list = {{ dumps(milter_list) }}
[userinfo]
recipe = slapos.cookbook:userinfo
......@@ -98,17 +119,6 @@ command =
echo '{{ smtpd_sasl_password }}' | '${wrapper-postfix-saslpasswd2:wrapper-path}' -pc '{{ smtpd_sasl_user }}'
update-command = ${:command}
[divert]
< = jinja2-template-base
rendered = ${directory:etc-postfix}/divert
{% if divert -%}
template = inline:{{ "/.*/ {{ ', '.join(divert) }}" }}
{%- else -%}
template = inline:
{%- endif %}
context =
key divert configuration:divert
[smtpd-ssl]
recipe = plone.recipe.command
stop-on-error = true
......@@ -140,7 +150,7 @@ context =
key relayhost configuration:relayhost
key sasl_passwd typed-paths:smtp-sasl-passwd
key aliases typed-paths:aliases
key divert typed-paths:divert
key milter_list configuration:milter-list
key cyrus_directory directory:etc-cyrus
key cert smtpd-ssl:cert
key key smtpd-ssl:key
......@@ -164,7 +174,6 @@ context =
# Postfix-friendly rendering of file paths, prefixed with database type.
aliases = hash:${aliases:rendered}
smtp-sasl-passwd = hash:${smtp-sasl-passwd:rendered}
divert = pcre:${divert:rendered}
[{{ section('postalias-db') }}]
recipe = plone.recipe.command
......
......@@ -201,6 +201,9 @@ environment =
TZ={{ slapparameter_dict['timezone'] }}
INSTANCE_HOME=${directory:instance}
MATPLOTLIBRC={{ parameter_dict['matplotlibrc'] }}
{% if slapparameter_dict.get('wendelin-core-zblk-fmt') %}
WENDELIN_CORE_ZBLK_FMT={{ slapparameter_dict['wendelin-core-zblk-fmt'] }}
{% endif %}
parameters-extra = true
command-line = '{{ parameter_dict['userhosts'] }}' '{{ bin_directory }}/runzope' -C '${:configuration-file}'
......@@ -400,7 +403,3 @@ parts +=
erp5-promise
{{ part_list | join('\n ') }}
publish
versions = versions
[versions]
slapos.core = {{ slapos_core_version }}
......@@ -2,8 +2,6 @@
extends = {{ instance_common_cfg }}
[jinja2-template-base]
context +=
raw slapos_core_version {{ slapos_core_version }}
mode = 644
[dynamic-template-cloudooo-parameters]
......@@ -54,6 +52,7 @@ extra-context =
[dynamic-template-postfix-parameters]
bin-directory = {{ bin_directory }}
buildout-bin-directory = {{ buildout_bin_directory }}
cyrus-sasl-location = {{ cyrus_sasl_location }}
openssl = {{ openssl_location }}
postfix-location = {{ postfix_location }}
......
......@@ -19,7 +19,6 @@ readme_directory =
inet_interfaces = {{ inet_interfaces }}
smtp_bind_address = 0.0.0.0
smtp_bind_address6 = ::
virtual_alias_maps = {{ divert }}
# Compared to default:
# - remove X-related variables, irrelevant for slapos, to be concise
......@@ -56,6 +55,8 @@ smtpd_relay_restrictions =
# Disable local delivery
local_transport = error
smtpd_milters ={{ '\n '.join(milter_list) }}
{% if relayhost -%}
relayhost = {{ relayhost }}
smtp_tls_security_level = encrypt
......
[versions]
AccessControl = 2.13.14
AccessControl = 2.13.15
Acquisition = 2.13.11
DateTime = 2.12.8
DocumentTemplate = 2.13.3
DocumentTemplate = 2.13.4
ExtensionClass = 2.13.2
Jinja2 = 2.8.1
Missing = 2.13.1
......@@ -22,18 +22,19 @@ Products.StandardCacheManagers = 2.13.1
Products.TemporaryFolder = 3.0
Products.ZCTextIndex = 2.13.5
Products.ZCatalog = 2.13.27
Pygments = 2.1.3
Pygments = 2.2.0
Record = 2.13.0
RestrictedPython = 3.6.0
Sphinx = 1.0.8
ZConfig = 2.9.3
ZODB3 = 3.10.7
Zope2 = 2.13.26
ZServer = 3.0
ZopeUndo = 2.12.0
docutils = 0.12
initgroups = 2.13.0
mechanize = 0.2.5
mr.developer = 1.34
mr.developer = 1.35
repoze.retry = 1.2
repoze.tm2 = 1.0
repoze.who = 2.0
......@@ -93,63 +94,7 @@ MarkupSafe = 0.23
# Required by:
# DateTime==2.12.8
# Zope2==2.13.25
# Zope2==2.13.26
# zope.i18n==3.7.4
# zope.testbrowser==3.11.1
pytz = 2016.10
# Required by:
# Products.BTreeFolder2==2.13.5
# Products.ExternalMethod==2.13.1
# Products.MIMETools==2.13.0
# Products.MailHost==2.13.2
# Products.OFSP==2.13.2
# Products.PythonScripts==2.13.2
# Products.Sessions==3.0
# Products.StandardCacheManagers==2.13.1
# Products.TemporaryFolder==3.0
# Products.ZCTextIndex==2.13.5
# Products.ZCatalog==2.13.27
# RestrictedPython==3.6.0
# ZServer==3.0
# Zope2==2.13.25
# mr.developer==1.34
# tempstorage==2.12.2
# z3c.checkversions==0.5
# zExceptions==2.13.0
# zope.browser==1.3
# zope.browsermenu==3.9.1
# zope.browserpage==3.12.2
# zope.browserresource==3.10.3
# zope.component==3.9.5
# zope.configuration==3.7.4
# zope.container==3.11.2
# zope.contentprovider==3.7.2
# zope.contenttype==3.5.5
# zope.deferredimport==3.5.3
# zope.event==3.5.2
# zope.exceptions==3.6.2
# zope.i18n==3.7.4
# zope.i18nmessageid==3.5.3
# zope.interface==3.6.7
# zope.lifecycleevent==3.6.2
# zope.location==3.9.1
# zope.pagetemplate==3.5.2
# zope.processlifetime==1.0
# zope.proxy==3.6.1
# zope.ptresource==3.9.0
# zope.publisher==3.12.6
# zope.schema==3.7.1
# zope.security==3.7.4
# zope.sendmail==3.7.5
# zope.sequencesort==3.4.0
# zope.site==3.9.2
# zope.size==3.4.1
# zope.structuredtext==3.5.1
# zope.tal==3.5.2
# zope.tales==3.5.3
# zope.testbrowser==3.11.1
# zope.testing==3.9.7
# zope.traversing==3.13.2
# zope.viewlet==3.7.2
setuptools = 32.3.1
......@@ -42,7 +42,7 @@ keep-compile-dir = false
apache-libcloud = 0.9.1
async = 0.6.1
gitdb = 0.5.4
mysqlclient = 1.3.9
mysqlclient = 1.3.10
plone.recipe.command = 1.1
slapos.recipe.template = 2.3
slapos.toolbox = 0.40.4
......
......@@ -185,7 +185,7 @@ eggs =
apache-libcloud = 0.12.3
async = 0.6.1
gitdb = 0.5.4
mysqlclient = 1.3.9
mysqlclient = 1.3.10
pycrypto = 2.6
rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.4.2
......
......@@ -19,8 +19,8 @@ md5sum = 67954c82a8030e5dcac3a8c5d115f624
[logrotate-entry-template]
filename = logrotate_entry.in
md5sum = 7a5ec8f141c61826a14db8619acba314
md5sum = 71462fcd0a97b3ccc7b6ac6af6b2ca1e
[template-logrotate-base]
filename = instance-logrotate-base.cfg.in
md5sum = 3e3e572c5537f77e83b1ee157d1c9852
md5sum = c786f73c2a99390a5fad8aa3e01de627
......@@ -44,13 +44,19 @@ context =
key log :log
key post :post
key pre :pre
key frequency :frequency
key rotate_num :rotate-num
key nocompress :nocompress
post =
pre =
frequency = daily
rotate-num = 3650
nocompress =
[logrotate]
recipe = slapos.cookbook:wrapper
parameters-extra = true
command-line = {{ logrotate_location }}/usr/sbin/logrotate -s "${logrotate-directory:srv}/logrotate.status" "${logrotate-conf:rendered}"
command-line = {{ logrotate_location }}/sbin/logrotate -s "${logrotate-directory:srv}/logrotate.status" "${logrotate-conf:rendered}"
wrapper-path = ${logrotate-directory:bin}/logrotate
[cron-entry-logrotate]
......@@ -60,11 +66,6 @@ name = logrotate
frequency = 0 0 * * *
command = ${logrotate:wrapper-path}
[logrotate-entry-base]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate-directory:logrotate-entries}
backup = ${logrotate-directory:backup}
[logrotate-entry-cron]
<= logrotate-entry-base
name = crond
......
{{ log }} {
daily
{{ frequency }}
dateext
rotate 3650
compress
delaycompress
rotate {{ rotate_num }}
{% if nocompress %}nocompress
{% else %}compress
delaycompress{% endif %}
notifempty
sharedscripts
create
......
......@@ -12,13 +12,21 @@ Implementation :
[buildout]
extends =
../../stack/monitor/buildout.cfg
...
../../stack/monitor/buildout.cfg
2/ In your instance.cfg file or instance template (instance-xxx.cfg.jinja2, ...),
Extend monitor template and a monitor-base to parts:
[buildout]
extends =
${monitor-template:rendered}
parts =
...
monitor-base
2/ In your instance.cfg file or instance template, override monitor configuration section to define your custom parameters.
Override monitor configuration section to define your custom parameters.
[monitor-instance-parameter]
monitor-title = ${slap-configuration:instance-title}
......@@ -57,6 +65,9 @@ You don't need to define all parameters, you can only set what is required to be
- configuration-file-path: path of knowledge0 cfg file where instance configuration will be written.
- interface-url: The URL of monitor web interface. This URL will be present in generated JSON files.
NB: You should use double $ (ex: $${monitor-template:rendered}) instead of one $ in your instance template file if it's not a jinja template. See:
- Jinja template file exemple, use one $: https://lab.nexedi.com/nexedi/slapos/blob/master/software/slaprunner/instance-resilient-test.cfg.jinja2
- Non Jinja template file, use $$: https://lab.nexedi.com/nexedi/slapos/blob/master/software/slaprunner/instance.cfg
Add a promise
-------------
......
[buildout]
extends =
buildout.hash.cfg
../../component/apache/buildout.cfg
../../component/curl/buildout.cfg
../../component/dash/buildout.cfg
../../component/dcron/buildout.cfg
../../component/openssl/buildout.cfg
../../component/logrotate/buildout.cfg
../../component/gzip/buildout.cfg
../../component/lxml-python/buildout.cfg
../../component/pycurl/buildout.cfg
../../component/python-cryptography/buildout.cfg
......@@ -102,13 +100,13 @@ recipe = slapos.recipe.template:jinja2
filename = template-monitor.cfg
template = ${:_profile_base_location_}/instance-monitor.cfg.jinja2.in
rendered = ${buildout:directory}/template-monitor.cfg
md5sum = bf0adf565d7cde55abc94bd223ec3162
context =
key apache_location apache:location
key gzip_location gzip:location
key template_logrotate_base template-logrotate-base:rendered
raw monitor_bin ${buildout:directory}/bin/monitor.bootstrap
raw monitor_collect ${buildout:directory}/bin/monitor.collect
raw monitor_statistic ${buildout:directory}/bin/monitor.statistic
raw monitor_runpromise ${buildout:directory}/bin/monitor.runpromise
raw monitor_genstatus ${buildout:directory}/bin/monitor.genstatus
raw monitor_configwrite ${buildout:directory}/bin/monitor.configwrite
......@@ -125,6 +123,7 @@ context =
raw python_with_eggs ${buildout:directory}/bin/${extra-eggs:interpreter}
raw template_wrapper ${monitor-template-wrapper:location}/${monitor-template-wrapper:filename}
raw template_check_disk_space ${monitor-check-free-disk-space:location}/${monitor-check-free-disk-space:filename}
raw bin_directory ${buildout:directory}/bin
depends =
${monitor-eggs:eggs}
......@@ -132,5 +131,5 @@ depends =
PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3
pycurl = 7.43.0
slapos.toolbox = 0.65
slapos.toolbox = 0.66
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# But avoid directories, they are not portable.
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[monitor2-template]
filename = instance-monitor.cfg.jinja2.in
md5sum = c963c43189a9b50e48ba27f2dd4e7b02
......@@ -108,6 +108,7 @@ parameter-list =
collector-db = ${monitor-instance-parameter:collector-db}
collect-script = {{ monitor_collect }}
statistic-script = {{ monitor_statistic }}
python = {{ python_with_eggs }}
nice-cmd = ${xnice-bin:output}
......@@ -161,7 +162,7 @@ environment =
[monitor-htpasswd]
recipe = slapos.cookbook:generate.password
storage-path = ${directory:etc}/.monitor_user
storage-path = ${directory:etc}/.monitor_pwd
bytes = 8
username = admin
......@@ -177,7 +178,11 @@ update-command = ${:command}
recipe = plone.recipe.command
stop-on-error = true
htpasswd-path = ${monitor-directory:etc}/monitor-htpasswd
command = if [ ! -s "${:htpasswd-path}" ]; then {{ apache_location }}/bin/htpasswd -cb ${:htpasswd-path} ${:user} ${:password}; fi
# Keep multiple lines as password can end with newline char.
command =
if [ ! -s "${:htpasswd-path}" ]; then
{{ apache_location }}/bin/htpasswd -cb ${:htpasswd-path} ${:user} ${:password}
fi
update-command = ${:command}
user = ${monitor-instance-parameter:username}
password = ${monitor-instance-parameter:password}
......@@ -239,6 +244,12 @@ input = inline:#!/bin/sh
output = ${directory:bin}/xnice
mode = 700
[promise-monitor-httpd-is-process-older-than-dependency-set]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/is-process-older-than-dependency-set ${monitor-httpd-conf-parameter:pid-file}
wrapper-path = ${directory:promises}/promise-monitor-httpd-is-process-older-than-dependency-set
parameters-extra = true
[monitor-globalstate-wrapper]
recipe = slapos.cookbook:wrapper
command-line = ${xnice-bin:output} {{ monitor_genstatus }} '${monitor-conf:rendered}' '${monitor-instance-info:rendered}'
......@@ -385,10 +396,11 @@ depends =
${monitor-httpd-promise:filename}
${monitor-bootstrap-promise:file}
${promise-check-slapgrid:output}
${promise-monitor-httpd-is-process-older-than-dependency-set:wrapper-path}
[monitor-publish]
monitor-base-url = ${publish:monitor-base-url}
monitor-setup-url = ${monitor-instance-parameter:interface-url}/#page=settings_configurator&url=${publish:monitor-url}&username=$${publish:monitor-user}&password=${publish:monitor-password}
monitor-setup-url = ${monitor-instance-parameter:interface-url}/#page=settings_configurator&url=${publish:monitor-url}&username=${publish:monitor-user}&password=${publish:monitor-password}
[buildout]
......
[buildout]
extends =
../../component/apache/buildout.cfg
buildout.hash.cfg
../../component/bash/buildout.cfg
../../component/dropbear/buildout.cfg
../../component/openssh/buildout.cfg
../../component/gzip/buildout.cfg
../../component/rdiff-backup/buildout.cfg
../../component/rsync/buildout.cfg
../monitor/buildout.cfg
......@@ -42,7 +41,6 @@ eggs =
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready.cfg.in
output = ${buildout:directory}/pbsready.cfg
md5sum = 0df8fe9b69f7943c3d5a2d30d4640557
mode = 0644
[pbsready-import]
......@@ -51,7 +49,6 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready-import.cfg.in
output = ${buildout:directory}/pbsready-import.cfg
md5sum = 5d5e4ad35c1a97ea5f7a15a4f5f766a8
mode = 0644
[pbsready-export]
......@@ -60,35 +57,28 @@ mode = 0644
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/pbsready-export.cfg.in
filename = pbsready-export.cfg.in
md5sum = 1b38292c42702f91f620cb99d1a88952
mode = 0644
[template-pull-backup]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-pull-backup.cfg.in
output = ${buildout:directory}/instance-pull-backup.cfg
md5sum = 7b4f8ac1a62680d624ac632f9601dab5
mode = 0644
[template-replicated]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/template-replicated.cfg.in
md5sum = 7a6234465ae845cb262d4f94c158764e
mode = 0644
destination = ${buildout:directory}/template-replicated.cfg.in
[template-parts]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/template-parts.cfg.in
md5sum = 071b1034ee8f5cc14f79b16fdeba2813
mode = 0644
destination = ${buildout:directory}/template-parts.cfg.in
[template-resilient-templates]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}.in
output = ${buildout:directory}/${:filename}
md5sum = 41e82859dc6b65e94a300a006d51536e
mode = 0644
filename = template-resilient-templates.cfg
......@@ -98,15 +88,13 @@ filename = template-resilient-templates.cfg
# which will run without removing any content because it raises an error.
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-frozen.cfg.in
md5sum = d21472f0e58f928fb827f2cbf22c4d4a
output = ${buildout:directory}/instance-frozen.cfg
[resilient-web-takeover-cgi-script-download]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/resilient-web-takeover-cgi-script.py.in
md5sum = 9d258d41eeef66f44f361adaa15cbd71
filename = resilient-web-takeover-cgi-script.py.in
mode = 0644
destination = ${buildout:directory}/resilient-web-takeover-cgi-script.py.in
# Provide an empty wrapper
[template-wrapper]
......@@ -114,12 +102,11 @@ recipe = slapos.recipe.template
url = ${:_profile_base_location_}/templates/wrapper.in
output = ${buildout:directory}/template-wrapper.cfg
mode = 0644
md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
[notifier-feed-promise-template]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/notifier-feed-promise.py.in
md5sum = d75346911dbc4cfcdb39a21e56cd5016
filename = notifier-feed-promise.py.in
mode = 0644
##################
......@@ -131,7 +118,6 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/templates/monitor-check-resilient-feed.in
download-only = true
md5sum = 19ee9055de961acf402e2dfe5b9581d2
filename = monitor-check-resilient-feed.in
mode = 0644
......
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# But avoid directories, they are not portable.
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[pbsready]
filename = pbsready.cfg.in
md5sum = 2adb49f45d27cbcb7430c98c2224d76a
[pbsready-import]
filename = pbsready-import.cfg.in
md5sum = a8f77b42782ec5e1fb49b5ae9891c757
[pbsready-export]
filename = pbsready-export.cfg.in
md5sum = bb463da1461d02d575717f1d1572bf97
[template-pull-backup]
filename = instance-pull-backup.cfg.in
md5sum = 7b4f8ac1a62680d624ac632f9601dab5
[template-replicated]
filename = template-replicated.cfg.in
md5sum = 75686abd9cb8d6e1f4e15931e280fa56
[template-parts]
filename = template-parts.cfg.in
md5sum = 071b1034ee8f5cc14f79b16fdeba2813
[template-resilient-templates]
filename = template-resilient-templates.cfg.in
md5sum = 41e82859dc6b65e94a300a006d51536e
[instance-frozen]
filename = instance-frozen.cfg.in
md5sum = d21472f0e58f928fb827f2cbf22c4d4a
[resilient-web-takeover-cgi-script-download]
filename = resilient-web-takeover-cgi-script.py.in
md5sum = 9d258d41eeef66f44f361adaa15cbd71
[template-wrapper]
filename = templates/wrapper.in
md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
[notifier-feed-promise-template]
filename = templates/notifier-feed-promise.py.in
md5sum = d75346911dbc4cfcdb39a21e56cd5016
[template-monitor-check-resilient-feed]
filename = templates/monitor-check-resilient-feed.in
md5sum = 19ee9055de961acf402e2dfe5b9581d2
[buildout]
extends = {{ pbsready_template_path }}
# Explicitely define extended parts from pbsready
# then add local parts
parts =
logrotate
logrotate-entry-cron
logrotate-entry-equeue
logrotate-entry-resilient
cron
cron-entry-logrotate
cron-entry-notifier-status-feed
notifier-feed-status-promise
notifier-stalled-promise
resilient-sshkeys-authority
sshd-raw-server
sshd-graceful
sshkeys-sshd
sshd-promise
resilient-sshkeys-sshd-promise
sshd-pbs-authorized-key
notifier
parts +=
notifier-exporter-promise
cron-entry-backup
[resilient-directory]
......
[buildout]
extends = ${pbsready:output}
# Explicitely define extended parts from pbsready
# then add local parts
parts =
logrotate
logrotate-entry-cron
logrotate-entry-equeue
logrotate-entry-notifier
logrotate-entry-resilient
cron
cron-entry-logrotate
cron-entry-notifier-status-feed
notifier-feed-status-promise
notifier-stalled-promise
resilient-sshkeys-authority
sshd-raw-server
sshd-graceful
sshkeys-sshd
sshd-promise
resilient-sshkeys-sshd-promise
sshd-pbs-authorized-key
notifier
parts +=
resiliency-takeover-script
resilient-web-takeover-cgi-script
resilient-web-takeover-httpd-wrapper
......@@ -72,7 +50,7 @@ proof-signature-file = $${basedirectory:backup}/proof.signature
input = inline:
#!/${bash:location}/bin/bash
cd $${directory:backup}
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -P4 -0 sha256sum | LC_ALL=C sort -k 66 > $${:proof-signature-file}
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -0 sha256sum | LC_ALL=C sort -k 66 > $${:proof-signature-file}
diff -ruw backup.signature $${:proof-signature-file} > $${:diff-file}
output = $${rootdirectory:bin}/post-notification-run
mode = 0700
......@@ -125,7 +103,7 @@ bytes = 8
[resilient-web-takeover-cgi-script]
recipe = collective.recipe.template
input = ${resilient-web-takeover-cgi-script-download:destination}
input = ${resilient-web-takeover-cgi-script-download:target}
output = $${directory:cgi-bin}/web-takeover.cgi
password = $${resilient-web-takeover-password:passwd}
mode = 700
......
[buildout]
parts =
logrotate
parts +=
logrotate-entry-cron
logrotate-entry-equeue
logrotate-entry-notifier
logrotate-entry-resilient
cron
cron-entry-logrotate
cron-entry-notifier-status-feed
notifier-feed-status-promise
notifier-stalled-promise
resilient-sshkeys-authority
sshd-raw-server
sshd-graceful
sshkeys-sshd
sshd-promise
......@@ -20,6 +19,9 @@ parts =
notifier
extends =
${monitor2-template:rendered}
#----------------
#--
#-- Creation of all needed directories.
......@@ -62,92 +64,29 @@ monitor-resilient = $${monitor-directory:private}/resilient
#----------------
#--
#-- Deploy cron.
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:wrapper}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = $${rootdirectory:bin}/cron_simplelogger
log = $${basedirectory:log}/crond.log
# cron and cron-simplelogger are deployed by logrotate.
#----------------
#--
#-- Deploy logrotate.
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[logrotate-entry-mariadb]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = mariadb
log = $${mariadb:error-log}
frequency = daily
rotate-num = 30
post = $${mariadb:logrotate-post}
sharedscripts = true
notifempty = true
create = true
[logrotate-entry-cron]
<= logrotate
recipe =slapos.cookbook:logrotate.d
name = crond
log = $${cron-simplelogger:log}
frequency = daily
rotate-num = 30
notifempty = true
create = true
[logrotate-entry-equeue]
<= logrotate
recipe = slapos.cookbook:logrotate.d
<= logrotate-entry-base
name = equeue
log = $${equeue:log} $${sshd-server:log}
frequency = daily
rotate-num = 30
[logrotate-entry-notifier]
recipe = collective.recipe.template
mode = 600
input = inline:
$${notifier:feeds}/* {
rotate 5
weekly
nocompress
missingok
olddir $${directory:logrotate-backup}
}
output = $${logrotate:logrotate-entries}/notifier
<= logrotate-entry-base
name = notifier
log = $${notifier:feeds}/*
rotate-num = 5
frequency = weekly
nocompress = 1
[logrotate-entry-resilient]
<= logrotate
recipe = slapos.cookbook:logrotate.d
<= logrotate-entry-base
name = resilient_log
log = $${basedirectory:log}/resilient.log
frequency = weekly
......
......@@ -41,7 +41,12 @@ config-notify = {% for id in range(1,nbbackup|int) %} ${request-pbs-{{namebase}}
config-name = {{namebase}}0
# Bubble up all the instance parameters to the requested export instance.
{% if slapparameter_dict is defined %}
{% for parameter_name, parameter_value in slapparameter_dict.items() %}config-{{parameter_name}} = {{parameter_value}}
{% for parameter_name, parameter_value in slapparameter_dict.items() %}
{% if parameter_value is string %}
config-{{parameter_name}} = {{ parameter_value.split('\n') | join('\n ') }}
{% else %}
config-{{parameter_name}} = {{ parameter_value }}
{% endif %}
{% endfor %}
{% endif %}
{% for key, value in monitor_dict.iteritems() -%}
......
[buildout]
extends = slapos.cfg
parts =
# Development eggs
slapos.toolbox-dev
slapos-cookbook-develop
slapos.core-dev
erp5.util-dev
slapos-cookbook
[slapos.toolbox-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.toolbox.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[slapos.toolbox-dev]
recipe = zc.recipe.egg:develop
egg = slapos.toolbox
setup = ${slapos.toolbox-repository:location}
[slapos.cookbook-repository]
develop = true
# Used for resiliency tests only
[erp5.util-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/erp5.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[erp5.util-dev]
recipe = zc.recipe.egg:develop
egg = erp5.util
setup = ${erp5.util-repository:location}
[slapos.core-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.core.git
branch = master
git-executable = ${git:location}/bin/git
develop = true
[slapos.core-dev]
recipe = zc.recipe.egg:develop
egg = slapos.core
setup = ${slapos.core-repository:location}
[versions]
slapos.cookbook =
slapos.core =
slapos.toolbox =
erp5-util =
lockfile =
......@@ -9,11 +9,12 @@ python = python2.7
# Note that if you want a develop version of slapos-cookbook from a git
# checkout, you must add both slapos-cookbook-develop and slapos-cookbook in parts
# Generate list of automatically chosen eggs version
extensions +=
buildout-versions
slapos.rebootstrap
slapos.extension.strip
show-picked-versions = true
# Use shacache and lxml
extends =
../component/git/buildout.cfg
......@@ -99,49 +100,49 @@ eggs =
[versions]
# Use SlapOS patched zc.buildout
zc.buildout = 1.7.1.post14
zc.buildout = 2.5.2+slapos007
# Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2)
zc.recipe.egg = 1.3.2.post5
zc.recipe.egg = 2.0.3+slapos003
# Use own version of h.r.download to be able to open .xz and .lz archives
hexagonit.recipe.download = 1.7.post4
Jinja2 = 2.9.5
MarkupSafe = 0.23
MarkupSafe = 1.0
PyYAML = 3.12
Werkzeug = 0.11.15
buildout-versions = 1.7
Werkzeug = 0.12
asn1crypto = 0.21.1
cffi = 1.9.1
click = 6.7
cliff = 2.4.0
cmd2 = 0.6.9
cmd2 = 0.7.0
collective.recipe.template = 2.0
cryptography = 1.7.2
cryptography = 1.8.1
decorator = 4.0.11
idna = 2.2
inotifyx = 0.2.2
itsdangerous = 0.24
lock-file = 2.0
lxml = 3.7.2
lxml = 3.7.3
meld3 = 1.0.2
netaddr = 0.7.19
pbr = 1.10.0
pbr = 2.0.0
plone.recipe.command = 1.1
prettytable = 0.7.2
psutil = 5.1.2
psutil = 5.2.0
pyOpenSSL = 16.2.0
pyasn1 = 0.2.1
pyparsing = 2.1.10
pyparsing = 2.2.0
pytz = 2016.10
requests = 2.13.0
setuptools = 19.6.2
setuptools = 33.1.1
six = 1.10.0
slapos.cookbook = 1.0.48
slapos.core = 1.3.18
slapos.extension.strip = 0.2
slapos.libnetworkcache = 0.14.5
slapos.recipe.build = 0.28
slapos.recipe.cmmi = 0.2
stevedore = 1.20.0
slapos.rebootstrap = 3.10
slapos.recipe.build = 0.33
slapos.recipe.cmmi = 0.5
stevedore = 1.21.0
unicodecsv = 0.14.1
xml-marshaller = 0.9.7
......@@ -150,7 +151,7 @@ xml-marshaller = 0.9.7
Flask = 0.12
# Required by:
# cryptography==1.7.2
# cryptography==1.8.1
enum34 = 1.1.6
# Required by:
......@@ -158,7 +159,7 @@ enum34 = 1.1.6
functools32 = 3.2.3.post2
# Required by:
# cryptography==1.7.2
# cryptography==1.8.1
ipaddress = 1.0.18
# Required by:
......@@ -170,6 +171,10 @@ jsonschema = 2.6.0
# XXX 'slapos node format' raises an exception with netifaces 0.10.5.
netifaces = 0.10.4
# Required by:
# cryptography==1.8.1
packaging = 16.8
# Required by:
# cffi==1.9.1
pycparser = 2.17
......
#!/usr/bin/env python
"""
Suggested installation:
$ cp update-hash .git/hooks/
$ $EDITOR .git/hooks/pre-commit
#!/bin/bash
set -e
touch "$(git rev-parse --git-dir)/hooks/.need-post-commit"
$ $EDITOR .git/hooks/post-commit
#!/bin/bash
set -e
MARKER="$(git rev-parse --git-dir)/hooks/.need-post-commit"
UPDATE_HASH="$(git rev-parse --git-dir)/hooks/update-hash"
if [ -e "$MARKER" -a -x "$UPDATE_HASH" ]; then
rm "$MARKER"
if git diff-index --quiet HEAD -- ; then
# nothing
true
else
git stash save --keep-index --quiet "update-hash pre-commit hook"
trap "git stash pop" EXIT
fi
find "$(git rev-parse --show-toplevel)" -name "buildout.hash.cfg" | while IFS= read -r HASHFILE; do
"$UPDATE_HASH" "$HASHFILE"
git add "$HASHFILE"
done
git commit --amend --no-verify -C HEAD
fi
BEWARE: rebasing does not trigger this hook, so you have to commit each
change explicitely. Improvements welcome.
"""
import hashlib
import os
import shutil
import sys
import tempfile
# Note: this is an intentionally very restrictive and primitive
# ConfigParser-ish parser.
# buildout.hash.cfg files are ConfigParser-compatible, but they are *not*
# ConfigParser syntax in order to be strictly validated, to prevent misuse
# and allow easy extension (ex: to other hashes).
FILENAME_KEY = 'filename'
HASH_MAP = {
'md5sum': hashlib.md5,
}
def main():
for infile_path in sys.argv[1:] or ['buildout.hash.cfg']:
eol = None
hash_file_path = None
hash_name = None
current_section = None
infile_dirname = os.path.dirname(infile_path)
outfile_path = infile_path + '.tmp'
infile = open(infile_path, 'r')
outfile_fd = os.open(outfile_path, os.O_EXCL | os.O_CREAT | os.O_WRONLY)
try:
outfile = os.fdopen(outfile_fd, 'w')
write = outfile.write
nextLine = iter(infile).next
while True:
try:
line = nextLine()
except StopIteration:
line = None
if line is None or not line.startswith('#'):
if line is None or line.startswith('['):
if hash_file_path is not None:
current_section.insert(
len([x for x in current_section if x.strip()]),
'%s = %s%s' % (
hash_name,
HASH_MAP[hash_name](
open(
os.path.join(
infile_dirname,
hash_file_path,
)
).read()
).hexdigest(),
eol,
),
)
outfile.writelines(current_section)
hash_file_path = hash_name = None
if line is None:
break
hash_file_path, _ = line[1:].split(']', 1)
current_section = []
elif '=' in line:
assert current_section is not None, line
name, value = line.split('=', 1)
name = name.strip()
value = value.strip()
if name == FILENAME_KEY:
hash_file_path = value
current_section.append(line)
else:
for hash_name in HASH_MAP:
if name == hash_name:
break
else:
raise ValueError('Unknown key: %r' % (name, ))
# NOT appending this line, it will be re-generated from
# scratch
eol = ''.join(x for x in line if x in ('\r', '\n'))
continue
if current_section is None:
write(line)
else:
current_section.append(line)
outfile.close()
shutil.copymode(infile_path, outfile_path)
shutil.move(outfile_path, infile_path)
except Exception:
os.unlink(outfile_path)
raise
if __name__ == '__main__':
main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment