Commit 8ca30103 authored by Alain Takoudjou's avatar Alain Takoudjou

Merge branch 'master' into 1.0

parents 7811a11d c91e5e88
diff -N -u -r bazel.orig/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java bazel/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java
--- bazel.orig/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java 1980-01-01 00:00:00.000000000 +0900
+++ bazel/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java 2017-01-02 22:19:09.326924214 +0900
@@ -172,7 +172,7 @@
@Param(
name = "use_default_shell_env",
type = Boolean.class,
- defaultValue = "False",
+ defaultValue = "True",
named = true,
positional = false,
doc = "whether the action should use the built in shell environment or not"
major_version: "local"
minor_version: ""
default_target_cpu: "same_as_host"
default_toolchain {
cpu: "k8"
toolchain_identifier: "local_linux"
}
default_toolchain {
cpu: "piii"
toolchain_identifier: "local_linux"
}
default_toolchain {
cpu: "darwin"
toolchain_identifier: "local_darwin"
}
default_toolchain {
cpu: "freebsd"
toolchain_identifier: "local_freebsd"
}
default_toolchain {
cpu: "armeabi-v7a"
toolchain_identifier: "stub_armeabi-v7a"
}
default_toolchain {
cpu: "arm"
toolchain_identifier: "local_linux"
}
default_toolchain {
cpu: "x64_windows"
toolchain_identifier: "local_windows_msys64"
}
default_toolchain {
cpu: "x64_windows_msvc"
toolchain_identifier: "vc_14_0_x64"
}
default_toolchain {
cpu: "s390x"
toolchain_identifier: "local_linux"
}
toolchain {
abi_version: "armeabi-v7a"
abi_libc_version: "armeabi-v7a"
builtin_sysroot: ""
compiler: "compiler"
host_system_name: "armeabi-v7a"
needsPic: true
supports_gold_linker: false
supports_incremental_linker: false
supports_fission: false
supports_interface_shared_objects: false
supports_normalizing_ar: false
supports_start_end_lib: false
target_libc: "armeabi-v7a"
target_cpu: "armeabi-v7a"
target_system_name: "armeabi-v7a"
toolchain_identifier: "stub_armeabi-v7a"
tool_path { name: "ar" path: "/bin/false" }
tool_path { name: "compat-ld" path: "/bin/false" }
tool_path { name: "cpp" path: "/bin/false" }
tool_path { name: "dwp" path: "/bin/false" }
tool_path { name: "gcc" path: "/bin/false" }
tool_path { name: "gcov" path: "/bin/false" }
tool_path { name: "ld" path: "/bin/false" }
tool_path { name: "nm" path: "/bin/false" }
tool_path { name: "objcopy" path: "/bin/false" }
tool_path { name: "objdump" path: "/bin/false" }
tool_path { name: "strip" path: "/bin/false" }
linking_mode_flags { mode: DYNAMIC }
}
toolchain {
abi_version: "local"
abi_libc_version: "local"
builtin_sysroot: ""
compiler: "compiler"
host_system_name: "local"
needsPic: true
supports_gold_linker: false
supports_incremental_linker: false
supports_fission: false
supports_interface_shared_objects: false
supports_normalizing_ar: false
supports_start_end_lib: false
target_libc: "local"
target_cpu: "local"
target_system_name: "local"
toolchain_identifier: "local_linux"
tool_path { name: "ar" path: "/usr/bin/ar" }
tool_path { name: "compat-ld" path: "/usr/bin/ld" }
tool_path { name: "cpp" path: "{{ cpp_path }}" }
tool_path { name: "dwp" path: "/usr/bin/dwp" }
tool_path { name: "gcc" path: "{{ gcc_path }}" }
cxx_flag: "-std=c++0x"
linker_flag: "-lstdc++"
linker_flag: "-B/usr/bin/"
linker_flag: "-Wl,rpath={{ gcc_lib64_path }}"
# TODO(bazel-team): In theory, the path here ought to exactly match the path
# used by gcc. That works because bazel currently doesn't track files at
# absolute locations and has no remote execution, yet. However, this will need
# to be fixed, maybe with auto-detection?
cxx_builtin_include_directory: "/usr/lib/gcc/"
cxx_builtin_include_directory: "/usr/local/include"
cxx_builtin_include_directory: "/usr/include"
cxx_builtin_include_directory: "{{ include_path }}"
tool_path { name: "gcov" path: "{{ gconv_path }}" }
# C(++) compiles invoke the compiler (as that is the one knowing where
# to find libraries), but we provide LD so other rules can invoke the linker.
tool_path { name: "ld" path: "/usr/bin/ld" }
tool_path { name: "nm" path: "/usr/bin/nm" }
tool_path { name: "objcopy" path: "/usr/bin/objcopy" }
objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "/usr/bin/objdump" }
tool_path { name: "strip" path: "/usr/bin/strip" }
# Anticipated future default.
unfiltered_cxx_flag: "-no-canonical-prefixes"
unfiltered_cxx_flag: "-fno-canonical-system-headers"
# Make C++ compilation deterministic. Use linkstamping instead of these
# compiler symbols.
unfiltered_cxx_flag: "-Wno-builtin-macro-redefined"
unfiltered_cxx_flag: "-D__DATE__=\"redacted\""
unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\""
unfiltered_cxx_flag: "-D__TIME__=\"redacted\""
# Security hardening on by default.
# Conservative choice; -D_FORTIFY_SOURCE=2 may be unsafe in some cases.
# We need to undef it before redefining it as some distributions now have
# it enabled by default.
compiler_flag: "-U_FORTIFY_SOURCE"
compiler_flag: "-D_FORTIFY_SOURCE=1"
compiler_flag: "-fstack-protector"
linker_flag: "-Wl,-z,relro,-z,now"
# Enable coloring even if there's no attached terminal. Bazel removes the
# escape sequences if --nocolor is specified. This isn't supported by gcc
# on Ubuntu 14.04.
# compiler_flag: "-fcolor-diagnostics"
# All warnings are enabled. Maybe enable -Werror as well?
compiler_flag: "-Wall"
# Enable a few more warnings that aren't part of -Wall.
compiler_flag: "-Wunused-but-set-parameter"
# But disable some that are problematic.
compiler_flag: "-Wno-free-nonheap-object" # has false positives
# Keep stack frames for debugging, even in opt mode.
compiler_flag: "-fno-omit-frame-pointer"
# Anticipated future default.
linker_flag: "-no-canonical-prefixes"
# Have gcc return the exit code from ld.
linker_flag: "-pass-exit-codes"
# Stamp the binary with a unique identifier.
linker_flag: "-Wl,--build-id=md5"
linker_flag: "-Wl,--hash-style=gnu"
# Gold linker only? Can we enable this by default?
# linker_flag: "-Wl,--warn-execstack"
# linker_flag: "-Wl,--detect-odr-violations"
compilation_mode_flags {
mode: DBG
# Enable debug symbols.
compiler_flag: "-g"
}
compilation_mode_flags {
mode: OPT
# No debug symbols.
# Maybe we should enable https://gcc.gnu.org/wiki/DebugFission for opt or
# even generally? However, that can't happen here, as it requires special
# handling in Bazel.
compiler_flag: "-g0"
# Conservative choice for -O
# -O3 can increase binary size and even slow down the resulting binaries.
# Profile first and / or use FDO if you need better performance than this.
compiler_flag: "-O2"
# Disable assertions
compiler_flag: "-DNDEBUG"
# Removal of unused code and data at link time (can this increase binary size in some cases?).
compiler_flag: "-ffunction-sections"
compiler_flag: "-fdata-sections"
linker_flag: "-Wl,--gc-sections"
}
linking_mode_flags { mode: DYNAMIC }
}
toolchain {
abi_version: "local"
abi_libc_version: "local"
builtin_sysroot: ""
compiler: "compiler"
host_system_name: "local"
needsPic: true
target_libc: "macosx"
target_cpu: "darwin"
target_system_name: "local"
toolchain_identifier: "local_darwin"
tool_path { name: "ar" path: "/usr/bin/libtool" }
tool_path { name: "compat-ld" path: "/usr/bin/ld" }
tool_path { name: "cpp" path: "/usr/bin/cpp" }
tool_path { name: "dwp" path: "/usr/bin/dwp" }
tool_path { name: "gcc" path: "osx_cc_wrapper.sh" }
cxx_flag: "-std=c++0x"
ar_flag: "-static"
ar_flag: "-s"
ar_flag: "-o"
linker_flag: "-lstdc++"
linker_flag: "-undefined"
linker_flag: "dynamic_lookup"
linker_flag: "-headerpad_max_install_names"
# TODO(ulfjack): This is wrong on so many levels. Figure out a way to auto-detect the proper
# setting from the local compiler, and also how to make incremental builds correct.
cxx_builtin_include_directory: "/"
tool_path { name: "gcov" path: "/usr/bin/gcov" }
tool_path { name: "ld" path: "/usr/bin/ld" }
tool_path { name: "nm" path: "/usr/bin/nm" }
tool_path { name: "objcopy" path: "/usr/bin/objcopy" }
objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "/usr/bin/objdump" }
tool_path { name: "strip" path: "/usr/bin/strip" }
# Anticipated future default.
unfiltered_cxx_flag: "-no-canonical-prefixes"
# Make C++ compilation deterministic. Use linkstamping instead of these
# compiler symbols.
unfiltered_cxx_flag: "-Wno-builtin-macro-redefined"
unfiltered_cxx_flag: "-D__DATE__=\"redacted\""
unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\""
unfiltered_cxx_flag: "-D__TIME__=\"redacted\""
# Security hardening on by default.
# Conservative choice; -D_FORTIFY_SOURCE=2 may be unsafe in some cases.
compiler_flag: "-D_FORTIFY_SOURCE=1"
compiler_flag: "-fstack-protector"
# Enable coloring even if there's no attached terminal. Bazel removes the
# escape sequences if --nocolor is specified.
compiler_flag: "-fcolor-diagnostics"
# All warnings are enabled. Maybe enable -Werror as well?
compiler_flag: "-Wall"
# Enable a few more warnings that aren't part of -Wall.
compiler_flag: "-Wthread-safety"
compiler_flag: "-Wself-assign"
# Keep stack frames for debugging, even in opt mode.
compiler_flag: "-fno-omit-frame-pointer"
# Anticipated future default.
linker_flag: "-no-canonical-prefixes"
compilation_mode_flags {
mode: DBG
# Enable debug symbols.
compiler_flag: "-g"
}
compilation_mode_flags {
mode: OPT
# No debug symbols.
# Maybe we should enable https://gcc.gnu.org/wiki/DebugFission for opt or even generally?
# However, that can't happen here, as it requires special handling in Bazel.
compiler_flag: "-g0"
# Conservative choice for -O
# -O3 can increase binary size and even slow down the resulting binaries.
# Profile first and / or use FDO if you need better performance than this.
compiler_flag: "-O2"
# Disable assertions
compiler_flag: "-DNDEBUG"
# Removal of unused code and data at link time (can this increase binary size in some cases?).
compiler_flag: "-ffunction-sections"
compiler_flag: "-fdata-sections"
}
linking_mode_flags { mode: DYNAMIC }
}
toolchain {
abi_version: "local"
abi_libc_version: "local"
builtin_sysroot: ""
compiler: "compiler"
host_system_name: "local"
needsPic: true
supports_gold_linker: false
supports_incremental_linker: false
supports_fission: false
supports_interface_shared_objects: false
supports_normalizing_ar: false
supports_start_end_lib: false
target_libc: "local"
target_cpu: "freebsd"
target_system_name: "local"
toolchain_identifier: "local_freebsd"
tool_path { name: "ar" path: "/usr/bin/ar" }
tool_path { name: "compat-ld" path: "/usr/bin/ld" }
tool_path { name: "cpp" path: "/usr/bin/cpp" }
tool_path { name: "dwp" path: "/usr/bin/dwp" }
tool_path { name: "gcc" path: "/usr/bin/clang" }
cxx_flag: "-std=c++0x"
linker_flag: "-lstdc++"
linker_flag: "-B/usr/bin/"
# TODO(bazel-team): In theory, the path here ought to exactly match the path
# used by gcc. That works because bazel currently doesn't track files at
# absolute locations and has no remote execution, yet. However, this will need
# to be fixed, maybe with auto-detection?
cxx_builtin_include_directory: "/usr/lib/clang"
cxx_builtin_include_directory: "/usr/local/include"
cxx_builtin_include_directory: "/usr/include"
tool_path { name: "gcov" path: "/usr/bin/gcov" }
# C(++) compiles invoke the compiler (as that is the one knowing where
# to find libraries), but we provide LD so other rules can invoke the linker.
tool_path { name: "ld" path: "/usr/bin/ld" }
tool_path { name: "nm" path: "/usr/bin/nm" }
tool_path { name: "objcopy" path: "/usr/bin/objcopy" }
objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "/usr/bin/objdump" }
tool_path { name: "strip" path: "/usr/bin/strip" }
# Anticipated future default.
unfiltered_cxx_flag: "-no-canonical-prefixes"
# Make C++ compilation deterministic. Use linkstamping instead of these
# compiler symbols.
unfiltered_cxx_flag: "-Wno-builtin-macro-redefined"
unfiltered_cxx_flag: "-D__DATE__=\"redacted\""
unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\""
unfiltered_cxx_flag: "-D__TIME__=\"redacted\""
# Security hardening on by default.
# Conservative choice; -D_FORTIFY_SOURCE=2 may be unsafe in some cases.
# We need to undef it before redefining it as some distributions now have
# it enabled by default.
compiler_flag: "-U_FORTIFY_SOURCE"
compiler_flag: "-D_FORTIFY_SOURCE=1"
compiler_flag: "-fstack-protector"
linker_flag: "-Wl,-z,relro,-z,now"
# Enable coloring even if there's no attached terminal. Bazel removes the
# escape sequences if --nocolor is specified. This isn't supported by gcc
# on Ubuntu 14.04.
# compiler_flag: "-fcolor-diagnostics"
# All warnings are enabled. Maybe enable -Werror as well?
compiler_flag: "-Wall"
# Enable a few more warnings that aren't part of -Wall.
#compiler_flag: "-Wunused-but-set-parameter"
# But disable some that are problematic.
#compiler_flag: "-Wno-free-nonheap-object" # has false positives
# Keep stack frames for debugging, even in opt mode.
compiler_flag: "-fno-omit-frame-pointer"
# Anticipated future default.
linker_flag: "-no-canonical-prefixes"
# Have gcc return the exit code from ld.
#linker_flag: "-pass-exit-codes"
# Stamp the binary with a unique identifier.
#linker_flag: "-Wl,--build-id=md5"
linker_flag: "-Wl,--hash-style=gnu"
# Gold linker only? Can we enable this by default?
# linker_flag: "-Wl,--warn-execstack"
# linker_flag: "-Wl,--detect-odr-violations"
compilation_mode_flags {
mode: DBG
# Enable debug symbols.
compiler_flag: "-g"
}
compilation_mode_flags {
mode: OPT
# No debug symbols.
# Maybe we should enable https://gcc.gnu.org/wiki/DebugFission for opt or
# even generally? However, that can't happen here, as it requires special
# handling in Bazel.
compiler_flag: "-g0"
# Conservative choice for -O
# -O3 can increase binary size and even slow down the resulting binaries.
# Profile first and / or use FDO if you need better performance than this.
compiler_flag: "-O2"
# Disable assertions
compiler_flag: "-DNDEBUG"
# Removal of unused code and data at link time (can this increase binary size in some cases?).
compiler_flag: "-ffunction-sections"
compiler_flag: "-fdata-sections"
linker_flag: "-Wl,--gc-sections"
}
linking_mode_flags { mode: DYNAMIC }
}
toolchain {
abi_version: "local"
abi_libc_version: "local"
builtin_sysroot: ""
compiler: "windows_mingw"
host_system_name: "local"
needsPic: false
target_libc: "local"
target_cpu: "x64_windows"
target_system_name: "local"
toolchain_identifier: "local_windows_mingw"
tool_path { name: "ar" path: "C:/mingw/bin/ar" }
tool_path { name: "compat-ld" path: "C:/mingw/bin/ld" }
tool_path { name: "cpp" path: "C:/mingw/bin/cpp" }
tool_path { name: "dwp" path: "C:/mingw/bin/dwp" }
tool_path { name: "gcc" path: "C:/mingw/bin/gcc" }
cxx_flag: "-std=c++0x"
# TODO(bazel-team): In theory, the path here ought to exactly match the path
# used by gcc. That works because bazel currently doesn't track files at
# absolute locations and has no remote execution, yet. However, this will need
# to be fixed, maybe with auto-detection?
cxx_builtin_include_directory: "C:/mingw/include"
cxx_builtin_include_directory: "C:/mingw/lib/gcc"
tool_path { name: "gcov" path: "C:/mingw/bin/gcov" }
tool_path { name: "ld" path: "C:/mingw/bin/ld" }
tool_path { name: "nm" path: "C:/mingw/bin/nm" }
tool_path { name: "objcopy" path: "C:/mingw/bin/objcopy" }
objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "C:/mingw/bin/objdump" }
tool_path { name: "strip" path: "C:/mingw/bin/strip" }
linking_mode_flags { mode: DYNAMIC }
}
toolchain {
abi_version: "local"
abi_libc_version: "local"
builtin_sysroot: ""
compiler: "windows_msys64_mingw64"
host_system_name: "local"
needsPic: false
target_libc: "local"
target_cpu: "x64_windows"
target_system_name: "local"
toolchain_identifier: "local_windows_msys64_mingw64"
tool_path { name: "ar" path: "C:/tools/msys64/mingw64/bin/ar" }
tool_path { name: "compat-ld" path: "C:/tools/msys64/mingw64/bin/ld" }
tool_path { name: "cpp" path: "C:/tools/msys64/mingw64/bin/cpp" }
tool_path { name: "dwp" path: "C:/tools/msys64/mingw64/bin/dwp" }
tool_path { name: "gcc" path: "C:/tools/msys64/mingw64/bin/gcc" }
cxx_flag: "-std=c++0x"
# TODO(bazel-team): In theory, the path here ought to exactly match the path
# used by gcc. That works because bazel currently doesn't track files at
# absolute locations and has no remote execution, yet. However, this will need
# to be fixed, maybe with auto-detection?
cxx_builtin_include_directory: "C:/tools/msys64/mingw64/x86_64-w64-mingw32/include"
tool_path { name: "gcov" path: "C:/tools/msys64/mingw64/bin/gcov" }
tool_path { name: "ld" path: "C:/tools/msys64/mingw64/bin/ld" }
tool_path { name: "nm" path: "C:/tools/msys64/mingw64/bin/nm" }
tool_path { name: "objcopy" path: "C:/tools/msys64/mingw64/bin/objcopy" }
objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "C:/tools/msys64/mingw64/bin/objdump" }
tool_path { name: "strip" path: "C:/tools/msys64/mingw64/bin/strip" }
linking_mode_flags { mode: DYNAMIC }
}
toolchain {
abi_version: "local"
abi_libc_version: "local"
builtin_sysroot: ""
compiler: "windows_clang"
host_system_name: "local"
needsPic: false
target_libc: "local"
target_cpu: "x64_windows"
target_system_name: "local"
toolchain_identifier: "local_windows_clang"
tool_path { name: "ar" path: "C:/mingw/bin/ar" }
tool_path { name: "compat-ld" path: "C:/Program Files (x86)/LLVM/bin/ld" }
tool_path { name: "cpp" path: "C:/Program Files (x86)/LLVM/bin/cpp" }
tool_path { name: "dwp" path: "C:/Program Files (x86)/LLVM/bin/dwp" }
tool_path { name: "gcc" path: "C:/Program Files (x86)/LLVM/bin/clang" }
cxx_flag: "-std=c++0x"
# TODO(bazel-team): In theory, the path here ought to exactly match the path
# used by gcc. That works because bazel currently doesn't track files at
# absolute locations and has no remote execution, yet. However, this will need
# to be fixed, maybe with auto-detection?
cxx_builtin_include_directory: "/usr/lib/gcc/"
cxx_builtin_include_directory: "/usr/local/include"
cxx_builtin_include_directory: "/usr/include"
tool_path { name: "gcov" path: "C:/Program Files (x86)/LLVM/bin/gcov" }
tool_path { name: "ld" path: "C:/Program Files (x86)/LLVM/bin/ld" }
tool_path { name: "nm" path: "C:/Program Files (x86)/LLVM/bin/nm" }
tool_path { name: "objcopy" path: "C:/Program Files (x86)/LLVM/bin/objcopy" }
objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "C:/Program Files (x86)/LLVM/bin/objdump" }
tool_path { name: "strip" path: "C:/Program Files (x86)/LLVM/bin/strip" }
linking_mode_flags { mode: DYNAMIC }
}
toolchain {
abi_version: "local"
abi_libc_version: "local"
builtin_sysroot: ""
compiler: "windows_msys64"
host_system_name: "local"
needsPic: false
target_libc: "local"
target_cpu: "x64_windows"
target_system_name: "local"
toolchain_identifier: "local_windows_msys64"
tool_path { name: "ar" path: "C:/tools/msys64/usr/bin/ar" }
tool_path { name: "compat-ld" path: "C:/tools/msys64/usr/bin/ld" }
tool_path { name: "cpp" path: "C:/tools/msys64/usr/bin/cpp" }
tool_path { name: "dwp" path: "C:/tools/msys64/usr/bin/dwp" }
# Use gcc instead of g++ so that C will compile correctly.
tool_path { name: "gcc" path: "C:/tools/msys64/usr/bin/gcc" }
cxx_flag: "-std=gnu++0x"
linker_flag: "-lstdc++"
# TODO(bazel-team): In theory, the path here ought to exactly match the path
# used by gcc. That works because bazel currently doesn't track files at
# absolute locations and has no remote execution, yet. However, this will need
# to be fixed, maybe with auto-detection?
cxx_builtin_include_directory: "C:/tools/msys64/"
cxx_builtin_include_directory: "/usr/"
tool_path { name: "gcov" path: "C:/tools/msys64/usr/bin/gcov" }
tool_path { name: "ld" path: "C:/tools/msys64/usr/bin/ld" }
tool_path { name: "nm" path: "C:/tools/msys64/usr/bin/nm" }
tool_path { name: "objcopy" path: "C:/tools/msys64/usr/bin/objcopy" }
objcopy_embed_flag: "-I"
objcopy_embed_flag: "binary"
tool_path { name: "objdump" path: "C:/tools/msys64/usr/bin/objdump" }
tool_path { name: "strip" path: "C:/tools/msys64/usr/bin/strip" }
linking_mode_flags { mode: DYNAMIC }
}
toolchain {
toolchain_identifier: "vc_14_0_x64"
host_system_name: "local"
target_system_name: "local"
abi_version: "local"
abi_libc_version: "local"
target_cpu: "x64_windows_msvc"
compiler: "cl"
target_libc: "msvcrt140"
default_python_version: "python2.7"
cxx_builtin_include_directory: "C:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/INCLUDE"
cxx_builtin_include_directory: "C:/Program Files (x86)/Windows Kits/10/include/"
cxx_builtin_include_directory: "C:/Program Files (x86)/Windows Kits/8.1/include/"
cxx_builtin_include_directory: "C:/Program Files (x86)/GnuWin32/include/"
cxx_builtin_include_directory: "C:/python_27_amd64/files/include"
tool_path {
name: "ar"
path: "wrapper/bin/msvc_link.bat"
}
tool_path {
name: "cpp"
path: "wrapper/bin/msvc_cl.bat"
}
tool_path {
name: "gcc"
path: "wrapper/bin/msvc_cl.bat"
}
tool_path {
name: "gcov"
path: "wrapper/bin/msvc_nop.bat"
}
tool_path {
name: "ld"
path: "wrapper/bin/msvc_link.bat"
}
tool_path {
name: "nm"
path: "wrapper/bin/msvc_nop.bat"
}
tool_path {
name: "objcopy"
path: "wrapper/bin/msvc_nop.bat"
}
tool_path {
name: "objdump"
path: "wrapper/bin/msvc_nop.bat"
}
tool_path {
name: "strip"
path: "wrapper/bin/msvc_nop.bat"
}
supports_gold_linker: false
supports_start_end_lib: false
supports_interface_shared_objects: false
supports_incremental_linker: false
supports_normalizing_ar: true
needsPic: false
compiler_flag: "-m64"
compiler_flag: "/D__inline__=__inline"
# TODO(pcloudy): Review those flags below, they should be defined by cl.exe
compiler_flag: "/DOS_WINDOWS=OS_WINDOWS"
compiler_flag: "/DCOMPILER_MSVC"
# Don't pollute with GDI macros in windows.h.
compiler_flag: "/DNOGDI"
# Don't define min/max macros in windows.h.
compiler_flag: "/DNOMINMAX"
compiler_flag: "/DPRAGMA_SUPPORTED"
# Platform defines.
compiler_flag: "/D_WIN32_WINNT=0x0600"
# Turn off warning messages.
compiler_flag: "/D_CRT_SECURE_NO_DEPRECATE"
compiler_flag: "/D_CRT_SECURE_NO_WARNINGS"
compiler_flag: "/D_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS"
# Use math constants (M_PI, etc.) from the math library
compiler_flag: "/D_USE_MATH_DEFINES"
# Useful options to have on for compilation.
# Suppress startup banner.
compiler_flag: "/nologo"
# Increase the capacity of object files to 2^32 sections.
compiler_flag: "/bigobj"
# Allocate 500MB for precomputed headers.
compiler_flag: "/Zm500"
# Use unsigned char by default.
compiler_flag: "/J"
# Use function level linking.
compiler_flag: "/Gy"
# Use string pooling.
compiler_flag: "/GF"
# Warning level 3 (could possibly go to 4 in the future).
compiler_flag: "/W3"
# Catch both asynchronous (structured) and synchronous (C++) exceptions.
compiler_flag: "/EHsc"
# Globally disabled warnings.
# Don't warn about elements of array being be default initialized.
compiler_flag: "/wd4351"
# Don't warn about no matching delete found.
compiler_flag: "/wd4291"
# Don't warn about diamond inheritance patterns.
compiler_flag: "/wd4250"
# Don't warn about insecure functions (e.g. non _s functions).
compiler_flag: "/wd4996"
linker_flag: "-m64"
feature {
name: 'include_paths'
flag_set {
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-header-parsing'
action: 'c++-header-preprocessing'
action: 'c++-module-compile'
flag_group {
flag: '/I%{quote_include_paths}'
}
flag_group {
flag: '/I%{include_paths}'
}
flag_group {
flag: '/I%{system_include_paths}'
}
}
}
feature {
name: 'dependency_file'
flag_set {
action: 'assemble'
action: 'preprocess-assemble'
action: 'c-compile'
action: 'c++-compile'
action: 'c++-module-compile'
action: 'c++-header-preprocessing'
action: 'c++-header-parsing'
expand_if_all_available: 'dependency_file'
flag_group {
flag: '/DEPENDENCY_FILE'
flag: '%{dependency_file}'
}
}
}
# Stop passing -frandom-seed option
feature {
name: 'random_seed'
}
# This feature is just for enabling flag_set in action_config for -c and -o options during the transitional period
feature {
name: 'compile_action_flags_in_flag_set'
}
action_config {
config_name: 'c-compile'
action_name: 'c-compile'
tool {
tool_path: 'wrapper/bin/msvc_cl.bat'
}
flag_set {
flag_group {
flag: '/c'
flag: '%{source_file}'
}
}
flag_set {
expand_if_all_available: 'output_object_file'
flag_group {
flag: '/Fo%{output_object_file}'
}
}
flag_set {
expand_if_all_available: 'output_assembly_file'
flag_group {
flag: '/Fa%{output_assembly_file}'
}
}
flag_set {
expand_if_all_available: 'output_preprocess_file'
flag_group {
flag: '/P'
flag: '/Fi%{output_preprocess_file}'
}
}
}
action_config {
config_name: 'c++-compile'
action_name: 'c++-compile'
tool {
tool_path: 'wrapper/bin/msvc_cl.bat'
}
flag_set {
flag_group {
flag: '/c'
flag: '%{source_file}'
}
}
flag_set {
expand_if_all_available: 'output_object_file'
flag_group {
flag: '/Fo%{output_object_file}'
}
}
flag_set {
expand_if_all_available: 'output_assembly_file'
flag_group {
flag: '/Fa%{output_assembly_file}'
}
}
flag_set {
expand_if_all_available: 'output_preprocess_file'
flag_group {
flag: '/P'
flag: '/Fi%{output_preprocess_file}'
}
}
}
compilation_mode_flags {
mode: DBG
compiler_flag: "/DDEBUG=1"
# This will signal the wrapper that we are doing a debug build, which sets
# some internal state of the toolchain wrapper. It is intentionally a "-"
# flag to make this very obvious.
compiler_flag: "-g"
compiler_flag: "/Od"
compiler_flag: "-Xcompilation-mode=dbg"
}
compilation_mode_flags {
mode: FASTBUILD
compiler_flag: "/DNDEBUG"
compiler_flag: "/Od"
compiler_flag: "-Xcompilation-mode=fastbuild"
}
compilation_mode_flags {
mode: OPT
compiler_flag: "/DNDEBUG"
compiler_flag: "/O2"
compiler_flag: "-Xcompilation-mode=opt"
}
}
[buildout]
extends =
../gcc/buildout.cfg
../unzip/buildout.cfg
parts =
bazel
# The bazel binary contains a zip file. It must not be stripped.
do-not-strip-path = ${buildout:parts-directory}/bazel/bin/bazel
# [jdk]
# recipe = slapos.recipe.build
# url = http://download.oracle.com/otn-pub/java/jdk/8u112-b15/jdk-8u112-linux-x64.tar.gz
# md5sum = de9b7a90f0f5a13cfcaa3b01451d0337
# location = ${buildout:parts-directory}/${:_buildout_section_name_}
# java_home = ${:location}/java_home
# script =
# from zc.buildout.download import check_md5sum, ChecksumError
# download_dir = tempfile.mkdtemp()
# download_path = os.path.join(download_dir, 'jdk.tar.gz')
# self.cleanup_list.append(download_dir)
# call(['wget', '-q', '-O', download_path, '--header', 'Cookie: oraclelicense=accept-securebackup-cookie', self.options['url']])
# if not check_md5sum(download_path, self.options['md5sum']):raise ChecksumError()
# extract_dir = self.extract(download_path)
# java_home = '%(location)s/java_home'
# self.copyTree(os.path.join(extract_dir, 'jdk1.8.0_112'), java_home)
[template-bazel-crosstool]
recipe = slapos.recipe.template:jinja2
location = ${buildout:parts-directory}/${:_buildout_section_name_}
mode = 640
filename = bazel_tools_cpp_CROSSTOOL
template = ${:_profile_base_location_}/${:filename}.in
rendered = ${:location}/${:filename}
cpp_path = ${gcc:location}/bin/cpp
gcc_path = ${gcc:location}/bin/gcc
gconv_path = ${gcc:location}/bin/gconv
include_path = ${gcc:location}/include
gcc_lib64_path = ${gcc:location}/lib64
context =
key cpp_path template-bazel-crosstool:cpp_path
key gcc_path template-bazel-crosstool:gcc_path
key gconv_path template-bazel-crosstool:gconv_path
key include_path template-bazel-crosstool:include_path
key gcc_lib64_path template-bazel-crosstool:gcc_lib64_path
[python2.7]
# link to gcc libraries to solve GLIBXXX undefined reference error.
environment =
PATH=${patch:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include/ncursesw -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include
LDFLAGS=-L${zlib:location}/lib -L${readline:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${gcc:location}/lib64
[bazel]
recipe = slapos.recipe.build
url = https://github.com/bazelbuild/bazel/releases/download/0.4.3/bazel-0.4.3-dist.zip
md5sum = cbd53f6f59915506da8998dab2098921
patch-binary = ${patch:location}/bin/patch
patch-file-path = ${:_profile_base_location_}/bazel-0.4.3.patch
bazel-crosstool-modified-file-path = ${template-bazel-crosstool:rendered}
unzip-bin = ${unzip:location}/bin
zip-bin = ${zip:location}/bin
gcc-bin = ${gcc:location}/bin
gcc-lib = ${gcc:location}/lib
gcc-lib64 = ${gcc:location}/lib64
java_home = PLEASE_INSTALL_JDK8_BY_YOURSELF_AND_SET_THE_PATH
script =
extract_dir = self.extract(self.download(self.options['url'], self.options['md5sum']))
crosstool_path = os.path.join(extract_dir, 'tools', 'cpp', 'CROSSTOOL')
os.chmod(crosstool_path, 0644)
shutil.copy(self.options['bazel-crosstool-modified-file-path'],
crosstool_path)
target_path = extract_dir+'/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java'
os.chmod(target_path, 0644)
call([self.buildout['bazel']['patch-binary'], '-p1', '-d', extract_dir, '-i', self.buildout['bazel']['patch-file-path']])
path = ':'.join((
self.options['unzip-bin'],
self.options['zip-bin'],
self.options['gcc-bin'],
os.environ['PATH']
))
env = {'JAVA_HOME':self.options['java_home'],
'PATH':path,
'LD_LIBRARY_PATH':':'.join((
self.options['gcc-lib'],
self.options['gcc-lib64'],
os.environ.get('LD_LIBRARY_PATH', '')
)),
'LDFLAGS':'-Wl,-rpath='+self.options['gcc-lib64'],
'CC':self.options['gcc-bin']+'/gcc',
'CXX':self.options['gcc-bin']+'/g++',
}
bin_dir = os.path.join(self.options['location'], 'bin')
os.makedirs(bin_dir)
call(['bash', 'compile.sh', 'compile'], cwd=extract_dir, env=env)
shutil.copy(os.path.join(extract_dir, 'output', 'bazel'),
os.path.join(bin_dir, 'bazel'))
[buildout]
parts = binutils
[binutils]
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/binutils/binutils-2.27.tar.gz
md5sum = 41b053ed4fb2c6a8173ef421460fbb28
\ No newline at end of file
[buildout] [buildout]
# chromium binary is linking to libudev, that can be provided by
# component/systemd. but current component/systemd can be built only
# with Linux kernel >= 3.7. so libudev is not included in
# LD_LIBRARY_PATH below intentionally and libudev should be installed
# in the system.
extends = extends =
../coreutils/buildout.cfg
../cups/buildout.cfg
../dbus/buildout.cfg ../dbus/buildout.cfg
../findutils/buildout.cfg
../fontconfig/buildout.cfg ../fontconfig/buildout.cfg
../gettext/buildout.cfg
../glib/buildout.cfg ../glib/buildout.cfg
../gtk-2/buildout.cfg ../gtk-2/buildout.cfg
../libexpat/buildout.cfg
../libffi/buildout.cfg
../libpng/buildout.cfg ../libpng/buildout.cfg
../libxml2/buildout.cfg
../mesa/buildout.cfg
../nspr/buildout.cfg
../nss/buildout.cfg
../pcre/buildout.cfg
../sqlite3/buildout.cfg
../xorg/buildout.cfg ../xorg/buildout.cfg
../zlib/buildout.cfg
parts = parts =
chromium chromium
[chromium] [chromium]
recipe = slapos.recipe.build recipe = slapos.recipe.build
slapos_promise = slapos_promise =
...@@ -17,17 +36,20 @@ slapos_promise = ...@@ -17,17 +36,20 @@ slapos_promise =
file:chrome-wrapper file:chrome-wrapper
file:chrome-slapos file:chrome-slapos
#chromium zip files for linux seem to be corrupted : rights are not correctly # How to get the revision :
#set (+x) when unzipping using python, but it works when doing "unzip chromium.zip" # stable : https://www.googleapis.com/download/storage/v1/b/chromium-browser-continuous/o/Linux_x64%2FLAST_CHANGE?alt=media
#AND it works when unzipping any other archive with python. # snapshot : https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2FLAST_CHANGE?alt=media
#Conclusion : Google, please, learn how to make zip files. revision_x86 = 382014
linux_x86 = http://commondatastorage.googleapis.com/chromium-browser-continuous/Linux/109696/chrome-linux.zip 8ba6c022849b2a882b6e65163c147eb9 revision_x86-64 = 382014
linux_x86-64 = http://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/109696/chrome-linux.zip a3ed3feb285ecfe7c722576db80d5099 revision_mac = 381909
mac_x86-64 = http://commondatastorage.googleapis.com/chromium-browser-continuous/Mac/100142/chrome-mac.zip cb3a76b8a1a93be94df2f500fb621131
linux_x86 = https://www.googleapis.com/download/storage/v1/b/chromium-browser-continuous/o/Linux%2F${:revision_x86}%2Fchrome-linux.zip?alt=media
linux_x86-64 = https://www.googleapis.com/download/storage/v1/b/chromium-browser-continuous/o/Linux_x64%2F${:revision_x86-64}%2Fchrome-linux.zip?alt=media ef2c476b1f059e9aa026bbac1872368d
mac_x86-64 = https://www.googleapis.com/download/storage/v1/b/chromium-browser-continuous/o/Mac%2F${:revision_mac}%2Fchrome-linux.zip?alt=media
script = script =
#If part directory already exist, will just throw an error. #If part directory already exist, will just throw an error.
import sys import os
platform = '%%s_%%s' %% (guessOperatingSystem(), guessPlatform()) platform = '%%s_%%s' %% (guessOperatingSystem(), guessPlatform())
if not self.options.get('url'): self.options['url'], self.options['md5sum'] = self.options[platform].split(' ') if not self.options.get('url'): self.options['url'], self.options['md5sum'] = self.options[platform].split(' ')
extract_dir = self.extract(self.download(self.options['url'], self.options.get('md5sum'))) extract_dir = self.extract(self.download(self.options['url'], self.options.get('md5sum')))
...@@ -36,12 +58,46 @@ script = ...@@ -36,12 +58,46 @@ script =
wrapper_location = os.path.join("%(location)s", "chrome-slapos") wrapper_location = os.path.join("%(location)s", "chrome-slapos")
wrapper = open(wrapper_location, 'w') wrapper = open(wrapper_location, 'w')
wrapper.write("""#!/bin/sh wrapper.write("""#!/bin/sh
export LD_LIBRARY_PATH=${libXrender:location}/lib/:${fontconfig:location}/lib/:${dbus:location}/lib/:${dbus-glib:location}/lib/:${pango:location}/lib:${cairo:location}/lib:${glib:location}/lib:${gtk-2:location}/lib:${atk:location}/lib:${gdk-pixbuf:location}/lib:${libXt:location}/lib:${gtk-2:location}/lib:${libpng15:location}/lib:%(location)s export LD_LIBRARY_PATH="%(location)s"
%(location)s/chrome""") export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${atk:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${cairo:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${cups:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${dbus:location}/lib/"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${dbus-glib:location}/lib/"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${fontconfig:location}/lib/"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gdk-pixbuf:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gettext:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${glib:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gtk-2:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${harfbuzz:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libX11:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXau:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXcomposite:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXcursor:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXext:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXi:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXrender:location}/lib/"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXtst:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libexpat:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libffi:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libpng:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libpng12:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libxcb:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libxml2:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${mesa:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${nspr:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${nss:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${pango:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${pcre:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${pixman:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${sqlite3:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${xdamage:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${xfixes:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${zlib:location}/lib"
exec %(location)s/chrome --disable-setuid-sandbox --disable-gpu $*""")
wrapper.flush() wrapper.flush()
wrapper.close() wrapper.close()
os.chmod(wrapper_location, 0766) os.system('"${findutils:location}/bin/find" "%(location)s" -type d -exec "${coreutils:location}/bin/chmod" a+rx {} \;')
os.chmod(os.path.join("%(location)s", 'chrome'), 0766) os.system('"${findutils:location}/bin/find" "%(location)s" -type f -executable -exec "${coreutils:location}/bin/chmod" a+rx {} \;')
os.chmod(os.path.join("%(location)s", 'chrome-wrapper'), 0766) os.system('"${findutils:location}/bin/find" "%(location)s" -type f -exec "${coreutils:location}/bin/chmod" a+r {} \;')
os.chmod(wrapper_location, 0755)
# requirements : libXrender1 libxss1 x11-common
\ No newline at end of file
From 6e2dd54b4988a40766db557906b1d544916a6859 Mon Sep 17 00:00:00 2001
From: Julien Muchembled <jm@nexedi.com>
Date: Fri, 13 Jan 2017 23:45:54 +0100
Subject: [PATCH] Fix deadlock with storages that "sync" on a new transaction
This backports a change from commit 227953b977a9e195c4ce9bbb9acd9c5ee60c333a.
NEO, as well as ZEO+server_sync (ERP5 backports this feature with a
monkey-patch), pings the server (primary master node in the case of NEO) on
new transactions. However, this round-trip is actually performed by the thread
that also does tasks requiring to lock the DB, like processing of invalidations.
Since transaction 1.6.1 (more precisely commit e581a120a6), IStorage.sync()
is called indirectly by DB.open() when a transaction has already begun,
and the DB must not be locked when this happens.
---
src/ZODB/DB.py | 9 +++------
src/ZODB/tests/testDB.py | 2 +-
2 files changed, 4 insertions(+), 7 deletions(-)
diff --git a/src/ZODB/DB.py b/src/ZODB/DB.py
index 95f1ab4..ffd10e9 100644
--- a/src/ZODB/DB.py
+++ b/src/ZODB/DB.py
@@ -753,19 +753,16 @@ def open(self, transaction_manager=None, at=None, before=None):
result = self.pool.pop()
assert result is not None
- # open the connection.
- result.open(transaction_manager)
-
# A good time to do some cache cleanup.
# (note we already have the lock)
self.pool.availableGC()
self.historical_pool.availableGC()
-
- return result
-
finally:
self._r()
+ result.open(transaction_manager)
+ return result
+
def connectionDebugInfo(self):
result = []
t = time.time()
diff --git a/src/ZODB/tests/testDB.py b/src/ZODB/tests/testDB.py
index 59b6ccf..e6286a4 100644
--- a/src/ZODB/tests/testDB.py
+++ b/src/ZODB/tests/testDB.py
@@ -151,7 +151,7 @@ def connectionDebugInfo():
>>> before
[None, '\x03zY\xd8\xc0m9\xdd', None]
>>> opened
- ['2008-12-04T20:40:44Z (1.40s)', '2008-12-04T20:40:45Z (0.30s)', None]
+ ['2008-12-04T20:40:44Z (1.30s)', '2008-12-04T20:40:46Z (0.10s)', None]
>>> infos
['test info (2)', ' (0)', ' (0)']
--
2.10.2.2.g19ca937.dirty
...@@ -5,10 +5,14 @@ extends = ...@@ -5,10 +5,14 @@ extends =
../dbus/buildout.cfg ../dbus/buildout.cfg
../fontconfig/buildout.cfg ../fontconfig/buildout.cfg
../gtk-2/buildout.cfg ../gtk-2/buildout.cfg
../gtk-3/buildout.cfg
../libffi/buildout.cfg ../libffi/buildout.cfg
../libpng/buildout.cfg ../libpng/buildout.cfg
../mesa/buildout.cfg
../p11-kit/buildout.cfg ../p11-kit/buildout.cfg
../pcre/buildout.cfg
../xorg/buildout.cfg ../xorg/buildout.cfg
../xz-utils/buildout.cfg
parts = parts =
firefox firefox
...@@ -26,23 +30,66 @@ depends = ...@@ -26,23 +30,66 @@ depends =
${liberation-fonts:location} ${liberation-fonts:location}
${ipaex-fonts:location} ${ipaex-fonts:location}
version = 45.0.1 version = 51.0.1
# MD5SUMs are available at : x86 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-i686/en-US/firefox-${:version}.tar.bz2 9a5b67e9d759a1e4df004294a24b2b43
# https://ftp.mozilla.org/pub/mozilla.org/firefox/releases/${:version}/MD5SUMS x86-64 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-x86_64/en-US/firefox-${:version}.tar.bz2 bd93f2652d1d90d59ae462439a93c85f
x86 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-i686/en-US/firefox-${:version}.tar.bz2 3756c8d06d6f915a3dff1dae643ee74b geckodriver_x86 = https://github.com/mozilla/geckodriver/releases/download/v0.14.0/geckodriver-v0.14.0-linux32.tar.gz b5836f5a944fe9f3ed1a67c7b342c6a7
x86-64 = http://download-installer.cdn.mozilla.net/pub/firefox/releases/${:version}/linux-x86_64/en-US/firefox-${:version}.tar.bz2 0409177ef649ec90ffe7a421a19bc156 geckodriver_x86-64 = https://github.com/mozilla/geckodriver/releases/download/v0.14.0/geckodriver-v0.14.0-linux64.tar.gz 4a185d3179862a35104603b9274452e7
script = script =
if not self.options.get('url'): self.options['url'], self.options['md5sum'] = self.options[guessPlatform()].split(' ') if not self.options.get('url'): self.options['url'], self.options['md5sum'] = self.options[guessPlatform()].split(' ')
extract_dir = self.extract(self.download(self.options['url'], self.options.get('md5sum'))) extract_dir = self.extract(self.download(self.options['url'], self.options.get('md5sum')))
workdir = guessworkdir(extract_dir) workdir = guessworkdir(extract_dir)
self.copyTree(workdir, "%(location)s") self.copyTree(workdir, "%(location)s")
geckodriver_url, geckodriver_md5sum = self.options['geckodriver_' + guessPlatform()].split(' ')
extract_dir = self.extract(self.download(geckodriver_url, geckodriver_md5sum))
shutil.copy(extract_dir + '/geckodriver', "%(location)s")
wrapper_location = os.path.join("%(location)s", "firefox-slapos") wrapper_location = os.path.join("%(location)s", "firefox-slapos")
wrapper = open(wrapper_location, 'w') wrapper = open(wrapper_location, 'w')
wrapper.write("""#!${dash:location}/bin/dash wrapper.write("""#!${dash:location}/bin/dash
cd %(location)s cd %(location)s
export LD_LIBRARY_PATH=%(location)s:${alsa:location}/lib:${atk:location}/lib:${bzip2:location}/lib:${cairo:location}/lib:${dbus:location}/lib:${dbus-glib:location}/lib:${fontconfig:location}/lib:${freetype:location}/lib:${gdk-pixbuf:location}/lib:${gettext:location}/lib:${glib:location}/lib:${gtk-2:location}/lib:${harfbuzz:location}/lib:${libICE:location}/lib:${libSM:location}/lib:${libX11:location}/lib:${libXau:location}/lib:${libXcomposite:location}/lib:${libXcursor:location}/lib:${libXext:location}/lib:${libXrender:location}/lib:${libXt:location}/lib:${libffi:location}/lib:${libpng:location}/lib:${libtool:location}/lib:${libuuid:location}/lib:${libxcb:location}/lib:${libxml2:location}/lib:${p11-kit:location}/lib:${pango:location}/lib:${pixman:location}/lib:${xdamage:location}/lib:${xfixes:location}/lib:${zlib:location}/lib export LD_LIBRARY_PATH="%(location)s"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${alsa:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${atk:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${at-spi2-atk:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${at-spi2-core:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${bzip2:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${cairo:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${dbus:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${dbus-glib:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${fontconfig:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${freetype:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gdk-pixbuf:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gettext:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${glib:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${gtk-3:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${harfbuzz:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libepoxy:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libffi:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libICE:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libpng:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libSM:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libtool:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libuuid:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libX11:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXau:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libxcb:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXcomposite:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXcursor:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXext:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXi:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libxml2:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXrender:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${libXt:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${mesa:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${pango:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${pcre:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${pixman:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${xdamage:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${xfixes:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${xz-utils:location}/lib"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${zlib:location}/lib"
export PATH=${fontconfig:location}/bin:$PATH export PATH=${fontconfig:location}/bin:$PATH
exec %(location)s/firefox $*""") exec %(location)s/firefox $*""")
wrapper.close() wrapper.close()
......
...@@ -6,6 +6,7 @@ extends = ...@@ -6,6 +6,7 @@ extends =
../perl/buildout.cfg ../perl/buildout.cfg
../tar/buildout.cfg ../tar/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../binutils/buildout.cfg
parts = parts =
gcc gcc
...@@ -59,6 +60,9 @@ configure-options = ...@@ -59,6 +60,9 @@ configure-options =
--with-mpc=${mpc:location} --with-mpc=${mpc:location}
--enable-languages="c,c++" --enable-languages="c,c++"
--with-isl=${isl:location} --with-isl=${isl:location}
--with-ld=${binutils:location}/bin/ld
--with-nm=${binutils:location}/bin/nm
--with-as=${binutils:location}/bin/as
environment = environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib
PATH=${perl:location}/bin:${tar:location}/bin:%(PATH)s PATH=${perl:location}/bin:${tar:location}/bin:%(PATH)s
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
parts = golang parts = golang
[golang] [golang]
<= golang16 <= golang18
[golang-common] [golang-common]
...@@ -19,8 +19,8 @@ environment = ...@@ -19,8 +19,8 @@ environment =
[golang14] [golang14]
<= golang-common <= golang-common
url = https://storage.googleapis.com/golang/go1.4.3.src.tar.gz url = https://storage.googleapis.com/golang/go1.4-bootstrap-20161024.tar.gz
md5sum = dfb604511115dd402a77a553a5923a04 md5sum = 76e42c8152e8560ded880a6d1d1f53cb
environment-extra = environment-extra =
...@@ -42,3 +42,12 @@ md5sum = bf3fce6ccaadd310159c9e874220e2a2 ...@@ -42,3 +42,12 @@ md5sum = bf3fce6ccaadd310159c9e874220e2a2
# go1.6 needs go1.4 to bootstrap # go1.6 needs go1.4 to bootstrap
environment-extra = environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} GOROOT_BOOTSTRAP=${golang14:location}
[golang18]
<= golang-common
url = https://storage.googleapis.com/golang/go1.8.src.tar.gz
md5sum = 7743960c968760437b6e39093cfe6f67
# go1.8 needs go1.4 to bootstrap
environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
[buildout]
[gperf]
recipe = slapos.recipe.cmmi
url = ftp://ftp.gnu.org/pub/gnu/gperf/gperf-3.0.4.tar.gz
md5sum = c1f1db32fb6598d6a93e6e88796a8632
[buildout]
extends =
../dbus/buildout.cfg
../gettext/buildout.cfg
../glib/buildout.cfg
../gtk-2/buildout.cfg
../intltool/buildout.cfg
../libepoxy/buildout.cfg
../pcre/buildout.cfg
../perl/buildout.cfg
../perl-XML-Parser/buildout.cfg
../xorg/buildout.cfg
[at-spi2-core]
recipe = slapos.recipe.cmmi
url =http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/at-spi2-core-2.20.1.tar.xz
md5sum = cd11cba463e8f5e1f39ba69555a7382f
depends =
${perl-XML-Parser:location}
configure-options =
--disable-gtk-doc-html
environment =
PATH=${dbus:location}/bin:${gettext:location}/bin:${glib:location}/bin:${intltool:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${dbus:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
CPPFLAGS=-I${inputproto:location}/include -I${kbproto:location}/include -I${libX11:location}/include -I${libXi:location}/include -I${libXtst:location}/include -I${xextproto:location}/include -I${xproto:location}/include
LDFLAGS=-L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libXi:location}/lib -Wl,-rpath=${libXi:location}/lib -L${libXtst:location}/lib -Wl,-rpath=${libXtst:location}/lib
[at-spi2-atk]
recipe = slapos.recipe.cmmi
url =http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/at-spi2-atk-2.20.1.tar.xz
md5sum = 23309b6f8e1623871ace6347fb734dce
environment =
PATH=${intltool:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${atk:location}/lib/pkgconfig:${at-spi2-core:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
[gtk-3]
recipe = slapos.recipe.cmmi
url = http://ftp.gnome.org/pub/gnome/core/3.20/3.20.2/sources/gtk+-3.20.4.tar.xz
md5sum = 0cceee599f2910c25bf4b9dde4ab2fb6
pkg_config_depends = ${at-spi2-atk:location}/lib/pkgconfig:${at-spi2-core:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${libepoxy:location}/lib/pkgconfig:${pango:location}/lib/pkgconfig:${pango:pkg_config_depends}:${atk:location}/lib/pkgconfig:${gdk-pixbuf:location}/lib/pkgconfig:${libXi:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
configure-options =
--disable-static
--disable-glibtest
--disable-cups
--disable-papi
--enable-explicit-deps
--disable-xinerama
--disable-gtk-doc-html
--disable-man
environment =
PATH=${gdk-pixbuf:location}/bin:${gettext:location}/bin:${glib:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
# not taken from pkg-config result...
CPPFLAGS=-I${cairo:location}/include -I${inputproto:location}/include -I${libX11:location}/include -I${libXi:location}/include -I${xproto:location}/include -I${kbproto:location}/include -I${libXrender:location}/include -I${render:location}/include -I${libXext:location}/include
LDFLAGS=-L${libX11:location}/lib -L${libXi:location}/lib -L${libXext:location}/lib -L${libXrender:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[buildout]
extends =
../hdf5/buildout.cfg
../cython/buildout.cfg
parts =
h5py
[hdf5-env]
LDFLAGS = -L${hdf5:location}/lib
CPPFLAGS = -I${hdf5:location}/include
[h5py]
recipe = zc.recipe.egg:custom
egg = h5py
setup-eggs = ${cython:egg}
include-dirs =
${hdf5:location}/include
library-dirs =
${hdf5:location}/lib
rpath =
${hdf5:location}/lib
environment = hdf5-env
[buildout]
extends =
../zlib/buildout.cfg
parts =
hdf5
[hdf5]
recipe = slapos.recipe.cmmi
url = https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.0-patch1/src/hdf5-1.10.0-patch1.tar.bz2
md5sum = f6d980febe2c35c11670a9b34fa3b487
configure-options =
--with-zlib=${zlib:location}
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
...@@ -89,7 +89,7 @@ input = inline: ...@@ -89,7 +89,7 @@ input = inline:
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
path = ${helloweb-repository:location}/go/ path = ${helloweb-repository:location}/go/
go = ${golang16:location}/bin/go go = ${golang18:location}/bin/go
configure-command = : configure-command = :
make-binary = make-binary =
make-targets= cd ${:path} && make-targets= cd ${:path} &&
......
[buildout]
extends =
../tensorflow/buildout.cfg
../h5py/buildout.cfg
../pillow/buildout.cfg
parts =
keras-egg
[keras-egg]
recipe = zc.recipe.egg
eggs =
${scipy:egg}
${numpy:egg}
${tensorflow-build-install-egg:egg}
${protobuf-python:egg}
Keras
${h5py:egg}
${pillow-python:egg}
interpreter = keras-python
scripts = keras-python
[versions]
Keras = 1.2.1
Theano = 0.8.2
tensorflow = 0.12.0
h5py = 2.7.0rc2
Cython = 0.25.2
...@@ -13,8 +13,8 @@ parts = ...@@ -13,8 +13,8 @@ parts =
[lcms2] [lcms2]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://downloads.sourceforge.net/project/lcms/lcms/2.6/lcms2-2.6.tar.gz url = http://downloads.sourceforge.net/project/lcms/lcms/2.8/lcms2-2.8.tar.gz
md5sum = f4c08d38ceade4a664ebff7228910a33 md5sum = 87a5913f1a52464190bb655ad230539c
configure-options = configure-options =
--disable-static --disable-static
environment = environment =
......
...@@ -14,7 +14,7 @@ attr-include = ${attr:location}/include/ ...@@ -14,7 +14,7 @@ attr-include = ${attr:location}/include/
attr-lib = ${attr:location}/lib/ attr-lib = ${attr:location}/lib/
slapos_promise = slapos_promise =
directory:sbin directory:sbin
directory:usr/include directory:include
statlib:lib/libcap.a statlib:lib/libcap.a
file:lib/libcap.so file:lib/libcap.so
file:sbin/getcap file:sbin/getcap
...@@ -26,7 +26,7 @@ script = ...@@ -26,7 +26,7 @@ script =
workdir = guessworkdir(extract_dir) workdir = guessworkdir(extract_dir)
cflags = '-I%(attr)s' % {'attr': self.options['attr-include']} cflags = '-I%(attr)s' % {'attr': self.options['attr-include']}
ldflags = '-L%(attr)s -Wl,-rpath=%(attr)s' % {'attr': self.options['attr-lib']} ldflags = '-L%(attr)s -Wl,-rpath=%(attr)s' % {'attr': self.options['attr-lib']}
call(['make', 'CFLAGS=%s' % cflags, 'LDFLAGS=%s' % ldflags, 'DESTDIR=%s' % self.options['location'], 'RAISE_SETFCAP=no', 'install'], call(['make', 'CFLAGS=%s' % cflags, 'LDFLAGS=%s' % ldflags, 'DESTDIR=%s' % self.options['location'], 'RAISE_SETFCAP=no', 'prefix=', 'install'],
cwd=workdir, env=env) cwd=workdir, env=env)
lib64 = os.path.join(self.options['location'], 'lib64') lib64 = os.path.join(self.options['location'], 'lib64')
lib = os.path.join(self.options['location'], 'lib') lib = os.path.join(self.options['location'], 'lib')
......
[buildout]
extends =
../mesa/buildout.cfg
../pkgconfig/buildout.cfg
../xorg/buildout.cfg
../xz-utils/buildout.cfg
[libepoxy]
recipe = slapos.recipe.cmmi
url = https://github.com/anholt/libepoxy/releases/download/v1.4/libepoxy-1.4.0.tar.xz
md5sum = d8d8cbf2beb64975d424fcc5167a2a38
environment =
PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${mesa:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libX11:pkg_config_depends}:${libXext:location}/lib/pkgconfig:${xdamage:location}/lib/pkgconfig:${damageproto:location}/lib/pkgconfig:${xfixes:location}/lib/pkgconfig:${fixesproto:location}/lib/pkgconfig
...@@ -18,15 +18,15 @@ environment = ...@@ -18,15 +18,15 @@ environment =
[libpng12] [libpng12]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.2.56.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.2.57.tar.xz
md5sum = 868562bd1c58b76ed8703f135a2e439a md5sum = 307052e5e8af97b82b17b64fb1b3677a
[libpng15] [libpng15]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.5.26.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.5.28.tar.xz
md5sum = 3414d556788e14b4a154369e67eacaa3 md5sum = 847aa2a1b231c07466d7f4167537424a
[libpng] [libpng]
<= libpng-common <= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.6.21.tar.xz url = http://download.sourceforge.net/libpng/libpng-1.6.28.tar.xz
md5sum = 3bacb4728f6694a64ad9052769d6a4ce md5sum = 425354f86c392318d31aedca71019372
[buildout] [buildout]
extends = extends =
../bzip2/buildout.cfg ../bzip2/buildout.cfg
../cmake/buildout.cfg
../patch/buildout.cfg
../perl/buildout.cfg
../popt/buildout.cfg ../popt/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
parts = parts =
...@@ -8,11 +11,19 @@ parts = ...@@ -8,11 +11,19 @@ parts =
[librsync] [librsync]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = http://downloads.sourceforge.net/sourceforge/librsync/librsync-0.9.7.tar.gz url = https://github.com/librsync/librsync/archive/v2.0.0.tar.gz
md5sum = 24cdb6b78f45e0e83766903fd4f6bc84 md5sum = cbda9c3eba21bcf2d56a4080ba7a5dc4
location = ${buildout:parts-directory}/${:_buildout_section_name_}
patch-options = -p1
patches =
${:_profile_base_location_}/librsync-2.0.0-issue50.patch#5bac5363646a2c2ec6d2c4b26ca4cd7f
configure-command = ${cmake:location}/bin/cmake
configure-options = configure-options =
--disable-static -DCMAKE_INSTALL_PREFIX=${:location}
--enable-shared -DCMAKE_INSTALL_RPATH=${:location}/lib:${bzip2:location}/lib:${popt:location}/lib:${zlib:location}/lib
make-options =
VERBOSE=1
environment = environment =
CPPFLAGS=-I${zlib:location}/include -I${bzip2:location}/include -I${popt:location}/include PATH=${perl:location}/bin:%(PATH)s
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${popt:location}/lib -Wl,-rpath=${popt:location}/lib CMAKE_INCLUDE_PATH=${bzip2:location}/include:${popt:location}/include:${zlib:location}/include
CMAKE_LIBRARY_PATH=${bzip2:location}/lib:${popt:location}/lib:${zlib:location}/lib
--- librsync-2.0.0/src/search.c 2017-02-20 13:39:48.012922600 +0100
+++ librsync-2.0.0/src/search.c 2017-02-20 13:41:43.661880014 +0100
@@ -218,7 +218,7 @@
r = m;
}
- if (l == r) {
+ if ((l == r) && (l <= bucket->r)) {
int i = sig->targets[l].i;
rs_block_sig_t *b = &(sig->block_sigs[i]);
if (weak_sum != b->weak_sum)
...@@ -46,5 +46,5 @@ patches = ...@@ -46,5 +46,5 @@ patches =
${lxc-0.8.0-rc2-lxc-ls-patch:location}/${lxc-0.8.0-rc2-lxc-ls-patch:filename} ${lxc-0.8.0-rc2-lxc-ls-patch:location}/${lxc-0.8.0-rc2-lxc-ls-patch:filename}
environment = environment =
PATH=%(PATH)s:${attr:location}/bin/:${libcap:location}/sbin/ PATH=%(PATH)s:${attr:location}/bin/:${libcap:location}/sbin/
CFLAGS=-I${libcap:location}/usr/include CFLAGS=-I${libcap:location}/include
LDFLAGS=-L${libcap:location}/lib/ -Wl,-rpath=${libcap:location}/lib/ LDFLAGS=-L${libcap:location}/lib/ -Wl,-rpath=${libcap:location}/lib/
...@@ -18,12 +18,12 @@ recipe = slapos.recipe.cmmi ...@@ -18,12 +18,12 @@ recipe = slapos.recipe.cmmi
url = ftp://ftp.freedesktop.org/pub/mesa/11.0.3/mesa-11.0.3.tar.xz url = ftp://ftp.freedesktop.org/pub/mesa/11.0.3/mesa-11.0.3.tar.xz
md5sum = bf9118bf0fbf360715cfe60baf7a1db5 md5sum = bf9118bf0fbf360715cfe60baf7a1db5
configure-options = configure-options =
--disable-static --enable-static
--disable-gles1 --disable-gles1
--disable-gles2 --disable-gles2
--disable-dri --disable-dri
--disable-dri3 --disable-dri3
--disable-egl --enable-egl
--disable-gbm --disable-gbm
--disable-xvmc --disable-xvmc
--disable-vdpau --disable-vdpau
......
...@@ -4,28 +4,12 @@ extends = ...@@ -4,28 +4,12 @@ extends =
parts = nspr parts = nspr
[nspr-pkgconfig-patch-download]
recipe = hexagonit.recipe.download
ignore-existing = true
filename = nspr-4.8.6-pkgconfig-1.patch
url = http://www.linuxfromscratch.org/patches/blfs/svn/${:filename}
md5sum = 7c00beff0475314f59214842509e407f
download-only = true
[nspr] [nspr]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://ftp.mozilla.org/pub/mozilla.org/nspr/releases/v4.8.7/src/nspr-4.8.7.tar.gz url = https://ftp.mozilla.org/pub/nspr/releases/v4.13.1/src/nspr-4.13.1.tar.gz
md5sum = 97e30989a56ab813453b71261849c200 md5sum = 9c44298a6fc478b3c0a4e98f4f9981ed
patches = ${nspr-pkgconfig-patch-download:location}/${nspr-pkgconfig-patch-download:filename} configure-command = nspr/configure
patch-options = -p1
configure-command =
cd mozilla/nsprpub
./configure
configure-options = configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_} --prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-mozilla
--enable-64bit --enable-64bit
make-options = --enable-ipv6
-C mozilla/nsprpub
environment =
PATH=${patch:location}/bin:%(PATH)s
[buildout] [buildout]
extends = extends =
https://svn.erp5.org/repos/public/erp5/trunk/buildout/software-profiles/zlib.cfg ../nspr/buildout.cfg
nspr.cfg ../sqlite3/buildout.cfg
sed.cfg ../zlib/buildout.cfg
parts =
nss
[nss-patch]
recipe = hexagonit.recipe.download
ignore-existing = true
filename = nss-3.12.9-with-nspr-4.8.7-1.patch
url = http://www.linuxfromscratch.org/patches/blfs/svn/nss-3.12.8-standalone-1.patch
md5sum = ee7b5966961bef16ca896435e78652d3
download-only = true
[nss-download]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ftp://ftp.mozilla.org/pub/mozilla.org/security/nss/releases/NSS_3_12_9_RTM/src/nss-3.12.9.tar.gz
md5sum = bd32f183ca28440c1744650be31a9ccc
strip-top-level-dir = true
[nss] [nss]
recipe = plone.recipe.command
source = ${nss-download:location}
destination = ${buildout:parts-directory}/${:_buildout_section_name_}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
compile-location = ${buildout:parts-directory}/${:_buildout_section_name_}_compile_
stop-on-error = true
command =
rm -rf ${:destination} &&
mkdir -p ${:destination} &&
rm -rf ${:compile-location} &&
cp -R ${:source} ${:compile-location} &&
cd ${:compile-location} &&
patch -Np1 -i ${nss-patch:location}/${nss-patch:filename} &&
${sed:location}/bin/sed -i "s/ZLIB_LIBS = -lz//g" ${:compile-location}/mozilla/security/coreconf/Linux.mk &&
${sed:location}/bin/sed -i "s/# INCLUDES += -I\/usr\/include -Y\/usr\/include\/linux/INCLUDES += \$(ZLIB_INCLUDE_DIR)/g" ${:compile-location}/mozilla/security/coreconf/Linux.mk &&
gmake -C mozilla/security/nss \
USE_64=1 \
BUILD_OPT=1 \
ZLIB_INCLUDE_DIR=-I${zlib:location}/include \
ZLIB_LIBS="-lz -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib" \
NSPR_INCLUDE_DIR=${nspr:location}/include/nspr \
NSPR_LIB_DIR="${nspr:location}/lib -Wl,-rpath=${nspr:location}/lib" \
NSSUTIL_LIB_DIR="${:location}/lib -Wl,-rpath=${:location}/lib" \
nss_build_all &&
mkdir -p ${:destination}/bin &&
mkdir -p ${:destination}/include/nss3 &&
mkdir -p ${:destination}/lib/pkgconfig &&
install -v -m755 ${:compile-location}/mozilla/dist/*.OBJ/lib/*.so ${:destination}/lib &&
install -v -m644 ${:compile-location}/mozilla/dist/*.OBJ/lib/{*.chk,libcrmf.a} ${:destination}/lib &&
install -v -m755 -d ${:destination}/include/nss &&
install -v -m755 ${:compile-location}/mozilla/dist/*.OBJ/bin/{certutil,nss-config,pk12util} ${:destination}/bin &&
install -v -m644 ${:compile-location}/mozilla/dist/*.OBJ/lib/pkgconfig/nss.pc ${:destination}/lib/pkgconfig &&
cp -v -RL ${:compile-location}/mozilla/dist/{public,private}/nss/* ${:destination}/include/nss &&
chmod 644 ${:destination}/include/nss/* &&
cd ${buildout:parts-directory} &&
rm -rf ${:compile-location}
[nss-cmmi]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
path = ${nss-download:location}/mozilla/security/nss url = https://ftp.mozilla.org/pub/security/nss/releases/NSS_3_29_RTM/src/nss-3.29.tar.gz
configure-command = echo "No need to configure" md5sum = 253d1ae4cf5a560373bbadb4bf483945
make-binary = gmake location = ${buildout:parts-directory}/${:_buildout_section_name_}
make-options = USE_64=1 configure-command = make -j1 -C nss/coreconf/nsinstall all
make-targets = nss_build_all # XXX How to build nss-config and pkg-config files ?
make-options =
-j1
-C nss
NSPR_INCLUDE_DIR="${nspr:location}/include/nspr -I${zlib:location}/include"
NSPR_LIB_DIR="${nspr:location}/lib -L${zlib:location}/lib"
SQLITE_INCLUDE_DIR=${sqlite3:location}/include
SQLITE_LIB_DIR=${sqlite3:location}/lib
LDFLAGS="-Wl,-rpath=${:location}/lib -Wl,-rpath=${nspr:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib"
all
post-install =
cp -aLr dist/*OBJ/* ${:location}/
mkdir -p ${:location}/include
cp -aLr dist/public/nss ${:location}/include/
environment =
BUILD_OPT=1
USE_64=1
NSS_USE_SYSTEM_SQLITE=1
[buildout]
extends =
../protobuf/buildout.cfg
parts =
protobuf-python
[protobuf-python-env]
PATH = ${protobuf:location}/bin:%(PATH)s
[protobuf-python]
recipe = zc.recipe.egg:custom
egg = protobuf
environment = protobuf-python-env
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[buildout]
parts = protobuf
[protobuf]
recipe = slapos.recipe.cmmi
url = https://github.com/google/protobuf/releases/download/v3.1.0/protobuf-python-3.1.0.tar.gz
md5sum = 7a227a21379a2ea08cc5d7ba1fb1ba5b
location = ${buildout:parts-directory}/${:_buildout_section_name_}
...@@ -13,6 +13,7 @@ extends = ...@@ -13,6 +13,7 @@ extends =
../zlib/buildout.cfg ../zlib/buildout.cfg
../file/buildout.cfg ../file/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../gcc/buildout.cfg
parts = parts =
python2.7 python2.7
......
...@@ -36,7 +36,7 @@ configure-options = ...@@ -36,7 +36,7 @@ configure-options =
--enable-vnc --enable-vnc
--enable-vnc-png --enable-vnc-png
--disable-vnc-jpeg --disable-vnc-jpeg
--extra-cflags="-I${gnutls:location}/include -I${libuuid:location}/include -I${ncurses:location}/include -I${zlib:location}/include -I${libpng:location}/include -I${libaio:location}/include -I${attr:location}/include -I${libcap:location}/usr/include" --extra-cflags="-I${gnutls:location}/include -I${libuuid:location}/include -I${ncurses:location}/include -I${zlib:location}/include -I${libpng:location}/include -I${libaio:location}/include -I${attr:location}/include -I${libcap:location}/include"
--extra-ldflags="-Wl,-rpath -Wl,${glib:location}/lib -L${glib:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -L${gnutls:location}/lib -Wl,-rpath -Wl,${gpg-error:location}/lib -L${gpg-error:location}/lib -L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib -Wl,-rpath -Wl,${libpng:location}/lib -L${libpng:location}/lib -L${libuuid:location}/lib -Wl,-rpath -Wl,${libuuid:location}/lib -L${libaio:location}/lib -Wl,-rpath=${libaio:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -lpng -lz -lgnutls -L${attr:location}/lib -Wl,-rpath=${attr:location}/lib -L${libcap:location}/lib -Wl,-rpath=${libcap:location}/lib" --extra-ldflags="-Wl,-rpath -Wl,${glib:location}/lib -L${glib:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -L${gnutls:location}/lib -Wl,-rpath -Wl,${gpg-error:location}/lib -L${gpg-error:location}/lib -L${gettext:location}/lib -Wl,-rpath -Wl,${gettext:location}/lib -Wl,-rpath -Wl,${libpng:location}/lib -L${libpng:location}/lib -L${libuuid:location}/lib -Wl,-rpath -Wl,${libuuid:location}/lib -L${libaio:location}/lib -Wl,-rpath=${libaio:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -lpng -lz -lgnutls -L${attr:location}/lib -Wl,-rpath=${attr:location}/lib -L${libcap:location}/lib -Wl,-rpath=${libcap:location}/lib"
--disable-werror --disable-werror
environment = environment =
......
...@@ -8,6 +8,10 @@ parts = ...@@ -8,6 +8,10 @@ parts =
[rdiff-backup-build] [rdiff-backup-build]
recipe = zc.recipe.egg:custom recipe = zc.recipe.egg:custom
egg = rdiff-backup egg = rdiff-backup
patches =
${:_profile_base_location_}/rdiff-backup-1.2.8-librsync-1.0.0.patch#5e54a67845edd6942fcf7359c921e003
patch-options = -p1
patch-binary = ${patch:location}/bin/patch
include-dirs = include-dirs =
${librsync:location}/include/ ${librsync:location}/include/
library-dirs = library-dirs =
......
Patch by Roman Tereshonkov and Kari Hautio for rdiff-backup <= 1.2.8 to avoid a build failure with
librsync >= 1.0.0 (which is a security bugfix release). The discussion and solution finding can be
found at https://bugs.launchpad.net/duplicity/+bug/1416344 (for duplicity).
--- rdiff-backup-1.2.8/_librsyncmodule.c 2009-03-16 15:36:21.000000000 +0100
+++ rdiff-backup-1.2.8/_librsyncmodule.c.librsync-1.0.0 2015-03-02 00:54:24.000000000 +0100
@@ -59,8 +59,13 @@
if (sm == NULL) return NULL;
sm->x_attr = NULL;
+#ifdef RS_DEFAULT_STRONG_LEN
sm->sig_job = rs_sig_begin((size_t)blocklen,
(size_t)RS_DEFAULT_STRONG_LEN);
+#else
+ sm->sig_job = rs_sig_begin((size_t)blocklen,
+ (size_t)8, RS_MD4_SIG_MAGIC);
+#endif
return (PyObject*)sm;
}
[buildout]
extends =
../pandas/buildout.cfg
../matplotlib/buildout.cfg
../statsmodels/buildout.cfg
parts =
seaborn
[seaborn-env]
<= pandas-env
[seaborn]
recipe = zc.recipe.egg:custom
egg = seaborn
environment = seaborn-env
setup-eggs =
${pandas:egg}
${matplotlib:egg}
${statsmodels:egg}
[buildout]
extends =
../pandas/buildout.cfg
parts =
statsmodels
[statsmodels-env]
<= pandas-env
[statsmodels]
recipe = zc.recipe.egg:custom
egg = statsmodels
environment = statsmodels-env
setup-eggs =
${pandas:egg}
[buildout]
extends =
../coreutils/buildout.cfg
../gettext/buildout.cfg
../gperf/buildout.cfg
../intltool/buildout.cfg
../libcap/buildout.cfg
../perl/buildout.cfg
../perl-XML-Parser/buildout.cfg
../pkgconfig/buildout.cfg
../util-linux/buildout.cfg
../xz-utils/buildout.cfg
[systemd]
recipe = slapos.recipe.cmmi
# XXX This version requires Linux kernel >= 3.7.
url = https://www.freedesktop.org/software/systemd/systemd-221.tar.xz
md5sum = b4d5a253841cf28a98b7ec99c45e3716
depends =
${perl-XML-Parser:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
make-options = rootprefix=${:location} SYSTEM_SYSVINIT_PATH=${:location}/etc/init.d
environment =
PATH=${coreutils:location}/bin:${gettext:location}/bin:${gperf:location}/bin:${intltool:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${libcap:location}/include -I${util-linux:location}/include
LDFLAGS=-L${libcap:location}/lib -Wl,-rpath=${libcap:location}/lib -L${util-linux:location}/lib -Wl,-rpath=${util-linux:location}/lib
PKG_CONFIG_PATH=${util-linux:location}/lib/pkgconfig
[buildout]
extends =
../../stack/slapos.cfg
../scipy/buildout.cfg
../zip/buildout.cfg
../bazel/buildout.cfg
../protobuf-python/buildout.cfg
parts =
slapos-cookbook-develop
slapos-cookbook
tensorflow-build-install-egg
[numpy-egg]
recipe = zc.recipe.egg
initialization =
import scipy.stats # load our own libstdc++ explicitly at the very beginning
eggs =
setuptools
${scipy:egg}
${numpy:egg}
interpreter = numpy-python
scripts = numpy-python
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[tensorflow-repository]
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/tensorflow/tensorflow
tag = v0.12.0
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --recurse-submodules --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/tensorflow-v0.12.0.patch ) || (rm -fr ${:location}; exit 1)
[template-tensorflow-bzl]
recipe = slapos.recipe.template:jinja2
location = ${buildout:parts-directory}/${:_buildout_section_name_}
mode = 640
filename = tensorflow.bzl
template = ${:_profile_base_location_}/${:filename}.in
rendered = ${:location}/${:filename}
tf_extension_linkopts = '-lrt', '-Wl,-rpath=${gcc:location}/lib64'
context =
key tf_extension_linkopts template-tensorflow-bzl:tf_extension_linkopts
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[cuda]
tf_need_cuda = 1
tf_cuda_version = 8.0
tf_cudnn_version = 6.5
tf_cuda_compute_capabilities = 5.2,6.1
cuda_toolkit_path = /usr/local/cuda
cudnn_install_path = /usr/local/cuda
[tensorflow-build]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
tensorflow-bzl-file-path = ${template-tensorflow-bzl:rendered}
workdir = ${tensorflow-repository:location}
gcc-bin = ${gcc:location}/bin
gcc-lib = ${gcc:location}/lib
gcc-lib64 = ${gcc:location}/lib64
numpy-python-command = ${buildout:bin-directory}/${numpy-egg:interpreter}
python27-lib = ${python2.7:location}/lib
java_home_bin = ${bazel:java_home}/bin
bazel_bin = ${bazel:location}/bin
need_cuda = ${cuda:cuda_toolkit_path}
script =
os.makedirs(location)
workdir = self.options['workdir']
env = {'PATH':':'.join([self.options['gcc-bin'],
self.options['java_home_bin'],
self.options['bazel_bin'],
os.environ['PATH']]),
'COMPILER_PATH':':'.join([self.options['gcc-bin'],
os.environ.get('COMPILER_PATH') or '']),
'LIBRARY_PATH':':'.join([self.options['gcc-lib'],
self.options['gcc-lib64'],
os.environ.get('LIBRARY_PATH') or '']),
'PYTHON_BIN_PATH':self.options['numpy-python-command'],
'PYTHON_LIB_PATH':self.options['python27-lib'],
'TF_NEED_GCP':'0',
'TF_NEED_HDFS':'0',
'TF_NEED_OPENCL':'0',
'TF_NEED_CUDA':self.buildout['cuda']['tf_need_cuda'],
##### FOR CUDA #####
'GCC_HOST_COMPILER_PATH':os.path.join(self.options['gcc-bin'], 'gcc'),
'TF_CUDA_VERSION':self.buildout['cuda']['tf_cuda_version'],
'CUDA_TOOLKIT_PATH':self.buildout['cuda']['cuda_toolkit_path'],
'TF_CUDNN_VERSION':self.buildout['cuda']['tf_cudnn_version'],
'CUDNN_INSTALL_PATH':self.buildout['cuda']['cudnn_install_path'],
'TF_CUDA_COMPUTE_CAPABILITIES':self.buildout['cuda']['tf_cuda_compute_capabilities'],
####################
}
import os.path
if not os.path.exists(env['CUDA_TOOLKIT_PATH']): env['TF_NEED_CUDA'] = '0'
env['LD_LIBRARY_PATH'] = env['LIBRARY_PATH']
env['LDFLAGS'] = '-lW,-rpath='+self.options['gcc-lib64']
shutil.copy(self.options['tensorflow-bzl-file-path'],
os.path.join(workdir, 'tensorflow', 'tensorflow.bzl'))
call(['./configure'], cwd=workdir, env=env)
bazel_command = (env['TF_NEED_CUDA'] == '1' and
['bazel', 'build', '-c', 'opt', '--copt', '-march=native', '--config', 'cuda', '-s', '--verbose_failures', '//tensorflow/tools/pip_package:build_pip_package']
or
['bazel', 'build', '-c', 'opt', '--copt', '-march=native', '-s', '--verbose_failures', '//tensorflow/tools/pip_package:build_pip_package'])
call(bazel_command, cwd=workdir, env=env)
[tensorflow-build-install-egg]
recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip
tensorflow-repository-path = ${tensorflow-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-tensorflow-build = ${tensorflow-build:location}
need-protobuf-python = ${protobuf-python:egg}
egg = tensorflow
script =
os.makedirs(location)
workdir = self.options['tensorflow-repository-path']
egg_name = 'tensorflow-0.12.0-py2.7-linux-x86_64.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
call(['bazel-bin/tensorflow/tools/pip_package/build_pip_package', dist_dir], cwd=workdir)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
[versions]
protobuf = 3.1.0.post1
wheel = 0.30.0a0
diff -N -u -r tensorflow.orig/tensorflow/tools/pip_package/build_pip_package.sh tensorflow/tensorflow/tools/pip_package/build_pip_package.sh
--- tensorflow.orig/tensorflow/tools/pip_package/build_pip_package.sh 2017-01-04 12:16:06.540748399 +0900
+++ tensorflow/tensorflow/tools/pip_package/build_pip_package.sh 2017-01-04 12:16:19.852748187 +0900
@@ -136,7 +136,7 @@
pushd ${TMPDIR}
rm -f MANIFEST
echo $(date) : "=== Building wheel"
- "${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${GPU_FLAG} >/dev/null
+ "${PYTHON_BIN_PATH:-python}" setup.py bdist_egg ${GPU_FLAG} >/dev/null
mkdir -p ${DEST}
cp dist/* ${DEST}
popd
diff -N -u -r tensorflow.orig/tensorflow/tools/pip_package/setup.py tensorflow/tensorflow/tools/pip_package/setup.py
--- tensorflow.orig/tensorflow/tools/pip_package/setup.py 2017-01-04 12:16:06.540748399 +0900
+++ tensorflow/tensorflow/tools/pip_package/setup.py 2017-01-04 12:17:29.836747073 +0900
@@ -34,7 +34,7 @@
REQUIRED_PACKAGES = [
'numpy >= 1.11.0',
'six >= 1.10.0',
- 'protobuf == 3.1.0',
+ 'protobuf == 3.1.0.post1',
]
project_name = 'tensorflow'
diff -N -u -r tensorflow.orig/tensorflow/workspace.bzl tensorflow/tensorflow/workspace.bzl
--- tensorflow.orig/tensorflow/workspace.bzl 2017-01-04 12:16:06.568748399 +0900
+++ tensorflow/tensorflow/workspace.bzl 2017-01-04 12:16:19.852748187 +0900
@@ -228,9 +228,9 @@
native.new_http_archive(
name = "zlib_archive",
- url = "http://zlib.net/zlib-1.2.8.tar.gz",
- sha256 = "36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d",
- strip_prefix = "zlib-1.2.8",
+ url = "https://github.com/madler/zlib/archive/v1.2.9.zip",
+ sha256 = "819936ec260b679726f21dd9c1ef9c553ce0281988842c24a9c42652cbca698a",
+ strip_prefix = "zlib-1.2.9",
build_file = str(Label("//:zlib.BUILD")),
)
# -*- Python -*-
# Parse the bazel version string from `native.bazel_version`.
def _parse_bazel_version(bazel_version):
# Remove commit from version.
version = bazel_version.split(" ", 1)[0]
# Split into (release, date) parts and only return the release
# as a tuple of integers.
parts = version.split('-', 1)
# Turn "release" into a tuple of strings
version_tuple = ()
for number in parts[0].split('.'):
version_tuple += (str(number),)
return version_tuple
# Given a source file, generate a test name.
# i.e. "common_runtime/direct_session_test.cc" becomes
# "common_runtime_direct_session_test"
def src_to_test_name(src):
return src.replace("/", "_").split(".")[0]
# Check that a specific bazel version is being used.
def check_version(bazel_version):
if "bazel_version" not in dir(native):
fail("\nCurrent Bazel version is lower than 0.2.1, expected at least %s\n" % bazel_version)
elif not native.bazel_version:
print("\nCurrent Bazel is not a release version, cannot check for compatibility.")
print("Make sure that you are running at least Bazel %s.\n" % bazel_version)
else:
current_bazel_version = _parse_bazel_version(native.bazel_version)
minimum_bazel_version = _parse_bazel_version(bazel_version)
if minimum_bazel_version > current_bazel_version:
fail("\nCurrent Bazel version is {}, expected at least {}\n".format(
native.bazel_version, bazel_version))
pass
# Return the options to use for a C++ library or binary build.
# Uses the ":optmode" config_setting to pick the options.
load(
"//tensorflow/core:platform/default/build_config_root.bzl",
"tf_cuda_tests_tags",
"tf_sycl_tests_tags",
)
load(
"@local_config_cuda//cuda:build_defs.bzl",
"if_cuda",
)
# List of proto files for android builds
def tf_android_core_proto_sources():
return ["//tensorflow/core:" + p
for p in tf_android_core_proto_sources_relative()]
# As tf_android_core_proto_sources, but paths relative to
# //third_party/tensorflow/core.
def tf_android_core_proto_sources_relative():
return [
"example/example.proto",
"example/feature.proto",
"framework/allocation_description.proto",
"framework/attr_value.proto",
"framework/cost_graph.proto",
"framework/device_attributes.proto",
"framework/function.proto",
"framework/graph.proto",
"framework/kernel_def.proto",
"framework/log_memory.proto",
"framework/node_def.proto",
"framework/op_def.proto",
"framework/resource_handle.proto",
"framework/step_stats.proto",
"framework/summary.proto",
"framework/tensor.proto",
"framework/tensor_description.proto",
"framework/tensor_shape.proto",
"framework/tensor_slice.proto",
"framework/types.proto",
"framework/versions.proto",
"lib/core/error_codes.proto",
"protobuf/config.proto",
"protobuf/tensor_bundle.proto",
"protobuf/saver.proto",
"util/memmapped_file_system.proto",
"util/saved_tensor_slice.proto",
]
# Returns the list of pb.h and proto.h headers that are generated for
# tf_android_core_proto_sources().
def tf_android_core_proto_headers():
return (["//tensorflow/core/" + p.replace(".proto", ".pb.h")
for p in tf_android_core_proto_sources_relative()] +
["//tensorflow/core/" + p.replace(".proto", ".proto.h")
for p in tf_android_core_proto_sources_relative()])
# Returns the list of protos for which proto_text headers should be generated.
def tf_proto_text_protos_relative():
return [p for p in tf_android_core_proto_sources_relative()]
def if_android_arm(a):
return select({
"//tensorflow:android_arm": a,
"//conditions:default": [],
})
def if_android_arm64(a):
return select({
"//tensorflow:android_arm64": a,
"//conditions:default": [],
})
def if_not_android(a):
return select({
"//tensorflow:android": [],
"//conditions:default": a,
})
def if_android(a):
return select({
"//tensorflow:android": a,
"//conditions:default": [],
})
def if_ios(a):
return select({
"//tensorflow:ios": a,
"//conditions:default": [],
})
def if_mobile(a):
return select({
"//tensorflow:android": a,
"//tensorflow:ios": a,
"//conditions:default": [],
})
def if_not_mobile(a):
return select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//conditions:default": a,
})
def if_not_windows(a):
return select({
"//tensorflow:windows": [],
"//conditions:default": a,
})
def tf_copts():
return (["-DEIGEN_AVOID_STL_ARRAY",
"-Iexternal/gemmlowp",
"-Wno-sign-compare",
"-fno-exceptions"] +
if_cuda(["-DGOOGLE_CUDA=1"]) +
if_android_arm(["-mfpu=neon"]) +
select({
"//tensorflow:android": [
"-std=c++11",
"-DTF_LEAN_BINARY",
"-O2",
],
"//tensorflow:darwin": [],
"//tensorflow:windows": [
"/DLANG_CXX11",
"/D__VERSION__=\\\"MSVC\\\"",
"/DPLATFORM_WINDOWS",
"/DEIGEN_HAS_C99_MATH",
"/DTENSORFLOW_USE_EIGEN_THREADPOOL",
],
"//tensorflow:ios": ["-std=c++11"],
"//conditions:default": ["-pthread"]}))
def tf_opts_nortti_if_android():
return if_android([
"-fno-rtti",
"-DGOOGLE_PROTOBUF_NO_RTTI",
"-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER",
])
# Given a list of "op_lib_names" (a list of files in the ops directory
# without their .cc extensions), generate a library for that file.
def tf_gen_op_libs(op_lib_names, deps=None):
# Make library out of each op so it can also be used to generate wrappers
# for various languages.
if not deps:
deps = []
for n in op_lib_names:
native.cc_library(name=n + "_op_lib",
copts=tf_copts(),
srcs=["ops/" + n + ".cc"],
deps=deps + ["//tensorflow/core:framework"],
visibility=["//visibility:public"],
alwayslink=1,
linkstatic=1,)
def tf_gen_op_wrapper_cc(name, out_ops_file, pkg="",
op_gen="//tensorflow/cc:cc_op_gen_main"):
# Construct an op generator binary for these ops.
tool = out_ops_file + "_gen_cc"
native.cc_binary(
name = tool,
copts = tf_copts(),
linkopts = ["-lm"],
linkstatic = 1, # Faster to link this one-time-use binary dynamically
deps = ([op_gen, pkg + ":" + name + "_op_lib"])
)
# Run the op generator.
if name == "sendrecv_ops" or name == "function_ops":
include_internal = "1"
else:
include_internal = "0"
native.genrule(
name=name + "_genrule",
outs=[out_ops_file + ".h", out_ops_file + ".cc"],
tools=[":" + tool],
cmd=("$(location :" + tool + ") $(location :" + out_ops_file + ".h) " +
"$(location :" + out_ops_file + ".cc) " + include_internal))
# Given a list of "op_lib_names" (a list of files in the ops directory
# without their .cc extensions), generate individual C++ .cc and .h
# files for each of the ops files mentioned, and then generate a
# single cc_library called "name" that combines all the
# generated C++ code.
#
# For example, for:
# tf_gen_op_wrappers_cc("tf_ops_lib", [ "array_ops", "math_ops" ])
#
#
# This will ultimately generate ops/* files and a library like:
#
# cc_library(name = "tf_ops_lib",
# srcs = [ "ops/array_ops.cc",
# "ops/math_ops.cc" ],
# hdrs = [ "ops/array_ops.h",
# "ops/math_ops.h" ],
# deps = [ ... ])
def tf_gen_op_wrappers_cc(name,
op_lib_names=[],
other_srcs=[],
other_hdrs=[],
pkg="",
deps=[
"//tensorflow/cc:ops",
"//tensorflow/cc:scope",
"//tensorflow/cc:const_op",
],
op_gen="//tensorflow/cc:cc_op_gen_main",
visibility=None):
subsrcs = other_srcs
subhdrs = other_hdrs
for n in op_lib_names:
tf_gen_op_wrapper_cc(n, "ops/" + n, pkg=pkg, op_gen=op_gen)
subsrcs += ["ops/" + n + ".cc"]
subhdrs += ["ops/" + n + ".h"]
native.cc_library(name=name,
srcs=subsrcs,
hdrs=subhdrs,
deps=deps + [
"//tensorflow/core:core_cpu",
"//tensorflow/core:framework",
"//tensorflow/core:lib",
"//tensorflow/core:protos_all_cc",
],
copts=tf_copts(),
alwayslink=1,
visibility=visibility)
# Invoke this rule in .../tensorflow/python to build the wrapper library.
def tf_gen_op_wrapper_py(name, out=None, hidden=None, visibility=None, deps=[],
require_shape_functions=False, hidden_file=None,
generated_target_name=None):
# Construct a cc_binary containing the specified ops.
tool_name = "gen_" + name + "_py_wrappers_cc"
if not deps:
deps = ["//tensorflow/core:" + name + "_op_lib"]
native.cc_binary(
name = tool_name,
linkopts = ["-lm"],
copts = tf_copts(),
linkstatic = 1, # Faster to link this one-time-use binary dynamically
deps = (["//tensorflow/core:framework",
"//tensorflow/python:python_op_gen_main"] + deps),
visibility = ["//tensorflow:internal"],
)
# Invoke the previous cc_binary to generate a python file.
if not out:
out = "ops/gen_" + name + ".py"
if hidden:
# `hidden` is a list of op names to be hidden in the generated module.
native.genrule(
name=name + "_pygenrule",
outs=[out],
tools=[tool_name],
cmd=("$(location " + tool_name + ") " + ",".join(hidden)
+ " " + ("1" if require_shape_functions else "0") + " > $@"))
elif hidden_file:
# `hidden_file` is file containing a list of op names to be hidden in the
# generated module.
native.genrule(
name=name + "_pygenrule",
outs=[out],
srcs=[hidden_file],
tools=[tool_name],
cmd=("$(location " + tool_name + ") @$(location "
+ hidden_file + ") " + ("1" if require_shape_functions else "0")
+ " > $@"))
else:
# No ops should be hidden in the generated module.
native.genrule(
name=name + "_pygenrule",
outs=[out],
tools=[tool_name],
cmd=("$(location " + tool_name + ") "
+ ("1" if require_shape_functions else "0") + " > $@"))
# Make a py_library out of the generated python file.
if not generated_target_name:
generated_target_name = name
native.py_library(name=generated_target_name,
srcs=[out],
srcs_version="PY2AND3",
visibility=visibility,
deps=[
"//tensorflow/python:framework_for_generated_wrappers",
],)
# Define a bazel macro that creates cc_test for tensorflow.
# TODO(opensource): we need to enable this to work around the hidden symbol
# __cudaRegisterFatBinary error. Need more investigations.
def tf_cc_test(name, srcs, deps, linkstatic=0, tags=[], data=[], size="medium",
suffix="", args=None, linkopts=[]):
native.cc_test(name="%s%s" % (name, suffix),
srcs=srcs,
size=size,
args=args,
copts=tf_copts(),
data=data,
deps=deps,
linkopts=["-lpthread", "-lm"] + linkopts,
linkstatic=linkstatic,
tags=tags)
# Part of the testing workflow requires a distinguishable name for the build
# rules that involve a GPU, even if otherwise identical to the base rule.
def tf_cc_test_gpu(name, srcs, deps, linkstatic=0, tags=[], data=[],
size="medium", suffix="", args=None):
tf_cc_test(name, srcs, deps, linkstatic=linkstatic, tags=tags, data=data,
size=size, suffix=suffix, args=args)
def tf_cuda_cc_test(name, srcs, deps, tags=[], data=[], size="medium",
linkstatic=0, args=[], linkopts=[]):
tf_cc_test(name=name,
srcs=srcs,
deps=deps,
tags=tags + ["manual"],
data=data,
size=size,
linkstatic=linkstatic,
linkopts=linkopts,
args=args)
tf_cc_test(name=name,
srcs=srcs,
suffix="_gpu",
deps=deps + if_cuda(["//tensorflow/core:gpu_runtime"]),
linkstatic=if_cuda(1, 0),
tags=tags + tf_cuda_tests_tags(),
data=data,
size=size,
linkopts=linkopts,
args=args)
# Create a cc_test for each of the tensorflow tests listed in "tests"
def tf_cc_tests(srcs, deps, linkstatic=0, tags=[], size="medium",
args=None, linkopts=[]):
for src in srcs:
tf_cc_test(
name=src_to_test_name(src),
srcs=[src],
deps=deps,
linkstatic=linkstatic,
tags=tags,
size=size,
args=args,
linkopts=linkopts)
def tf_cc_tests_gpu(srcs, deps, linkstatic=0, tags=[], size="medium",
args=None):
tf_cc_tests(srcs, deps, linkstatic, tags=tags, size=size, args=args)
def tf_cuda_cc_tests(srcs, deps, tags=[], size="medium", linkstatic=0,
args=None, linkopts=[]):
for src in srcs:
tf_cuda_cc_test(
name=src_to_test_name(src),
srcs=[src],
deps=deps,
tags=tags,
size=size,
linkstatic=linkstatic,
args=args,
linkopts=linkopts)
def _cuda_copts():
"""Gets the appropriate set of copts for (maybe) CUDA compilation.
If we're doing CUDA compilation, returns copts for our particular CUDA
compiler. If we're not doing CUDA compilation, returns an empty list.
"""
common_cuda_opts = ["-x", "cuda", "-DGOOGLE_CUDA=1"]
return select({
"//conditions:default": [],
"@local_config_cuda//cuda:using_nvcc": (
common_cuda_opts +
[
"-nvcc_options=expt-relaxed-constexpr",
"-nvcc_options=ftz=true",
]
),
"@local_config_cuda//cuda:using_clang": (
common_cuda_opts +
[
"-fcuda-flush-denormals-to-zero",
"--cuda-path=external/local_config_cuda/cuda",
"--cuda-gpu-arch=sm_35",
]
),
}) + select({
# Pass -O3 when building CUDA code with clang; some important
# optimizations are not enabled at O2.
"@local_config_cuda//cuda:using_clang_opt": ["-O3"],
"//conditions:default": [],
})
# Build defs for TensorFlow kernels
# When this target is built using --config=cuda, a cc_library is built
# that passes -DGOOGLE_CUDA=1 and '-x cuda', linking in additional
# libraries needed by GPU kernels.
def tf_gpu_kernel_library(srcs, copts=[], cuda_copts=[], deps=[], hdrs=[],
**kwargs):
copts = copts + _cuda_copts() + if_cuda(cuda_copts) + tf_copts()
native.cc_library(
srcs = srcs,
hdrs = hdrs,
copts = copts,
deps = deps + if_cuda([
"//tensorflow/core:cuda",
"//tensorflow/core:gpu_lib",
]),
alwayslink=1,
**kwargs)
def tf_cuda_library(deps=None, cuda_deps=None, copts=None, **kwargs):
"""Generate a cc_library with a conditional set of CUDA dependencies.
When the library is built with --config=cuda:
- both deps and cuda_deps are used as dependencies
- the cuda runtime is added as a dependency (if necessary)
- The library additionally passes -DGOOGLE_CUDA=1 to the list of copts
Args:
- cuda_deps: BUILD dependencies which will be linked if and only if:
'--config=cuda' is passed to the bazel command line.
- deps: dependencies which will always be linked.
- copts: copts always passed to the cc_library.
- kwargs: Any other argument to cc_library.
"""
if not deps:
deps = []
if not cuda_deps:
cuda_deps = []
if not copts:
copts = []
native.cc_library(
deps = deps + if_cuda(cuda_deps + ["//tensorflow/core:cuda"]),
copts = copts + if_cuda(["-DGOOGLE_CUDA=1"]),
**kwargs)
def tf_kernel_library(name, prefix=None, srcs=None, gpu_srcs=None, hdrs=None,
deps=None, alwayslink=1, **kwargs):
"""A rule to build a TensorFlow OpKernel.
May either specify srcs/hdrs or prefix. Similar to tf_cuda_library,
but with alwayslink=1 by default. If prefix is specified:
* prefix*.cc (except *.cu.cc) is added to srcs
* prefix*.h (except *.cu.h) is added to hdrs
* prefix*.cu.cc and prefix*.h (including *.cu.h) are added to gpu_srcs.
With the exception that test files are excluded.
For example, with prefix = "cast_op",
* srcs = ["cast_op.cc"]
* hdrs = ["cast_op.h"]
* gpu_srcs = ["cast_op_gpu.cu.cc", "cast_op.h"]
* "cast_op_test.cc" is excluded
With prefix = "cwise_op"
* srcs = ["cwise_op_abs.cc", ..., "cwise_op_tanh.cc"],
* hdrs = ["cwise_ops.h", "cwise_ops_common.h"],
* gpu_srcs = ["cwise_op_gpu_abs.cu.cc", ..., "cwise_op_gpu_tanh.cu.cc",
"cwise_ops.h", "cwise_ops_common.h",
"cwise_ops_gpu_common.cu.h"]
* "cwise_ops_test.cc" is excluded
"""
if not srcs:
srcs = []
if not hdrs:
hdrs = []
if not deps:
deps = []
if prefix:
if native.glob([prefix + "*.cu.cc"], exclude = ["*test*"]):
if not gpu_srcs:
gpu_srcs = []
gpu_srcs = gpu_srcs + native.glob([prefix + "*.cu.cc", prefix + "*.h"],
exclude = ["*test*"])
srcs = srcs + native.glob([prefix + "*.cc"],
exclude = ["*test*", "*.cu.cc"])
hdrs = hdrs + native.glob([prefix + "*.h"], exclude = ["*test*", "*.cu.h"])
cuda_deps = ["//tensorflow/core:gpu_lib"]
if gpu_srcs:
for gpu_src in gpu_srcs:
if gpu_src.endswith(".cc") and not gpu_src.endswith(".cu.cc"):
fail("{} not allowed in gpu_srcs. .cc sources must end with .cu.cc".format(gpu_src))
tf_gpu_kernel_library(
name = name + "_gpu",
srcs = gpu_srcs,
deps = deps,
**kwargs)
cuda_deps.extend([":" + name + "_gpu"])
tf_cuda_library(
name = name,
srcs = srcs,
hdrs = hdrs,
copts = tf_copts(),
cuda_deps = cuda_deps,
linkstatic = 1, # Needed since alwayslink is broken in bazel b/27630669
alwayslink = alwayslink,
deps = deps,
**kwargs)
def tf_kernel_libraries(name, prefixes, deps=None, libs=None, **kwargs):
"""Makes one target per prefix, and one target that includes them all.
Args:
name: The name of the omnibus cc_library target that depends on each
generated tf_kernel_library target.
prefixes: A list of source file name prefixes used to generate individual
libraries. See the definition of tf_kernel_library for details.
deps: The dependencies list associated with each generated target.
libs: Additional tf_kernel_library targets that should be included in the
omnibus cc_library target but not as deps of individual libraries.
This can be used, for example, if a library that was previously
generated by this rule is refactored into a separate definition
in order to specify more or fewer deps for it.
Other attributes are forwarded to each individual target but not to the
omnibus cc_library target.
"""
for p in prefixes:
tf_kernel_library(name=p, prefix=p, deps=deps, **kwargs)
native.cc_library(name=name,
deps=[":" + p for p in prefixes] + (libs or []))
# Bazel rules for building swig files.
def _py_wrap_cc_impl(ctx):
srcs = ctx.files.srcs
if len(srcs) != 1:
fail("Exactly one SWIG source file label must be specified.", "srcs")
module_name = ctx.attr.module_name
src = ctx.files.srcs[0]
inputs = set([src])
inputs += ctx.files.swig_includes
for dep in ctx.attr.deps:
inputs += dep.cc.transitive_headers
inputs += ctx.files._swiglib
swig_include_dirs = set(_get_repository_roots(ctx, inputs))
swig_include_dirs += sorted([f.dirname for f in ctx.files._swiglib])
args = ["-c++",
"-python",
"-module", module_name,
"-o", ctx.outputs.cc_out.path,
"-outdir", ctx.outputs.py_out.dirname]
args += ["-l" + f.path for f in ctx.files.swig_includes]
args += ["-I" + i for i in swig_include_dirs]
args += [src.path]
outputs = [ctx.outputs.cc_out,
ctx.outputs.py_out]
ctx.action(executable=ctx.executable._swig,
arguments=args,
inputs=list(inputs),
outputs=outputs,
mnemonic="PythonSwig",
progress_message="SWIGing " + src.path)
return struct(files=set(outputs))
_py_wrap_cc = rule(
attrs = {
"srcs": attr.label_list(
mandatory = True,
allow_files = True,
),
"swig_includes": attr.label_list(
cfg = "data",
allow_files = True,
),
"deps": attr.label_list(
allow_files = True,
providers = ["cc"],
),
"module_name": attr.string(mandatory = True),
"py_module_name": attr.string(mandatory = True),
"_swig": attr.label(
default = Label("@swig//:swig"),
executable = True,
cfg = "host",
),
"_swiglib": attr.label(
default = Label("@swig//:templates"),
allow_files = True,
),
},
outputs = {
"cc_out": "%{module_name}.cc",
"py_out": "%{py_module_name}.py",
},
implementation = _py_wrap_cc_impl,
)
def _get_repository_roots(ctx, files):
"""Returns abnormal root directories under which files reside.
When running a ctx.action, source files within the main repository are all
relative to the current directory; however, files that are generated or exist
in remote repositories will have their root directory be a subdirectory,
e.g. bazel-out/local-fastbuild/genfiles/external/jpeg_archive. This function
returns the set of these devious directories, ranked and sorted by popularity
in order to hopefully minimize the number of I/O system calls within the
compiler, because includes have quadratic complexity.
"""
result = {}
for f in files:
root = f.root.path
if root:
if root not in result:
result[root] = 0
result[root] -= 1
work = f.owner.workspace_root
if work:
if root:
root += "/"
root += work
if root:
if root not in result:
result[root] = 0
result[root] -= 1
return [k for v, k in sorted([(v, k) for k, v in result.items()])]
# Bazel rule for collecting the header files that a target depends on.
def _transitive_hdrs_impl(ctx):
outputs = set()
for dep in ctx.attr.deps:
outputs += dep.cc.transitive_headers
return struct(files=outputs)
_transitive_hdrs = rule(
attrs = {
"deps": attr.label_list(
allow_files = True,
providers = ["cc"],
),
},
implementation = _transitive_hdrs_impl,
)
def transitive_hdrs(name, deps=[], **kwargs):
_transitive_hdrs(name=name + "_gather",
deps=deps)
native.filegroup(name=name,
srcs=[":" + name + "_gather"])
# Create a header only library that includes all the headers exported by
# the libraries in deps.
def cc_header_only_library(name, deps=[], **kwargs):
_transitive_hdrs(name=name + "_gather",
deps=deps)
native.cc_library(name=name,
hdrs=[":" + name + "_gather"],
**kwargs)
def tf_custom_op_library_additional_deps():
return [
"@protobuf//:protobuf",
"//third_party/eigen3",
"//tensorflow/core:framework_headers_lib",
]
# Traverse the dependency graph along the "deps" attribute of the
# target and return a struct with one field called 'tf_collected_deps'.
# tf_collected_deps will be the union of the deps of the current target
# and the tf_collected_deps of the dependencies of this target.
def _collect_deps_aspect_impl(target, ctx):
alldeps = set()
if hasattr(ctx.rule.attr, "deps"):
for dep in ctx.rule.attr.deps:
alldeps = alldeps | set([dep.label])
if hasattr(dep, "tf_collected_deps"):
alldeps = alldeps | dep.tf_collected_deps
return struct(tf_collected_deps=alldeps)
collect_deps_aspect = aspect(
implementation=_collect_deps_aspect_impl,
attr_aspects=["deps"])
def _dep_label(dep):
label = dep.label
return label.package + ":" + label.name
# This rule checks that the transitive dependencies of targets listed
# in the 'deps' attribute don't depend on the targets listed in
# the 'disallowed_deps' attribute.
def _check_deps_impl(ctx):
disallowed_deps = ctx.attr.disallowed_deps
for input_dep in ctx.attr.deps:
if not hasattr(input_dep, "tf_collected_deps"):
continue
for dep in input_dep.tf_collected_deps:
for disallowed_dep in disallowed_deps:
if dep == disallowed_dep.label:
fail(_dep_label(input_dep) + " cannot depend on " +
_dep_label(disallowed_dep))
return struct()
check_deps = rule(
_check_deps_impl,
attrs = {
"deps": attr.label_list(
aspects=[collect_deps_aspect],
mandatory = True,
allow_files = True
),
"disallowed_deps": attr.label_list(
mandatory = True,
allow_files = True
)},
)
# Helper to build a dynamic library (.so) from the sources containing
# implementations of custom ops and kernels.
def tf_custom_op_library(name, srcs=[], gpu_srcs=[], deps=[]):
cuda_deps = [
"//tensorflow/core:stream_executor_headers_lib",
"@local_config_cuda//cuda:cudart_static",
]
deps = deps + tf_custom_op_library_additional_deps()
if gpu_srcs:
basename = name.split(".")[0]
native.cc_library(
name = basename + "_gpu",
srcs = gpu_srcs,
copts = _cuda_copts(),
deps = deps + if_cuda(cuda_deps))
cuda_deps.extend([":" + basename + "_gpu"])
check_deps(name=name+"_check_deps",
deps=deps + if_cuda(cuda_deps),
disallowed_deps=["//tensorflow/core:framework",
"//tensorflow/core:lib"])
native.cc_binary(name=name,
srcs=srcs,
deps=deps + if_cuda(cuda_deps),
data=[name + "_check_deps"],
copts=tf_copts(),
linkshared=1,
linkopts = select({
"//conditions:default": [
"-lm",
],
"//tensorflow:darwin": [],
}),
)
def tf_extension_linkopts():
return [{{ tf_extension_linkopts }}] # No extension link opts
def tf_extension_copts():
return [] # No extension c opts
def tf_py_wrap_cc(name, srcs, swig_includes=[], deps=[], copts=[], **kwargs):
module_name = name.split("/")[-1]
# Convert a rule name such as foo/bar/baz to foo/bar/_baz.so
# and use that as the name for the rule producing the .so file.
cc_library_name = "/".join(name.split("/")[:-1] + ["_" + module_name + ".so"])
cc_library_pyd_name = "/".join(name.split("/")[:-1] + ["_" + module_name + ".pyd"])
extra_deps = []
_py_wrap_cc(name=name + "_py_wrap",
srcs=srcs,
swig_includes=swig_includes,
deps=deps + extra_deps,
module_name=module_name,
py_module_name=name)
extra_linkopts = select({
"@local_config_cuda//cuda:darwin": [
"-Wl,-exported_symbols_list",
"//tensorflow:tf_exported_symbols.lds"
],
"//tensorflow:windows": [
],
"//conditions:default": [
"-Wl,--version-script",
"//tensorflow:tf_version_script.lds"
]})
extra_deps += select({
"@local_config_cuda//cuda:darwin": [
"//tensorflow:tf_exported_symbols.lds"
],
"//tensorflow:windows": [
],
"//conditions:default": [
"//tensorflow:tf_version_script.lds"
]
})
native.cc_binary(
name=cc_library_name,
srcs=[module_name + ".cc"],
copts=(copts + ["-Wno-self-assign",
"-Wno-sign-compare",
"-Wno-write-strings"]
+ tf_extension_copts()),
linkopts=tf_extension_linkopts() + extra_linkopts,
linkstatic=1,
linkshared=1,
deps=deps + extra_deps)
native.genrule(
name = "gen_" + cc_library_pyd_name,
srcs = [":" + cc_library_name],
outs = [cc_library_pyd_name],
cmd = "cp $< $@",
)
native.py_library(name=name,
srcs=[":" + name + ".py"],
srcs_version="PY2AND3",
data=select({
"//tensorflow:windows": [":" + cc_library_pyd_name],
"//conditions:default": [":" + cc_library_name],
}))
def tf_py_test(name, srcs, size="medium", data=[], main=None, args=[],
tags=[], shard_count=1, additional_deps=[], flaky=0):
native.py_test(
name=name,
size=size,
srcs=srcs,
main=main,
args=args,
tags=tags,
visibility=["//tensorflow:internal"],
shard_count=shard_count,
data=data,
deps=[
"//tensorflow/python:extra_py_tests_deps",
"//tensorflow/python:gradient_checker",
] + additional_deps,
flaky=flaky,
srcs_version="PY2AND3")
def cuda_py_test(name, srcs, size="medium", data=[], main=None, args=[],
shard_count=1, additional_deps=[], tags=[], flaky=0):
test_tags = tags + tf_cuda_tests_tags()
tf_py_test(name=name,
size=size,
srcs=srcs,
data=data,
main=main,
args=args,
tags=test_tags,
shard_count=shard_count,
additional_deps=additional_deps,
flaky=flaky)
def sycl_py_test(name, srcs, size="medium", data=[], main=None, args=[],
shard_count=1, additional_deps=[], tags=[], flaky=0):
test_tags = tags + tf_sycl_tests_tags()
tf_py_test(name=name,
size=size,
srcs=srcs,
data=data,
main=main,
args=args,
tags=test_tags,
shard_count=shard_count,
additional_deps=additional_deps,
flaky=flaky)
def py_tests(name,
srcs,
size="medium",
additional_deps=[],
data=[],
tags=[],
shard_count=1,
prefix=""):
for src in srcs:
test_name = src.split("/")[-1].split(".")[0]
if prefix:
test_name = "%s_%s" % (prefix, test_name)
tf_py_test(name=test_name,
size=size,
srcs=[src],
main=src,
tags=tags,
shard_count=shard_count,
data=data,
additional_deps=additional_deps)
def cuda_py_tests(name, srcs, size="medium", additional_deps=[], data=[],
shard_count=1, tags=[], prefix=""):
test_tags = tags + tf_cuda_tests_tags()
py_tests(name=name, size=size, srcs=srcs, additional_deps=additional_deps,
data=data, tags=test_tags, shard_count=shard_count,prefix=prefix)
# Creates a genrule named <name> for running tools/proto_text's generator to
# make the proto_text functions, for the protos passed in <srcs>.
#
# Return a struct with fields (hdrs, srcs) containing the names of the
# generated files.
def tf_generate_proto_text_sources(name, srcs_relative_dir, srcs):
out_hdrs = ([p.replace(".proto", ".pb_text.h") for p in srcs] +
[p.replace(".proto", ".pb_text-impl.h") for p in srcs])
out_srcs = [p.replace(".proto", ".pb_text.cc") for p in srcs]
native.genrule(
name = name,
srcs = srcs + ["//tensorflow/tools/proto_text:placeholder.txt"],
outs = out_hdrs + out_srcs,
cmd = "$(location //tensorflow/tools/proto_text:gen_proto_text_functions) " +
"$(@D) " + srcs_relative_dir + " $(SRCS)",
tools = ["//tensorflow/tools/proto_text:gen_proto_text_functions"],
)
return struct(hdrs=out_hdrs, srcs=out_srcs)
def tf_genrule_cmd_append_to_srcs(to_append):
return ("cat $(SRCS) > $(@) && " +
"echo >> $(@) && " +
"echo " + to_append + " >> $(@)")
def tf_version_info_genrule():
native.genrule(
name = "version_info_gen",
srcs = [
"//tensorflow/tools/git:gen/spec.json",
"//tensorflow/tools/git:gen/head",
"//tensorflow/tools/git:gen/branch_ref",
],
outs = ["util/version_info.cc"],
cmd = "$(location //tensorflow/tools/git:gen_git_source.py) --generate $(SRCS) \"$@\"",
local = 1,
tools = ["//tensorflow/tools/git:gen_git_source.py"],
)
[buildout]
parts = unzip
[unzip]
recipe = slapos.recipe.cmmi
url = http://downloads.sourceforge.net/project/infozip/UnZip%206.x%20%28latest%29/UnZip%206.0/unzip60.tar.gz
md5sum = 62b490407489521db863b523a7f86375
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = true
make-options = prefix=${:location} NO_BZIP2_SUPPORT=1 -f unix/Makefile unzips
[buildout]
extends =
../perl/buildout.cfg
../xz-utils/buildout.cfg
[util-linux]
recipe = slapos.recipe.cmmi
url = http://www.kernel.org/pub/linux/utils/util-linux/v2.29/util-linux-2.29.tar.xz
md5sum = 07b6845f48a421ad5844aa9d58edb837
configure-options =
--disable-static
--enable-libuuid
--disable-agetty
--disable-bash-completion
--disable-cal
--disable-cramfs
--disable-fallocate
--disable-fsck
--enable-libblkid
--disable-libfdisk
--enable-libmount
--disable-makeinstall-chown
--disable-makeinstall-setuid
--disable-more
--disable-mount
--disable-nls
--disable-pivot_root
--disable-pylibmount
--disable-rename
--disable-schedutils
--disable-setterm
--disable-libsmartcols
--disable-switch_root
--disable-tinfo
--disable-tls
--disable-ul
--disable-unshare
--disable-uuidd
--disable-wall
--without-libiconv-prefix
--without-libintl-prefix
--without-ncurses
--without-slang
--without-selinux
--without-audit
environment =
PATH=${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
[buildout] [buildout]
extends = extends =
../freetype/buildout.cfg
../fontconfig/buildout.cfg ../fontconfig/buildout.cfg
../libpng/buildout.cfg
../tar/buildout.cfg ../tar/buildout.cfg
../xorg/buildout.cfg ../xorg/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg
parts = parts =
tar
wkhtmltopdf wkhtmltopdf
[wkhtmltopdf] [wkhtmltopdf]
...@@ -25,6 +28,7 @@ script = ...@@ -25,6 +28,7 @@ script =
location = %(location)r location = %(location)r
self.failIfPathExists(location) self.failIfPathExists(location)
import sys import sys
import os
ARCH_DIR_MAP = { 'x86': 'x86', 'x86-64': 'x86_64' } ARCH_DIR_MAP = { 'x86': 'x86', 'x86-64': 'x86_64' }
WK_SUFIX_MAP = { 'x86': 'i386', 'x86-64': 'amd64' } WK_SUFIX_MAP = { 'x86': 'i386', 'x86-64': 'amd64' }
platform = guessPlatform() platform = guessPlatform()
...@@ -35,12 +39,15 @@ script = ...@@ -35,12 +39,15 @@ script =
extract_dir = tempfile.mkdtemp(self.name) extract_dir = tempfile.mkdtemp(self.name)
self.cleanup_dir_list.append(extract_dir) self.cleanup_dir_list.append(extract_dir)
self.logger.debug('Created working directory ' + repr(extract_dir)) self.logger.debug('Created working directory ' + repr(extract_dir))
call(["${tar:location}/bin/tar", "xJf", path, "-C", extract_dir]) env = os.environ.copy()
env["PATH"] = "${tar:location}/bin:${xz-utils:location}/bin" + (":" + env["PATH"] if env.get("PATH") else "")
env["LD_LIBRARY_PATH"] = "${xz-utils:location}/lib" + (":" + env["LD_LIBRARY_PATH"] if env.get("LD_LIBRARY_PATH") else "")
call(["tar", "xJf", path, "-C", extract_dir], env=env)
shutil.move(os.path.join(extract_dir, "wkhtmltox"), location) shutil.move(os.path.join(extract_dir, "wkhtmltox"), location)
wrapper_location = os.path.join("%(location)s", "wkhtmltopdf") wrapper_location = os.path.join("%(location)s", "wkhtmltopdf")
wrapper = open(wrapper_location, 'w') wrapper = open(wrapper_location, 'w')
wrapper.write("""#!${dash:location}/bin/dash wrapper.write("""#!${dash:location}/bin/dash
export LD_LIBRARY_PATH=%(location)s/lib/:${libXrender:location}/lib/:${fontconfig:location}/lib/:${libX11:location}/lib/:${libXext:location}/lib/ export LD_LIBRARY_PATH=%(location)s/lib:${fontconfig:location}/lib:${freetype:location}/lib:${libX11:location}/lib:${libXext:location}/lib:${libXrender:location}/lib:${libpng12:location}/lib:${zlib:location}/lib
export PATH=${fontconfig:location}/bin:$PATH export PATH=${fontconfig:location}/bin:$PATH
exec %(location)s/bin/wkhtmltopdf "$@" exec %(location)s/bin/wkhtmltopdf "$@"
""") """)
......
...@@ -528,3 +528,32 @@ environment = ...@@ -528,3 +528,32 @@ environment =
CPPFLAGS=-I${bzip2:location}/include -I${gcrypt:location}/include -I${gpg-error:location}/include -I${zlib:location}/include CPPFLAGS=-I${bzip2:location}/include -I${gcrypt:location}/include -I${gpg-error:location}/include -I${zlib:location}/include
PATH=${patch:location}/bin:${pkgconfig:location}/bin:%(PATH)s PATH=${patch:location}/bin:${pkgconfig:location}/bin:%(PATH)s
LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${gcrypt:location}/lib -Wl,-rpath=${gcrypt:location}/lib -L${gpg-error:location}/lib -Wl,-rpath=${gpg-error:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib LDFLAGS=-L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib -L${gcrypt:location}/lib -Wl,-rpath=${gcrypt:location}/lib -L${gpg-error:location}/lib -Wl,-rpath=${gpg-error:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[libXi]
recipe = slapos.recipe.cmmi
url = http://www.x.org/releases/X11R7.7/src/everything/libXi-1.6.1.tar.bz2
md5sum = 78ee882e1ff3b192cf54070bdb19938e
pkg_config_depends = ${inputproto:location}/lib/pkgconfig:${kbproto:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libXau:location}/lib/pkgconfig:${libXext:location}/lib/pkgconfig:${libxcb:location}/lib/pkgconfig:${xextproto:location}/lib/pkgconfig:${xorg-libpthread-stubs:location}/lib/pkgconfig:${xproto:location}/lib/pkgconfig
environment =
PKG_CONFIG_PATH=${:pkg_config_depends}
PATH=${pkgconfig:location}/bin:%(PATH)s
configure-options =
--disable-docs
--disable-specs
--disable-static
--without-fop
--without-xmlto
[libXtst]
recipe = slapos.recipe.cmmi
url = http://www.x.org/releases/X11R7.7/src/everything/libXtst-1.2.1.tar.bz2
md5sum = e8abc5c00c666f551cf26aa53819d592
configure-options =
--disable-specs
--disable-static
--without-fop
--without-xmlto
--without-xsltproc
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libXext:location}/lib/pkgconfig:${libXext:pkg_config_depends}:${libXi:location}/lib/pkgconfig:${recordproto:location}/lib/pkgconfig
...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages ...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob import glob
import os import os
version = '1.0.48' version = '1.0.49.dev0'
name = 'slapos.cookbook' name = 'slapos.cookbook'
long_description = open("README.rst").read() + "\n" + \ long_description = open("README.rst").read() + "\n" + \
open("CHANGES.rst").read() + "\n" open("CHANGES.rst").read() + "\n"
......
...@@ -51,29 +51,29 @@ pycrypto = 2.6.1 ...@@ -51,29 +51,29 @@ pycrypto = 2.6.1
pycurl = 7.43.0 pycurl = 7.43.0
slapos.recipe.download = 1.0 slapos.recipe.download = 1.0
slapos.recipe.template = 2.8 slapos.recipe.template = 2.8
slapos.toolbox = 0.64 slapos.toolbox = 0.65
smmap = 0.9.0 smmap = 0.9.0
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
GitPython = 2.0.8 GitPython = 2.0.8
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
atomize = 0.2.0 atomize = 0.2.0
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
feedparser = 5.2.1 feedparser = 5.2.1
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
lockfile = 0.12.2 lockfile = 0.12.2
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
paramiko = 2.0.1 paramiko = 2.0.1
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
rpdb = 0.1.5 rpdb = 0.1.5
...@@ -13,9 +13,9 @@ ecdsa = 0.13 ...@@ -13,9 +13,9 @@ ecdsa = 0.13
gitdb = 0.6.4 gitdb = 0.6.4
plone.recipe.command = 1.1 plone.recipe.command = 1.1
pycrypto = 2.6.1 pycrypto = 2.6.1
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.8 slapos.recipe.template = 2.8
slapos.toolbox = 0.64 slapos.toolbox = 0.65
smmap = 0.9.0 smmap = 0.9.0
numpy = 1.11.2 numpy = 1.11.2
...@@ -34,29 +34,29 @@ erp5.util = 0.4.46 ...@@ -34,29 +34,29 @@ erp5.util = 0.4.46
passlib = 1.6.5 passlib = 1.6.5
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
GitPython = 2.0.8 GitPython = 2.0.8
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
atomize = 0.2.0 atomize = 0.2.0
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
feedparser = 5.2.1 feedparser = 5.2.1
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
lockfile = 0.12.2 lockfile = 0.12.2
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
paramiko = 2.0.1 paramiko = 2.0.1
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
pycurl = 7.43.0 pycurl = 7.43.0
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
rpdb = 0.1.5 rpdb = 0.1.5
...@@ -117,7 +117,7 @@ output = ${buildout:directory}/template.cfg ...@@ -117,7 +117,7 @@ output = ${buildout:directory}/template.cfg
mode = 0644 mode = 0644
[versions] [versions]
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5+SlapOSPatched001
gunicorn = 19.1.1 gunicorn = 19.1.1
plone.recipe.command = 1.1 plone.recipe.command = 1.1
slapos.recipe.template = 2.4.2 slapos.recipe.template = 2.4.2
......
...@@ -150,12 +150,10 @@ crl = ...@@ -150,12 +150,10 @@ crl =
{%- endif %} {%- endif %}
[apache-logrotate] [apache-logrotate]
recipe = slapos.cookbook:logrotate.d < = logrotate-entry-base
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = apache name = apache
log = ${apache-conf-parameter-dict:error-log} ${apache-conf-parameter-dict:access-log} log = ${apache-conf-parameter-dict:error-log} ${apache-conf-parameter-dict:access-log}
post = {{ bin_directory }}/slapos-kill --pidfile ${apache-conf-parameter-dict:pid-file} -s USR1 post = test ! -s ${apache-conf-parameter-dict:pid-file} || {{ bin_directory }}/slapos-kill --pidfile ${apache-conf-parameter-dict:pid-file} -s USR1
[publish] [publish]
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
......
...@@ -83,5 +83,5 @@ template-logrotate-base = ${template-logrotate-base:rendered} ...@@ -83,5 +83,5 @@ template-logrotate-base = ${template-logrotate-base:rendered}
[template-cloudooo-instance] [template-cloudooo-instance]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-cloudooo.cfg.in url = ${:_profile_base_location_}/instance-cloudooo.cfg.in
md5sum = 3de0b4732e5132448027c1387a4de3c9 md5sum = e1a4a73aeb8b978e953a3f4f39b98a4d
mode = 640 mode = 640
...@@ -45,7 +45,7 @@ ...@@ -45,7 +45,7 @@
"type": "array" "type": "array"
}, },
"hostalias-dict": { "hostalias-dict": {
"description": "Hostname-to-hostname mapping", "description": "Hostname-to-domain-name mapping",
"default": {}, "default": {},
"additionalProperties": { "additionalProperties": {
"description": "A hostname to which current entry will resolve", "description": "A hostname to which current entry will resolve",
......
...@@ -70,6 +70,11 @@ ...@@ -70,6 +70,11 @@
"minimum": 0, "minimum": 0,
"type": "integer" "type": "integer"
}, },
"innodb-buffer-pool-instances": {
"description": "See MariaDB documentation on innodb_buffer_pool_instances",
"minimum": 1,
"type": "integer"
},
"innodb-log-file-size": { "innodb-log-file-size": {
"description": "See MariaDB documentation on innodb_log_file_size", "description": "See MariaDB documentation on innodb_log_file_size",
"minimum": 0, "minimum": 0,
......
...@@ -33,7 +33,7 @@ ...@@ -33,7 +33,7 @@
}, },
"properties": { "properties": {
"host": { "host": {
"description": "Host name or address of relay, with optional port (ex: '[example.com]:submissionu'). Enclosing hostname with [] prevents MX lookup.", "description": "Host name or address of relay, with optional port (ex: '[example.com]:submission'). Enclosing hostname with [] prevents MX lookup.",
"type": "string" "type": "string"
}, },
"sasl-credential": { "sasl-credential": {
......
...@@ -12,6 +12,7 @@ extends = ...@@ -12,6 +12,7 @@ extends =
../../component/nodejs/buildout.cfg ../../component/nodejs/buildout.cfg
../../component/openssl/buildout.cfg ../../component/openssl/buildout.cfg
../../component/nginx/buildout.cfg ../../component/nginx/buildout.cfg
../../component/zlib/buildout.cfg
# for instance # for instance
../../component/coreutils/buildout.cfg ../../component/coreutils/buildout.cfg
...@@ -203,6 +204,7 @@ make-targets= cd ${git2go-repository:location} ...@@ -203,6 +204,7 @@ make-targets= cd ${git2go-repository:location}
&& go install ${git-backup-repository:go.importpath} && go install ${git-backup-repository:go.importpath}
&& cp -a ${git-backup-repository:location}/contrib/gitlab-backup ${gopath:bin} && cp -a ${git-backup-repository:location}/contrib/gitlab-backup ${gopath:bin}
environment = environment =
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${git:location}/bin:${golang16:location}/bin:${buildout:bin-directory}:%(PATH)s PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${git:location}/bin:${golang16:location}/bin:${buildout:bin-directory}:%(PATH)s
GOPATH=${gopath:directory} GOPATH=${gopath:directory}
......
...@@ -71,7 +71,7 @@ apache-libcloud = 0.12.3 ...@@ -71,7 +71,7 @@ apache-libcloud = 0.12.3
async = 0.6.1 async = 0.6.1
gitdb = 0.5.4 gitdb = 0.5.4
pycrypto = 2.6 pycrypto = 2.6
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.4.2 slapos.recipe.template = 2.4.2
slapos.toolbox = 0.40.4 slapos.toolbox = 0.40.4
smmap = 0.8.2 smmap = 0.8.2
......
...@@ -7,6 +7,7 @@ extends = ...@@ -7,6 +7,7 @@ extends =
../../component/scikit-learn/buildout.cfg ../../component/scikit-learn/buildout.cfg
../../component/pandas/buildout.cfg ../../component/pandas/buildout.cfg
../../component/openssl/buildout.cfg ../../component/openssl/buildout.cfg
../../component/seaborn/buildout.cfg
parts = parts =
slapos-cookbook slapos-cookbook
ipython-notebook ipython-notebook
...@@ -22,6 +23,10 @@ eggs += ...@@ -22,6 +23,10 @@ eggs +=
${pandas:egg} ${pandas:egg}
${scikit-learn:egg} ${scikit-learn:egg}
${seaborn:egg}
${statsmodels:egg}
patsy
[download-file-base] [download-file-base]
recipe = hexagonit.recipe.download recipe = hexagonit.recipe.download
ignore-existing = true ignore-existing = true
...@@ -43,7 +48,7 @@ md5sum = d7d4a7e19d55bf14007819258bf42100 ...@@ -43,7 +48,7 @@ md5sum = d7d4a7e19d55bf14007819258bf42100
[erp5-kernel] [erp5-kernel]
<= download-file-base <= download-file-base
filename = ERP5kernel.py.jinja filename = ERP5kernel.py.jinja
md5sum = 24308ca010532863dbcf501f011f9846 md5sum = cbd35bbe54b66e9b2f73487ecdbc6976
[kernel-json] [kernel-json]
<= download-file-base <= download-file-base
......
...@@ -215,6 +215,8 @@ class ERP5Kernel(Kernel): ...@@ -215,6 +215,8 @@ class ERP5Kernel(Kernel):
code = code.strip() code = code.strip()
extra_data_list = [] extra_data_list = []
print_result = {}
displayhook_result = {}
if code.startswith('%'): if code.startswith('%'):
# No need to try-catch here as its already been taken that the code # No need to try-catch here as its already been taken that the code
...@@ -270,12 +272,17 @@ class ERP5Kernel(Kernel): ...@@ -270,12 +272,17 @@ class ERP5Kernel(Kernel):
# "evalue": null, # "evalue": null,
# "traceback": null, # "traceback": null,
# "code_result": "", # "code_result": "",
# "mime_type": "text/plain" # "print_result": {},
# "displayhook_result": {},
# "mime_type": "text/plain",
# "extra_data_list": []
# } # }
# So, we can easily use any of the key to update values as such. # So, we can easily use any of the key to update values as such.
# Getting code_result for succesfull execution of code # Getting code_result for succesfull execution of code
code_result = content['code_result'] code_result = content['code_result']
print_result = content['print_result']
displayhook_result = content['displayhook_result']
# Update mime_type with the mime_type from the http response result # Update mime_type with the mime_type from the http response result
# Required in case the mime_type is anything other than 'text/plain' # Required in case the mime_type is anything other than 'text/plain'
...@@ -297,21 +304,26 @@ class ERP5Kernel(Kernel): ...@@ -297,21 +304,26 @@ class ERP5Kernel(Kernel):
except ValueError: except ValueError:
content = self.response content = self.response
code_result = content code_result = content
print_result = {'data':{'text/plain':content}, 'metadata':{}}
# Display basic error message to frontend in case of error on server side # Display basic error message to frontend in case of error on server side
else: else:
self.make_erp5_request(code=code) self.make_erp5_request(code=code)
code_result = "Error at Server Side" code_result = "Error at Server Side"
print_result = {'data':{'text/plain':'Error at Server Side'}, 'metadata':{}}
mime_type = 'text/plain' mime_type = 'text/plain'
# For all status_code except allowed_HTTP_response_code_list show unauthorized message # For all status_code except allowed_HTTP_response_code_list show unauthorized message
else: else:
code_result = 'Unauthorized access' code_result = 'Unauthorized access'
print_result = {'data':{'text/plain':'Unauthorized access'}, 'metadata':{}}
mime_type = 'text/plain' mime_type = 'text/plain'
data = { if print_result.get('data'):
'data': {mime_type: code_result}, self.send_response(self.iopub_socket, 'display_data', print_result)
'metadata': {}}
self.send_response(self.iopub_socket, 'display_data', data) if displayhook_result.get('data'):
displayhook_result['execution_count'] = self.execution_count
self.send_response(self.iopub_socket, 'execute_result', displayhook_result)
for extra_data in extra_data_list: for extra_data in extra_data_list:
self.send_response(self.iopub_socket, 'display_data', extra_data) self.send_response(self.iopub_socket, 'display_data', extra_data)
......
...@@ -38,15 +38,6 @@ output = $${directory:bin}/runTestSuite ...@@ -38,15 +38,6 @@ output = $${directory:bin}/runTestSuite
buildout-directory = $${buildout:directory} buildout-directory = $${buildout:directory}
mode = 0700 mode = 0700
[firefox-instance]
recipe = slapos.cookbook:firefox
executable = firefox
runner-path = $${directory:bin}/$${:executable}
firefox-path = ${firefox:location}/firefox-slapos
prefsjs-path = $${directory:etc}/prefs.js
shell-path = ${dash:location}/bin/dash
tmp-path = $${xvfb-instance:tmp-path}
[xvfb-instance] [xvfb-instance]
recipe = slapos.cookbook:xvfb recipe = slapos.cookbook:xvfb
runner-path = $${directory:services}/xvfb runner-path = $${directory:services}/xvfb
......
...@@ -13,11 +13,9 @@ from selenium.webdriver.common.by import By ...@@ -13,11 +13,9 @@ from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support import expected_conditions as EC
# Put a 'firefox' executable in the path os.environ['TMPDIR'] = '$${xvfb-instance:tmp-path}'
# otherwise, WebDriver refuses to start os.environ['DISPLAY'] = ':0'
os.environ['PATH'] = '$${directory:bin}' + os.pathsep + os.environ['PATH']
FIREFOX_EXECUTABLE = '$${firefox-instance:executable}'
BASE_URL = 'http://[$${nginx-configuration:ip}]:$${nginx-configuration:port}/' BASE_URL = 'http://[$${nginx-configuration:ip}]:$${nginx-configuration:port}/'
def main(): def main():
...@@ -91,8 +89,11 @@ def main(): ...@@ -91,8 +89,11 @@ def main():
is_appium = False is_appium = False
if args.target == 'firefox': if args.target == 'firefox':
firefox_binary = webdriver.firefox.firefox_binary.FirefoxBinary(firefox_path=FIREFOX_EXECUTABLE) firefox_capabilities = webdriver.common.desired_capabilities.DesiredCapabilities.FIREFOX
browser = webdriver.Firefox(firefox_binary=firefox_binary) firefox_capabilities['marionette'] = True
firefox_capabilities['binary'] = '${firefox:location}/firefox-slapos'
browser = webdriver.Firefox(capabilities=firefox_capabilities,
executable_path='${firefox:location}/geckodriver')
elif args.target in ['iOS', 'Android']: elif args.target in ['iOS', 'Android']:
# parameters for mobile emulators have different names then parameters for # parameters for mobile emulators have different names then parameters for
# desktop OSes # desktop OSes
......
...@@ -25,7 +25,7 @@ parts = ...@@ -25,7 +25,7 @@ parts =
[instance] [instance]
recipe = slapos.recipe.template recipe = slapos.recipe.template
md5sum = 929a2b6cf6bb16e22e49984563547ca9 md5sum = 7c907db5f803b03a218b49888a3a3799
url = ${:_profile_base_location_}/instance.cfg.in url = ${:_profile_base_location_}/instance.cfg.in
output = ${buildout:directory}/instance.cfg output = ${buildout:directory}/instance.cfg
mode = 0644 mode = 0644
...@@ -107,7 +107,7 @@ mode = 0644 ...@@ -107,7 +107,7 @@ mode = 0644
[template-runTestSuite] [template-runTestSuite]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/runTestSuite.in url = ${:_profile_base_location_}/runTestSuite.in
md5sum = ef4118cb653838bf5c875c6fcac1677f md5sum = fcf15b2a90340e0afe8f8b9921a4ffae
output = ${buildout:directory}/runTestSuite.in output = ${buildout:directory}/runTestSuite.in
mode = 0644 mode = 0644
......
...@@ -40,5 +40,5 @@ output = ${buildout:directory}/template.cfg ...@@ -40,5 +40,5 @@ output = ${buildout:directory}/template.cfg
mode = 0644 mode = 0644
[versions] [versions]
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.4.2 slapos.recipe.template = 2.4.2
...@@ -5,7 +5,7 @@ extends = common.cfg ...@@ -5,7 +5,7 @@ extends = common.cfg
# XXX - use websockify = 0.5.1 for compatibility with kvm frontend # XXX - use websockify = 0.5.1 for compatibility with kvm frontend
websockify = 0.5.1 websockify = 0.5.1
slapos.toolbox = 0.64 slapos.toolbox = 0.65
erp5.util = 0.4.46 erp5.util = 0.4.46
apache-libcloud = 1.1.0 apache-libcloud = 1.1.0
collective.recipe.environment = 0.2.0 collective.recipe.environment = 0.2.0
......
...@@ -44,5 +44,5 @@ output = ${buildout:directory}/template.cfg ...@@ -44,5 +44,5 @@ output = ${buildout:directory}/template.cfg
mode = 0644 mode = 0644
[versions] [versions]
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.2 slapos.recipe.template = 2.2
...@@ -22,9 +22,7 @@ port = ${neo-admin:port} ...@@ -22,9 +22,7 @@ port = ${neo-admin:port}
path = ${directory:promises}/neo-admin-promise path = ${directory:promises}/neo-admin-promise
[logrotate-admin] [logrotate-admin]
recipe = slapos.cookbook:logrotate.d < = logrotate-entry-base
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = neo-admin name = neo-admin
log = ${neo-admin:logfile} log = ${neo-admin:logfile}
post = {{ bin_directory }}/slapos-kill -n neoadmin -s RTMIN+1 ${:log} post = {{ bin_directory }}/slapos-kill -n neoadmin -s RTMIN+1 ${:log}
......
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
"type": "string" "type": "string"
}, },
"sla-dict": { "sla-dict": {
"description": "[NEO SR only] Where to request instances. Each key is a query string for criterions (e.g. \"computer_guid=foo\"), and each value is a list of partition references ('node-0', 'node-1', ...).", "description": "[NEO SR only] Where to request instances. Each key is a query string for criterions (e.g. \"computer_guid=foo\"), and each value is a list of partition references ('node-0', 'node-1', ...). The prefix 'node-' is mandatory and the number must start from 0. The total number of nodes here must be equal to the length of node-list.",
"additionalProperties": { "additionalProperties": {
"type": "array", "type": "array",
"items": { "type": "string" }, "items": { "type": "string" },
......
...@@ -30,9 +30,7 @@ port = ${neo-master:port} ...@@ -30,9 +30,7 @@ port = ${neo-master:port}
path = ${directory:promises}/neo-master-promise path = ${directory:promises}/neo-master-promise
[logrotate-master] [logrotate-master]
recipe = slapos.cookbook:logrotate.d < = logrotate-entry-base
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = neo-master name = neo-master
log = ${neo-master:logfile} log = ${neo-master:logfile}
post = {{ bin_directory }}/slapos-kill -n neomaster -s RTMIN+1 ${:log} post = {{ bin_directory }}/slapos-kill -n neomaster -s RTMIN+1 ${:log}
...@@ -88,11 +88,6 @@ database-adapter = MySQL ...@@ -88,11 +88,6 @@ database-adapter = MySQL
wait-database = 60 wait-database = 60
engine = {{ slapparameter_dict.get('engine', '') }} engine = {{ slapparameter_dict.get('engine', '') }}
[logrotate-storage]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
{% for i in range(slapparameter_dict.get('storage-count', 1)) -%} {% for i in range(slapparameter_dict.get('storage-count', 1)) -%}
{% set storage_id = 'neo-storage-' ~ i -%} {% set storage_id = 'neo-storage-' ~ i -%}
[{{ section(storage_id) }}] [{{ section(storage_id) }}]
...@@ -103,7 +98,7 @@ logfile = ${directory:log}/{{ 'neostorage-' ~ i }}.log ...@@ -103,7 +98,7 @@ logfile = ${directory:log}/{{ 'neostorage-' ~ i }}.log
database-parameters = root@neo{{ i }}${my-cnf-parameters:socket} database-parameters = root@neo{{ i }}${my-cnf-parameters:socket}
[{{ section('logrotate-storage-' ~ i) }}] [{{ section('logrotate-storage-' ~ i) }}]
< = logrotate-storage < = logrotate-entry-base
name = {{ storage_id }} name = {{ storage_id }}
log = {{ '${' + storage_id + ':logfile}' }} log = {{ '${' + storage_id + ':logfile}' }}
post = {{ bin_directory }}/slapos-kill -n neostorage -s RTMIN+1 ${:log} post = {{ bin_directory }}/slapos-kill -n neostorage -s RTMIN+1 ${:log}
...@@ -129,9 +124,7 @@ log = ${buildout:directory}/var/log ...@@ -129,9 +124,7 @@ log = ${buildout:directory}/var/log
tmp = ${buildout:directory}/tmp tmp = ${buildout:directory}/tmp
[logrotate-mysql] [logrotate-mysql]
recipe = slapos.cookbook:logrotate.d < = logrotate-entry-base
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = mariadb name = mariadb
log = ${my-cnf-parameters:error-log} ${my-cnf-parameters:slow-query-log} log = ${my-cnf-parameters:error-log} ${my-cnf-parameters:slow-query-log}
post = ${mysqld:mysql-base-directory}/bin/mysql --defaults-file="${my-cnf:rendered}" -e "FLUSH LOGS" post = ${mysqld:mysql-base-directory}/bin/mysql --defaults-file="${my-cnf:rendered}" -e "FLUSH LOGS"
......
...@@ -47,10 +47,6 @@ eggs = neoppod[admin, ctl, master, storage-mysqldb] ...@@ -47,10 +47,6 @@ eggs = neoppod[admin, ctl, master, storage-mysqldb]
ZODB ZODB
zope.testing zope.testing
zodbtools zodbtools
patch-binary = ${patch:location}/bin/patch
ZODB-patch-options = -p1
ZODB-patches =
${:_profile_base_location_}/../../component/egg-patch/ZODB4-pr136.patch
[slapos-deps-eggs] [slapos-deps-eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
...@@ -93,15 +89,15 @@ md5sum = f3259726bd5d824c569dc7db6b7d26a0 ...@@ -93,15 +89,15 @@ md5sum = f3259726bd5d824c569dc7db6b7d26a0
[instance-neo-admin] [instance-neo-admin]
<= download-base-neo <= download-base-neo
md5sum = ec1e2d9d2a1da092621f1b01ce6af322 md5sum = d5eb14e3d83d7787950de913d7e29aad
[instance-neo-master] [instance-neo-master]
<= download-base-neo <= download-base-neo
md5sum = 82f3f76f54ee9db355966a7ada61f56e md5sum = c6e0db7344a26a55580102e3cc33c984
[instance-neo-storage-mysql] [instance-neo-storage-mysql]
<= download-base-neo <= download-base-neo
md5sum = cd2a978a09c5686205592923866f6584 md5sum = 22206396eff084af3e0587494a012174
[template-neo-my-cnf] [template-neo-my-cnf]
<= download-base-neo <= download-base-neo
...@@ -109,28 +105,28 @@ url = ${:_profile_base_location_}/my.cnf.in ...@@ -109,28 +105,28 @@ url = ${:_profile_base_location_}/my.cnf.in
md5sum = cfdd59d42ae540563a964ccaf8fec232 md5sum = cfdd59d42ae540563a964ccaf8fec232
[versions] [versions]
slapos.recipe.template = 2.10 BTrees = 4.4.1
ZODB = 4.4.4+SlapOSPatched001 ZODB = 4.4.5
BTrees = 4.3.1
persistent = 4.2.2 persistent = 4.2.2
slapos.recipe.template = 2.10
transaction = 1.7.0 transaction = 1.7.0
zdaemon = 4.1.0 zdaemon = 4.1.0
zodbpickle = 0.6.0 zodbpickle = 0.6.0
zodbtools = 0.0.0.dev3 zodbtools = 0.0.0.dev3
# Required by slapos.toolbox = 0.64 # Required by slapos.toolbox = 0.65
slapos.toolbox = 0.64 slapos.toolbox = 0.65
GitPython = 2.1.1
PyRSS2Gen = 1.1 PyRSS2Gen = 1.1
apache-libcloud = 1.5.0 apache-libcloud = 1.5.0
atomize = 0.2.0 atomize = 0.2.0
dnspython = 1.14.0 dnspython = 1.14.0
ecdsa = 0.13 ecdsa = 0.13
feedparser = 5.2.1 feedparser = 5.2.1
GitPython = 2.1.1
gitdb2 = 2.0.0 gitdb2 = 2.0.0
lockfile = 0.12.2 lockfile = 0.12.2
mysqlclient = 1.3.9 mysqlclient = 1.3.9
paramiko = 2.1.1 paramiko = 2.1.1
passlib = 1.6.5 passlib = 1.7.1
pycrypto = 2.6.1 pycrypto = 2.6.1
pycurl = 7.43.0 pycurl = 7.43.0
smmap2 = 2.0.1 smmap2 = 2.0.1
[buildout] [buildout]
extends = software.cfg extends = software.cfg
[neoppod]
ZODB-patches =
[versions] [versions]
ZODB = 5.1.1 ZODB = 5.1.1
transaction = 2.0.3 transaction = 2.0.3
[buildout]
parts =
cron-entry-logrotate
[cron]
recipe = slapos.cookbook:cron
cron-entries = ${logrotate-directory:cron-entries}
dcrond-binary = {{ dcron_location }}/sbin/crond
crontabs = ${logrotate-directory:crontabs}
cronstamps = ${logrotate-directory:cronstamps}
catcher = ${cron-simplelogger:wrapper}
binary = ${logrotate-directory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = ${logrotate-directory:bin}/cron_simplelogger
log = ${logrotate-directory:log}/cron.log
[logrotate]
recipe = slapos.cookbook:logrotate
logrotate-entries = ${logrotate-directory:logrotate-entries}
backup = ${logrotate-directory:logrotate-backup}
logrotate-binary = {{ logrotate_location }}/usr/sbin/logrotate
gzip-binary = {{ gzip_location }}/bin/gzip
gunzip-binary = {{ gzip_location }}/bin/gunzip
wrapper = ${logrotate-directory:bin}/logrotate
conf = ${logrotate-directory:etc}/logrotate.conf
state-file = ${logrotate-directory:srv}/logrotate.status
[cron-entry-logrotate]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = logrotate
frequency = 0 0 * * *
command = ${logrotate:wrapper}
[logrotate-directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = ${:etc}/cron.d
cronstamps = ${:etc}/cronstamps
crontabs = ${:etc}/crontabs
logrotate-backup = ${:backup}/logrotate
logrotate-entries = ${:etc}/logrotate.d
bin = ${buildout:directory}/bin
srv = ${buildout:directory}/srv
backup = ${:srv}/backup
etc = ${buildout:directory}/etc
services = ${:etc}/service
log = ${buildout:directory}/var/log
...@@ -102,12 +102,7 @@ context = ...@@ -102,12 +102,7 @@ context =
< = logrotate-entry-base < = logrotate-entry-base
name = apache name = apache
log = ${apache-conf:error-log} ${apache-conf:access-log} log = ${apache-conf:error-log} ${apache-conf:access-log}
post = {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf:pid-file} -s USR1 post = test ! -s ${apache-conf:pid-file} || {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf:pid-file} -s USR1
[logrotate-entry-base]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
[re6st-registry-conf-dict] [re6st-registry-conf-dict]
port = 9201 port = 9201
...@@ -197,7 +192,7 @@ command = {{ python_bin }} ${re6st-registry:drop-service-wrapper} ...@@ -197,7 +192,7 @@ command = {{ python_bin }} ${re6st-registry:drop-service-wrapper}
< = logrotate-entry-base < = logrotate-entry-base
name = re6stnet name = re6stnet
log = ${re6st-registry-conf-dict:logfile} log = ${re6st-registry-conf-dict:logfile}
post = {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${re6st-registry:pid-file} -s USR1 post = test ! -s ${re6st-registry:pid-file} || {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${re6st-registry:pid-file} -s USR1
[re6st-registry-promise] [re6st-registry-promise]
recipe = slapos.cookbook:check_port_listening recipe = slapos.cookbook:check_port_listening
......
...@@ -75,7 +75,6 @@ md5sum = 84387d5af13561447c1170704a2076a6 ...@@ -75,7 +75,6 @@ md5sum = 84387d5af13561447c1170704a2076a6
extra-context = extra-context =
key apache_location apache:location key apache_location apache:location
key dash_location dash:location key dash_location dash:location
key logrotate_location logrotate:location
key openssl_location openssl:location key openssl_location openssl:location
key template_apache_conf template-apache-conf:target key template_apache_conf template-apache-conf:target
key template_re6stnet template-re6stnet:target key template_re6stnet template-re6stnet:target
...@@ -88,16 +87,7 @@ extra-context = ...@@ -88,16 +87,7 @@ extra-context =
[template-re6stnet] [template-re6stnet]
< = download-base < = download-base
filename = instance-re6stnet.cfg.in filename = instance-re6stnet.cfg.in
md5sum = 050efe586eb78c0b4f0cc9ba33389675 md5sum = 6e9452d283e82e2f512a9f9edb17fe3a
[template-logrotate-base]
< = template-jinja2-base
filename = instance-logrotate-base.cfg
md5sum = f28fbd310944f321ccb34b2a34c82005
extra-context =
key dcron_location dcron:location
key gzip_location gzip:location
key logrotate_location logrotate:location
[template-apache-conf] [template-apache-conf]
< = download-base < = download-base
...@@ -125,15 +115,15 @@ plone.recipe.command = 1.1 ...@@ -125,15 +115,15 @@ plone.recipe.command = 1.1
pycrypto = 2.6.1 pycrypto = 2.6.1
pycurl = 7.43.0 pycurl = 7.43.0
slapos.recipe.template = 2.7 slapos.recipe.template = 2.7
slapos.toolbox = 0.64 slapos.toolbox = 0.65
smmap = 0.9.0 smmap = 0.9.0
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
GitPython = 2.0.8 GitPython = 2.0.8
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
atomize = 0.2.0 atomize = 0.2.0
# Required by: # Required by:
...@@ -141,11 +131,11 @@ atomize = 0.2.0 ...@@ -141,11 +131,11 @@ atomize = 0.2.0
backports.ssl-match-hostname = 3.4.0.2 backports.ssl-match-hostname = 3.4.0.2
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
feedparser = 5.1.3 feedparser = 5.1.3
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
lockfile = 0.12.2 lockfile = 0.12.2
# Required by: # Required by:
...@@ -153,10 +143,10 @@ lockfile = 0.12.2 ...@@ -153,10 +143,10 @@ lockfile = 0.12.2
miniupnpc = 1.9 miniupnpc = 1.9
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
paramiko = 2.0.1 paramiko = 2.0.1
# Required by: # Required by:
# slapos.toolbox = 0.64 # slapos.toolbox = 0.65
rpdb = 0.1.5 rpdb = 0.1.5
...@@ -46,7 +46,7 @@ context = ...@@ -46,7 +46,7 @@ context =
raw dropbearkey_executable ${dropbear:location}/bin/dropbearkey raw dropbearkey_executable ${dropbear:location}/bin/dropbearkey
raw gzip_executable ${gzip:location}/bin/gzip raw gzip_executable ${gzip:location}/bin/gzip
raw gunzip_executable ${gzip:location}/bin/gunzip raw gunzip_executable ${gzip:location}/bin/gunzip
raw logrotate_executable ${logrotate:location}/sbin/logrotate raw logrotate_executable ${logrotate:location}/usr/sbin/logrotate
raw slapos_configuration_file_template_path ${slapos-configuration-file-template:target} raw slapos_configuration_file_template_path ${slapos-configuration-file-template:target}
raw httpd_configuration_file_template_path ${httpd-configuration-file-template:target} raw httpd_configuration_file_template_path ${httpd-configuration-file-template:target}
......
...@@ -17,7 +17,7 @@ gunicorn = 19.4.5 ...@@ -17,7 +17,7 @@ gunicorn = 19.4.5
prettytable = 0.7.2 prettytable = 0.7.2
pycurl = 7.43.0 pycurl = 7.43.0
slapos.recipe.template = 2.9 slapos.recipe.template = 2.9
slapos.toolbox = 0.64 slapos.toolbox = 0.65
smmap = 0.9.0 smmap = 0.9.0
# Required by: # Required by:
......
{% set tcpv4_port = slapparameter_dict.get('tcpv4_port', 6001) -%} {% set tcpv4_port = slapparameter_dict.get('tcpv4_port', 6001) -%}
{% set ip = (ipv4_set | list)[0] %} {% set ip = (ipv4_set | list)[0] %}
[buildout] [buildout]
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
parts = parts =
publish-varnish-connection-information publish-varnish-connection-information
varnish-instance varnish-instance
...@@ -46,10 +49,6 @@ notifempty = true ...@@ -46,10 +49,6 @@ notifempty = true
create = true create = true
{%- endif %} {%- endif %}
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[publish-varnish-connection-information] [publish-varnish-connection-information]
recipe = slapos.cookbook:publish.serialised recipe = slapos.cookbook:publish.serialised
url = http://${varnish-instance:ip}:${varnish-instance:server-port}/ url = http://${varnish-instance:ip}:${varnish-instance:server-port}/
......
[buildout] [buildout]
# Local development
develop =
${:parts-directory}/slapos.cookbook-repository
extends = extends =
../../stack/slapos.cfg ../../stack/slapos.cfg
../../component/dash/buildout.cfg ../../component/dash/buildout.cfg
...@@ -18,32 +14,15 @@ extends = ...@@ -18,32 +14,15 @@ extends =
../../component/wget/buildout.cfg ../../component/wget/buildout.cfg
parts = parts =
slapos-cookbook
dash dash
dcron dcron
gcc-minimal gcc-minimal
slapos-toolbox
stunnel stunnel
varnish-3.0 varnish-3.0
wget
# Local development
slapos.cookbook-repository
check-recipe
# Create instance template # Create instance template
template template
# Local development
[slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.git
branch = master
git-executable = ${git:location}/bin/git
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link
[template-jinja2-base] [template-jinja2-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
mode = 640 mode = 640
...@@ -79,29 +58,8 @@ extra-context = ...@@ -79,29 +58,8 @@ extra-context =
[template-varnish] [template-varnish]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-varnish.cfg.in url = ${:_profile_base_location_}/instance-varnish.cfg.in
md5sum = 4334d900f212d170fd0ca35865879bdf md5sum = 0ea12a4ad2d2e3d406476e35b8d3e3fb
mode = 640 mode = 640
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
erp5.util
pytz
lock_file
inotifyx
scripts =
web_checker_utility = erp5.util.webchecker:web_checker_utility
[slapos-toolbox]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
${pycurl:egg}
slapos.toolbox
scripts =
killpidfromfile
[versions] [versions]
erp5.util = 0.4.41 slapos.recipe.template = 2.9
slapos.toolbox = 0.40.4
[buildout]
extends =
../../component/keras/buildout.cfg
software.cfg
parts +=
keras-egg
[eggs]
eggs +=
${tensorflow-build-install-egg:egg}
${protobuf-python:egg}
${h5py:egg}
${pillow-python:egg}
Keras
[bazel]
# For Debian 8 users
# Add jessie-backports to /etc/apt/sources.list
# deb http://ftp.jp.debian.org/debian jessie-backports main
# Then install jdk
# apt-get update
# apt-get -t jessie-backports install openjdk-8-jdk
java_home = /usr/lib/jvm/java-8-openjdk-amd64
[cuda]
# If you use cuda, please adjust paramters for your environment
# else ignore this part.
# https://developer.nvidia.com/cuda-gpus
tf_need_cuda = 1
tf_cuda_version = 8.0
tf_cudnn_version = 6.5
tf_cuda_compute_capabilities = 5.2,6.1
cuda_toolkit_path = /usr/local/cuda
cudnn_install_path = /usr/local/cuda
[versions]
mock = 2.0.0
funcsigs = 1.0.2
...@@ -88,7 +88,7 @@ Paste = 2.0.2 ...@@ -88,7 +88,7 @@ Paste = 2.0.2
PasteScript = 2.0.2 PasteScript = 2.0.2
WSGIUtils = 0.7 WSGIUtils = 0.7
python-magic = 0.4.6 python-magic = 0.4.6
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.8 slapos.recipe.template = 2.8
# Required by: # Required by:
......
[buildout] [buildout]
extends = extends =
# Exact version of Zope # Exact version of Zope
ztk-versions.cfg ztk-versions.cfg
zope-versions.cfg zope-versions.cfg
buildout.hash.cfg
../../component/cups/buildout.cfg ../../component/cups/buildout.cfg
../../component/dbus/buildout.cfg ../../component/dbus/buildout.cfg
../../component/file/buildout.cfg ../../component/file/buildout.cfg
...@@ -150,15 +152,11 @@ update-command = ${:command} ...@@ -150,15 +152,11 @@ update-command = ${:command}
command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link
[mariadb-resiliency-after-import-script] [mariadb-resiliency-after-import-script]
recipe = slapos.recipe.build:download <= download-base
url = ${:_profile_base_location_}/template/instance-mariadb-resiliency-after-import-script.sh.in
md5sum = 79109ae720d5471cb2dd15e63fdf4dcf
mode = 755 mode = 755
[template-mariadb] [template-mariadb]
<= download-base <= download-base
filename = instance-mariadb.cfg.in
md5sum = 9312af2f9d9faf06d2f26f073ad60180
link-binary = link-binary =
${coreutils:location}/bin/basename ${coreutils:location}/bin/basename
${coreutils:location}/bin/cat ${coreutils:location}/bin/cat
...@@ -173,70 +171,45 @@ link-binary = ...@@ -173,70 +171,45 @@ link-binary =
[template-kumofs] [template-kumofs]
<= download-base <= download-base
filename = instance-kumofs.cfg.in
md5sum = 763db0c4a94649296e74fe1f53c03940
[template-cloudooo] [template-cloudooo]
<= download-base <= download-base
filename = instance-cloudoo.cfg.in
md5sum = c553bbcdc5f80d893907a05acefc1356
[template-zope-conf] [template-zope-conf]
<= download-base <= download-base
filename = zope.conf.in
md5sum = 77ab4f229a92e02603028a0bd3772edd
[site-zcml] [site-zcml]
<= download-base <= download-base
filename = site.zcml
md5sum = d32417746fcf671d4e86a70379815039
[template-my-cnf] [template-my-cnf]
<= download-base <= download-base
filename = my.cnf.in
md5sum = d50920c942b8ee98402f8551fef38383
[template-mariadb-initial-setup] [template-mariadb-initial-setup]
<= download-base <= download-base
filename = mariadb_initial_setup.sql.in
md5sum = 6465212fdc7fe9076a0c929d9f14da14
[template-create-erp5-site] [template-create-erp5-site]
<= download-base <= download-base
filename = instance-create-erp5-site.cfg.in
md5sum = 71cef1d06065951ab4cf43eb13f311a3
[template-create-erp5-site-real] [template-create-erp5-site-real]
<= download-base <= download-base
filename = instance-create-erp5-site-real.cfg.in
md5sum = 79f789360e71146486c82a7a10834bae
[template-postfix] [template-postfix]
< = download-base < = download-base
filename = instance-postfix.cfg.in
md5sum = 90a017581116f14014a039d38ef36ffd
[template-postfix-master-cf] [template-postfix-master-cf]
< = download-base < = download-base
filename = postfix_master.cf.in
md5sum = 9ac81647368068a1a98a785d08074b43
[template-postfix-main-cf] [template-postfix-main-cf]
< = download-base < = download-base
filename = postfix_main.cf.in
md5sum = d51897728755e14d8005344608098009
[template-postfix-aliases] [template-postfix-aliases]
< = download-base < = download-base
filename = postfix_aliases.in
md5sum = 0969fbb25b05c02ef3c2d437b2f4e1a0
[template] [template]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
# XXX: "template.cfg" is hardcoded in instanciation recipe # XXX: "template.cfg" is hardcoded in instanciation recipe
rendered = ${buildout:directory}/template.cfg rendered = ${buildout:directory}/template.cfg
template = ${:_profile_base_location_}/instance.cfg.in template = ${:_profile_base_location_}/${:filename}
md5sum = ec143441fac42ce6b3a183724298bc6b
mode = 640 mode = 640
context = context =
key mariadb_link_binary template-mariadb:link-binary key mariadb_link_binary template-mariadb:link-binary
...@@ -330,26 +303,18 @@ context = ...@@ -330,26 +303,18 @@ context =
[monitor-template-dummy] [monitor-template-dummy]
<= download-base <= download-base
# This is a placeholder, to be overriden by extending monitor SR # This is a placeholder, to be overriden by extending monitor SR
filename = dummy.cfg
md5sum = d41d8cd98f00b204e9800998ecf8427e
[monitor-template] [monitor-template]
rendered = ${monitor-template-dummy:target} rendered = ${monitor-template-dummy:target}
[template-erp5] [template-erp5]
<= download-base <= download-base
filename = instance-erp5.cfg.in
md5sum = b5f49c90017684aa3389ef3f97ece509
[template-zeo] [template-zeo]
<= download-base <= download-base
filename = instance-zeo.cfg.in
md5sum = 84aba4fa10d3087fbcc1d979df90fab9
[template-zope] [template-zope]
<= download-base <= download-base
filename = instance-zope.cfg.in
md5sum = e65672c423d603e5fa516ad811cc1fa9
link-binary = link-binary =
${aspell:location}/bin/aspell ${aspell:location}/bin/aspell
${dmtx-utils:location}/bin/dmtxwrite ${dmtx-utils:location}/bin/dmtxwrite
...@@ -372,13 +337,9 @@ link-binary = ...@@ -372,13 +337,9 @@ link-binary =
[template-balancer] [template-balancer]
<= download-base <= download-base
filename = instance-balancer.cfg.in
md5sum = 049d4153990374dc130403c04accaf13
[template-haproxy-cfg] [template-haproxy-cfg]
<= download-base <= download-base
filename = haproxy.cfg.in
md5sum = 3defd473e2cea17ae36bba7752494858
[bt5-repository] [bt5-repository]
# Format: # Format:
...@@ -559,7 +520,6 @@ eggs = ${neoppod:eggs} ...@@ -559,7 +520,6 @@ eggs = ${neoppod:eggs}
Products.ExternalEditor Products.ExternalEditor
Products.TIDStorage Products.TIDStorage
Products.LongRequestLogger Products.LongRequestLogger
Products.PloneHotfix20160830
# BBB: Temporarily keep zope.app.testing awaiting we use newer version of CMF # BBB: Temporarily keep zope.app.testing awaiting we use newer version of CMF
# (for tests like testCookieCrumbler). # (for tests like testCookieCrumbler).
...@@ -572,6 +532,8 @@ eggs = ${neoppod:eggs} ...@@ -572,6 +532,8 @@ eggs = ${neoppod:eggs}
# Needed for parsing .po files from our Localizer subset # Needed for parsing .po files from our Localizer subset
polib polib
# Needed for Google OAuth
google-api-python-client
# Used by ERP5 Jupyter backend # Used by ERP5 Jupyter backend
ipykernel ipykernel
...@@ -633,12 +595,15 @@ scripts += ...@@ -633,12 +595,15 @@ scripts +=
Acquisition = 2.13.9+SlapOSPatched001 Acquisition = 2.13.9+SlapOSPatched001
Products.DCWorkflow = 2.2.4+SlapOSPatched001 Products.DCWorkflow = 2.2.4+SlapOSPatched001
pysvn = 1.7.10+SlapOSPatched002 pysvn = 1.7.10+SlapOSPatched002
python-ldap = 2.4.28+SlapOSPatched001 python-ldap = 2.4.29+SlapOSPatched001
python-magic = 0.4.12+SlapOSPatched001 python-magic = 0.4.12+SlapOSPatched001
# specify dev version to be sure that an old released version is not used # specify dev version to be sure that an old released version is not used
cloudooo = 1.2.5-dev cloudooo = 1.2.5-dev
uritemplate = 3.0.0
google-api-python-client = 1.6.1
# use newer version than specified in ZTK # use newer version than specified in ZTK
PasteDeploy = 1.5.2 PasteDeploy = 1.5.2
Pygments = 2.1.3 Pygments = 2.1.3
...@@ -650,7 +615,7 @@ zope.dottedname = 4.1.0 ...@@ -650,7 +615,7 @@ zope.dottedname = 4.1.0
Products.PluggableAuthService = 1.9.0 Products.PluggableAuthService = 1.9.0
# we are still using this old stable version. # we are still using this old stable version.
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5+SlapOSPatched001
# use newest version of slapos.cookbook # use newest version of slapos.cookbook
slapos.cookbook = slapos.cookbook =
...@@ -684,7 +649,6 @@ Products.GenericSetup = 1.8.6 ...@@ -684,7 +649,6 @@ Products.GenericSetup = 1.8.6
Products.LongRequestLogger = 2.0.0 Products.LongRequestLogger = 2.0.0
# Products.MimetypesRegistry 2.1 requires AccessControl>=3.0.0Acquisition. # Products.MimetypesRegistry 2.1 requires AccessControl>=3.0.0Acquisition.
Products.MimetypesRegistry = 2.0.10 Products.MimetypesRegistry = 2.0.10
Products.PloneHotfix20160830 = 1.3
Products.PluginRegistry = 1.4 Products.PluginRegistry = 1.4
Products.TIDStorage = 5.4.9 Products.TIDStorage = 5.4.9
PyPDF2 = 1.26.0 PyPDF2 = 1.26.0
...@@ -696,7 +660,7 @@ WSGIUtils = 0.7 ...@@ -696,7 +660,7 @@ WSGIUtils = 0.7
# ZEO 5 requires transaction >= 2 # ZEO 5 requires transaction >= 2
ZEO = 4.3.1 ZEO = 4.3.1
ZODB3 = 3.11.0 ZODB3 = 3.11.0
Zope2 = 2.13.24 Zope2 = 2.13.25
astor = 0.5 astor = 0.5
# astroid 1.4.1 breaks testDynamicClassGeneration # astroid 1.4.1 breaks testDynamicClassGeneration
astroid = 1.3.8 astroid = 1.3.8
...@@ -714,26 +678,27 @@ http-parser = 0.8.3 ...@@ -714,26 +678,27 @@ http-parser = 0.8.3
httplib2 = 0.9.2 httplib2 = 0.9.2
huBarcode = 1.0.0 huBarcode = 1.0.0
interval = 1.0.0 interval = 1.0.0
ipdb = 0.10.1 ipdb = 0.10.2
ipykernel = 4.5.2 ipykernel = 4.5.2
ipython = 5.1.0 ipython = 5.2.1
ipython-genutils = 0.1.0 ipython-genutils = 0.1.0
ipywidgets = 5.2.2 ipywidgets = 5.2.2
logilab-common = 1.3.0 logilab-common = 1.3.0
matplotlib = 2.0.0 matplotlib = 2.0.0
mistune = 0.7.3 mistune = 0.7.3
msgpack-python = 0.4.8 msgpack-python = 0.4.8
notebook = 4.3.1 notebook = 4.3.2
numpy = 1.11.3 nt-svcutils = 2.13.0
numpy = 1.12.0
objgraph = 3.1.0 objgraph = 3.1.0
pandas = 0.19.2 pandas = 0.19.2
ply = 3.9 ply = 3.10
polib = 1.0.8 polib = 1.0.8
pprofile = 1.10.0 pprofile = 1.10.0
prompt-toolkit = 1.0.9 prompt-toolkit = 1.0.10
ptyprocess = 0.5.1 ptyprocess = 0.5.1
pycountry = 17.1.8 pycountry = 17.1.8
pyflakes = 1.3.0 pyflakes = 1.5.0
# pylint 1.5.1 breaks testDynamicClassGeneration # pylint 1.5.1 breaks testDynamicClassGeneration
pylint = 1.4.4 pylint = 1.4.4
python-memcached = 1.58 python-memcached = 1.58
...@@ -745,8 +710,6 @@ rtjp-eventlet = 0.3.2 ...@@ -745,8 +710,6 @@ rtjp-eventlet = 0.3.2
scikit-learn = 0.18.1 scikit-learn = 0.18.1
scipy = 0.18.1 scipy = 0.18.1
simplegeneric = 0.8.1 simplegeneric = 0.8.1
slapos.recipe.template = 2.9
slapos.toolbox = 0.64
socketpool = 0.5.3 socketpool = 0.5.3
spyne = 2.12.14 spyne = 2.12.14
suds = 0.4 suds = 0.4
...@@ -772,7 +735,7 @@ backports.ssl-match-hostname = 3.5.0.1 ...@@ -772,7 +735,7 @@ backports.ssl-match-hostname = 3.5.0.1
# Required by: # Required by:
# tornado==4.4.2 # tornado==4.4.2
certifi = 2016.9.26 certifi = 2017.1.23
# Required by: # Required by:
# eventlet==0.20.1 # eventlet==0.20.1
...@@ -787,13 +750,13 @@ fpconst = 0.7.2 ...@@ -787,13 +750,13 @@ fpconst = 0.7.2
graphviz = 0.5.2 graphviz = 0.5.2
# Required by: # Required by:
# notebook==4.3.1 # notebook==4.3.2
# nbconvert 4.2.0 depends on entrypoints egg that is not available as tar/zip source. # nbconvert 4.2.0 depends on entrypoints egg that is not available as tar/zip source.
nbconvert = 4.1.0 nbconvert = 4.1.0
# Required by: # Required by:
# nbconvert==4.1.0 # nbconvert==4.1.0
# notebook==4.3.1 # notebook==4.3.2
nbformat = 4.2.0 nbformat = 4.2.0
# Required by: # Required by:
...@@ -801,15 +764,15 @@ nbformat = 4.2.0 ...@@ -801,15 +764,15 @@ nbformat = 4.2.0
olefile = 0.44 olefile = 0.44
# Required by: # Required by:
# ipython==5.1.0 # ipython==5.2.1
pathlib2 = 2.1.0 pathlib2 = 2.2.1
# Required by: # Required by:
# ipython==5.1.0 # ipython==5.2.1
pexpect = 4.2.1 pexpect = 4.2.1
# Required by: # Required by:
# ipython==5.1.0 # ipython==5.2.1
pickleshare = 0.7.4 pickleshare = 0.7.4
# Required by: # Required by:
...@@ -817,6 +780,10 @@ pickleshare = 0.7.4 ...@@ -817,6 +780,10 @@ pickleshare = 0.7.4
# pandas==0.19.2 # pandas==0.19.2
python-dateutil = 2.6.0 python-dateutil = 2.6.0
# Required by:
# pathlib2==2.2.1
scandir = 1.4
# Required by: # Required by:
# tornado==4.4.2 # tornado==4.4.2
singledispatch = 3.4.0.3 singledispatch = 3.4.0.3
...@@ -826,7 +793,7 @@ singledispatch = 3.4.0.3 ...@@ -826,7 +793,7 @@ singledispatch = 3.4.0.3
subprocess32 = 3.2.7 subprocess32 = 3.2.7
# Required by: # Required by:
# prompt-toolkit==1.0.9 # prompt-toolkit==1.0.10
wcwidth = 0.1.7 wcwidth = 0.1.7
# Required by: # Required by:
......
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# But avoid directories, they are not portable.
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[mariadb-resiliency-after-import-script]
filename = instance-mariadb-resiliency-after-import-script.sh.in
md5sum = 394109d9373f4375d5e1599d5d1500ac
[template-mariadb]
filename = instance-mariadb.cfg.in
md5sum = 11e7faca5233868c1643854e4c898588
[template-kumofs]
filename = instance-kumofs.cfg.in
md5sum = 42d2a2c7cb5bf5122b6cfd8f53a5576f
[template-cloudooo]
filename = instance-cloudoo.cfg.in
md5sum = c553bbcdc5f80d893907a05acefc1356
[template-zope-conf]
filename = zope.conf.in
md5sum = 77ab4f229a92e02603028a0bd3772edd
[site-zcml]
filename = site.zcml
md5sum = d32417746fcf671d4e86a70379815039
[template-my-cnf]
filename = my.cnf.in
md5sum = d814b984abf2dc444af2a0bc6287e7f5
[template-mariadb-initial-setup]
filename = mariadb_initial_setup.sql.in
md5sum = 6465212fdc7fe9076a0c929d9f14da14
[template-create-erp5-site]
filename = instance-create-erp5-site.cfg.in
md5sum = 1186c5804bdc679d8a31ac70cd85d51f
[template-create-erp5-site-real]
filename = instance-create-erp5-site-real.cfg.in
md5sum = 79f789360e71146486c82a7a10834bae
[template-postfix]
filename = instance-postfix.cfg.in
md5sum = 2fd9ec619456fd00e7482cebc5c41f76
[template-postfix-master-cf]
filename = postfix_master.cf.in
md5sum = 9ac81647368068a1a98a785d08074b43
[template-postfix-main-cf]
filename = postfix_main.cf.in
md5sum = 66b273861888c8bd59e46de399d2c1d8
[template-postfix-aliases]
filename = postfix_aliases.in
md5sum = 0969fbb25b05c02ef3c2d437b2f4e1a0
[template]
filename = instance.cfg.in
md5sum = 9d6c7b1a17cf2a5987e8c9f4c9239736
[monitor-template-dummy]
filename = dummy.cfg
md5sum = d41d8cd98f00b204e9800998ecf8427e
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = b5f49c90017684aa3389ef3f97ece509
[template-zeo]
filename = instance-zeo.cfg.in
md5sum = 7610bafda245c008ccf0b6ea58ce21c2
[template-zope]
filename = instance-zope.cfg.in
md5sum = 5e7145e8fa17046581b66e9690bf5186
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = d71c49f91b3455e6866f4b2db591009f
[template-haproxy-cfg]
filename = haproxy.cfg.in
md5sum = 3defd473e2cea17ae36bba7752494858
...@@ -16,8 +16,12 @@ per partition. No more (undefined result), no less (IndexError). ...@@ -16,8 +16,12 @@ per partition. No more (undefined result), no less (IndexError).
{% do apache_ip_list.append('[' ~ ipv6 ~ ']') -%} {% do apache_ip_list.append('[' ~ ipv6 ~ ']') -%}
{% endif -%} {% endif -%}
[simplefile] [jinja2-template-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
mode = 644
[simplefile]
< = jinja2-template-base
template = inline:{{ '{{ content }}' }} template = inline:{{ '{{ content }}' }}
{% macro simplefile(section_name, file_path, content, mode='') -%} {% macro simplefile(section_name, file_path, content, mode='') -%}
...@@ -95,7 +99,7 @@ backend-dict = {{ dumps(haproxy_dict) }} ...@@ -95,7 +99,7 @@ backend-dict = {{ dumps(haproxy_dict) }}
ip = {{ ipv4 }} ip = {{ ipv4 }}
[haproxy-cfg] [haproxy-cfg]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
template = {{ parameter_dict['template-haproxy-cfg'] }} template = {{ parameter_dict['template-haproxy-cfg'] }}
rendered = ${directory:etc}/haproxy.cfg rendered = ${directory:etc}/haproxy.cfg
context = section parameter_dict haproxy-cfg-parameter-dict context = section parameter_dict haproxy-cfg-parameter-dict
...@@ -132,7 +136,7 @@ ca-cert = ${apache-ssl-client:cert} ...@@ -132,7 +136,7 @@ ca-cert = ${apache-ssl-client:cert}
crl = ${apache-ssl-client:crl} crl = ${apache-ssl-client:crl}
[apache-conf] [apache-conf]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
template = {{ parameter_dict['template-apache-conf'] }} template = {{ parameter_dict['template-apache-conf'] }}
rendered = ${directory:apache-conf}/apache.conf rendered = ${directory:apache-conf}/apache.conf
context = section parameter_dict apache-conf-parameter-dict context = section parameter_dict apache-conf-parameter-dict
...@@ -181,12 +185,10 @@ crl = ...@@ -181,12 +185,10 @@ crl =
{%- endif %} {%- endif %}
[logrotate-apache] [logrotate-apache]
recipe = slapos.cookbook:logrotate.d < = logrotate-entry-base
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = apache name = apache
log = ${apache-conf-parameter-dict:error-log} ${apache-conf-parameter-dict:access-log} log = ${apache-conf-parameter-dict:error-log} ${apache-conf-parameter-dict:access-log}
post = {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf-parameter-dict:pid-file} -s USR1 post = test ! -s ${apache-conf-parameter-dict:pid-file} || {{ parameter_dict['bin-directory'] }}/slapos-kill --pidfile ${apache-conf-parameter-dict:pid-file} -s USR1
[directory] [directory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
......
...@@ -14,6 +14,7 @@ recipe = slapos.recipe.template:jinja2 ...@@ -14,6 +14,7 @@ recipe = slapos.recipe.template:jinja2
template = {{ parameter_dict['template-create-erp5-site-real'] }} template = {{ parameter_dict['template-create-erp5-site-real'] }}
rendered = ${buildout:directory}/instance-create-erp5-site-real.cfg rendered = ${buildout:directory}/instance-create-erp5-site-real.cfg
extensions = jinja2.ext.do extensions = jinja2.ext.do
mode = 644
context = context =
import urlparse urlparse import urlparse urlparse
section publish publish section publish publish
......
...@@ -63,9 +63,7 @@ kumo-server-binary = {{ parameter_dict['kumo-location'] }}/bin/kumo-server ...@@ -63,9 +63,7 @@ kumo-server-binary = {{ parameter_dict['kumo-location'] }}/bin/kumo-server
shell-path = {{ parameter_dict['dash-location'] }}/bin/dash shell-path = {{ parameter_dict['dash-location'] }}/bin/dash
[logrotate-entry-kumofs] [logrotate-entry-kumofs]
recipe = slapos.cookbook:logrotate.d < = logrotate-entry-base
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = kumofs name = kumofs
log = ${kumofs-instance:kumo-gateway-log} ${kumofs-instance:kumo-manager-log} ${kumofs-instance:kumo-server-log} log = ${kumofs-instance:kumo-gateway-log} ${kumofs-instance:kumo-manager-log} ${kumofs-instance:kumo-server-log}
......
...@@ -12,17 +12,18 @@ ...@@ -12,17 +12,18 @@
set -e set -e
mysql_executable="${binary-wrap-mysql:wrapper-path}" mysql_executable='${binary-wrap-mysql:wrapper-path}'
mysqldump_executable="${binary-wrap-mysqldump:wrapper-path}" mariadb_data_directory='${directory:mariadb-data}'
mariadb_data_directory="${directory:mariadb-data}" mariadb_backup_directory='${directory:mariadb-backup-full}'
mariadb_backup_directory="${directory:mariadb-backup-full}" instance_directory='${buildout:directory}'
instance_directory="${buildout:directory}" pid_file='${my-cnf-parameters:pid-file}'
pid_file="${my-cnf-parameters:pid-file}" binlog_path='${my-cnf-parameters:binlog-path}'
binlog_path="${my-cnf-parameters:binlog-path}" server_executable='${mysqld:rendered}'
update_executable='${update-mysql:output}'
# Make sure mariadb is not already running # Make sure mariadb is not already running
if [ -e "$pid_file" ]; then if [ -e "$pid_file" ]; then
pid=$(cat $pid_file) > /dev/null 2>&1 pid=$(cat "$pid_file") > /dev/null 2>&1
if kill -0 "$pid"; then if kill -0 "$pid"; then
echo "Mariadb is already running with pid $pid. Aborting." echo "Mariadb is already running with pid $pid. Aborting."
exit 1 exit 1
...@@ -30,11 +31,11 @@ if [ -e "$pid_file" ]; then ...@@ -30,11 +31,11 @@ if [ -e "$pid_file" ]; then
fi fi
echo "Deleting existing database..." echo "Deleting existing database..."
rm -r $mariadb_data_directory/* >/dev/null 2>&1 || true rm -r "$mariadb_data_directory"/* >/dev/null 2>&1 || true
# $binlog_path can be empty if incremental_backup_retention_days <= -1 # $binlog_path can be empty if incremental_backup_retention_days <= -1
if [ -n "$binlog_path" ]; then if [ -n "$binlog_path" ]; then
new_binlog_directory="$(dirname $binlog_path)" new_binlog_directory="$(dirname "$binlog_path")"
binlog_index_file="$new_binlog_directory/binlog.index" binlog_index_file="$new_binlog_directory/binlog.index"
if [ -e "$binlog_index_file" ]; then if [ -e "$binlog_index_file" ]; then
echo "Adapting binlog database to new paths..." echo "Adapting binlog database to new paths..."
...@@ -45,7 +46,7 @@ fi ...@@ -45,7 +46,7 @@ fi
echo "Starting mariadb..." echo "Starting mariadb..."
# XXX hardcoded # XXX hardcoded
$instance_directory/etc/run/mariadb & "$server_executable" --innodb-flush-method=nosync --skip-innodb-doublewrite &
mysqld_pid=$! mysqld_pid=$!
trap "kill $mysqld_pid" EXIT TERM INT trap "kill $mysqld_pid" EXIT TERM INT
sleep 30 sleep 30
...@@ -54,7 +55,7 @@ if ! [ -d /proc/$mysql_pid ]; then ...@@ -54,7 +55,7 @@ if ! [ -d /proc/$mysql_pid ]; then
echo "mysqld exited, aborting." echo "mysqld exited, aborting."
exit 1 exit 1
fi fi
$instance_directory/etc/run/mariadb_update & "$update_executable" &
mariadb_update_pid=$! mariadb_update_pid=$!
sleep 60 sleep 60
# If mariadb_update is still running, abort # If mariadb_update is still running, abort
...@@ -66,7 +67,7 @@ fi ...@@ -66,7 +67,7 @@ fi
echo "Importing data..." echo "Importing data..."
# Use latest dump XXX can contain funny characters # Use latest dump XXX can contain funny characters
dump=$(ls -r $mariadb_backup_directory | head -1) dump=$(ls -r "$mariadb_backup_directory" | head -1)
zcat "$mariadb_backup_directory/$dump" | $mysql_executable -u root --socket="$instance_directory/var/run/mariadb.sock" zcat "$mariadb_backup_directory/$dump" | $mysql_executable -u root --socket="$instance_directory/var/run/mariadb.sock"
RESTORE_EXIT_CODE=$? RESTORE_EXIT_CODE=$?
......
...@@ -33,8 +33,16 @@ recipe = slapos.cookbook:publish.serialised ...@@ -33,8 +33,16 @@ recipe = slapos.cookbook:publish.serialised
database-list = {{ render_database_list(database_list) }} database-list = {{ render_database_list(database_list) }}
test-database-list = {{ render_database_list(test_database_list) }} test-database-list = {{ render_database_list(test_database_list) }}
[simplefile] [jinja2-template-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
mode = 644
[jinja2-template-executable]
< = jinja2-template-base
mode = 755
[simplefile]
< = jinja2-template-base
template = inline:{{ '{{ content }}' }} template = inline:{{ '{{ content }}' }}
{% macro simplefile(section_name, file_path, content, mode='') -%} {% macro simplefile(section_name, file_path, content, mode='') -%}
...@@ -108,6 +116,7 @@ error-log = ${directory:log}/mariadb_error.log ...@@ -108,6 +116,7 @@ error-log = ${directory:log}/mariadb_error.log
slow-query-log = ${directory:log}/mariadb_slowquery.log slow-query-log = ${directory:log}/mariadb_slowquery.log
long-query-time = {{ dumps(slapparameter_dict.get('long-query-time', 1)) }} long-query-time = {{ dumps(slapparameter_dict.get('long-query-time', 1)) }}
innodb-buffer-pool-size = {{ dumps(slapparameter_dict.get('innodb-buffer-pool-size', 0)) }} innodb-buffer-pool-size = {{ dumps(slapparameter_dict.get('innodb-buffer-pool-size', 0)) }}
innodb-buffer-pool-instances = {{ dumps(slapparameter_dict.get('innodb-buffer-pool-instances', 0)) }}
innodb-log-file-size = {{ dumps(slapparameter_dict.get('innodb-log-file-size', 0)) }} innodb-log-file-size = {{ dumps(slapparameter_dict.get('innodb-log-file-size', 0)) }}
innodb-log-buffer-size = {{ dumps(slapparameter_dict.get('innodb-log-buffer-size', 0)) }} innodb-log-buffer-size = {{ dumps(slapparameter_dict.get('innodb-log-buffer-size', 0)) }}
relaxed-writes = {{ dumps(slapparameter_dict.get('relaxed-writes', False)) }} relaxed-writes = {{ dumps(slapparameter_dict.get('relaxed-writes', False)) }}
...@@ -124,7 +133,7 @@ ssl-{{ key }} = {{ value }} ...@@ -124,7 +133,7 @@ ssl-{{ key }} = {{ value }}
{% endfor %} {% endfor %}
[my-cnf] [my-cnf]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
rendered = ${directory:etc}/mariadb.cnf rendered = ${directory:etc}/mariadb.cnf
template = {{ parameter_dict['template-my-cnf'] }} template = {{ parameter_dict['template-my-cnf'] }}
context = section parameter_dict my-cnf-parameters context = section parameter_dict my-cnf-parameters
...@@ -133,7 +142,7 @@ context = section parameter_dict my-cnf-parameters ...@@ -133,7 +142,7 @@ context = section parameter_dict my-cnf-parameters
database-list = {{ dumps(database_list + test_database_list) }} database-list = {{ dumps(database_list + test_database_list) }}
[init-script] [init-script]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-executable
# XXX: is there a better location ? # XXX: is there a better location ?
rendered = ${directory:etc}/mariadb_initial_setup.sql rendered = ${directory:etc}/mariadb_initial_setup.sql
template = {{ parameter_dict['template-mariadb-initial-setup'] }} template = {{ parameter_dict['template-mariadb-initial-setup'] }}
...@@ -147,19 +156,25 @@ mysql = ${binary-wrap-mysql:wrapper-path} ...@@ -147,19 +156,25 @@ mysql = ${binary-wrap-mysql:wrapper-path}
init-script = ${init-script:rendered} init-script = ${init-script:rendered}
mysql_tzinfo_to_sql = ${binary-wrap-mysql_tzinfo_to_sql:wrapper-path} mysql_tzinfo_to_sql = ${binary-wrap-mysql_tzinfo_to_sql:wrapper-path}
[mysqld] [{{ section('mysqld') }}]
recipe = slapos.cookbook:generic.mysql.wrap_mysqld < = jinja2-template-executable
output = ${directory:services}/mariadb # Note: all rendering is done when this file is rendered, not when the mysqld
binary = {{ parameter_dict['mariadb-location'] }}/bin/mysqld # section is installed - so I only use jinja2 as a fancy way to write an
configuration-file = ${my-cnf:rendered} # executable file with partition-dependent but instance-parameters independent
data-directory = ${my-cnf-parameters:data-directory} # content.
mysql-install-binary = {{ parameter_dict['mariadb-location'] }}/scripts/mysql_install_db template = inline:#!{{ parameter_dict['dash-location'] }}/bin/dash
mysql-base-directory = {{ parameter_dict['mariadb-location'] }} '{{ parameter_dict['mariadb-location'] }}/scripts/mysql_install_db' \
--defaults-file='${my-cnf:rendered}' \
--skip-name-resolve \
--datadir='${my-cnf-parameters:data-directory}' \
--basedir='{{ parameter_dict['mariadb-location'] }}' \
&& exec '{{ parameter_dict['mariadb-location'] }}/bin/mysqld' \
--defaults-file='${my-cnf:rendered}' \
"$@"
rendered = ${directory:services}/mariadb
[logrotate-entry-mariadb] [logrotate-entry-mariadb]
recipe = slapos.cookbook:logrotate.d < = logrotate-entry-base
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
name = mariadb name = mariadb
log = ${my-cnf-parameters:error-log} ${my-cnf-parameters:slow-query-log} log = ${my-cnf-parameters:error-log} ${my-cnf-parameters:slow-query-log}
post = "${binary-wrap-mysql:wrapper-path}" -B -u root -e "FLUSH LOGS" post = "${binary-wrap-mysql:wrapper-path}" -B -u root -e "FLUSH LOGS"
...@@ -249,7 +264,6 @@ parts += ...@@ -249,7 +264,6 @@ parts +=
logrotate-entry-mariadb logrotate-entry-mariadb
binary-link binary-link
update-mysql update-mysql
mysqld
resiliency-exclude-file resiliency-exclude-file
resiliency-after-import-script resiliency-after-import-script
promise promise
......
...@@ -13,6 +13,10 @@ ...@@ -13,6 +13,10 @@
{% set smtpd_sasl_user = slapparameter_dict['smtpd-sasl-user'] -%} {% set smtpd_sasl_user = slapparameter_dict['smtpd-sasl-user'] -%}
{% set smtpd_sasl_password = slapparameter_dict['smtpd-sasl-password'] -%} {% set smtpd_sasl_password = slapparameter_dict['smtpd-sasl-password'] -%}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
mode = 644
[smtpd-password] [smtpd-password]
recipe = slapos.cookbook:generate.password recipe = slapos.cookbook:generate.password
storage-path = storage-path =
...@@ -64,7 +68,7 @@ cyrus-sasldb = ${directory:etc-cyrus}/postfix.gdbm ...@@ -64,7 +68,7 @@ cyrus-sasldb = ${directory:etc-cyrus}/postfix.gdbm
recipe = slapos.cookbook:userinfo recipe = slapos.cookbook:userinfo
[smtp-sasl-passwd] [smtp-sasl-passwd]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
rendered = ${directory:etc-postfix}/sasl_passwd rendered = ${directory:etc-postfix}/sasl_passwd
{% if relay -%} {% if relay -%}
template = inline:{{ "{{ host }} {{ sasl_credential }}" }} template = inline:{{ "{{ host }} {{ sasl_credential }}" }}
...@@ -77,7 +81,7 @@ context = ...@@ -77,7 +81,7 @@ context =
mode = 600 mode = 600
[{{ section('cyrus-smtpd-conf') }}] [{{ section('cyrus-smtpd-conf') }}]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
rendered = ${directory:etc-cyrus}/smtpd.conf rendered = ${directory:etc-cyrus}/smtpd.conf
template = inline: template = inline:
pwcheck_method: auxprop pwcheck_method: auxprop
...@@ -95,7 +99,7 @@ command = ...@@ -95,7 +99,7 @@ command =
update-command = ${:command} update-command = ${:command}
[divert] [divert]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
rendered = ${directory:etc-postfix}/divert rendered = ${directory:etc-postfix}/divert
{% if divert -%} {% if divert -%}
template = inline:{{ "/.*/ {{ ', '.join(divert) }}" }} template = inline:{{ "/.*/ {{ ', '.join(divert) }}" }}
...@@ -121,7 +125,7 @@ update = ...@@ -121,7 +125,7 @@ update =
${:openssl} req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout '${:key}' -out '${:cert}' ${:openssl} req -newkey rsa -batch -new -x509 -days 3650 -nodes -keyout '${:key}' -out '${:cert}'
[{{ section('postfix-main-cf') }}] [{{ section('postfix-main-cf') }}]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
rendered = ${directory:etc-postfix}/main.cf rendered = ${directory:etc-postfix}/main.cf
template = {{ parameter_dict['template-postfix-main-cf'] }} template = {{ parameter_dict['template-postfix-main-cf'] }}
context = context =
...@@ -144,13 +148,13 @@ context = ...@@ -144,13 +148,13 @@ context =
key dh_2048 smtpd-ssl:dh-2048 key dh_2048 smtpd-ssl:dh-2048
[{{ section('postfix-master-cf') }}] [{{ section('postfix-master-cf') }}]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
rendered = ${directory:etc-postfix}/master.cf rendered = ${directory:etc-postfix}/master.cf
template = {{ parameter_dict['template-postfix-master-cf'] }} template = {{ parameter_dict['template-postfix-master-cf'] }}
context = key smtp configuration:smtp context = key smtp configuration:smtp
[aliases] [aliases]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
template = {{ parameter_dict['template-postfix-aliases'] }} template = {{ parameter_dict['template-postfix-aliases'] }}
rendered = ${directory:etc-postfix}/aliases rendered = ${directory:etc-postfix}/aliases
context = context =
......
...@@ -8,11 +8,6 @@ ...@@ -8,11 +8,6 @@
{% set default_backup_path = buildout_directory ~ '/srv/backup/zodb' -%} {% set default_backup_path = buildout_directory ~ '/srv/backup/zodb' -%}
{% set bin_directory = parameter_dict['buildout-bin-directory'] -%} {% set bin_directory = parameter_dict['buildout-bin-directory'] -%}
[logrotate-base]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
[zeo-base] [zeo-base]
recipe = slapos.cookbook:zeo recipe = slapos.cookbook:zeo
log-path = ${directory:log}/${:base-name}.log log-path = ${directory:log}/${:base-name}.log
...@@ -53,10 +48,10 @@ port = {{ current_port }} ...@@ -53,10 +48,10 @@ port = {{ current_port }}
storage = {{ dumps(storage_list) }} storage = {{ dumps(storage_list) }}
[{{ section("logrotate-" ~ zeo_section_name) }}] [{{ section("logrotate-" ~ zeo_section_name) }}]
< = logrotate-base < = logrotate-entry-base
name = {{ "${" ~ zeo_section_name ~ ":base-name}" }} name = {{ "${" ~ zeo_section_name ~ ":base-name}" }}
log = {{ "${" ~ zeo_section_name ~ ":log-path}" }} log = {{ "${" ~ zeo_section_name ~ ":log-path}" }}
post = {{ bin_directory }}/slapos-kill --pidfile {{ "${" ~ zeo_section_name ~ ":pid-path}" }} -s USR2 post = test ! -s {{ "${" ~ zeo_section_name ~":pid-path}" }} || {{ bin_directory }}/slapos-kill --pidfile {{ "${" ~ zeo_section_name ~ ":pid-path}" }} -s USR2
[{{ section(zeo_section_name ~ "-promise") }}] [{{ section(zeo_section_name ~ "-promise") }}]
recipe = slapos.cookbook:check_port_listening recipe = slapos.cookbook:check_port_listening
......
...@@ -2,7 +2,9 @@ ...@@ -2,7 +2,9 @@
{% set next_port = itertools.count(slapparameter_dict['port-base']).next -%} {% set next_port = itertools.count(slapparameter_dict['port-base']).next -%}
{% set site_id = slapparameter_dict['site-id'] -%} {% set site_id = slapparameter_dict['site-id'] -%}
{% set zodb_dict = slapparameter_dict['zodb-dict'] -%} {% set zodb_dict = slapparameter_dict['zodb-dict'] -%}
{% set instance_index_list = range(slapparameter_dict['instance-count']) -%}
{% set node_id_base = slapparameter_dict['name'] -%} {% set node_id_base = slapparameter_dict['name'] -%}
{% set node_id_index_format = '-%%0%ii' % (len(str(instance_index_list[-1])), ) -%}
{% set part_list = [] -%} {% set part_list = [] -%}
{% set publish_list = [] -%} {% set publish_list = [] -%}
{% set longrequest_logger_base_path = buildout_directory ~ '/var/log/longrequest_logger_' -%} {% set longrequest_logger_base_path = buildout_directory ~ '/var/log/longrequest_logger_' -%}
...@@ -14,6 +16,10 @@ partition. No more (undefined result), no less (IndexError). ...@@ -14,6 +16,10 @@ partition. No more (undefined result), no less (IndexError).
-#} -#}
{% set ipv4 = (ipv4_set | list)[0] -%} {% set ipv4 = (ipv4_set | list)[0] -%}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
mode = 644
{% if slapparameter_dict['mysql-test-url-list'] -%} {% if slapparameter_dict['mysql-test-url-list'] -%}
[{{ section('test-runner') }}] [{{ section('test-runner') }}]
recipe = slapos.cookbook:erp5.test recipe = slapos.cookbook:erp5.test
...@@ -143,7 +149,7 @@ hostalias-dict = {{ dumps(slapparameter_dict['hostalias-dict']) }} ...@@ -143,7 +149,7 @@ hostalias-dict = {{ dumps(slapparameter_dict['hostalias-dict']) }}
# ...so it's not possible to merge these templates (not a big deal anyway). # ...so it's not possible to merge these templates (not a big deal anyway).
[hostaliases] [hostaliases]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
template = inline: {{ ' template = inline: {{ '
{% for alias, aliased in host_dict.items() -%} {% for alias, aliased in host_dict.items() -%}
{{ alias }} {{ aliased }} {{ alias }} {{ aliased }}
...@@ -153,7 +159,7 @@ rendered = ${directory:etc}/hostaliases ...@@ -153,7 +159,7 @@ rendered = ${directory:etc}/hostaliases
context = key host_dict hosts-parameter:hostalias-dict context = key host_dict hosts-parameter:hostalias-dict
[hosts] [hosts]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
template = inline: {{ ' template = inline: {{ '
{% for alias, aliased in host_dict.items() -%} {% for alias, aliased in host_dict.items() -%}
{{ aliased }} {{ alias }} {{ aliased }} {{ alias }}
...@@ -171,7 +177,7 @@ context = key host_dict hosts-parameter:host-dict ...@@ -171,7 +177,7 @@ context = key host_dict hosts-parameter:host-dict
('_key', 'neo.key')) -%} ('_key', 'neo.key')) -%}
{% if k in storage_dict -%} {% if k in storage_dict -%}
[{{ section('neo-ssl-' + k[1:]) }}] [{{ section('neo-ssl-' + k[1:]) }}]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
rendered = ${directory:etc}/{{v}} rendered = ${directory:etc}/{{v}}
template = inline:{{'{{'}}pem}} template = inline:{{'{{'}}pem}}
context = key pem :pem context = key pem :pem
...@@ -204,7 +210,7 @@ target-directory = ${directory:instance-etc} ...@@ -204,7 +210,7 @@ target-directory = ${directory:instance-etc}
file-list = {{ parameter_dict['site-zcml'] }} file-list = {{ parameter_dict['site-zcml'] }}
[{{ section('zope-inituser') }}] [{{ section('zope-inituser') }}]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
rendered = ${directory:instance}/inituser rendered = ${directory:instance}/inituser
template = inline:{{ slapparameter_dict['inituser-login'] }}:{SHA}{{ hashlib.sha1(slapparameter_dict['inituser-password']).digest().encode('base64').rstrip() }} template = inline:{{ slapparameter_dict['inituser-login'] }}:{SHA}{{ hashlib.sha1(slapparameter_dict['inituser-password']).digest().encode('base64').rstrip() }}
mode = 600 mode = 600
...@@ -253,14 +259,9 @@ webdav = {{ dumps(webdav) }} ...@@ -253,14 +259,9 @@ webdav = {{ dumps(webdav) }}
timerserver-interval = {{ dumps(timerserver_interval) }} timerserver-interval = {{ dumps(timerserver_interval) }}
[zope-conf-base] [zope-conf-base]
recipe = slapos.recipe.template:jinja2 < = jinja2-template-base
template = {{ parameter_dict['zope-conf-template'] }} template = {{ parameter_dict['zope-conf-template'] }}
[logrotate-entry-base]
recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries}
backup = ${logrotate:backup}
{% macro zope( {% macro zope(
index, index,
port, port,
...@@ -279,7 +280,7 @@ lock-file = ${directory:run}/{{ name }}.lock ...@@ -279,7 +280,7 @@ lock-file = ${directory:run}/{{ name }}.lock
port = {{ port }} port = {{ port }}
event-log = ${directory:log}/{{ name }}-event.log event-log = ${directory:log}/{{ name }}-event.log
z2-log = ${directory:log}/{{ name }}-Z2.log z2-log = ${directory:log}/{{ name }}-Z2.log
node-id = {{ dumps(node_id_base ~ '-' ~ index) }} node-id = {{ dumps(node_id_base ~ (node_id_index_format % index)) }}
{% set log_list = [] -%} {% set log_list = [] -%}
{% set import_set = set() -%} {% set import_set = set() -%}
{% for db_name, zodb in zodb_dict.iteritems() -%} {% for db_name, zodb in zodb_dict.iteritems() -%}
...@@ -352,10 +353,10 @@ path = ${directory:promises}/{{ zope_tunnel_base_name }} ...@@ -352,10 +353,10 @@ path = ${directory:promises}/{{ zope_tunnel_base_name }}
< = logrotate-entry-base < = logrotate-entry-base
name = {{ name }} name = {{ name }}
log = {{ '${' ~ conf_parameter_name ~ ':event-log}' }} {{ '${' ~ conf_parameter_name ~ ':z2-log}' }} {{ '${' ~ conf_parameter_name ~ ':longrequest-logger-file}' }} {{ ' '.join(log_list) }} log = {{ '${' ~ conf_parameter_name ~ ':event-log}' }} {{ '${' ~ conf_parameter_name ~ ':z2-log}' }} {{ '${' ~ conf_parameter_name ~ ':longrequest-logger-file}' }} {{ ' '.join(log_list) }}
post = {{ bin_directory }}/slapos-kill --pidfile {{ '${' ~ conf_parameter_name ~ ':pid-file}' }} -s USR2 post = test ! -s {{ '${' ~ conf_parameter_name ~ ':pid-file}' }} || {{ bin_directory }}/slapos-kill --pidfile {{ '${' ~ conf_parameter_name ~ ':pid-file}' }} -s USR2
{% endmacro -%} {% endmacro -%}
{% for i in range(slapparameter_dict['instance-count']) -%} {% for i in instance_index_list -%}
{{ zope( {{ zope(
i, i,
next_port(), next_port(),
......
...@@ -4,6 +4,7 @@ extends = {{ instance_common_cfg }} ...@@ -4,6 +4,7 @@ extends = {{ instance_common_cfg }}
[jinja2-template-base] [jinja2-template-base]
context += context +=
raw slapos_core_version {{ slapos_core_version }} raw slapos_core_version {{ slapos_core_version }}
mode = 644
[dynamic-template-cloudooo-parameters] [dynamic-template-cloudooo-parameters]
cairo = {{ cairo_location }} cairo = {{ cairo_location }}
......
...@@ -37,6 +37,8 @@ max_connections = 1000 ...@@ -37,6 +37,8 @@ max_connections = 1000
{% set innodb_buffer_pool_size = parameter_dict['innodb-buffer-pool-size'] -%} {% set innodb_buffer_pool_size = parameter_dict['innodb-buffer-pool-size'] -%}
{% if innodb_buffer_pool_size %}innodb_buffer_pool_size = {{ innodb_buffer_pool_size }}{% endif %} {% if innodb_buffer_pool_size %}innodb_buffer_pool_size = {{ innodb_buffer_pool_size }}{% endif %}
{% set innodb_buffer_pool_instances = parameter_dict['innodb-buffer-pool-instances'] -%}
{% if innodb_buffer_pool_instances %}innodb_buffer_pool_instances = {{ innodb_buffer_pool_instances }}{% endif %}
{% set innodb_log_file_size = parameter_dict['innodb-log-file-size'] -%} {% set innodb_log_file_size = parameter_dict['innodb-log-file-size'] -%}
{% if innodb_log_file_size %} innodb_log_file_size = {{ innodb_log_file_size }}{% endif %} {% if innodb_log_file_size %} innodb_log_file_size = {{ innodb_log_file_size }}{% endif %}
{% set innodb_log_buffer_size = parameter_dict['innodb-log-buffer-size'] -%} {% set innodb_log_buffer_size = parameter_dict['innodb-log-buffer-size'] -%}
......
...@@ -17,6 +17,8 @@ manpage_directory = ...@@ -17,6 +17,8 @@ manpage_directory =
sample_directory = sample_directory =
readme_directory = readme_directory =
inet_interfaces = {{ inet_interfaces }} inet_interfaces = {{ inet_interfaces }}
smtp_bind_address = 0.0.0.0
smtp_bind_address6 = ::
virtual_alias_maps = {{ divert }} virtual_alias_maps = {{ divert }}
# Compared to default: # Compared to default:
...@@ -46,6 +48,10 @@ smtpd_recipient_restrictions = ...@@ -46,6 +48,10 @@ smtpd_recipient_restrictions =
reject_unknown_recipient_domain reject_unknown_recipient_domain
permit_sasl_authenticated permit_sasl_authenticated
reject reject
# Do not allow mynetworks to send mails, only authenticated clients.
smtpd_relay_restrictions =
permit_sasl_authenticated
defer_unauth_destination
# Disable local delivery # Disable local delivery
local_transport = error local_transport = error
......
[versions] [versions]
AccessControl = 2.13.14 AccessControl = 2.13.14
Acquisition = 2.13.9 Acquisition = 2.13.11
DateTime = 2.12.8 DateTime = 2.12.8
DocumentTemplate = 2.13.2 DocumentTemplate = 2.13.3
ExtensionClass = 2.13.2 ExtensionClass = 2.13.2
Jinja2 = 2.8 Jinja2 = 2.8.1
MarkupSafe = 0.23
Missing = 2.13.1 Missing = 2.13.1
MultiMapping = 2.13.0 MultiMapping = 2.13.0
Paste = 1.7.5.1 Paste = 1.7.5.1
...@@ -18,25 +17,26 @@ Products.MIMETools = 2.13.0 ...@@ -18,25 +17,26 @@ Products.MIMETools = 2.13.0
Products.MailHost = 2.13.2 Products.MailHost = 2.13.2
Products.OFSP = 2.13.2 Products.OFSP = 2.13.2
Products.PythonScripts = 2.13.2 Products.PythonScripts = 2.13.2
Products.Sessions = 3.0
Products.StandardCacheManagers = 2.13.1 Products.StandardCacheManagers = 2.13.1
Products.TemporaryFolder = 3.0
Products.ZCTextIndex = 2.13.5 Products.ZCTextIndex = 2.13.5
Products.ZCatalog = 2.13.27 Products.ZCatalog = 2.13.27
Pygments = 2.1.2 Pygments = 2.1.3
Record = 2.13.0 Record = 2.13.0
RestrictedPython = 3.6.0 RestrictedPython = 3.6.0
Sphinx = 1.0.8 Sphinx = 1.0.8
ZConfig = 2.9.3 ZConfig = 2.9.3
ZODB3 = 3.10.5 ZODB3 = 3.10.7
ZServer = 3.0
ZopeUndo = 2.12.0 ZopeUndo = 2.12.0
docutils = 0.12 docutils = 0.12
initgroups = 2.13.0 initgroups = 2.13.0
mechanize = 0.2.5 mechanize = 0.2.5
mr.developer = 1.34 mr.developer = 1.34
pytz = 2015.7
repoze.retry = 1.2 repoze.retry = 1.2
repoze.tm2 = 1.0 repoze.tm2 = 1.0
repoze.who = 2.0 repoze.who = 2.0
setuptools = 20.2.2
tempstorage = 2.12.2 tempstorage = 2.12.2
transaction = 1.1.1 transaction = 1.1.1
z3c.checkversions = 0.5 z3c.checkversions = 0.5
...@@ -86,3 +86,70 @@ zope.testbrowser = 3.11.1 ...@@ -86,3 +86,70 @@ zope.testbrowser = 3.11.1
zope.testing = 3.9.7 zope.testing = 3.9.7
zope.traversing = 3.13.2 zope.traversing = 3.13.2
zope.viewlet = 3.7.2 zope.viewlet = 3.7.2
# Required by:
# Jinja2==2.8.1
MarkupSafe = 0.23
# Required by:
# DateTime==2.12.8
# Zope2==2.13.25
# zope.i18n==3.7.4
# zope.testbrowser==3.11.1
pytz = 2016.10
# Required by:
# Products.BTreeFolder2==2.13.5
# Products.ExternalMethod==2.13.1
# Products.MIMETools==2.13.0
# Products.MailHost==2.13.2
# Products.OFSP==2.13.2
# Products.PythonScripts==2.13.2
# Products.Sessions==3.0
# Products.StandardCacheManagers==2.13.1
# Products.TemporaryFolder==3.0
# Products.ZCTextIndex==2.13.5
# Products.ZCatalog==2.13.27
# RestrictedPython==3.6.0
# ZServer==3.0
# Zope2==2.13.25
# mr.developer==1.34
# tempstorage==2.12.2
# z3c.checkversions==0.5
# zExceptions==2.13.0
# zope.browser==1.3
# zope.browsermenu==3.9.1
# zope.browserpage==3.12.2
# zope.browserresource==3.10.3
# zope.component==3.9.5
# zope.configuration==3.7.4
# zope.container==3.11.2
# zope.contentprovider==3.7.2
# zope.contenttype==3.5.5
# zope.deferredimport==3.5.3
# zope.event==3.5.2
# zope.exceptions==3.6.2
# zope.i18n==3.7.4
# zope.i18nmessageid==3.5.3
# zope.interface==3.6.7
# zope.lifecycleevent==3.6.2
# zope.location==3.9.1
# zope.pagetemplate==3.5.2
# zope.processlifetime==1.0
# zope.proxy==3.6.1
# zope.ptresource==3.9.0
# zope.publisher==3.12.6
# zope.schema==3.7.1
# zope.security==3.7.4
# zope.sendmail==3.7.5
# zope.sequencesort==3.4.0
# zope.site==3.9.2
# zope.size==3.4.1
# zope.structuredtext==3.5.1
# zope.tal==3.5.2
# zope.tales==3.5.3
# zope.testbrowser==3.11.1
# zope.testing==3.9.7
# zope.traversing==3.13.2
# zope.viewlet==3.7.2
setuptools = 32.3.1
...@@ -187,7 +187,7 @@ async = 0.6.1 ...@@ -187,7 +187,7 @@ async = 0.6.1
gitdb = 0.5.4 gitdb = 0.5.4
mysqlclient = 1.3.9 mysqlclient = 1.3.9
pycrypto = 2.6 pycrypto = 2.6
rdiff-backup = 1.0.5 rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.4.2 slapos.recipe.template = 2.4.2
slapos.toolbox = 0.40.4 slapos.toolbox = 0.40.4
smmap = 0.8.2 smmap = 0.8.2
......
[buildout] [buildout]
extends = extends =
buildout.hash.cfg
../../component/dcron/buildout.cfg ../../component/dcron/buildout.cfg
../../component/gzip/buildout.cfg ../../component/gzip/buildout.cfg
../../component/logrotate/buildout.cfg ../../component/logrotate/buildout.cfg
parts = parts =
template-logrotate-base template-logrotate-base
[logrotate-download-base]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
# XXX: following mode should be the default
mode = 644
[logrotate-conf-template]
< = logrotate-download-base
[logrotate-entry-template]
< = logrotate-download-base
[template-logrotate-base] [template-logrotate-base]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/${:filename}.in template = ${:_profile_base_location_}/${:filename}
rendered = ${buildout:directory}/${:filename} rendered = ${buildout:directory}/instance-logrotate-base.cfg
filename = instance-logrotate-base.cfg
md5sum = 4f940a5b5a6abac039738b45633c709c
context = context =
key dcron_location dcron:location key dcron_location dcron:location
key gzip_location gzip:location key gzip_location gzip:location
key logrotate_location logrotate:location key logrotate_location logrotate:location
key logrotate_conf_template logrotate-conf-template:target
key logrotate_entry_template logrotate-entry-template:target
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# But avoid directories, they are not portable.
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[logrotate-conf-template]
filename = logrotate.conf.in
md5sum = 67954c82a8030e5dcac3a8c5d115f624
[logrotate-entry-template]
filename = logrotate_entry.in
md5sum = 7a5ec8f141c61826a14db8619acba314
[template-logrotate-base]
filename = instance-logrotate-base.cfg.in
md5sum = 3e3e572c5537f77e83b1ee157d1c9852
...@@ -17,27 +17,52 @@ recipe = slapos.cookbook:simplelogger ...@@ -17,27 +17,52 @@ recipe = slapos.cookbook:simplelogger
wrapper = ${logrotate-directory:bin}/cron_simplelogger wrapper = ${logrotate-directory:bin}/cron_simplelogger
log = ${logrotate-directory:log}/cron.log log = ${logrotate-directory:log}/cron.log
[logrotate] [logrotate-conf-parameter]
recipe = slapos.cookbook:logrotate
logrotate-entries = ${logrotate-directory:logrotate-entries}
backup = ${logrotate-directory:logrotate-backup}
logrotate-binary = {{ logrotate_location }}/usr/sbin/logrotate
gzip-binary = {{ gzip_location }}/bin/gzip gzip-binary = {{ gzip_location }}/bin/gzip
gunzip-binary = {{ gzip_location }}/bin/gunzip gunzip-binary = {{ gzip_location }}/bin/gunzip
wrapper = ${logrotate-directory:bin}/logrotate logrotate-entries = ${logrotate-directory:logrotate-entries}
conf = ${logrotate-directory:etc}/logrotate.conf
state-file = ${logrotate-directory:srv}/logrotate.status [logrotate-conf]
recipe = slapos.recipe.template:jinja2
template = {{ logrotate_conf_template }}
rendered = ${logrotate-directory:etc}/logrotate.conf
context =
section parameter_dict logrotate-conf-parameter
[logrotate-entry-base]
# User must define keys:
# - "name" to generate configuration path (must be unique in partition)
# - "log" space-separated list of file paths to be rotated
# User may override keys:
# - "post" with commands to execute after rotation
# - "pre" with commands to execute before rotation
recipe = slapos.recipe.template:jinja2
template = {{ logrotate_entry_template }}
rendered = ${logrotate-conf-parameter:logrotate-entries}/${:name}
context =
key backup logrotate-directory:logrotate-backup
key log :log
key post :post
key pre :pre
post =
pre =
[logrotate]
recipe = slapos.cookbook:wrapper
parameters-extra = true
command-line = {{ logrotate_location }}/usr/sbin/logrotate -s "${logrotate-directory:srv}/logrotate.status" "${logrotate-conf:rendered}"
wrapper-path = ${logrotate-directory:bin}/logrotate
[cron-entry-logrotate] [cron-entry-logrotate]
recipe = slapos.cookbook:cron.d recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries} cron-entries = ${cron:cron-entries}
name = logrotate name = logrotate
frequency = 0 0 * * * frequency = 0 0 * * *
command = ${logrotate:wrapper} command = ${logrotate:wrapper-path}
[logrotate-entry-base] [logrotate-entry-base]
recipe = slapos.cookbook:logrotate.d recipe = slapos.cookbook:logrotate.d
logrotate-entries = ${logrotate:logrotate-entries} logrotate-entries = ${logrotate-directory:logrotate-entries}
backup = ${logrotate-directory:backup} backup = ${logrotate-directory:backup}
[logrotate-entry-cron] [logrotate-entry-cron]
...@@ -58,4 +83,3 @@ backup = ${:srv}/backup ...@@ -58,4 +83,3 @@ backup = ${:srv}/backup
etc = ${buildout:directory}/etc etc = ${buildout:directory}/etc
services = ${:etc}/run services = ${:etc}/run
log = ${buildout:directory}/var/log log = ${buildout:directory}/var/log
compresscmd {{ parameter_dict['gzip-binary'] }}
compressoptions -9
uncompresscmd {{ parameter_dict['gunzip-binary'] }}
include {{ parameter_dict['logrotate-entries'] }}
{{ log }} {
daily
dateext
rotate 3650
compress
delaycompress
notifempty
sharedscripts
create
olddir {{ backup }}
missingok
{% if pre %}prerotate {
{{ pre }}
}{% endif %}
{% if post %}postrotate {
{{ post }}
}{% endif %}
}
...@@ -14,6 +14,9 @@ Implementation : ...@@ -14,6 +14,9 @@ Implementation :
extends = extends =
../../stack/monitor/buildout.cfg ../../stack/monitor/buildout.cfg
... ...
parts =
...
monitor-base
2/ In your instance.cfg file or instance template, override monitor configuration section to define your custom parameters. 2/ In your instance.cfg file or instance template, override monitor configuration section to define your custom parameters.
...@@ -23,13 +26,14 @@ Implementation : ...@@ -23,13 +26,14 @@ Implementation :
monitor-httpd-port = ... monitor-httpd-port = ...
monitor-base-url = ${monitor-frontend-promise:url} monitor-base-url = ${monitor-frontend-promise:url}
root-instance-title = ${slap-configuration:root-instance-title} root-instance-title = ${slap-configuration:root-instance-title}
opml-url-list = monitor-url-list =
cors-domains = monitor.app.officejs.com cors-domains = monitor.app.officejs.com
collector-db = ... collector-db = ...
password = ${monitor-htpasswd:passwd} password = ${monitor-htpasswd:passwd}
username = ${monitor-htpasswd:username} username = ${monitor-htpasswd:username}
instance-configuration = ... instance-configuration = ...
configuration-file-path = ... configuration-file-path = ...
interface-url = ...
You don't need to define all parameters, you can only set what is required to be changed. ie: You don't need to define all parameters, you can only set what is required to be changed. ie:
...@@ -38,7 +42,7 @@ You don't need to define all parameters, you can only set what is required to be ...@@ -38,7 +42,7 @@ You don't need to define all parameters, you can only set what is required to be
- monitor-title: is the title of the current software instance. - monitor-title: is the title of the current software instance.
- root-instance-title: it the title of the hosting subscription. - root-instance-title: is the title of the hosting subscription.
- monitor-httpd-ipv6: is the ipv6 of the computer partition. - monitor-httpd-ipv6: is the ipv6 of the computer partition.
- monitor-httpd-port: the port to bind monitor httpd server on. - monitor-httpd-port: the port to bind monitor httpd server on.
- monitor-base-url: this url that will be used/showed in monitor interface. This url is present in some monitor generated output files. There can be two value, the default: ${monitor-frontend-promise:url} which access monitor httpd server through the frontend and ${monitor-httpd-conf-parameter:url} which is the url with ipv6 (https://[IPv6]:port/). - monitor-base-url: this url that will be used/showed in monitor interface. This url is present in some monitor generated output files. There can be two value, the default: ${monitor-frontend-promise:url} which access monitor httpd server through the frontend and ${monitor-httpd-conf-parameter:url} which is the url with ipv6 (https://[IPv6]:port/).
...@@ -51,37 +55,45 @@ You don't need to define all parameters, you can only set what is required to be ...@@ -51,37 +55,45 @@ You don't need to define all parameters, you can only set what is required to be
file CONFIG_KEY PATH_TO_RESULT_FILE => editable configuration. file CONFIG_KEY PATH_TO_RESULT_FILE => editable configuration.
httpdcors CONFIG_KEY PATH_TO_HTTP_CORS_CFG_FILE PATH_HTTPD_GRACEFUL_WRAPPER => show/edit cors domain in monitor httpdcors CONFIG_KEY PATH_TO_HTTP_CORS_CFG_FILE PATH_HTTPD_GRACEFUL_WRAPPER => show/edit cors domain in monitor
- configuration-file-path: path of knowledge0 cfg file where instance configuration will be written. - configuration-file-path: path of knowledge0 cfg file where instance configuration will be written.
- interface-url: The URL of monitor web interface. This URL will be present in generated JSON files.
Example of custom monitor-instance-parameter: https://lab.nexedi.com/nexedi/slapos/blob/master/software/slaprunner/instance-runner.cfg#L726
Add a promise
-------------
Add a monitor promise Monitor stack will include slapos promise directory etc/promise to promise folder. All files in that directory will be considered as a promise.
--------------------- This mean that all slapos promises will be checked frequently by monitor.
By default, monitor stack will include slapos promise directory etc/promise to promise folder. All files in that directory will be considered as a promise.
[monitor-conf-parameters] [monitor-conf-parameters]
promise-folder-list = ...
${directory:promises} promise-folder = ${directory:promises}
${directory:monitor-promise} ...
Monitor will run each promise every minutes and save the result in a json file. Here is an exemple of promise result: Monitor will run each promise every minutes and save the result in a json file. Here is an exemple of promise result:
{"status": "ERROR", "change-time": 1466415901.53, "hosting_subscription": "XXXX", "title": "vnc_promise", "start-date": "2016-06-21 10:47:01", "instance": "XXXX-title", "_links": {"monitor": {"href": "MONITOR_PRIVATE_URL"}}, "message": "PROMISE_OUPT_MESSAGE", "type": "status"} {"status": "ERROR", "change-time": 1466415901.53, "hosting_subscription": "XXXX", "title": "vnc_promise", "start-date": "2016-06-21 10:47:01", "instance": "XXXX-title", "_links": {"monitor": {"href": "MONITOR_PRIVATE_URL"}}, "message": "PROMISE_OUPT_MESSAGE", "type": "status"}
A promise will be ran during a short time and report the status: ERROR or OK, plus an ouput message which says what was good or bad.
The promise should not run for more that 20 seconds, else it will be interrupted because of time out. However this value can be modified from monitoring web interface, see parameter "promise-timeout" of your hosting subscription.
On slapos, the default timeout value is also 20 seconds, if the value is modified on monitor (ex: to 50 seconds), it will still fails when slapgrid will process instance if the promise execution exceed 20 seconds.
Add log directory to monitor Promises result are published in web public folder, access URL is: MONITOR_BASE_URL/private/PROMISE_NAME.status.json
---------------------------- Everytime monitor will run a promise an history of result will be also updated. The promise history will be updated during one day, after that a new history will be created.
To access promise history file as JSON, use URL MONITOR_BASE_URL/private/PROMISE_NAME.history.json
Log or others files can be added in monitor public or private directory: Add a promise: monitor promise
------------------------------
[monitor-conf-parameters] Monitor promise is also a promise like normal promise script but it will be placed into the folder ${monitor-directory:promises}:
public-path-list =
...
private-path-list =
${directory:log}
files in public directory are accessible at MONITOR_BASE_URL/public, and for private directory: MONITOR_BASE_URL/private. [monitor-promise-xxxxx]
recipe = slapos.recipe.template:jinja2
rendered = ${monitor-directory:promises}/my-custom-monitor-promise
Theses promises will be executed only by monitor (not slapos) every minutes and will report same infor as default promises. This is another way to
add more custom promises to check if server is overloaded, or if network start to be slow, etc...
Add custom scripts to monitor Add custom scripts to monitor
...@@ -98,7 +110,7 @@ Custom script will be automatically run by the monitor and result will be report ...@@ -98,7 +110,7 @@ Custom script will be automatically run by the monitor and result will be report
The script will be executed every minutes by default. To change, put periodicity in script name: The script will be executed every minutes by default. To change, put periodicity in script name:
- monitor-check-webrunner-internal-instance_every_1_minute - monitor-check-webrunner-internal-instance_every_1_minute
- monitor-check-webrunner-internal-instance_every_5_minute - monitor-check-webrunner-internal-instance_every_25_minute
- monitor-check-webrunner-internal-instance_every_1_hour - monitor-check-webrunner-internal-instance_every_1_hour
- monitor-check-webrunner-internal-instance_every_3_hour - monitor-check-webrunner-internal-instance_every_3_hour
- ... - ...
...@@ -109,6 +121,58 @@ the script name should end with _every_XX_hour or _every_XX_minute. With this, w ...@@ -109,6 +121,58 @@ the script name should end with _every_XX_hour or _every_XX_minute. With this, w
You can get custom script results files at MONITOR_BASE_URL/private/FILE_NAME. You can get custom script results files at MONITOR_BASE_URL/private/FILE_NAME.
Add custom file or directory to monitor
---------------------------------------
Log or others files can be added in monitor public or private directory:
[monitor-conf-parameters]
public-path-list =
/path/to/my/public/directory/
...
private-path-list =
${directory:log}
...
files in public directory are accessible at MONITOR_BASE_URL/public, and for private directory: MONITOR_BASE_URL/private.
Monitor RSS and OPML Feed
-------------------------
Monitor generate rss containing latest result for all promises, this feed will be upaded every minutes. The Rss fee URL is
MONITOR_BASE_URL/public/feed
OPML Feed is used to aggregate many feed URL, this is used on monitor to link many single monitor instances. For example, a resilient
webrunner has 5 instances at least, each instance has a monitor which leads to 5 monitor instances too. One main instance (usally the root instance)
will collect rss feeds of all others monitor's in a single OPML file. This file is published and used to configure a monitor backend to the web interface.
The URL of OPML feed is: MONITOR_BASE_URL/public/feeds
Monitor Base web directory tree
-------------------------------
MONITOR_BASE_URL
|
--------------------------
| | |
share public private
(webdav) X Y
|
---------------------------------
| | | |
public private jio_public jio_private
X Y | |
.jio_documents .jio_documents
X Y
MONITOR_BASE_URL/public or private is for normal HTTPS.
MONITOR_BASE_URL/share is the webdav URL. public/ and private/ are linked to public and private directories.
webdav also has jio_public/.jio_documents and jio_private/.jio_documents which are linked to public and private directory and they works with jio webdav pluging.
Access to Monitor Access to Monitor
----------------- -----------------
...@@ -120,9 +184,53 @@ Usefull information are monitor-base-url, monitor-url, monitor-user and monitor- ...@@ -120,9 +184,53 @@ Usefull information are monitor-base-url, monitor-url, monitor-user and monitor-
- ${publish:monitor-base-url}/public/feeds is the OPML URL of this monitor instance. To setup monitor instance in your monitoring interface, use OPML URL of the root instance. It should contain URL to others monitor instances. - ${publish:monitor-base-url}/public/feeds is the OPML URL of this monitor instance. To setup monitor instance in your monitoring interface, use OPML URL of the root instance. It should contain URL to others monitor instances.
- ${publish:monitor-base-url}/private is the monitor private directory. Username and password are reqired to connect. - ${publish:monitor-base-url}/private is the monitor private directory. Username and password are reqired to connect.
The section [monitor-publish] contain parameters to publish with your instance connection information. It will publish "monitor-base-url" and
"monitor-setup-url" which is used to configure your instance to monitor interface in one click.
To publish configuration URL in your instance.cfg, you can do like this: To publish configuration URL in your instance.cfg, you can do like this:
[publish-connection-information] [publish-connection-information]
<= monitor-publish
... ...
monitor-setup-url = https://monitor.app.officejs.com/#page=settings_configurator&url=${publish:monitor-url}&username=${publish:monitor-user}&password=${publish:monitor-password} custom-parameter-one = xxxxx
custom-parameter-two = yyyyy
Send parameters to monitor interface
------------------------------------
Monitor has a paramters called "instance-configuration" from the section [monitor-instance-parameter]
that can be updated to specify which parameters will be deplayed on monitor web interface.
Parameters can be editable (except raw parameter) directly from monitor interface. The change will be updated into the related file. Here are some examples:
[monitor-instance-parameter]
instance-configuration =
raw init-user ${publish-connection-information:init-user}
htpasswd monitor-password ${monitor-htpassword-file:password-file} ${monitor-instance-parameter:username} ${httpd-monitor-htpasswd:htpasswd-path}
file promise-timeout ${monitor-promise-timeout-file:file}
The user will see parameters:
- init-user (non editable)
- monitor-password (editable)
- promise-timeout (editable)
htpasswd: is used to change apache htpasswd directly from monitor interface. The syntax is like:
htpasswd PARAMETER_ID PASSWORD_TEXT_FILE HTPASSWD_USER_NAME HTPASSWD_FILE
PASSWORD_TEXT_FILE contain the password which is showed to the user.
file: is used to edit a parameter directly into file. Parameter is read and write into the provided file
file PARAMETER_ID PATH_TO_THE_FILE
raw: is a non editable paramter.
raw PARAMETER_ID TEXT_VALUE
httpdcors: used to edit an apache http_cors.conf file, this file should be include in the main apache configuration file
httpdcors PARAMETER_ID PATH_TO_HTTP_CORS_CFG_FILE PATH_HTTPD_GRACEFUL_WRAPPER
PATH_HTTPD_GRACEFUL_WRAPPER will be executed to reload apache configuration after modification is done.
...@@ -85,6 +85,11 @@ md5sum = 1bdb4e05c6be04f4e5766c64467fbcec ...@@ -85,6 +85,11 @@ md5sum = 1bdb4e05c6be04f4e5766c64467fbcec
<= monitor-template-base <= monitor-template-base
filename = httpd-cors.cfg.in filename = httpd-cors.cfg.in
md5sum = 683ea85fc054094248baf5752dd089bf md5sum = 683ea85fc054094248baf5752dd089bf
[monitor-check-free-disk-space]
<= monitor-template-base
filename = check_free_disk.in
md5sum = bab457ac4d139ed31d0b343a7d14d996
# End templates files # End templates files
# XXX keep compatibility (with software/ipython_notebook/software.cfg ) # XXX keep compatibility (with software/ipython_notebook/software.cfg )
...@@ -97,7 +102,7 @@ recipe = slapos.recipe.template:jinja2 ...@@ -97,7 +102,7 @@ recipe = slapos.recipe.template:jinja2
filename = template-monitor.cfg filename = template-monitor.cfg
template = ${:_profile_base_location_}/instance-monitor.cfg.jinja2.in template = ${:_profile_base_location_}/instance-monitor.cfg.jinja2.in
rendered = ${buildout:directory}/template-monitor.cfg rendered = ${buildout:directory}/template-monitor.cfg
md5sum = 3a0417a9a3c2710c31be37e7a66f8a82 md5sum = bf0adf565d7cde55abc94bd223ec3162
context = context =
key apache_location apache:location key apache_location apache:location
key gzip_location gzip:location key gzip_location gzip:location
...@@ -119,6 +124,7 @@ context = ...@@ -119,6 +124,7 @@ context =
raw python_executable ${buildout:executable} raw python_executable ${buildout:executable}
raw python_with_eggs ${buildout:directory}/bin/${extra-eggs:interpreter} raw python_with_eggs ${buildout:directory}/bin/${extra-eggs:interpreter}
raw template_wrapper ${monitor-template-wrapper:location}/${monitor-template-wrapper:filename} raw template_wrapper ${monitor-template-wrapper:location}/${monitor-template-wrapper:filename}
raw template_check_disk_space ${monitor-check-free-disk-space:location}/${monitor-check-free-disk-space:filename}
depends = depends =
${monitor-eggs:eggs} ${monitor-eggs:eggs}
...@@ -126,5 +132,5 @@ depends = ...@@ -126,5 +132,5 @@ depends =
PyRSS2Gen = 1.1 PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3 cns.recipe.symlink = 0.2.3
pycurl = 7.43.0 pycurl = 7.43.0
slapos.toolbox = 0.64 slapos.toolbox = 0.65
...@@ -82,7 +82,7 @@ base-url = ${monitor-instance-parameter:monitor-base-url} ...@@ -82,7 +82,7 @@ base-url = ${monitor-instance-parameter:monitor-base-url}
monitor-hal-json = ${monitor-directory:public}/monitor.hal.json monitor-hal-json = ${monitor-directory:public}/monitor.hal.json
service-pid-folder = ${monitor-directory:pids} service-pid-folder = ${monitor-directory:pids}
crond-folder = ${logrotate-directory:cron-entries} crond-folder = ${logrotate-directory:cron-entries}
logrotate-folder = ${logrotate:logrotate-entries} logrotate-folder = ${logrotate-directory:logrotate-entries}
promise-runner = {{ monitor_runpromise }} promise-runner = {{ monitor_runpromise }}
promise-folder = ${directory:promises} promise-folder = ${directory:promises}
monitor-promise-folder = ${monitor-directory:promises} monitor-promise-folder = ${monitor-directory:promises}
...@@ -102,12 +102,14 @@ parameter-list = ...@@ -102,12 +102,14 @@ parameter-list =
raw monitor-user ${monitor-instance-parameter:username} raw monitor-user ${monitor-instance-parameter:username}
htpasswd monitor-password ${monitor-htpassword-file:password-file} ${monitor-instance-parameter:username} ${httpd-monitor-htpasswd:htpasswd-path} htpasswd monitor-password ${monitor-htpassword-file:password-file} ${monitor-instance-parameter:username} ${httpd-monitor-htpasswd:htpasswd-path}
file promise-timeout ${monitor-promise-timeout-file:file} file promise-timeout ${monitor-promise-timeout-file:file}
file min-free-disk-MB ${promise-check-free-disk-space:config-file}
${monitor-instance-parameter:instance-configuration} ${monitor-instance-parameter:instance-configuration}
# htpasswd entry: htpasswd key password-file username htpasswd-file # htpasswd entry: htpasswd key password-file username htpasswd-file
collector-db = ${monitor-instance-parameter:collector-db} collector-db = ${monitor-instance-parameter:collector-db}
collect-script = {{ monitor_collect }} collect-script = {{ monitor_collect }}
python = {{ python_with_eggs }} python = {{ python_with_eggs }}
nice-cmd = ${xnice-bin:output}
promise-output-file = ${directory:monitor}/monitor-bootstrap-status promise-output-file = ${directory:monitor}/monitor-bootstrap-status
...@@ -168,14 +170,14 @@ recipe = plone.recipe.command ...@@ -168,14 +170,14 @@ recipe = plone.recipe.command
stop-on-error = true stop-on-error = true
password-file = ${directory:etc}/.monitor_pwd password-file = ${directory:etc}/.monitor_pwd
command = command =
if [ ! -f "${:password-file}" ]; then echo "${monitor-instance-parameter:password}" > ${:password-file}; fi if [ ! -s "${:password-file}" ]; then echo "${monitor-instance-parameter:password}" > ${:password-file}; fi
update-command = ${:command} update-command = ${:command}
[httpd-monitor-htpasswd] [httpd-monitor-htpasswd]
recipe = plone.recipe.command recipe = plone.recipe.command
stop-on-error = true stop-on-error = true
htpasswd-path = ${monitor-directory:etc}/monitor-htpasswd htpasswd-path = ${monitor-directory:etc}/monitor-htpasswd
command = if [ ! -f "${:htpasswd-path}" ]; then {{ apache_location }}/bin/htpasswd -cb ${:htpasswd-path} ${:user} ${:password}; fi command = if [ ! -s "${:htpasswd-path}" ]; then {{ apache_location }}/bin/htpasswd -cb ${:htpasswd-path} ${:user} ${:password}; fi
update-command = ${:command} update-command = ${:command}
user = ${monitor-instance-parameter:username} user = ${monitor-instance-parameter:username}
password = ${monitor-instance-parameter:password} password = ${monitor-instance-parameter:password}
...@@ -229,9 +231,17 @@ context = ...@@ -229,9 +231,17 @@ context =
raw dash_binary {{ dash_executable_location }} raw dash_binary {{ dash_executable_location }}
command = kill -USR1 $(cat ${monitor-httpd-conf-parameter:pid-file}) command = kill -USR1 $(cat ${monitor-httpd-conf-parameter:pid-file})
[xnice-bin]
recipe = collective.recipe.template
input = inline:#!/bin/sh
# run something at lowest possible priority
exec nice -19 chrt --idle 0 ionice -c3 "$@"
output = ${directory:bin}/xnice
mode = 700
[monitor-globalstate-wrapper] [monitor-globalstate-wrapper]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = {{ monitor_genstatus }} '${monitor-conf:rendered}' '${monitor-instance-info:rendered}' command-line = ${xnice-bin:output} {{ monitor_genstatus }} '${monitor-conf:rendered}' '${monitor-instance-info:rendered}'
wrapper-path = ${directory:bin}/monitor-globalstate wrapper-path = ${directory:bin}/monitor-globalstate
[monitor-configurator-wrapper] [monitor-configurator-wrapper]
...@@ -345,6 +355,18 @@ input = inline:#!/bin/sh ...@@ -345,6 +355,18 @@ input = inline:#!/bin/sh
output = ${monitor-directory:promises}/buildout-${slap-connection:partition-id}-status output = ${monitor-directory:promises}/buildout-${slap-connection:partition-id}-status
mode = 700 mode = 700
[promise-check-free-disk-space]
recipe = slapos.recipe.template:jinja2
template = {{ template_check_disk_space }}
rendered = ${monitor-directory:promises}/check-free-disk-space
mode = 0700
context =
key config_file :config-file
raw home_path ${buildout:directory}
raw python_bin {{ python_with_eggs }}
config-file = ${directory:etc}/min-free-disk-size
[monitor-base] [monitor-base]
# create dependencies between required monitor parts # create dependencies between required monitor parts
recipe = plone.recipe.command recipe = plone.recipe.command
......
#!{{ python_bin }}
import os
import sys
def free_space(path, fn):
while True:
try:
disk = os.statvfs(path)
return fn(disk)
except OSError:
pass
if os.sep not in path:
break
path = os.path.split(path)[0]
def user_free_space(path):
return free_space(path, lambda d: d.f_bsize * d.f_bavail)
def check_inode_usage(path):
max_inode_usage = 97.99 # < 98% usage
st = os.statvfs(path)
usage_output = ""
total_inode = st.f_files
free_inode = st.f_ffree
usage = round((float(total_inode - free_inode) / total_inode), 4) * 100
if usage > max_inode_usage:
return "Disk Inodes are widely used: %s%%" % usage
elif os.path.exists('/tmp'):
# check if /tmp is mounted on another disk than path
tmp_st = os.statvfs('/tmp')
if tmp_st.f_blocks != st.f_blocks:
tmp_usage = round((float(tmp_st.f_files - tmp_st.f_ffree) / tmp_st.f_files), 4) * 100
if tmp_usage > max_inode_usage:
return "Disk Inodes are widely used: %s%%" % tmp_usage
return ""
if __name__ == '__main__':
home_path = '{{ home_path }}'
config_file = '{{ config_file }}'
min_free_size = 1024*1024*1024*2 # 2G by default
if os.path.exists(config_file):
with open(config_file) as f:
min_size_str = f.read().strip()
if min_size_str == '0':
# disable check
print "Free disk space check is disabled\n set a number up to 0 to enable!"
exit(0)
if min_size_str.isdigit():
value = int(min_size_str)
if value >= 200:
# Minimum value is 200Mb, it's already low
min_free_size = int(min_size_str)*1024*1024
else:
with open(config_file, 'w') as f:
f.write(str(min_free_size/(1024*1024)))
real_free_space = user_free_space(home_path)
if real_free_space > min_free_size:
inode_usage = check_inode_usage(home_path)
if inode_usage:
print inode_usage
exit(2)
print "Disk usage: OK"
exit(0)
real_space_g = round(real_free_space/(1024.0*1024*1024), 2)
min_space_g = round(min_free_size/(1024.0*1024*1024), 2)
print 'Free disk space low: remaning %s G (threshold: %s G)' % (
real_space_g, min_space_g)
print 'You can modify minimum value on your monitor interface.'
exit(1)
...@@ -88,7 +88,7 @@ destination = ${buildout:directory}/template-parts.cfg.in ...@@ -88,7 +88,7 @@ destination = ${buildout:directory}/template-parts.cfg.in
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}.in url = ${:_profile_base_location_}/${:filename}.in
output = ${buildout:directory}/${:filename} output = ${buildout:directory}/${:filename}
md5sum = be2ebf91faa6b5b131995a05a907707f md5sum = 41e82859dc6b65e94a300a006d51536e
mode = 0644 mode = 0644
filename = template-resilient-templates.cfg filename = template-resilient-templates.cfg
...@@ -138,8 +138,10 @@ mode = 0644 ...@@ -138,8 +138,10 @@ mode = 0644
[rdiff-backup-build] [rdiff-backup-build]
# use our own version # use our own version
find-links = http://www.nexedi.org/static/packages/source/rdiff-backup-1.3.4nxd2.tar.gz find-links = http://www.nexedi.org/static/packages/source/rdiff-backup-1.3.4nxd2.tar.gz
patches =
${:_profile_base_location_}/rdiff-backup-1.3.4-librsync-1.0.0.patch#31fafc8bc4a00f002f52008a9f3b671f
[versions] [versions]
rdiff-backup = 1.3.4nxd2 # 1.3.4nxd2 is invalid version string, thus pached version string is not '1.3.4nxd2+SlapOSPatched001'
# but '1.3.4nxd2-SlapOSPatched001'.
rdiff-backup = 1.3.4nxd2-SlapOSPatched001
Patch by Roman Tereshonkov and Kari Hautio for rdiff-backup <= 1.2.8 to avoid a build failure with
librsync >= 1.0.0 (which is a security bugfix release). The discussion and solution finding can be
found at https://bugs.launchpad.net/duplicity/+bug/1416344 (for duplicity).
--- rdiff-backup-1.3.4/rdiff_backup/_librsyncmodule.c 2009-03-16 15:36:21.000000000 +0100
+++ rdiff-backup-1.3.4/rdiff_backup/_librsyncmodule.c.librsync-1.0.0 2015-03-02 00:54:24.000000000 +0100
@@ -59,8 +59,13 @@
if (sm == NULL) return NULL;
sm->x_attr = NULL;
+#ifdef RS_DEFAULT_STRONG_LEN
sm->sig_job = rs_sig_begin((size_t)blocklen,
(size_t)RS_DEFAULT_STRONG_LEN);
+#else
+ sm->sig_job = rs_sig_begin((size_t)blocklen,
+ (size_t)8, RS_MD4_SIG_MAGIC);
+#endif
return (PyObject*)sm;
}
...@@ -7,4 +7,4 @@ context = ...@@ -7,4 +7,4 @@ context =
key slapparameter_dict slap-configuration:configuration key slapparameter_dict slap-configuration:configuration
raw pbsready_template_path ${pbsready:output} raw pbsready_template_path ${pbsready:output}
raw bash_executable_location ${bash:location}/bin/bash raw bash_executable_location ${bash:location}/bin/bash
raw logrotate_executable_location ${logrotate:location}/sbin/logrotate raw logrotate_executable_location ${logrotate:location}/usr/sbin/logrotate
...@@ -105,9 +105,9 @@ zc.recipe.egg = 1.3.2.post5 ...@@ -105,9 +105,9 @@ zc.recipe.egg = 1.3.2.post5
# Use own version of h.r.download to be able to open .xz and .lz archives # Use own version of h.r.download to be able to open .xz and .lz archives
hexagonit.recipe.download = 1.7.post4 hexagonit.recipe.download = 1.7.post4
Jinja2 = 2.9.4 Jinja2 = 2.9.5
PyYAML = 3.12
MarkupSafe = 0.23 MarkupSafe = 0.23
PyYAML = 3.12
Werkzeug = 0.11.15 Werkzeug = 0.11.15
buildout-versions = 1.7 buildout-versions = 1.7
cffi = 1.9.1 cffi = 1.9.1
...@@ -115,7 +115,7 @@ click = 6.7 ...@@ -115,7 +115,7 @@ click = 6.7
cliff = 2.4.0 cliff = 2.4.0
cmd2 = 0.6.9 cmd2 = 0.6.9
collective.recipe.template = 2.0 collective.recipe.template = 2.0
cryptography = 1.7.1 cryptography = 1.7.2
decorator = 4.0.11 decorator = 4.0.11
idna = 2.2 idna = 2.2
inotifyx = 0.2.2 inotifyx = 0.2.2
...@@ -127,12 +127,12 @@ netaddr = 0.7.19 ...@@ -127,12 +127,12 @@ netaddr = 0.7.19
pbr = 1.10.0 pbr = 1.10.0
plone.recipe.command = 1.1 plone.recipe.command = 1.1
prettytable = 0.7.2 prettytable = 0.7.2
psutil = 5.0.1 psutil = 5.1.2
pyOpenSSL = 16.2.0 pyOpenSSL = 16.2.0
pyasn1 = 0.1.9 pyasn1 = 0.2.1
pyparsing = 2.1.10 pyparsing = 2.1.10
pytz = 2016.10 pytz = 2016.10
requests = 2.12.5 requests = 2.13.0
setuptools = 19.6.2 setuptools = 19.6.2
six = 1.10.0 six = 1.10.0
slapos.cookbook = 1.0.48 slapos.cookbook = 1.0.48
...@@ -150,20 +150,20 @@ xml-marshaller = 0.9.7 ...@@ -150,20 +150,20 @@ xml-marshaller = 0.9.7
Flask = 0.12 Flask = 0.12
# Required by: # Required by:
# cryptography==1.7.1 # cryptography==1.7.2
enum34 = 1.1.6 enum34 = 1.1.6
# Required by: # Required by:
# jsonschema==2.5.1 # jsonschema==2.6.0
functools32 = 3.2.3.post2 functools32 = 3.2.3.post2
# Required by: # Required by:
# cryptography==1.7.1 # cryptography==1.7.2
ipaddress = 1.0.18 ipaddress = 1.0.18
# Required by: # Required by:
# slapos.cookbook==1.0.48 # slapos.cookbook==1.0.48
jsonschema = 2.5.1 jsonschema = 2.6.0
# Required by: # Required by:
# slapos.core==1.3.18 # slapos.core==1.3.18
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment