Commit 0569696f authored by Alain Takoudjou's avatar Alain Takoudjou

Update Release Candidate

parents 2ec3b9f0 5691fd6f
......@@ -59,12 +59,16 @@ location = ${buildout:parts-directory}/ZEO4
git-executable = ${git:location}/bin/git
# ZEO5 is plain upstream egg
[ZEO5]
# ZEO6 and ZEO5 are plain upstream eggs
[ZEO6]
recipe = zc.recipe.egg:eggs
egg = ZEO
eggs = ${:egg}
egg-versions =
ZEO = 6.0.0
[ZEO5]
<= ZEO6
egg-versions =
ZEO = 5.4.0
trollius = 2.2.1
......
......@@ -9,16 +9,16 @@ extends =
parts = ZODB/scripts
# ZODB allows to use either ZODB4, ZODB4-wc2 or ZODB5.
# ZODB allows to use either ZODB4, ZODB4-wc2, ZODB5 or ZODB6.
# To select which version to use users should do:
#
# [ZODB]
# major = <ZODB-version-major>
#
# By default ZODB5 is used.
# By default latest is used: ZODB6 on python3 and ZODB5 on python2
[ZODB]
recipe = slapos.recipe.build
major = 5
major = 6
init =
# link/depend ZODB -> ZODB<X>
zodb_x = 'ZODB'+options['major']
......@@ -36,13 +36,15 @@ init =
import zc.buildout.easy_install
zc.buildout.easy_install.default_versions(versions)
[ZODB:python2]
major = 5
# ZODB/scripts installs scripts from ZODB
[ZODB/scripts]
recipe = zc.recipe.egg:scripts
eggs = ${ZODB:egg}
# ZODB4 and ZODB5 are plain upstream eggs
# ZODB4, ZODB5 and ZODB6 are plain upstream eggs
[_ZODB]
recipe = zc.recipe.egg:eggs
egg = ZODB
......@@ -57,10 +59,21 @@ egg-versions =
[ZODB5]
<= _ZODB
egg-versions =
ZODB = 5.8.1
transaction = 4.0.0
[ZODB5:python2]
<= _ZODB
egg-versions =
ZODB = 5.8.1
transaction = 3.0.1
[ZODB6]
<= _ZODB
egg-versions =
ZODB = 6.0.0
transaction = 4.0.0
# ZODB4-wc2 is ZODB4 version with patches for wendelin.core 2 to work correctly.
# The main change is backport of the way MVCC is handled by always calling
......@@ -92,13 +105,18 @@ egg = ${:_buildout_section_name_}
setup-eggs = ${python-cffi:egg}
# eggs that are common to ZODB4 and ZODB5.
# eggs that are common to ZODB4, ZODB5 and ZODB6.
[versions]
BTrees = 4.11.3
persistent = 4.9.3
zodbpickle = 2.6.0
BTrees = 5.1.0
persistent = 5.1.0
zodbpickle = 3.3.0
# Provide ZODB3 for those eggs that still care about ZODB3 compatibility -
# for example wendelin.core. ZODB3 3.11 is just a dependency egg on _latest_
# ZODB, persistent, BTrees and ZEO.
ZODB3 = 3.11.0
[versions:python2]
BTrees = 4.11.3
persistent = 4.9.3
zodbpickle = 2.6.0
......@@ -2,6 +2,7 @@
extends =
../autoconf/buildout.cfg
../automake/buildout.cfg
../xz-utils/buildout.cfg
parts =
brctl
......@@ -12,7 +13,7 @@ recipe = slapos.recipe.cmmi
url = https://mirrors.edge.kernel.org/pub/linux/utils/net/bridge-utils/bridge-utils-1.7.1.tar.xz
md5sum = 3e1fee4dc22cac5457c2f6ffb990a518
environment =
PATH=${autoconf:location}/bin:${automake:location}/bin:%(PATH)s
PATH=${autoconf:location}/bin:${automake:location}/bin:${xz-utils:location}/bin:%(PATH)s
pre-configure =
aclocal &&
autoconf
......@@ -6,6 +6,7 @@ extends =
../pkgconfig/buildout.cfg
../json-c/buildout.cfg
../openssl/buildout.cfg
../xz-utils/buildout.cfg
[cryptsetup]
recipe = slapos.recipe.cmmi
......@@ -25,7 +26,7 @@ configure-options =
--disable-kernel_crypto
--disable-blkid
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${json-c:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig
CFLAGS=-I${libuuid:location}/include -I${lvm2:location}/include -I${popt:location}/include -I${json-c:location}/include -I${openssl:location}/include
LDFLAGS=-L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${lvm2:location}/lib -Wl,-rpath=${lvm2:location}/lib -L${popt:location}/lib -Wl,-rpath=${popt:location}/lib -L${json-c:location}/lib -Wl,-rpath=${json-c:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib
From 3666a7afd46ea6d069606450c520b8b7e2b5fddf Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Thu, 22 Feb 2024 23:33:41 +0900
Subject: [PATCH] Make dict views behave like their unrestricted versions
unlike the restricted versions, the unrestricted versions:
- are not iterators, they are views
- have a len
- are false when the mapping is empty, true otherwise
- are instances of collections.abc.MappingView
During this refactoring, also change `.items()` to validate
ach keys and values, like `.keys()` and `.values()` do.
---
CHANGES.rst | 7 ++++
src/AccessControl/ZopeGuards.py | 50 ++++++++++++++++++-----
src/AccessControl/tests/actual_python.py | 33 +++++++++++++++
src/AccessControl/tests/testZopeGuards.py | 34 +++++++++++----
4 files changed, 104 insertions(+), 20 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index f35a8d2..073b791 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -8,6 +8,13 @@ For changes before version 3.0, see ``HISTORY.rst``.
- Nothing changed yet.
+- Make dict views (`.keys()`, `.items()` and `.values()`) behave like their
+ unrestricted versions.
+ (`#147 <https://github.com/zopefoundation/AccessControl/pull/147>`_)
+
+- Make `.items()` validate each keys and values, like `.keys()` and
+ `.values()` do.
+
6.3 (2023-11-20)
----------------
diff --git a/src/AccessControl/ZopeGuards.py b/src/AccessControl/ZopeGuards.py
index 84c2e9e..bc24941 100644
--- a/src/AccessControl/ZopeGuards.py
+++ b/src/AccessControl/ZopeGuards.py
@@ -12,6 +12,7 @@
##############################################################################
+import collections.abc
import math
import random
import string
@@ -127,13 +128,18 @@ def guarded_pop(key, default=_marker):
return guarded_pop
-def get_iter(c, name):
- iter = getattr(c, name)
+def get_mapping_view(c, name):
- def guarded_iter():
- return SafeIter(iter(), c)
+ view_class = {
+ 'keys': SafeKeysView,
+ 'items': SafeItemsView,
+ 'values': SafeValuesView,
+ }
- return guarded_iter
+ def guarded_mapping_view():
+ return view_class[name](c)
+
+ return guarded_mapping_view
def get_list_pop(lst, name):
@@ -153,18 +159,15 @@ def guarded_pop(index=-1):
'copy': 1,
'fromkeys': 1,
'get': get_dict_get,
- 'items': 1,
+ 'items': get_mapping_view,
+ 'keys': get_mapping_view,
'pop': get_dict_pop,
'popitem': 1,
'setdefault': 1,
'update': 1,
+ 'values': get_mapping_view,
}
-_dict_white_list.update({
- 'keys': get_iter,
- 'values': get_iter,
-})
-
def _check_dict_access(name, value):
# Check whether value is a dict method
@@ -262,6 +265,31 @@ def __next__(self):
next = __next__
+class _SafeMappingView:
+ __allow_access_to_unprotected_subobjects__ = 1
+
+ def __iter__(self):
+ for e in super().__iter__():
+ guard(self._mapping, e)
+ yield e
+
+
+class SafeKeysView(_SafeMappingView, collections.abc.KeysView):
+ pass
+
+
+class SafeValuesView(_SafeMappingView, collections.abc.ValuesView):
+ pass
+
+
+class SafeItemsView(_SafeMappingView, collections.abc.ItemsView):
+ def __iter__(self):
+ for k, v in super().__iter__():
+ guard(self._mapping, k)
+ guard(self._mapping, v)
+ yield k, v
+
+
class NullIter(SafeIter):
def __init__(self, ob):
self._iter = ob
diff --git a/src/AccessControl/tests/actual_python.py b/src/AccessControl/tests/actual_python.py
index 3405b8e..866a480 100644
--- a/src/AccessControl/tests/actual_python.py
+++ b/src/AccessControl/tests/actual_python.py
@@ -123,6 +123,39 @@ def f7():
access = getattr(d, meth)
result = sorted(access())
assert result == expected[kind], (meth, kind, result, expected[kind])
+ assert len(access()) == len(expected[kind]), (meth, kind, "len")
+ iter_ = access() # iterate twice on the same view
+ assert list(iter_) == list(iter_)
+
+ assert sorted([k for k in getattr(d, meth)()]) == expected[kind]
+ assert sorted(k for k in getattr(d, meth)()) == expected[kind]
+ assert {k: v for k, v in d.items()} == d
+
+ assert 1 in d
+ assert 1 in d.keys()
+ assert 2 in d.values()
+ assert (1, 2) in d.items()
+
+ assert d
+ assert d.keys()
+ assert d.values()
+ assert d.items()
+
+ empty_d = {}
+ assert not empty_d
+ assert not empty_d.keys()
+ assert not empty_d.values()
+ assert not empty_d.items()
+
+ smaller_d = {1: 2}
+ for m, _ in methods:
+ assert getattr(d, m)() != getattr(smaller_d, m)()
+ assert not getattr(d, m)() == getattr(smaller_d, m)()
+ if m != 'values':
+ assert getattr(d, m)() > getattr(smaller_d, m)()
+ assert getattr(d, m)() >= getattr(smaller_d, m)()
+ assert getattr(smaller_d, m)() < getattr(d, m)()
+ assert getattr(smaller_d, m)() <= getattr(d, m)()
f7()
diff --git a/src/AccessControl/tests/testZopeGuards.py b/src/AccessControl/tests/testZopeGuards.py
index 533bfa2..50eeca9 100644
--- a/src/AccessControl/tests/testZopeGuards.py
+++ b/src/AccessControl/tests/testZopeGuards.py
@@ -258,23 +258,40 @@ def test_pop_validates(self):
self.assertTrue(sm.calls)
def test_keys_empty(self):
- from AccessControl.ZopeGuards import get_iter
- keys = get_iter({}, 'keys')
+ from AccessControl.ZopeGuards import get_mapping_view
+ keys = get_mapping_view({}, 'keys')
self.assertEqual(list(keys()), [])
+ def test_kvi_len(self):
+ from AccessControl.ZopeGuards import get_mapping_view
+ for attr in ("keys", "values", "items"):
+ with self.subTest(attr):
+ view = get_mapping_view({'a': 1}, attr)
+ self.assertEqual(len(view()), 1)
+
def test_keys_validates(self):
sm = SecurityManager()
old = self.setSecurityManager(sm)
keys = guarded_getattr({GuardTestCase: 1}, 'keys')
try:
- next(keys())
+ next(iter(keys()))
finally:
self.setSecurityManager(old)
self.assertTrue(sm.calls)
+ def test_items_validates(self):
+ sm = SecurityManager()
+ old = self.setSecurityManager(sm)
+ items = guarded_getattr({GuardTestCase: GuardTestCase}, 'items')
+ try:
+ next(iter(items()))
+ finally:
+ self.setSecurityManager(old)
+ self.assertEqual(len(sm.calls), 2)
+
def test_values_empty(self):
- from AccessControl.ZopeGuards import get_iter
- values = get_iter({}, 'values')
+ from AccessControl.ZopeGuards import get_mapping_view
+ values = get_mapping_view({}, 'values')
self.assertEqual(list(values()), [])
def test_values_validates(self):
@@ -282,18 +299,17 @@ def test_values_validates(self):
old = self.setSecurityManager(sm)
values = guarded_getattr({GuardTestCase: 1}, 'values')
try:
- next(values())
+ next(iter(values()))
finally:
self.setSecurityManager(old)
self.assertTrue(sm.calls)
def test_kvi_iteration(self):
- from AccessControl.ZopeGuards import SafeIter
d = dict(a=1, b=2)
for attr in ("keys", "values", "items"):
v = getattr(d, attr)()
- si = SafeIter(v)
- self.assertEqual(next(si), next(iter(v)))
+ si = guarded_getattr(d, attr)()
+ self.assertEqual(next(iter(si)), next(iter(v)))
class TestListGuards(GuardTestCase):
From 77f86b50f097dcf364e0d140e45593bf001d46bc Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Fri, 1 Mar 2024 09:49:17 +0900
Subject: [PATCH] set metadata in setup.py for compatibility with old slapos
buildout
---
setup.py | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/setup.py b/setup.py
index 1bf0bcff5..a93fe7b22 100755
--- a/setup.py
+++ b/setup.py
@@ -987,6 +987,11 @@ ext_modules = [
try:
setup(
+ name='pillow',
+ version='10.2.0',
+ packages=["PIL"],
+ include_package_data=True,
+ package_dir={"": "src"},
cmdclass={"build_ext": pil_build_ext},
ext_modules=ext_modules,
zip_safe=not (debug_build() or PLATFORM_MINGW),
--
2.42.0
From c233d7278ae7089ba2ad32b8a178a3793273a47d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Sat, 1 Jun 2024 14:58:23 +0900
Subject: [PATCH] import from zope.lifecycleevent.interfaces to prevent
DeprecationWarnings
partial backport from https://github.com/zopefoundation/Products.CMFCore/pull/125
---
src/Products/CMFCore/CMFCatalogAware.py | 4 ++--
src/Products/CMFCore/CachingPolicyManager.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/Products/CMFCore/CMFCatalogAware.py b/src/Products/CMFCore/CMFCatalogAware.py
index a574660..502161f 100644
--- a/src/Products/CMFCore/CMFCatalogAware.py
+++ b/src/Products/CMFCore/CMFCatalogAware.py
@@ -25,11 +25,11 @@ from OFS.interfaces import IObjectClonedEvent
from OFS.interfaces import IObjectWillBeMovedEvent
from zope.component import queryUtility
from zope.component import subscribers
-from zope.container.interfaces import IObjectAddedEvent
-from zope.container.interfaces import IObjectMovedEvent
from zope.interface import implementer
+from zope.lifecycleevent.interfaces import IObjectAddedEvent
from zope.lifecycleevent.interfaces import IObjectCopiedEvent
from zope.lifecycleevent.interfaces import IObjectCreatedEvent
+from zope.lifecycleevent.interfaces import IObjectMovedEvent
from .interfaces import ICallableOpaqueItem
from .interfaces import ICatalogAware
diff --git a/src/Products/CMFCore/CachingPolicyManager.py b/src/Products/CMFCore/CachingPolicyManager.py
index 3722b7f..65a079d 100644
--- a/src/Products/CMFCore/CachingPolicyManager.py
+++ b/src/Products/CMFCore/CachingPolicyManager.py
@@ -27,9 +27,9 @@ from Persistence import PersistentMapping
from Products.PageTemplates.Expressions import SecureModuleImporter
from Products.PageTemplates.Expressions import getEngine
from zope.component import getUtility
-from zope.container.interfaces import IObjectMovedEvent
from zope.datetime import rfc1123_date
from zope.interface import implementer
+from zope.lifecycleevent.interfaces import IObjectMovedEvent
from .Expression import Expression
from .interfaces import ICachingPolicy
--
2.42.0
From 2a248ffc584082122776f3b51f5fdaf90c5e2905 Mon Sep 17 00:00:00 2001
From: Maurits van Rees <maurits@vanrees.org>
Date: Tue, 23 Apr 2024 22:22:53 +0200
Subject: [PATCH] Use the new resources keyword of registerClass if available.
This avoids a deprecation warning for using a non callable constructor in Zope higher than 5.9.
See report in https://github.com/zopefoundation/Zope/issues/1202
---
CHANGES.rst | 6 ++++-
src/Products/CMFCore/utils.py | 51 ++++++++++++++++++++++++-----------
2 files changed, 40 insertions(+), 17 deletions(-)
diff --git a/src/Products/CMFCore/utils.py b/src/Products/CMFCore/utils.py
index 2f99d6d..565914d 100644
--- a/src/Products/CMFCore/utils.py
+++ b/src/Products/CMFCore/utils.py
@@ -599,13 +599,20 @@ def initialize(self, context):
# Add only one meta type to the folder add list.
productObject = context._ProductContext__prod
self.product_name = productObject.id
- context.registerClass(
- meta_type=self.meta_type,
- # This is a little sneaky: we add self to the
- # FactoryDispatcher under the name "toolinit".
- # manage_addTool() can then grab it.
- constructors=(manage_addToolForm, manage_addTool, self),
- icon=self.icon)
+ # We add self to the FactoryDispatcher under the name 'toolinit'.
+ # manage_addContentType() can then grab it.
+ try:
+ context.registerClass(
+ meta_type=self.meta_type,
+ constructors=(manage_addToolForm, manage_addTool),
+ resources=(self, ),
+ icon=self.icon)
+ except TypeError:
+ # The 'resources' keyword was only introduced after Zope 5.9.
+ context.registerClass(
+ meta_type=self.meta_type,
+ constructors=(manage_addToolForm, manage_addTool, self),
+ icon=self.icon)
if self.icon:
icon = os_path.split(self.icon)[1]
@@ -680,15 +687,27 @@ def __init__(self, meta_type, content_types, permission=None,
def initialize(self, context):
# Add only one meta type to the folder add list.
- context.registerClass(
- meta_type=self.meta_type,
- # This is a little sneaky: we add self to the
- # FactoryDispatcher under the name "contentinit".
- # manage_addContentType() can then grab it.
- constructors=(manage_addContentForm, manage_addContent,
- self) + self.extra_constructors,
- permission=self.permission,
- visibility=self.visibility)
+ # We add self to the FactoryDispatcher under the name 'contentinit'.
+ # manage_addContentType() can then grab it.
+ try:
+ context.registerClass(
+ meta_type=self.meta_type,
+ constructors=(
+ manage_addContentForm,
+ manage_addContent,
+ ) + self.extra_constructors,
+ resources=(self, ),
+ permission=self.permission,
+ visibility=self.visibility)
+ except TypeError:
+ # The 'resources' keyword was only introduced after Zope 5.9.
+ context.registerClass(
+ meta_type=self.meta_type,
+ constructors=(
+ manage_addContentForm, manage_addContent, self,
+ ) + self.extra_constructors,
+ permission=self.permission,
+ visibility=self.visibility)
for ct in self.content_types:
ct.__factory_meta_type__ = self.meta_type
From 3c6b815bbb2a9300984a7b50cb5ec5375bf4588e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Tue, 2 Apr 2024 21:54:07 +0900
Subject: [PATCH] Revive TRIGGER_WORKFLOW_METHOD support, ERP5 uses it
---
src/Products/DCWorkflow/DCWorkflow.py | 47 +++++++++++++++++++
src/Products/DCWorkflow/Transitions.py | 1 +
.../dtml/transition_properties.dtml | 10 ++++
src/Products/DCWorkflow/dtml/transitions.dtml | 3 +-
src/Products/DCWorkflow/exportimport.py | 2 +-
5 files changed, 61 insertions(+), 2 deletions(-)
diff --git a/src/Products/DCWorkflow/DCWorkflow.py b/src/Products/DCWorkflow/DCWorkflow.py
index 9adf05c..d0306dc 100644
--- a/src/Products/DCWorkflow/DCWorkflow.py
+++ b/src/Products/DCWorkflow/DCWorkflow.py
@@ -38,6 +38,7 @@ from .Expression import createExprContext
from .interfaces import IDCWorkflowDefinition
from .Transitions import TRIGGER_AUTOMATIC
from .Transitions import TRIGGER_USER_ACTION
+from .Transitions import TRIGGER_WORKFLOW_METHOD
from .utils import Message as _
from .utils import modifyRolesForGroup
from .utils import modifyRolesForPermission
@@ -278,6 +279,52 @@ class DCWorkflowDefinition(WorkflowUIMixin, Folder):
raise Unauthorized(action)
self._changeStateOf(ob, tdef, kw)
+ @security.private
+ def isWorkflowMethodSupported(self, ob, method_id):
+ '''
+ Returns a true value if the given workflow method
+ is supported in the current state.
+ '''
+ sdef = self._getWorkflowStateOf(ob)
+ if sdef is None:
+ return 0
+ if method_id in sdef.transitions:
+ tdef = self.transitions.get(method_id, None)
+ if (tdef is not None and
+ tdef.trigger_type == TRIGGER_WORKFLOW_METHOD and
+ self._checkTransitionGuard(tdef, ob)):
+ return 1
+ return 0
+
+ @security.private
+ def wrapWorkflowMethod(self, ob, method_id, func, args, kw):
+ '''
+ Allows the user to request a workflow action. This method
+ must perform its own security checks.
+ '''
+ sdef = self._getWorkflowStateOf(ob)
+ if sdef is None:
+ raise WorkflowException('Object is in an undefined state')
+ if method_id not in sdef.transitions:
+ raise Unauthorized(method_id)
+ tdef = self.transitions.get(method_id, None)
+ if tdef is None or tdef.trigger_type != TRIGGER_WORKFLOW_METHOD:
+ raise WorkflowException(
+ 'Transition %s is not triggered by a workflow method'
+ % method_id)
+ if not self._checkTransitionGuard(tdef, ob):
+ raise Unauthorized(method_id)
+ res = func(*args, **kw)
+ try:
+ self._changeStateOf(ob, tdef)
+ except ObjectDeleted:
+ # Re-raise with a different result.
+ raise ObjectDeleted(res)
+ except ObjectMoved as ex:
+ # Re-raise with a different result.
+ raise ObjectMoved(ex.getNewObject(), res)
+ return res
+
@security.private
def isInfoSupported(self, ob, name):
'''
diff --git a/src/Products/DCWorkflow/Transitions.py b/src/Products/DCWorkflow/Transitions.py
index a6e1e6f..b4e012c 100644
--- a/src/Products/DCWorkflow/Transitions.py
+++ b/src/Products/DCWorkflow/Transitions.py
@@ -31,6 +31,7 @@ from .utils import _dtmldir
TRIGGER_AUTOMATIC = 0
TRIGGER_USER_ACTION = 1
+TRIGGER_WORKFLOW_METHOD = 2
class TransitionDefinition(SimpleItem):
diff --git a/src/Products/DCWorkflow/dtml/transition_properties.dtml b/src/Products/DCWorkflow/dtml/transition_properties.dtml
index d6b8a74..6a0803e 100644
--- a/src/Products/DCWorkflow/dtml/transition_properties.dtml
+++ b/src/Products/DCWorkflow/dtml/transition_properties.dtml
@@ -55,6 +55,16 @@ Initiated by user action
</td>
</tr>
+<tr>
+<th></th>
+<td>
+<dtml-let checked="trigger_type==2 and 'checked' or ' '">
+<input type="radio" name="trigger_type" value="2" &dtml-checked; />
+Initiated by WorkflowMethod
+</dtml-let>
+</td>
+</tr>
+
<tr>
<th align="left">Script (before)</th>
<td>
diff --git a/src/Products/DCWorkflow/dtml/transitions.dtml b/src/Products/DCWorkflow/dtml/transitions.dtml
index 4cdd3d3..37e949c 100644
--- a/src/Products/DCWorkflow/dtml/transitions.dtml
+++ b/src/Products/DCWorkflow/dtml/transitions.dtml
@@ -17,7 +17,8 @@
<td>
Destination state: <code><dtml-if new_state_id>&dtml-new_state_id;<dtml-else>(Remain in state)</dtml-if></code> <br />
Trigger: <dtml-var expr="(trigger_type == 0 and 'Automatic') or
- (trigger_type == 1 and 'User action')">
+ (trigger_type == 1 and 'User action') or
+ (trigger_type == 2 and 'WorkflowMethod')">
<br />
<dtml-if script_name>
Script (before): &dtml-script_name;
diff --git a/src/Products/DCWorkflow/exportimport.py b/src/Products/DCWorkflow/exportimport.py
index f17264d..2374b6e 100644
--- a/src/Products/DCWorkflow/exportimport.py
+++ b/src/Products/DCWorkflow/exportimport.py
@@ -37,7 +37,7 @@ from .interfaces import IDCWorkflowDefinition
from .utils import _xmldir
-TRIGGER_TYPES = ('AUTOMATIC', 'USER')
+TRIGGER_TYPES = ('AUTOMATIC', 'USER', 'METHOD' )
_FILENAME = 'workflows.xml'
--
2.42.0
From a037f2a2e2090dcd63b83af9b06427dd8c7e9536 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Wed, 22 May 2024 23:58:45 +0900
Subject: [PATCH] Show Python Script source code in tracebacks
Expose a __loader__ in globals so that linecache module is able to use
it to display the source code.
This requires changing the "filename" used when compiling function,
because linecache uses code.co_filename as a cache key, so it's
necessary that each python script use a different filename.
WIP from https://github.com/zopefoundation/Products.PythonScripts/pull/65
---
CHANGES.rst | 2 +
src/Products/PythonScripts/PythonScript.py | 19 ++++++-
.../PythonScripts/tests/testPythonScript.py | 50 ++++++++++++++++++-
3 files changed, 67 insertions(+), 4 deletions(-)
diff --git a/src/Products/PythonScripts/PythonScript.py b/src/Products/PythonScripts/PythonScript.py
index fe4223a..5cb7f37 100644
--- a/src/Products/PythonScripts/PythonScript.py
+++ b/src/Products/PythonScripts/PythonScript.py
@@ -16,7 +16,9 @@
Python code.
"""
+import importlib.abc
import importlib.util
+import linecache
import marshal
import os
import re
@@ -56,7 +58,7 @@
Python_magic = importlib.util.MAGIC_NUMBER
# This should only be incremented to force recompilation.
-Script_magic = 4
+Script_magic = 5
_log_complaint = (
'Some of your Scripts have stale code cached. Since Zope cannot'
' use this code, startup will be slightly slower until these Scripts'
@@ -97,6 +99,16 @@ def manage_addPythonScript(self, id, title='', file=None, REQUEST=None,
return ''
+class PythonScriptLoader(importlib.abc.Loader):
+ """PEP302 loader to display source code in tracebacks
+ """
+ def __init__(self, source):
+ self._source = source
+
+ def get_source(self, name):
+ return self._source
+
+
class PythonScript(Script, Historical, Cacheable):
"""Web-callable scripts written in a safe subset of Python.
@@ -234,7 +246,7 @@ def _compile(self):
self._params,
body=self._body or 'pass',
name=self.id,
- filename=self.meta_type,
+ filename=getattr(self, '_filepath', None) or self.get_filepath(),
globalize=bind_names)
code = compile_result.code
@@ -261,6 +273,7 @@ def _compile(self):
fc.co_argcount)
self.Python_magic = Python_magic
self.Script_magic = Script_magic
+ linecache.clearcache()
self._v_change = 0
def _newfun(self, code):
@@ -331,6 +344,8 @@ def _exec(self, bound_names, args, kw):
PythonScriptTracebackSupplement, self, -1)
safe_globals['__file__'] = getattr(
self, '_filepath', None) or self.get_filepath()
+ safe_globals['__loader__'] = PythonScriptLoader(self._body)
+
function = types.FunctionType(
function_code, safe_globals, None, function_argument_definitions)
diff --git a/src/Products/PythonScripts/tests/testPythonScript.py b/src/Products/PythonScripts/tests/testPythonScript.py
index 60ef6c3..7cd2266 100644
--- a/src/Products/PythonScripts/tests/testPythonScript.py
+++ b/src/Products/PythonScripts/tests/testPythonScript.py
@@ -15,6 +15,7 @@
import io
import os
import sys
+import traceback
import unittest
import warnings
from urllib.error import HTTPError
@@ -241,7 +242,8 @@ def test_manage_DAVget(self):
self.assertEqual(ps.read(), ps.manage_DAVget())
def test_PUT_native_string(self):
- ps = makerequest(self._filePS('complete'))
+ container = DummyFolder('container')
+ ps = makerequest(self._filePS('complete').__of__(container))
self.assertEqual(ps.title, 'This is a title')
self.assertEqual(ps.body(), 'print(foo+bar+baz)\nreturn printed\n')
self.assertEqual(ps.params(), 'foo, bar, baz=1')
@@ -265,7 +267,8 @@ def test_PUT_native_string(self):
self.assertEqual(ps.params(), 'oops')
def test_PUT_bytes(self):
- ps = makerequest(self._filePS('complete'))
+ container = DummyFolder('container')
+ ps = makerequest(self._filePS('complete').__of__(container))
self.assertEqual(ps.title, 'This is a title')
self.assertEqual(ps.body(), 'print(foo+bar+baz)\nreturn printed\n')
self.assertEqual(ps.params(), 'foo, bar, baz=1')
@@ -588,3 +591,46 @@ def test_PythonScript_proxyroles_nonmanager(self):
# Cleanup
noSecurityManager()
+
+
+class TestTraceback(FunctionalTestCase, PythonScriptTestBase):
+
+ def _format_exception(self):
+ return "".join(traceback.format_exception(*sys.exc_info()))
+
+ def test_source_code_in_traceback(self):
+ ps = self._newPS("1 / 0")
+ try:
+ ps()
+ except ZeroDivisionError:
+ formatted_exception = self._format_exception()
+ self.assertIn("1 / 0", formatted_exception)
+
+ ps.write("2 / 0")
+ try:
+ ps()
+ except ZeroDivisionError:
+ formatted_exception = self._format_exception()
+ self.assertIn("2 / 0", formatted_exception)
+
+ def test_multiple_scripts_in_traceback(self):
+ from Products.PythonScripts.PythonScript import manage_addPythonScript
+
+ script1_body = "container.script2()"
+ manage_addPythonScript(
+ self.folder,
+ "script1",
+ file=script1_body,
+ )
+ script2_body = "1 / 0"
+ manage_addPythonScript(
+ self.folder,
+ "script2",
+ file=script2_body,
+ )
+ try:
+ self.folder.script1()
+ except ZeroDivisionError:
+ formatted_exception = self._format_exception()
+ self.assertIn(script1_body, formatted_exception)
+ self.assertIn(script2_body, formatted_exception)
From 21a91db138cca3ada0e4dff475b061066362410c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Sat, 17 Feb 2024 23:25:43 +0900
Subject: [PATCH] backport changes from 0.52.29
We can not use 0.52.29 directly because it does not have a setup.py
and our buildout / setuptools tooling is too old.
---
src/SOAPpy/Client.py | 3 ++-
src/SOAPpy/Types.py | 2 ++
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/src/SOAPpy/Client.py b/src/SOAPpy/Client.py
index e86c5ec..d2bbefb 100644
--- a/src/SOAPpy/Client.py
+++ b/src/SOAPpy/Client.py
@@ -45,6 +45,7 @@
ident = '$Id: Client.py 1496 2010-03-04 23:46:17Z pooryorick $'
from .version import __version__
+from io import StringIO
#import xml.sax
import urllib.request, urllib.parse, urllib.error
@@ -152,7 +153,7 @@ class HTTP:
return -1, e.line, None
self.headers = response.msg
- self.file = response.fp
+ self.file = StringIO(response.fp.read().decode('utf-8'))
return response.status, response.reason, response.msg
def close(self):
diff --git a/src/SOAPpy/Types.py b/src/SOAPpy/Types.py
index de9dcac..cf08d17 100644
--- a/src/SOAPpy/Types.py
+++ b/src/SOAPpy/Types.py
@@ -1451,6 +1451,8 @@ class arrayType(collections.UserList, compoundType):
def __getitem__(self, item):
try:
return self.data[int(item)]
+ except TypeError:
+ return self.data[item]
except ValueError:
return getattr(self, item)
--
2.42.0
From c56146829ab065183c709229a9daa682cc445212 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Fri, 26 Apr 2024 15:09:39 +0900
Subject: [PATCH] fix loading font for ean13
use same technique as for code128
---
hubarcode/ean13/renderer.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/hubarcode/ean13/renderer.py b/hubarcode/ean13/renderer.py
index 654501e..ff5f518 100644
--- a/hubarcode/ean13/renderer.py
+++ b/hubarcode/ean13/renderer.py
@@ -78,8 +78,10 @@ class EAN13Renderer:
# Draw the text
font_size = font_sizes.get(bar_width, 24)
- # Use relative name, PIL will do searching for us
- fontfile = os.path.join("fonts", "courR%02d.pil" % font_size)
+ # Locate and load the font file relative to the module
+ ean13dir, _ = os.path.split(__file__)
+ rootdir, _ = os.path.split(ean13dir)
+ fontfile = os.path.join(rootdir, "fonts", "courR%02d.pil" % font_size)
font = ImageFont.load_path(fontfile)
draw = ImageDraw.Draw(img)
--
2.42.0
From 42fab4bbede61a384046646dbc2573bb79957a89 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Sat, 17 Feb 2024 16:07:18 +0900
Subject: [PATCH] python3 support
---
interval.py | 366 ++++++++++++++++++++++++++++------------------------
1 file changed, 194 insertions(+), 172 deletions(-)
diff --git a/interval.py b/interval.py
index fe9e595..8a4b2be 100644
--- a/interval.py
+++ b/interval.py
@@ -36,15 +36,18 @@ False
>>> "15:30" in myHours
True
>>> inOffice = officeHours & myHours
->>> print inOffice
+>>> print(inOffice)
['08:30'..'11:30'),('12:30'..'17:00']
>>> overtime = myHours - officeHours
->>> print overtime
+>>> print(overtime)
('17:00'..'19:30']
"""
import copy
+import functools
+
+@functools.total_ordering
class Smallest:
"""Represents the smallest value
@@ -70,11 +73,17 @@ class Smallest:
The opposite of negative infinity is infinity, the largest value.
- >>> print -Smallest()
+ >>> print(-Smallest())
~
"""
return Largest()
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return False
+
def __cmp__(self, other):
"""Compares this with another object
@@ -103,7 +112,7 @@ class Smallest:
The string for the smallest number is -~, which means negative infinity.
- >>> print Smallest()
+ >>> print(Smallest())
-~
"""
return "-~"
@@ -124,6 +133,7 @@ class Smallest:
return 0x55555555
+@functools.total_ordering
class Largest:
"""Class representing the universal largest value
@@ -149,11 +159,17 @@ class Largest:
The opposite of infinity is negative infinity, the smallest value.
- >>> print -Largest()
+ >>> print(-Largest())
-~
"""
return Smallest()
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ge__(self, other):
+ return True
+
def __cmp__(self, other):
"""Compares object with another object
@@ -182,7 +198,7 @@ class Largest:
The largest number is displayed as ~ (it sort of looks like infinity...)
- >>> print Largest()
+ >>> print(Largest())
~
"""
return "~"
@@ -203,6 +219,8 @@ class Largest:
Inf = Largest()
# Use -Inf for the smallest value
+
+@functools.total_ordering
class Interval:
"""Represents a continuous range of values
@@ -255,7 +273,7 @@ class Interval:
Intervals that are not normalized, i.e. that have a lower bound
exceeding an upper bound, are silently normalized.
- >>> print Interval(5, 2, lower_closed=False)
+ >>> print(Interval(5, 2, lower_closed=False))
[2..5)
Intervals can represent an empty set.
@@ -343,27 +361,27 @@ class Interval:
consisting of only a single value are shown as that value. Empty
intervals are shown as the string <Empty>
- >>> print Interval.all()
+ >>> print(Interval.all())
(...)
- >>> print Interval.less_than(100)
+ >>> print(Interval.less_than(100))
(...100)
- >>> print Interval.less_than_or_equal_to(2593)
+ >>> print(Interval.less_than_or_equal_to(2593))
(...2593]
- >>> print Interval.greater_than(2378)
+ >>> print(Interval.greater_than(2378))
(2378...)
- >>> print Interval.between(26, 8234, False)
+ >>> print(Interval.between(26, 8234, False))
(26..8234)
- >>> print Interval(237, 2348, lower_closed=False)
+ >>> print(Interval(237, 2348, lower_closed=False))
(237..2348]
- >>> print Interval.greater_than_or_equal_to(347)
+ >>> print(Interval.greater_than_or_equal_to(347))
[347...)
- >>> print Interval(237, 278, upper_closed=False)
+ >>> print(Interval(237, 278, upper_closed=False))
[237..278)
- >>> print Interval.between(723, 2378)
+ >>> print(Interval.between(723, 2378))
[723..2378]
- >>> print Interval.equal_to(5)
+ >>> print(Interval.equal_to(5))
5
- >>> print Interval.none()
+ >>> print(Interval.none())
<Empty>
"""
if self.lower_bound == self.upper_bound:
@@ -399,20 +417,22 @@ class Interval:
retval = "".join([lbchar, lstr, between, ustr, ubchar])
return retval
- def __nonzero__(self):
+ def __bool__(self):
"""Tells whether the interval is empty
-
>>> if Interval(12, 12, closed=False):
- ... print "Non-empty"
+ ... print("Non-empty")
>>> if Interval(12, 12, upper_closed=False):
- ... print "Non-empty"
+ ... print("Non-empty")
>>> if Interval(12, 12):
- ... print "Non-empty"
+ ... print("Non-empty")
Non-empty
"""
return self.lower_bound != self.upper_bound \
or (self.upper_closed and self.lower_closed)
+ def __lt__(self, other):
+ return self.comes_before(other)
+
def __cmp__(self, other):
"""Compares two intervals for ordering purposes
@@ -442,15 +462,15 @@ class Interval:
def __and__(self, other):
"""Returns the intersection of two intervals
- >>> print Interval.greater_than(3) & Interval.greater_than(5)
+ >>> print(Interval.greater_than(3) & Interval.greater_than(5))
(5...)
- >>> print Interval.greater_than(3) & Interval.equal_to(3)
+ >>> print(Interval.greater_than(3) & Interval.equal_to(3))
<Empty>
- >>> print Interval.greater_than_or_equal_to(3) & Interval.equal_to(3)
+ >>> print(Interval.greater_than_or_equal_to(3) & Interval.equal_to(3))
3
- >>> print Interval.all() & Interval.all()
+ >>> print(Interval.all() & Interval.all())
(...)
- >>> print Interval.greater_than(3) & Interval.less_than(10)
+ >>> print(Interval.greater_than(3) & Interval.less_than(10))
(3..10)
"""
if self == other:
@@ -494,7 +514,7 @@ class Interval:
def none(cls):
"""Returns an empty interval
- >>> print Interval.none()
+ >>> print(Interval.none())
<Empty>
"""
return cls(0, 0, closed=False)
@@ -503,7 +523,7 @@ class Interval:
def all(cls):
"""Returns an interval encompassing all values
- >>> print Interval.all()
+ >>> print(Interval.all())
(...)
"""
return cls()
@@ -516,9 +536,9 @@ class Interval:
then the endpoints are included. Otherwise, the endpoints are
excluded.
- >>> print Interval.between(2, 4)
+ >>> print(Interval.between(2, 4))
[2..4]
- >>> print Interval.between(2, 4, False)
+ >>> print(Interval.between(2, 4, False))
(2..4)
"""
return cls(a, b, closed=closed)
@@ -529,7 +549,7 @@ class Interval:
Returns an interval containing only a.
- >>> print Interval.equal_to(32)
+ >>> print(Interval.equal_to(32))
32
"""
return cls(a, a)
@@ -541,7 +561,7 @@ class Interval:
Returns an interval containing all values less than a. If closed
is True, then all values less than or equal to a are returned.
- >>> print Interval.less_than(32)
+ >>> print(Interval.less_than(32))
(...32)
"""
return cls(upper_bound=a, upper_closed=False)
@@ -550,7 +570,7 @@ class Interval:
def less_than_or_equal_to(cls, a):
"""Returns an interval containing the given values and everything less
- >>> print Interval.less_than_or_equal_to(32)
+ >>> print(Interval.less_than_or_equal_to(32))
(...32]
"""
return cls(upper_bound=a, upper_closed=True)
@@ -559,7 +579,7 @@ class Interval:
def greater_than(cls, a):
"""Returns interval of all values greater than the given value
- >>> print Interval.greater_than(32)
+ >>> print(Interval.greater_than(32))
(32...)
"""
return cls(lower_bound=a, lower_closed=False)
@@ -568,7 +588,7 @@ class Interval:
def greater_than_or_equal_to(cls, a):
"""Returns interval of all values greater than or equal to the given value
- >>> print Interval.greater_than_or_equal_to(32)
+ >>> print(Interval.greater_than_or_equal_to(32))
[32...)
"""
return cls(lower_bound=a, lower_closed=True)
@@ -637,29 +657,29 @@ class Interval:
>>> r13 = Interval.greater_than(100)
>>> r14 = Interval.equal_to(100)
>>> r15 = Interval.greater_than_or_equal_to(100)
- >>> print r13.join(r15)
+ >>> print(r13.join(r15))
[100...)
- >>> print r7.join(r6)
+ >>> print(r7.join(r6))
(-100..100]
- >>> print r11.join(r2)
+ >>> print(r11.join(r2))
(...100]
- >>> print r4.join(r15)
+ >>> print(r4.join(r15))
(...)
- >>> print r8.join(r8)
+ >>> print(r8.join(r8))
(-100...)
- >>> print r3.join(r7)
+ >>> print(r3.join(r7))
(...100]
- >>> print r5.join(r10)
+ >>> print(r5.join(r10))
(...)
- >>> print r9.join(r1)
+ >>> print(r9.join(r1))
(...-100]
- >>> print r12.join(r5)
+ >>> print(r12.join(r5))
(...)
- >>> print r13.join(r1)
+ >>> print(r13.join(r1))
Traceback (most recent call last):
...
ArithmeticError: The Intervals are disjoint.
- >>> print r14.join(r2)
+ >>> print(r14.join(r2))
Traceback (most recent call last):
...
ArithmeticError: The Intervals are disjoint.
@@ -894,27 +914,27 @@ class BaseIntervalSet(object):
If no parameters are provided, then an empty IntervalSet is
constructed.
- >>> print IntervalSet() # An empty set
+ >>> print(IntervalSet()) # An empty set
<Empty>
Interval objects arguments are added directly to the IntervalSet.
- >>> print IntervalSet([Interval(4, 6, lower_closed=False)])
+ >>> print(IntervalSet([Interval(4, 6, lower_closed=False)]))
(4..6]
- >>> print IntervalSet([Interval.less_than_or_equal_to(2)])
+ >>> print(IntervalSet([Interval.less_than_or_equal_to(2)]))
(...2]
Each non-Interval value of an iterator is added as a discrete
value.
- >>> print IntervalSet(set([3, 7, 2, 1]))
+ >>> print(IntervalSet(set([3, 7, 2, 1])))
1,2,3,7
- >>> print IntervalSet(["Bob", "Fred", "Mary"])
+ >>> print(IntervalSet(["Bob", "Fred", "Mary"]))
'Bob','Fred','Mary'
- >>> print IntervalSet(range(10))
+ >>> print(IntervalSet(range(10)))
0,1,2,3,4,5,6,7,8,9
- >>> print IntervalSet(
- ... Interval.between(l, u) for l, u in [(10, 20), (30, 40)])
+ >>> print(IntervalSet(
+ ... Interval.between(l, u) for l, u in [(10, 20), (30, 40)]))
[10..20],[30..40]
"""
self.intervals = []
@@ -935,9 +955,9 @@ class BaseIntervalSet(object):
1
>>> nonempty = IntervalSet([3])
>>> if IntervalSet.empty():
- ... print "Non-empty"
+ ... print("Non-empty")
>>> if nonempty:
- ... print "Non-empty"
+ ... print("Non-empty")
Non-empty
"""
return len(self.intervals)
@@ -948,17 +968,17 @@ class BaseIntervalSet(object):
This function shows a string representation of an IntervalSet.
The string is shown sorted, with all intervals normalized.
- >>> print IntervalSet()
+ >>> print(IntervalSet())
<Empty>
- >>> print IntervalSet([62])
+ >>> print(IntervalSet([62]))
62
- >>> print IntervalSet([62, 56])
+ >>> print(IntervalSet([62, 56]))
56,62
- >>> print IntervalSet([23, Interval(26, 32, upper_closed=False)])
+ >>> print(IntervalSet([23, Interval(26, 32, upper_closed=False)]))
23,[26..32)
- >>> print IntervalSet.less_than(3) + IntervalSet.greater_than(3)
+ >>> print(IntervalSet.less_than(3) + IntervalSet.greater_than(3))
(...3),(3...)
- >>> print IntervalSet([Interval.less_than_or_equal_to(6)])
+ >>> print(IntervalSet([Interval.less_than_or_equal_to(6)]))
(...6]
"""
if len(self.intervals) == 0:
@@ -987,20 +1007,20 @@ class BaseIntervalSet(object):
...
IndexError: Index is out of range
>>> interval = IntervalSet.greater_than(5)
- >>> print interval[0]
+ >>> print(interval[0])
(5...)
- >>> print interval[1]
+ >>> print(interval[1])
Traceback (most recent call last):
...
IndexError: Index is out of range
- >>> print interval[-1]
+ >>> print(interval[-1])
(5...)
>>> interval = IntervalSet([3, 6])
- >>> print interval[1]
+ >>> print(interval[1])
6
- >>> print interval[0]
+ >>> print(interval[0])
3
- >>> print interval[2]
+ >>> print(interval[2])
Traceback (most recent call last):
...
IndexError: Index is out of range
@@ -1018,14 +1038,14 @@ class BaseIntervalSet(object):
that with the left-most lower bound to that with the right-most.
>>> for i in IntervalSet():
- ... print i
+ ... print(i)
...
>>> for i in IntervalSet.between(3, 5):
- ... print i
+ ... print(i)
...
[3..5]
>>> for i in IntervalSet([2, 5, 3]):
- ... print i
+ ... print(i)
...
2
3
@@ -1104,11 +1124,11 @@ class BaseIntervalSet(object):
def bounds(self):
"""Returns an interval that encompasses the entire BaseIntervalSet
- >>> print IntervalSet([Interval.between(4, 6), 2, 12]).bounds()
+ >>> print(IntervalSet([Interval.between(4, 6), 2, 12]).bounds())
[2..12]
- >>> print IntervalSet().bounds()
+ >>> print(IntervalSet().bounds())
<Empty>
- >>> print IntervalSet.all().bounds()
+ >>> print(IntervalSet.all().bounds())
(...)
"""
if len(self.intervals) == 0:
@@ -1240,7 +1260,7 @@ class BaseIntervalSet(object):
>>> for i in s:
... l.add(str(i))
...
- >>> print len(l)
+ >>> print(len(l))
6
>>> "2" in l
True
@@ -1284,15 +1304,15 @@ class BaseIntervalSet(object):
>>> zero = IntervalSet([0])
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
- >>> print evens + positives
+ >>> print(evens + positives)
-8,-6,-4,-2,[0...)
- >>> print negatives + zero
+ >>> print(negatives + zero)
(...0]
- >>> print empty + negatives
+ >>> print(empty + negatives)
(...0)
- >>> print empty + naturals
+ >>> print(empty + naturals)
[0...)
- >>> print nonzero + evens
+ >>> print(nonzero + evens)
(...)
"""
return self.__or__(other)
@@ -1309,17 +1329,17 @@ class BaseIntervalSet(object):
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
>>> all = IntervalSet.all()
- >>> print evens - nonzero
+ >>> print(evens - nonzero)
0
- >>> print empty - naturals
+ >>> print(empty - naturals)
<Empty>
- >>> print zero - naturals
+ >>> print(zero - naturals)
<Empty>
- >>> print positives - zero
+ >>> print(positives - zero)
(0...)
- >>> print naturals - negatives
+ >>> print(naturals - negatives)
[0...)
- >>> print all - zero
+ >>> print(all - zero)
(...0),(0...)
>>> all - zero == nonzero
True
@@ -1380,17 +1400,17 @@ class BaseIntervalSet(object):
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
>>> all = IntervalSet.all()
- >>> print evens.difference(nonzero)
+ >>> print(evens.difference(nonzero))
0
- >>> print empty.difference(naturals)
+ >>> print(empty.difference(naturals))
<Empty>
- >>> print zero.difference(naturals)
+ >>> print(zero.difference(naturals))
<Empty>
- >>> print positives.difference(zero)
+ >>> print(positives.difference(zero))
(0...)
- >>> print naturals.difference(negatives)
+ >>> print(naturals.difference(negatives))
[0...)
- >>> print all.difference(zero)
+ >>> print(all.difference(zero))
(...0),(0...)
>>> all.difference(zero) == nonzero
True
@@ -1413,15 +1433,15 @@ class BaseIntervalSet(object):
>>> zero = IntervalSet([0])
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
- >>> print naturals and naturals
+ >>> print(naturals and naturals)
[0...)
- >>> print evens & zero
+ >>> print(evens & zero)
0
- >>> print negatives & zero
+ >>> print(negatives & zero)
<Empty>
- >>> print nonzero & positives
+ >>> print(nonzero & positives)
(0...)
- >>> print empty & zero
+ >>> print(empty & zero)
<Empty>
>>> positives & [0]
Traceback (most recent call last):
@@ -1469,15 +1489,15 @@ class BaseIntervalSet(object):
>>> zero = IntervalSet([0])
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
- >>> print naturals.intersection(naturals)
+ >>> print(naturals.intersection(naturals))
[0...)
- >>> print evens.intersection(zero)
+ >>> print(evens.intersection(zero))
0
- >>> print negatives.intersection(zero)
+ >>> print(negatives.intersection(zero))
<Empty>
- >>> print nonzero.intersection(positives)
+ >>> print(nonzero.intersection(positives))
(0...)
- >>> print empty.intersection(zero)
+ >>> print(empty.intersection(zero))
<Empty>
"""
if isinstance(other, BaseIntervalSet):
@@ -1497,17 +1517,17 @@ class BaseIntervalSet(object):
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
>>> all = IntervalSet.all()
- >>> print evens | positives
+ >>> print(evens | positives)
-8,-6,-4,-2,[0...)
- >>> print negatives | zero
+ >>> print(negatives | zero)
(...0]
- >>> print empty | negatives
+ >>> print(empty | negatives)
(...0)
- >>> print empty | naturals
+ >>> print(empty | naturals)
[0...)
- >>> print nonzero | evens
+ >>> print(nonzero | evens)
(...)
- >>> print negatives | range(5)
+ >>> print(negatives | range(5))
Traceback (most recent call last):
...
TypeError: unsupported operand type(s) for |: expected BaseIntervalSet
@@ -1537,17 +1557,17 @@ class BaseIntervalSet(object):
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
>>> all = IntervalSet.all()
- >>> print evens.union(positives)
+ >>> print(evens.union(positives))
-8,-6,-4,-2,[0...)
- >>> print negatives.union(zero)
+ >>> print(negatives.union(zero))
(...0]
- >>> print empty.union(negatives)
+ >>> print(empty.union(negatives))
(...0)
- >>> print empty.union(naturals)
+ >>> print(empty.union(naturals))
[0...)
- >>> print nonzero.union(evens)
+ >>> print(nonzero.union(evens))
(...)
- >>> print negatives.union(range(5))
+ >>> print(negatives.union(range(5)))
(...0],1,2,3,4
"""
if isinstance(other, BaseIntervalSet):
@@ -1566,13 +1586,13 @@ class BaseIntervalSet(object):
>>> zero = IntervalSet([0])
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
- >>> print nonzero ^ naturals
+ >>> print(nonzero ^ naturals)
(...0]
- >>> print zero ^ negatives
+ >>> print(zero ^ negatives)
(...0]
- >>> print positives ^ empty
+ >>> print(positives ^ empty)
(0...)
- >>> print evens ^ zero
+ >>> print(evens ^ zero)
-8,-6,-4,-2,2,4,6,8
>>> negatives ^ [0]
Traceback (most recent call last):
@@ -1599,15 +1619,15 @@ class BaseIntervalSet(object):
>>> zero = IntervalSet([0])
>>> nonzero = IntervalSet.not_equal_to(0)
>>> empty = IntervalSet.empty()
- >>> print nonzero.symmetric_difference(naturals)
+ >>> print(nonzero.symmetric_difference(naturals))
(...0]
- >>> print zero.symmetric_difference(negatives)
+ >>> print(zero.symmetric_difference(negatives))
(...0]
- >>> print positives.symmetric_difference(empty)
+ >>> print(positives.symmetric_difference(empty))
(0...)
- >>> print evens.symmetric_difference(zero)
+ >>> print(evens.symmetric_difference(zero))
-8,-6,-4,-2,2,4,6,8
- >>> print evens.symmetric_difference(range(0, 9, 2))
+ >>> print(evens.symmetric_difference(range(0, 9, 2)))
-8,-6,-4,-2
"""
if isinstance(other, BaseIntervalSet):
@@ -1625,15 +1645,15 @@ class BaseIntervalSet(object):
>>> evens = IntervalSet([-8, -6, -4, -2, 0, 2, 4, 6, 8])
>>> zero = IntervalSet([0])
>>> nonzero = IntervalSet.not_equal_to(0)
- >>> print ~(IntervalSet.empty())
+ >>> print(~(IntervalSet.empty()))
(...)
>>> ~negatives == naturals
True
- >>> print ~positives
+ >>> print(~positives)
(...0]
>>> ~naturals == negatives
True
- >>> print ~evens
+ >>> print(~evens)
(...-8),(-8..-6),(-6..-4),(-4..-2),(-2..0),(0..2),(2..4),(4..6),(6..8),(8...)
>>> ~zero == nonzero
True
@@ -1954,9 +1974,9 @@ class BaseIntervalSet(object):
def less_than(cls, n):
"""Returns an IntervalSet containing values less than the given value
- >>> print IntervalSet.less_than(0)
+ >>> print(IntervalSet.less_than(0))
(...0)
- >>> print IntervalSet.less_than(-23)
+ >>> print(IntervalSet.less_than(-23))
(...-23)
"""
return cls([Interval.less_than(n)])
@@ -1966,9 +1986,9 @@ class BaseIntervalSet(object):
"""Returns an IntervalSet containing values less than or equal to the
given value
- >>> print IntervalSet.less_than_or_equal_to(0)
+ >>> print(IntervalSet.less_than_or_equal_to(0))
(...0]
- >>> print IntervalSet.less_than_or_equal_to(-23)
+ >>> print(IntervalSet.less_than_or_equal_to(-23))
(...-23]
"""
return cls([Interval.less_than_or_equal_to(n)])
@@ -1977,9 +1997,9 @@ class BaseIntervalSet(object):
def greater_than(cls, n):
"""Returns an IntervalSet containing values greater than the given value
- >>> print IntervalSet.greater_than(0)
+ >>> print(IntervalSet.greater_than(0))
(0...)
- >>> print IntervalSet.greater_than(-23)
+ >>> print(IntervalSet.greater_than(-23))
(-23...)
"""
return cls([Interval.greater_than(n)])
@@ -1989,9 +2009,9 @@ class BaseIntervalSet(object):
"""Returns an IntervalSet containing values greater than or equal to
the given value
- >>> print IntervalSet.greater_than_or_equal_to(0)
+ >>> print(IntervalSet.greater_than_or_equal_to(0))
[0...)
- >>> print IntervalSet.greater_than_or_equal_to(-23)
+ >>> print(IntervalSet.greater_than_or_equal_to(-23))
[-23...)
"""
return cls([Interval.greater_than_or_equal_to(n)])
@@ -2000,9 +2020,9 @@ class BaseIntervalSet(object):
def not_equal_to(cls, n):
"""Returns an IntervalSet of all values not equal to n
- >>> print IntervalSet.not_equal_to(0)
+ >>> print(IntervalSet.not_equal_to(0))
(...0),(0...)
- >>> print IntervalSet.not_equal_to(-23)
+ >>> print(IntervalSet.not_equal_to(-23))
(...-23),(-23...)
"""
return cls([Interval.less_than(n), Interval.greater_than(n)])
@@ -2014,9 +2034,9 @@ class BaseIntervalSet(object):
If closed is True, then the endpoints are included; otherwise, they
aren't.
- >>> print IntervalSet.between(0, 100)
+ >>> print(IntervalSet.between(0, 100))
[0..100]
- >>> print IntervalSet.between(-1, 1)
+ >>> print(IntervalSet.between(-1, 1))
[-1..1]
"""
return cls([Interval.between(a, b, closed)])
@@ -2025,7 +2045,7 @@ class BaseIntervalSet(object):
def all(cls):
"""Returns an interval set containing all values
- >>> print IntervalSet.all()
+ >>> print(IntervalSet.all())
(...)
"""
return cls([Interval.all()])
@@ -2034,7 +2054,7 @@ class BaseIntervalSet(object):
def empty(cls):
"""Returns an interval set containing no values.
- >>> print IntervalSet.empty()
+ >>> print(IntervalSet.empty())
<Empty>
"""
return cls()
@@ -2101,7 +2121,7 @@ class IntervalSet(BaseIntervalSet):
>>> del interval[1]
>>> len(interval)
2
- >>> print interval
+ >>> print(interval)
-2,7
"""
try:
@@ -2114,13 +2134,13 @@ class IntervalSet(BaseIntervalSet):
>>> r = IntervalSet()
>>> r.add(4)
- >>> print r
+ >>> print(r)
4
>>> r.add(Interval(23, 39, lower_closed=False))
- >>> print r
+ >>> print(r)
4,(23..39]
>>> r.add(Interval.less_than(25))
- >>> print r
+ >>> print(r)
(...39]
"""
BaseIntervalSet._add(self, obj)
@@ -2134,10 +2154,10 @@ class IntervalSet(BaseIntervalSet):
>>> r = IntervalSet.all()
>>> r.remove(4)
- >>> print r
+ >>> print(r)
(...4),(4...)
>>> r.remove(Interval(23, 39, lower_closed=False))
- >>> print r
+ >>> print(r)
(...4),(4..23],(39...)
>>> r.remove(Interval.less_than(25))
Traceback (most recent call last):
@@ -2157,13 +2177,13 @@ class IntervalSet(BaseIntervalSet):
>>> r = IntervalSet.all()
>>> r.discard(4)
- >>> print r
+ >>> print(r)
(...4),(4...)
>>> r.discard(Interval(23, 39, lower_closed=False))
- >>> print r
+ >>> print(r)
(...4),(4..23],(39...)
>>> r.discard(Interval.less_than(25))
- >>> print r
+ >>> print(r)
(39...)
"""
diff = self - IntervalSet([obj])
@@ -2177,18 +2197,18 @@ class IntervalSet(BaseIntervalSet):
>>> r = IntervalSet.all()
>>> r.difference_update([4])
- >>> print r
+ >>> print(r)
(...4),(4...)
>>> r.difference_update(
... IntervalSet([Interval(23, 39, lower_closed=False)]))
- >>> print r
+ >>> print(r)
(...4),(4..23],(39...)
>>> r.difference_update(IntervalSet.less_than(25))
- >>> print r
+ >>> print(r)
(39...)
>>> r2 = IntervalSet.all()
>>> r.difference_update(r2)
- >>> print r
+ >>> print(r)
<Empty>
"""
diff = self.difference(other)
@@ -2198,10 +2218,10 @@ class IntervalSet(BaseIntervalSet):
"""Removes all Intervals from the object
>>> s = IntervalSet([2, 7, Interval.greater_than(8), 2, 6, 34])
- >>> print s
+ >>> print(s)
2,6,7,(8...)
>>> s.clear()
- >>> print s
+ >>> print(s)
<Empty>
"""
self.intervals = []
@@ -2214,17 +2234,17 @@ class IntervalSet(BaseIntervalSet):
>>> r = IntervalSet()
>>> r.update([4])
- >>> print r
+ >>> print(r)
4
>>> r.update(IntervalSet([Interval(23, 39, lower_closed=False)]))
- >>> print r
+ >>> print(r)
4,(23..39]
>>> r.update(IntervalSet.less_than(25))
- >>> print r
+ >>> print(r)
(...39]
>>> r2 = IntervalSet.all()
>>> r.update(r2)
- >>> print r
+ >>> print(r)
(...)
"""
union = self.union(other)
@@ -2238,19 +2258,19 @@ class IntervalSet(BaseIntervalSet):
>>> r = IntervalSet.all()
>>> r.intersection_update([4])
- >>> print r
+ >>> print(r)
4
>>> r = IntervalSet.all()
>>> r.intersection_update(
... IntervalSet([Interval(23, 39, lower_closed=False)]))
- >>> print r
+ >>> print(r)
(23..39]
>>> r.intersection_update(IntervalSet.less_than(25))
- >>> print r
+ >>> print(r)
(23..25)
>>> r2 = IntervalSet.all()
>>> r.intersection_update(r2)
- >>> print r
+ >>> print(r)
(23..25)
"""
intersection = self.intersection(other)
@@ -2264,18 +2284,18 @@ class IntervalSet(BaseIntervalSet):
>>> r = IntervalSet.empty()
>>> r.symmetric_difference_update([4])
- >>> print r
+ >>> print(r)
4
>>> r.symmetric_difference_update(
... IntervalSet([Interval(23, 39, lower_closed=False)]))
- >>> print r
+ >>> print(r)
4,(23..39]
>>> r.symmetric_difference_update(IntervalSet.less_than(25))
- >>> print r
+ >>> print(r)
(...4),(4..23],[25..39]
>>> r2 = IntervalSet.all()
>>> r.symmetric_difference_update(r2)
- >>> print r
+ >>> print(r)
4,(23..25),(39...)
"""
xor = self.symmetric_difference(other)
@@ -2294,7 +2314,7 @@ class IntervalSet(BaseIntervalSet):
True
>>> "7" in l
True
- >>> print s
+ >>> print(s)
<Empty>
>>> i = s.pop()
Traceback (most recent call last):
@@ -2341,7 +2361,7 @@ class FrozenIntervalSet(BaseIntervalSet):
... FrozenIntervalSet.less_than(3) : 3}
"""
- def __new__(cls, items=[]):
+ def __new__(cls, items=None):
"""Constructs a new FrozenInteralSet
Object creation is just like with a regular IntervalSet, except for
@@ -2354,11 +2374,13 @@ class FrozenIntervalSet(BaseIntervalSet):
>>> id(fs1) == id(fs2)
True
"""
+ if items is None:
+ items = []
if (cls == FrozenIntervalSet) and isinstance(items, FrozenIntervalSet):
result = items
else:
s = IntervalSet(items)
- result = super(FrozenIntervalSet, cls).__new__(cls, items)
+ result = super(FrozenIntervalSet, cls).__new__(cls)
result.intervals = s.intervals
return result
--
2.42.0
diff -ur urlnorm-1.1.4.orig/setup.py urlnorm-1.1.4/setup.py
--- urlnorm-1.1.4.orig/setup.py 2016-08-05 20:07:24.000000000 +0200
+++ urlnorm-1.1.4/setup.py 2022-10-21 09:32:35.377477901 +0200
@@ -9,8 +9,15 @@
description="Normalize a URL to a standard unicode encoding",
py_modules=['urlnorm'],
license='MIT License',
+ install_requires=['six'],
author='Jehiah Czebotar',
author_email='jehiah@gmail.com',
url='http://github.com/jehiah/urlnorm',
download_url="http://github.com/downloads/jehiah/urlnorm/urlnorm-%s.tar.gz" % version,
+ classifiers=[
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ ],
)
diff -ur urlnorm-1.1.4.orig/urlnorm.py urlnorm-1.1.4/urlnorm.py
--- urlnorm-1.1.4.orig/urlnorm.py 2016-08-05 20:06:32.000000000 +0200
+++ urlnorm-1.1.4/urlnorm.py 2022-10-21 09:32:35.377477901 +0200
@@ -41,6 +41,10 @@
- more fine-grained authority parsing and normalisation
"""
+from __future__ import absolute_import
+from six import unichr
+import six
+from six.moves import range
__license__ = """
Copyright (c) 1999-2002 Mark Nottingham <mnot@pobox.com>
Copyright (c) 2010 Jehiah Czebotar <jehiah@gmail.com>
@@ -67,8 +71,7 @@
# also update in setup.py
__version__ = "1.1.4"
-from urlparse import urlparse, urlunparse
-from string import lower
+from six.moves.urllib.parse import urlparse, urlunparse, unquote
import re
@@ -108,8 +111,8 @@
qs_unsafe_list = set('?&=+%#')
fragment_unsafe_list = set('+%#')
path_unsafe_list = set('/?;%+#')
-_hextochr = dict(('%02x' % i, chr(i)) for i in range(256))
-_hextochr.update(('%02X' % i, chr(i)) for i in range(256))
+_hextochr = dict((b'%02x' % i, six.int2byte(i)) for i in range(256))
+_hextochr.update((b'%02X' % i, six.int2byte(i)) for i in range(256))
def unquote_path(s):
@@ -132,22 +135,23 @@
"""unquote percent escaped string except for percent escape sequences that are in unsafe_list"""
# note: this build utf8 raw strings ,then does a .decode('utf8') at the end.
# as a result it's doing .encode('utf8') on each block of the string as it's processed.
- res = _utf8(s).split('%')
- for i in xrange(1, len(res)):
+ unsafe_list = [_utf8(i) for i in unsafe_list]
+ res = _utf8(s).split(b'%')
+ for i in range(1, len(res)):
item = res[i]
try:
raw_chr = _hextochr[item[:2]]
if raw_chr in unsafe_list or ord(raw_chr) < 20:
# leave it unescaped (but uppercase the percent escape)
- res[i] = '%' + item[:2].upper() + item[2:]
+ res[i] = b'%' + item[:2].upper() + item[2:]
else:
res[i] = raw_chr + item[2:]
except KeyError:
- res[i] = '%' + item
+ res[i] = b'%' + item
except UnicodeDecodeError:
# note: i'm not sure what this does
res[i] = unichr(int(item[:2], 16)) + item[2:]
- o = "".join(res)
+ o = b"".join(res)
return _unicode(o)
@@ -160,7 +164,7 @@
def norm_tuple(scheme, authority, path, parameters, query, fragment):
"""given individual url components, return its normalized form"""
- scheme = lower(scheme)
+ scheme = scheme.lower()
if not scheme:
raise InvalidUrl('missing URL scheme')
authority = norm_netloc(scheme, authority)
@@ -203,7 +207,7 @@
return '/'
return path
-MAX_IP = 0xffffffffL
+MAX_IP = 0xffffffff
def int2ip(ipnum):
@@ -238,7 +242,7 @@
if '.' not in host and not (host[0] == '[' and host[-1] == ']'):
raise InvalidUrl('host %r is not valid' % host)
- authority = lower(host)
+ authority = host.lower()
if 'xn--' in authority:
subdomains = [_idn(subdomain) for subdomain in authority.split('.')]
authority = '.'.join(subdomains)
@@ -260,14 +264,14 @@
def _utf8(value):
- if isinstance(value, unicode):
+ if isinstance(value, six.text_type):
return value.encode("utf-8")
assert isinstance(value, str)
return value
def _unicode(value):
- if isinstance(value, str):
+ if isinstance(value, six.binary_type):
return value.decode("utf-8")
- assert isinstance(value, unicode)
+ assert isinstance(value, six.text_type)
return value
# ethtool - query or control network driver and hardware settings
# https://www.kernel.org/pub/software/network/ethtool
[buildout]
extends =
../xz-utils/buildout.cfg
parts = ethtool
[ethtool]
recipe = slapos.recipe.cmmi
url = https://www.kernel.org/pub/software/network/ethtool/ethtool-4.11.tar.xz
md5sum = 16d38f4ebe23e44f96f7d8b38ed3652c
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
......@@ -8,6 +8,7 @@ extends =
../perl/buildout.cfg
../tar/buildout.cfg
../binutils/buildout.cfg
../xz-utils/buildout.cfg
parts =
gcc-10.5
......@@ -56,7 +57,7 @@ post-install =
chmod +x ld
environment =
LDFLAGS=-Wl,-rpath=${gmp:location}/lib -Wl,-rpath=${isl:location}/lib -Wl,-rpath=${mpc:location}/lib -Wl,-rpath=${mpfr:location}/lib
PATH=${binutils:location}/bin:${gettext:location}/bin:${perl:location}/bin:${tar:location}/bin:%(PATH)s
PATH=${binutils:location}/bin:${gettext:location}/bin:${perl:location}/bin:${tar:location}/bin:${xz-utils:location}/bin:%(PATH)s
[gcc-common:os.path.isdir('/usr/lib/arm-linux-gnueabihf') and os.access('/proc/device-tree/model', os.R_OK) and 'OLinuXino' in open('/proc/device-tree/model').read()]
extra-configure-options =
......
......@@ -18,14 +18,13 @@ shared = true
url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs10031/ghostscript-10.03.1.tar.gz
md5sum = 01c515f190de2d8dcce4767407720855
pkg_config_depends = ${libidn:location}/lib/pkgconfig:${libtiff:location}/lib/pkgconfig:${libjpeg:location}/lib/pkgconfig:${fontconfig:location}/lib/pkgconfig:${fontconfig:pkg_config_depends}
# XXX --with-tessdata work arounds a slaprunner bug of having softwares installed in a path containing //
configure-options =
--disable-cups
--with-system-libtiff
--without-so
--without-x
--with-drivers=FILES
--with-tessdata=$(python -c 'print("""${:tessdata-location}""".replace("//", "/"))')
--with-tessdata=${:tessdata-location}
environment =
PATH=${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${:pkg_config_depends}
......
......@@ -33,6 +33,9 @@ environment =
PATH=${curl:location}/bin:${gettext:location}/bin:${tar:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${curl:location}/lib -Wl,-rpath=${libexpat:location}/lib
patch-options = -p1
patches =
${:_profile_base_location_}/prevent-git-fetch-pack-segfault.patch#f18e84a5ff8951aad094eddf32fbf8a1
[gitweb]
<= git
......
From 93be30997053105b5e3bad9203e5d07cbea0f0e6 Mon Sep 17 00:00:00 2001
From: Alain Takoudjou <alain.takoudjou@nexedi.com>
Date: Fri, 7 Jun 2024 12:13:50 +0200
Subject: [PATCH] prevent-git-fetch-pack-segfault
---
fetch-pack.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/fetch-pack.c b/fetch-pack.c
index 65c1ff4bb4..a9b1558ce2 100644
--- a/fetch-pack.c
+++ b/fetch-pack.c
@@ -1037,7 +1037,7 @@ static int get_pack(struct fetch_pack_args *args,
cmd.git_cmd = 1;
if (start_command(&cmd))
die(_("fetch-pack: unable to fork off %s"), cmd_name);
- if (do_keep && (pack_lockfiles || fsck_objects)) {
+ if (do_keep && (pack_lockfiles /*|| fsck_objects*/)) {
int is_well_formed;
char *pack_lockfile = index_pack_lockfile(cmd.out, &is_well_formed);
--
2.34.1
......@@ -96,6 +96,15 @@ patches +=
https://lab.nexedi.com/kirr/go/commit/6dfc2256e2cd1ad46f3a5e9da85af6a70be7fba3.patch#fa9de83838b198490abec0a6a924ad8d
https://lab.nexedi.com/kirr/go/commit/28fbdd01d6c89db1e81e12bd05910fdec4c3b9b3.patch#c75e2eb7aaaeffdd3c507ec1beacd3ca
[golang1.15]
<= golang-common-pre-1.19
url = https://go.dev/dl/go1.15.15.src.tar.gz
md5sum = 05fedd8289291eb2d91cd0c092b41aaa
# go1.1 needs go1.4 to bootstrap
environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
[golang1.16]
<= golang-common-pre-1.19
url = https://golang.org/dl/go1.16.15.src.tar.gz
......
......@@ -10,11 +10,12 @@ parts =
LDFLAGS = -L${hdf5:location}/lib -Wl,-rpath=${hdf5:location}/lib
CPPFLAGS = -I${hdf5:location}/include
LD_LIBRARY_PATH=${hdf5:location}/lib
HDF5_DIR=${hdf5:location}
[h5py]
recipe = zc.recipe.egg:custom
egg = h5py
setup-eggs =
setup-eggs =
${cython:egg}
${numpy:egg}
pkgconfig
......
......@@ -7,10 +7,14 @@ parts =
[hdf5]
recipe = slapos.recipe.cmmi
shared = true
url = https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.0-patch1/src/hdf5-1.10.0-patch1.tar.bz2
md5sum = f6d980febe2c35c11670a9b34fa3b487
url = https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.3/src/hdf5-1.14.3.tar.gz
md5sum = 075d923171c7785b2ee7d820b0ef8707
configure-options =
--with-zlib=${zlib:location}
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
[hdf5:python2]
url = https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.0-patch1/src/hdf5-1.10.0-patch1.tar.bz2
md5sum = f6d980febe2c35c11670a9b34fa3b487
......@@ -11,7 +11,7 @@ parts =
<= numpy-env
[ipython]
recipe = zc.recipe.egg:custom
recipe = zc.recipe.egg
egg = ipython
environment = ipython-env
setup-eggs =
......
......@@ -75,7 +75,7 @@ class ERP5Kernel(Kernel):
self.title = None
# Allowed HTTP request code list for making request to erp5 from Kernel
# This list should be to used check status_code before making requests to erp5
self.allowed_HTTP_request_code_list = range(500, 511)
self.allowed_HTTP_request_code_list = list(range(500, 511))
# Append request code 200 in the allowed HTTP status code list
self.allowed_HTTP_request_code_list.append(200)
......
......@@ -14,9 +14,8 @@ parts +=
# Always build GCC for Fortran (see openblas).
max_version = 0
[jupyter]
[jupyter:python2]
extra-eggs =
python_executable = ${buildout:bin-directory}/${:interpreter}
[download-file-base]
recipe = slapos.recipe.build:download
......@@ -46,7 +45,7 @@ context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key openssl_output openssl-output:openssl
key python_executable jupyter:python_executable
key python_executable jupyter:python-executable
key jupyter_config_location jupyter-notebook-config:location
key jupyter_config_filename jupyter-notebook-config:filename
key jupyter_set_password_location jupyter-set-password:location
......@@ -59,7 +58,7 @@ context =
key custom_js_filename custom-js:filename
key monitor_template_rendered buildout:directory
[versions]
[versions:python2]
Pygments = 2.2.0
ipykernel = 4.5.2
ipython = 5.3.0
......
......@@ -15,11 +15,11 @@
[instance-jupyter-notebook]
filename = instance.cfg.in
md5sum = fd7ed44da8d8723983b8666df2971a36
md5sum = c335782940a8f3b1ff7d4280aeec336e
[jupyter-notebook-config]
filename = jupyter_notebook_config.py.jinja
md5sum = 9d579353b579b6e488ae6330c7f4ad68
md5sum = 10b9a9892d50c5d085ff0be5936ab88b
[jupyter-set-password]
filename = jupyter_set_password.cgi.jinja
......@@ -27,7 +27,7 @@ md5sum = ac10fbcf790bd8e58750cfdd069812d2
[erp5-kernel]
filename = ERP5kernel.py
md5sum = 7d5309fe79afbcb455c0d8181b42e56c
md5sum = da04b99b70b2e327c9e9b4cdd056098e
[kernel-json]
filename = kernel.json.jinja
......
......@@ -55,7 +55,7 @@ key_file = ${directory:etc}/jupyter_cert.key
[instance]
recipe = slapos.cookbook:wrapper
command-line =
{{ bin_directory }}/jupyter-lab
{{ bin_directory }}/jupyter-notebook
--no-browser
--ip=${instance-parameter:host}
--port=${instance-parameter:port}
......@@ -69,15 +69,19 @@ environment =
JUPYTER_PATH=${directory:jupyter_dir}
JUPYTER_CONFIG_DIR=${directory:jupyter_config_dir}
JUPYTER_RUNTIME_DIR=${directory:jupyter_runtime_dir}
JUPYTERLAB_DIR=${directory:jupyterlab-dir}
LANG=C.UTF-8
[jupyter-notebook-config]
recipe = slapos.recipe.template:jinja2
url = {{ jupyter_config_location }}/{{ jupyter_config_filename }}
output = ${directory:jupyter_config_dir}/jupyter_notebook_config.py
output = ${directory:jupyter_config_dir}/jupyter_server_config.py
context =
raw config_cfg ${buildout:directory}/knowledge0.cfg
[jupyter-notebook-config:python2]
output = ${directory:jupyter_config_dir}/jupyter_notebook_config.py
[directory]
recipe = slapos.cookbook:mkdirectory
home = ${buildout:directory}
......@@ -95,12 +99,13 @@ jupyter_runtime_dir = ${:jupyter_dir}/runtime
jupyter_custom_dir = ${:jupyter_config_dir}/custom
jupyter_nbextensions_dir = ${:jupyter_dir}/nbextensions
erp5_kernel_dir = ${:jupyter_kernel_dir}/ERP5
jupyterlab-dir = ${:jupyter_dir}/lab
[jupyter_notebook]
# This part is called like this because knowledge0.write uses the part name for
# the section name in the config file.
recipe = slapos.cookbook:zero-knowledge.write
password =
password =
filename = knowledge0.cfg
[read-knowledge0]
......@@ -136,7 +141,7 @@ output = ${directory:erp5_kernel_dir}/kernel.json
context =
raw python_executable {{ python_executable }}
raw kernel_dir ${erp5-kernel:target-directory}/{{ erp5_kernel_filename }}
key erp5_url slapconfiguration:configuration.erp5-url
key erp5_url slapconfiguration:configuration.erp5-url
raw display_name ERP5
raw language_name python
......
'''
This script initializes Jupyter's configuration such as passwords and other
things. It is run by IPython hence why it can use functions like get_config().
'''
import os
import ssl
import sys
import six
from six.moves.configparser import ConfigParser
if six.PY3:
from jupyter_server.auth import passwd
import secrets
random_password = secrets.token_hex
else:
from notebook.auth import passwd
import random
def random_password(length=10):
result = ""
for i in range(0, length):
result = result + chr(random.randint(0, 25) + ord('a'))
return result
knowledge_0 = '{{ config_cfg }}'
if not os.path.exists(knowledge_0):
print ("Your software does <b>not</b> embed 0-knowledge.\n"
"This interface is useless in this case</body></html>")
exit(0)
c = get_config()
parser = ConfigParser()
parser.read(knowledge_0)
if not parser.has_section("jupyter_notebook"):
parser.add_section("jupyter_notebook")
if not parser.has_option("jupyter_notebook", "password") or \
parser.get("jupyter_notebook", "password") == "":
parser.set("jupyter_notebook", "password", random_password())
if six.PY3: # This supports old jupyterlab on python2 and recent jupyterlab on python3
import pathlib
import jupyterlab
jupyterlab_dir = pathlib.Path(os.environ['JUPYTERLAB_DIR'])
# symlink all schemas in a folder, jupyter seems to assume that everything is installed
# in the same place.
schemas_dir = jupyterlab_dir / 'schemas'
if not schemas_dir.exists():
schemas_dir.mkdir()
for p in sys.path:
for schema in (pathlib.Path(p) / 'share' / 'jupyter' / 'lab' / 'schemas').glob('*/'):
dest = (schemas_dir / schema.name)
if dest.exists():
dest.unlink()
dest.symlink_to(schema)
c.LabServerApp.schemas_dir = str(schemas_dir)
# static really needs to be a sub-folder of $JUPYTERLAB_DIR
static = pathlib.Path(jupyterlab.__file__).parent.parent / 'share' / 'jupyter' / 'lab' / 'static'
static_dir = jupyterlab_dir / 'static'
if static_dir.exists():
static_dir.unlink()
static_dir.symlink_to(static)
c.LabServerApp.themes_dir = str(pathlib.Path(jupyterlab.__file__).parent / 'themes')
c.ServerApp.jpserver_extensions = {
'notebook': True,
'jupyter_lsp':True,
'jupyter_server_terminals': True,
'jupyterlab': True,
'notebook_shim': True,
}
c.ServerApp.password = passwd(parser.get("jupyter_notebook", "password"))
else:
c.NotebookApp.password = passwd(parser.get("jupyter_notebook", "password"))
c.NotebookApp.ssl_options = {
'ssl_version': ssl.PROTOCOL_TLSv1_2,
}
with open(knowledge_0, 'w') as file:
parser.write(file)
'''
This script initializes Jupyter's configuration such as passwords and other
things. It is run by IPython hence why it can use functions like get_config().
'''
import ConfigParser
import random
from notebook.auth import passwd
import os
import ssl
def random_password(length = 10):
result = ""
for i in range(0, length):
result = result + chr(random.randint(0, 25) + ord('a'))
return result
knowledge_0 = '{{ config_cfg }}'
if not os.path.exists(knowledge_0):
print "Your software does <b>not</b> embed 0-knowledge. \
This interface is useless in this case</body></html>"
exit(0)
c = get_config()
parser = ConfigParser.ConfigParser()
parser.read(knowledge_0)
if not parser.has_section("jupyter_notebook"):
parser.add_section("jupyter_notebook")
if not parser.has_option("jupyter_notebook", "password") or \
parser.get("jupyter_notebook", "password") == "":
parser.set("jupyter_notebook", "password", random_password())
c.NotebookApp.password = passwd(parser.get("jupyter_notebook", "password"))
c.NotebookApp.ssl_options = {
'ssl_version': ssl.PROTOCOL_TLSv1_2,
}
with open(knowledge_0, 'w') as file:
parser.write(file)
......@@ -3,7 +3,7 @@ extends =
../numpy/openblas.cfg
../matplotlib/buildout.cfg
../ipython/buildout.cfg
../python-cffi/buildout.cfg
../python-argon2-cffi/buildout.cfg
../python-pyzmq/buildout.cfg
../scipy/buildout.cfg
../scikit-learn/buildout.cfg
......@@ -15,10 +15,6 @@ parts =
jupyter
jupyter-notebook-scripts
[argon2-cffi]
recipe = zc.recipe.egg:custom
egg = ${:_buildout_section_name_}
setup-eggs = ${python-cffi:egg}
[jupyter-env]
<= numpy-env
......@@ -74,6 +70,7 @@ scripts =
jupyter-migrate
jupyter-troubleshoot
jupyter-run
python-executable = ${buildout:bin-directory}/${:interpreter}
[jupyter-notebook-initialized-scripts]
recipe = zc.recipe.egg:scripts
......@@ -82,9 +79,7 @@ environment = jupyter-env
scripts =
jupyter-nbconvert
jupyter-nbextension
jupyter-notebook
jupyter-serverextension
jupyter-lab
jupyter-labextension
jupyter-labhub
......
From 386b9a90dec9be74a271258833acb8c7ae993eed Mon Sep 17 00:00:00 2001
From: Alain Takoudjou <alain.takoudjou@nexedi.com>
Date: Tue, 23 Apr 2024 20:51:04 +0200
Subject: [PATCH] prefer use python 3 for tests
---
tests/CMakeLists.txt | 1 +
1 file changed, 1 insertion(+)
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index 6f8a18ec0..04ccf99e7 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -1,3 +1,4 @@
+set(Python_ADDITIONAL_VERSIONS 3 2.7)
FIND_PACKAGE(PythonInterp)
IF(NOT PYTHONINTERP_FOUND)
--
2.42.0
[buildout]
extends =
../defaults.cfg
../cmake/buildout.cfg
../openssl/buildout.cfg
../pkgconfig/buildout.cfg
../pcre/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg
parts = libgit2
[libgit2]
recipe = slapos.recipe.cmmi
url = https://github.com/libgit2/libgit2/archive/refs/tags/v${:version}.tar.gz
md5sum = 9f4ca15249e703ab88cbc929187750cd
version = 1.1.0
shared = true
configure-command =
cmake
configure-options =
-Bbuild
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-DCMAKE_C_FLAGS="-I${openssl:location}/include -I${zlib:location}/include -I${pcre:location}/include"
-DCMAKE_INSTALL_RPATH=${zlib:location}/lib:${openssl:location}/lib:${pcre:location}/lib
-G"Unix Makefiles"
patch-options = -p1
patches =
${:_profile_base_location_}/0001-prefer-use-python-3-for-tests.patch#6f2a6e83db45b33fc7da86279f06595b
make-options = -C build
environment =
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${xz-utils:location}/lib/pkgconfig
PATH=${python3:location}/bin:${pkgconfig:location}/bin:${cmake:location}/bin:%(PATH)s
LDFLAGS=-L${openssl-1.0:location}/lib -Wl,-rpath=${openssl:location}/lib -L${pcre:location}/lib -Wl,-rpath=${pcre:location}/lib
[buildout]
extends =
../gnutls/buildout.cfg
../gnupg/buildout.cfg
parts =
libgpgme
[libgpgme]
recipe = slapos.recipe.cmmi
url = https://www.gnupg.org/ftp/gcrypt/gpgme/gpgme-1.23.2.tar.bz2
md5sum = 01a8c05b409847e87daf0543e91f8c37
configure-options =
--disable-gpg-test
--with-libgpg-error-prefix=${libgpg-error:location}
--with-libassuan-prefix=${libassuan:location}
make-options =
PYTHONS=
environment =
LDFLAGS=-Wl,-rpath=${libgpg-error:location}/lib -Wl,-rpath=${libassuan:location}/lib
......@@ -3,6 +3,7 @@
extends =
../bzip2/buildout.cfg
../cmake/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg
parts = libzip
......@@ -22,7 +23,7 @@ configure-options =
-DCMAKE_LIBRARY_PATH=${zlib:location}/lib:${bzip2:location}/lib
make-options = -C build
environment =
PATH=${cmake:location}/bin:%(PATH)s
PATH=${cmake:location}/bin:${xz-utils:location}/bin:%(PATH)s
CMAKE_INCLUDE_PATH=${zlib:location}/include:${bzip2:location}/include
CMAKE_LIBRARY_PATH=${zlib:location}/lib:${bzip2:location}/lib
LDFLAGS=-Wl,-rpath=${:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${bzip2:location}/lib -Wl,-rpath=${bzip2:location}/lib
......@@ -40,6 +40,7 @@ need-matplotlibrc = ${matplotlibrc:location}
[versions]
matplotlib = 2.1.2
cycler = 0.11.0
matplotlib-inline = 0.1.6:whl
[versions:sys.version_info < (3,8)]
cycler = 0.10.0
......@@ -6,6 +6,7 @@ extends =
../file/buildout.cfg
../zlib/buildout.cfg
../ncurses/buildout.cfg
../xz-utils/buildout.cfg
[nano]
recipe = slapos.recipe.cmmi
......@@ -15,6 +16,7 @@ md5sum = 12784a5c245518d7580125ebbd6b7601
# The dummy PKG_CONFIG is in the case that both pkg-config and ncursesw
# are installed on the system.
environment=
PATH=${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG=false
NCURSESW_CONFIG=${ncurses:location}/bin/ncursesw6-config
CPPFLAGS=-I${file:location}/include -I${zlib:location}/include
......
......@@ -21,7 +21,7 @@ environment = numpy-env
eggs = ${cython:egg}
[versions]
numpy = 1.22.0
numpy = 1.24.4
[numpy:sys.version_info < (3,8)]
depends =
......
# SlapOS software release to test nxdtest on Nexedi testing infrastructure.
[buildout]
extends =
../defaults.cfg
test.cfg
[python]
part = python2.7
[openssl]
<= openssl-1.1
../../stack/slapos-py2.cfg
......@@ -5,9 +5,10 @@ parts =
open62541
extends =
../cmake/buildout.cfg
../git/buildout.cfg
../mbedtls/buildout.cfg
../patch/buildout.cfg
../python3/buildout.cfg
../gcc/buildout.cfg
../defaults.cfg
[gcc]
......@@ -27,6 +28,7 @@ pre-configure =
# remove old FindPython3 which does not know about python > 3.8 to
# use FindPython3 bundled with slapos' cmake
rm tools/cmake/FindPython3.cmake tools/cmake/FindPython/Support.cmake
${git:location}/bin/git clone -b v1.1.6 https://github.com/LiamBindle/MQTT-C.git deps/mqtt-c
configure-command =
${cmake:location}/bin/cmake
configure-options =
......@@ -34,9 +36,20 @@ configure-options =
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-DUA_ENABLE_PUBSUB=ON
-DUA_ENABLE_PUBSUB_MONITORING=ON
-DUA_ENABLE_PUBSUB_ETH_UADP=ON
-DUA_ENABLE_SUBSCRIPTIONS=ON
-DUA_NAMESPACE_ZERO=REDUCED
-DUA_ENABLE_ENCRYPTION=MBEDTLS
-DUA_ENABLE_ENCRYPTION_MBEDTLS=ON
-DMBEDTLS_INCLUDE_DIRS=${mbedtls:location}/include
-DMBEDTLS_LIBRARY=${mbedtls:location}/lib/libmbedtls.so
-DMBEDX509_LIBRARY=${mbedtls:location}/lib/libmbedx509.so
-DMBEDCRYPTO_LIBRARY=${mbedtls:location}/lib/libmbedcrypto.so
-DUA_ENABLE_PUBSUB_INFORMATIONMODEL=ON
-DUA_ENABLE_PUBSUB_MQTT=ON
post-install =
cp src/pubsub/*.h deps/open62541_queue.h @@LOCATION@@/include
environment =
PATH=${gcc:prefix}/bin:${python3:location}/bin:${patch:location}/bin:%(PATH)s
LDFLAGS=-L${mbedtls:location}/lib -Wl,-rpath=${mbedtls:location}/lib
[buildout]
extends =
../git/buildout.cfg
../mbedtls/buildout.cfg
../open62541/buildout.cfg
../defaults.cfg
parts = osie-coupler
[gcc]
# we want this SR to use a fixed gcc (so that we have the same gcc as open62541 component)
max_version = 0
[osie-repository]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
repository = https://lab.nexedi.com/nexedi/osie.git
revision = dd9aea8
[osie-coupler]
recipe = slapos.recipe.cmmi
path = ${osie-repository:location}/coupler
bin_dir = ${:path}/bin/
environment =
PATH=${gcc:prefix}/bin:/usr/bin
C_INCLUDE_PATH=${open62541:location}/include:${open62541:location}/deps:${open62541:location}/src/pubsub
LDFLAGS=-L${open62541:location}/lib -Wl,-rpath=${open62541:location}/lib -L ${mbedtls:location}/lib -Wl,-rpath=${mbedtls:location}/lib
configure-command = true
......@@ -34,3 +34,8 @@ rpath =
${libjpeg:location}/lib
${libtiff:location}/lib
${zlib:location}/lib
Pillow-patches = ${:_profile_base_location_}/../../component/egg-patch/Pillow/0001-set-metadata-in-setup.py-for-compatibility-with-old-.patch#0a06cc5a94d3db24688938731e4b15e2
Pillow-patch-options = -p1
[pillow-python:python2]
Pillow-patches =
......@@ -33,7 +33,7 @@ configure-options =
# build core PostgreSQL + pg_trgm contrib extension for GitLab
# unaccent contrib extension is for peertube
# citext contrib extension is for metabase
make-targets = install && make -C contrib/pg_trgm/ install && make -C contrib/unaccent/ install && make -C contrib/citext/ install
make-targets = install && make -C contrib/pg_trgm/ install && make -C contrib/unaccent/ install && make -C contrib/citext/ install && make -C contrib/btree_gist/ install
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${openssl:location}/include -I${ncurses:location}/include
......
# SlapOS software release to test pygolang/py2 on Nexedi testing infrastructure.
[buildout]
extends = test.cfg
[python]
part = python2.7
[openssl]
<= openssl-1.1
extends =
test.cfg
../../stack/slapos-py2.cfg
......@@ -55,4 +55,3 @@ ipython-genutils = 0.2.0
Pygments = 2.5.2
prompt-toolkit = 1.0.18
pickleshare = 0.7.5
ptyprocess = 0.6.0
......@@ -5,19 +5,27 @@ parts =
extends =
../patch/buildout.cfg
[astroid]
[pylint]
recipe = zc.recipe.egg
egg = pylint
[pylint:python2]
recipe = zc.recipe.egg:custom
egg = astroid
patches =
${:_profile_base_location_}/astroid-six_moves_import_error.patch#377beb0c50f52b9608bb6be7bf93096e
${:_profile_base_location_}/pylint-super_on_old_class.patch#cb0c3f8c091bf4980be395c917edc435
${:_profile_base_location_}/pylint-redefining-builtins-modules.patch#043defc6e9002ac48b40e078797d4d17
patch-options = -p1
patch-binary = ${patch:location}/bin/patch
[pylint]
[astroid]
recipe = zc.recipe.egg
egg = astroid
[astroid:python2]
recipe = zc.recipe.egg:custom
egg = pylint
patches =
${:_profile_base_location_}/pylint-super_on_old_class.patch#cb0c3f8c091bf4980be395c917edc435
${:_profile_base_location_}/pylint-redefining-builtins-modules.patch#043defc6e9002ac48b40e078797d4d17
${:_profile_base_location_}/astroid-six_moves_import_error.patch#377beb0c50f52b9608bb6be7bf93096e
${:_profile_base_location_}/fix-import-six.moves.urllib.request-on-astroid-1.3.8.patch#266139a893d0eba377ac510fb0fa75f1
patch-options = -p1
patch-binary = ${patch:location}/bin/patch
From 67abf302360eab857fb02d1e83a97aff86f31aa5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Tue, 9 Apr 2024 11:04:38 +0900
Subject: [PATCH] fix "import six.moves.urllib.request" on astroid 1.3.8
---
astroid/brain/pysix_moves.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/astroid/brain/pysix_moves.py b/astroid/brain/pysix_moves.py
index 4a82b499..9bf31146 100644
--- a/astroid/brain/pysix_moves.py
+++ b/astroid/brain/pysix_moves.py
@@ -164,6 +164,7 @@ if sys.version_info[0] == 2:
urllib_parse = UrllibParse()
urllib_error = UrllibError()
+ urllib_request = UrllibRequest()
urllib = DummyModule()
urllib.request = UrllibRequest()
urllib.parse = UrllibParse()
--
2.42.0
[buildout]
extends =
../../component/cython/buildout.cfg
[PyStemmer]
recipe = zc.recipe.egg:custom
egg = PyStemmer
setup-eggs =
${cython:egg}
setuptools-dso
[buildout]
extends =
../python-cffi/buildout.cfg
parts = argon2-cffi
[argon2-cffi]
recipe = zc.recipe.egg:custom
egg = ${:_buildout_section_name_}
setup-eggs = ${python-cffi:egg}
[buildout]
parts =
python-ldap
python-ldap-python
extends =
../cyrus-sasl/buildout.cfg
../openldap/buildout.cfg
......@@ -10,19 +10,25 @@ extends =
[python-ldap-python]
recipe = zc.recipe.egg:custom
egg = python-ldap
patches =
${:_profile_base_location_}/python-ldap-no_default_dirs.patch#959115f13f1de5c63654c69b8dfacd69
patch-options = -p1
patch-binary = ${patch:location}/bin/patch
rpath =
${openldap:location}/lib
${cyrus-sasl:location}/lib
${openssl:location}/lib
include-dirs =
${openldap:location}/include
${cyrus-sasl:location}/include/sasl
${cyrus-sasl:location}/include
${openssl:location}/include
library-dirs =
${openldap:location}/lib
${cyrus-sasl:location}/lib
${openssl:location}/lib
[python-ldap-python:python2]
patches =
${:_profile_base_location_}/python-ldap-no_default_dirs.patch#959115f13f1de5c63654c69b8dfacd69
patch-options = -p1
patch-binary = ${patch:location}/bin/patch
include-dirs =
${openldap:location}/include
${cyrus-sasl:location}/include/sasl
${openssl:location}/include
......@@ -16,3 +16,5 @@ egg = pyzmq
environment = python-pyzmq-env
rpath =
${libzmq:location}/lib
setup-eggs =
packaging
......@@ -21,9 +21,19 @@ setup-eggs =
pkgconfig
pathlib2
setuptools-scm
toml
tomli
environment = python-xmlsec-env
[python-xmlsec:python2]
setup-eggs =
${lxml-python:egg}
pkgconfig
pathlib2
setuptools-scm
toml
[python-xmlsec-env]
PKG_CONFIG=${pkgconfig:location}/bin/pkg-config
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig:${xmlsec:location}/lib/pkgconfig
......
......@@ -19,6 +19,7 @@ extends =
../zlib/buildout.cfg
../glib/buildout.cfg
../ceph/buildout.cfg
../xz-utils/buildout.cfg
[gcc]
min_version = 7.4
......@@ -63,7 +64,7 @@ PKG_CONFIG_PATH-rbd = :${librbd:location}/lib/pkgconfig
environment =
CFLAGS=${:CFLAGS-rbd} -I${gettext:location}/include -I${libaio:location}/include -I${liburing:location}/include -I${libcap-ng:location}/include
LDFLAGS=${:LDFLAGS-rbd} -L${gettext:location}/lib -L${libaio:location}/lib -L${libcap-ng:location}/lib -Wl,-rpath=${libcap-ng:location}/lib -Wl,-rpath=${glib:location}/lib -Wl,-rpath=${gnutls:location}/lib -Wl,-rpath=${nettle:location}/lib -Wl,-rpath=${pixman:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${libpng:location}/lib -Wl,-rpath=${libaio:location}/lib -Wl,-rpath=${liburing:location}/lib -Wl,-rpath=${libcap-ng:location}/lib
PATH=${pkgconfig:location}/bin:${bzip2:location}/bin:%(PATH)s
PATH=${pkgconfig:location}/bin:${bzip2:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${glib:pkg_config_depends}:${gnutls:location}/lib/pkgconfig:${gnutls:pkg-config-path}:${libpng:location}/lib/pkgconfig:${liburing:location}/lib/pkgconfig:${ncurses:location}/lib/pkgconfig:${pixman:location}/lib/pkgconfig${:PKG_CONFIG_PATH-rbd}
[qemu:python2 or [int(q) for q in platform.libc_ver()[1].split(".")] < [2,25]]
......
......@@ -8,20 +8,18 @@ extends =
../tcl/buildout.cfg
[redis]
<= redis28
[redis28]
recipe = slapos.recipe.cmmi
url = http://download.redis.io/releases/redis-2.8.24.tar.gz
md5sum = 7b6eb6e4ccc050c351df8ae83c55a035
url = https://download.redis.io/releases/redis-6.2.9.tar.gz
md5sum = 5251b9cfe7d799e6b63b7bd07410f97c
configure-command = true
prefix =
prefix =
make-options =
PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
V=1
make-targets =
make-targets =
install
test
# There are many tests failing, so disable make test for now
# test
environment =
PATH=${patch:location}/bin:${tcl:location}/bin:%(PATH)s
patch-options = -p1
......
......@@ -25,10 +25,10 @@ environment =
PKG_CONFIG_PATH=${libyaml:location}/lib/
[ruby2.6]
[ruby2.7]
<= ruby-common
url = https://ftp.ruby-lang.org/pub/ruby/2.6/ruby-2.6.5.tar.xz
md5sum = b8a4e2bdbb76485c3d6690e57be67750
url = https://ftp.ruby-lang.org/pub/ruby/2.7/ruby-2.7.8.tar.xz
md5sum = 27af2c340d0524ab272d564ddfd733d9
[ruby]
<= ruby2.6
<= ruby2.7
......@@ -23,9 +23,21 @@ setup-eggs =
${PyWavelets:egg}
${pillow-python:egg}
networkx
pythran
packaging
rpath =
${openblas:location}/lib
[scikit-image:python2]
setup-eggs =
${numpy:egg}
${scipy:egg}
${cython:egg}
${PyWavelets:egg}
${pillow-python:egg}
networkx
[scikit-image-repository]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
......
......@@ -15,6 +15,7 @@ recipe = zc.recipe.egg:custom
egg = scikit-learn
environment = scikit-learn-env
setup-eggs =
${cython:egg}
${numpy:egg}
${scipy:egg}
rpath =
......
......@@ -36,15 +36,19 @@ init =
zc.buildout.easy_install.default_versions(versions)
# tempstorage5 is plain upstream egg
[tempstorage5]
# tempstorage6 and tempstorage5 are plain upstream egg
[tempstorage6]
recipe = zc.recipe.egg:eggs
egg = tempstorage
eggs = ${:egg}
egg-versions =
tempstorage = 5.2
tempstorage = 6.0.0
[tempstorage5]
<= tempstorage6
egg-versions =
tempstorage = 5.2
# tempstorage4-wc2 is tempstorage 3 + backports for loadBefore fixes
[tempstorage4-wc2]
......
......@@ -26,9 +26,6 @@ md5sum = 51fe2bcbff1bbce77a25d180fd247f7d
pkg_config_depends = ${leptonica:location}/lib/pkgconfig:${fontconfig:location}/lib/pkgconfig:${fontconfig:pkg_config_depends}:${lcms2:location}/lib/pkgconfig:${xz-utils:location}/lib/pkgconfig
pre-configure =
autoreconf -ivf -I${pkgconfig:location}/share/aclocal -I${libtool:location}/share/aclocal -Wno-portability
# XXX workaround path on slaprunner with a double slash
# https://github.com/tesseract-ocr/tesseract/issues/3527
configure-options = --prefix=$(python -c 'print("""@@LOCATION@@""".replace("//", "/"))')
environment =
PATH=${pkgconfig:location}/bin:${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:${m4:location}/bin:${patch:location}/bin:%(PATH)s
......
......@@ -42,4 +42,7 @@ inline =
[versions]
freezegun = 1.0.0
freezegun = 1.5.1:whl
[versions:python2]
freezegun = 0.3.15
[python]
part = python2.7
[openssl]
<= openssl-1.1
[buildout]
extends =
../../stack/slapos-py2.cfg
......@@ -15,6 +15,9 @@ extends =
../../stack/slapos.cfg
../../stack/monitor/buildout.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts =
dcron
logrotate
......@@ -27,12 +30,6 @@ parts =
template-crontab-line
slapos-cookbook
[python]
part = python2.7
[openssl]
<= openssl-1.1
[rssgen-eggs]
recipe = zc.recipe.egg
interpreter = python-${:_buildout_section_name_}
......
......@@ -28,6 +28,9 @@ extends =
../../stack/nxdtest.cfg
./buildout.hash.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts +=
slapos-cookbook
beremiz-eggs
......@@ -41,13 +44,6 @@ parts +=
# Always build GCC for Fortran (see openblas).
max_version = 0
[python]
# Beremiz works with python2.7 for now, the code is not yet upgraded for python3
part = python2.7
[openssl]
<= openssl-1.1
[open62541]
# Beremiz need it to be in folder parts/open62541
# as Beremiz search for open62541 to BEREMIZ_PATH/../open62541
......
......@@ -11,6 +11,9 @@ extends =
../../stack/monitor/buildout.cfg
../../stack/slapos.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts =
beremiz-source
slapos-cookbook
......@@ -19,12 +22,6 @@ parts =
matiec
open62541
[python]
part = python2.7
[openssl]
<= openssl-1.1
[gcc]
# we want this SR to use a fixed gcc (so that we have the same gcc as open62541 component)
max_version = 0
......
......@@ -3,17 +3,15 @@ extends =
../../stack/slapos.cfg
../../component/vm-img/debian.cfg
../../component/rina-tools/buildout.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts =
slapos-cookbook
template
download-cache = ${:directory}/download-cache
[python]
part = python2.7
[openssl]
<= openssl-1.1
[template]
recipe = slapos.recipe.template:jinja2
# XXX: "template.cfg" is hardcoded in instanciation recipe
......
......@@ -114,6 +114,7 @@ zope.testrunner = 5.2
recipe = zc.recipe.egg
eggs =
${:recipe}
zc.buildout[test]
zope.testing
zope.testrunner
scripts =
......
......@@ -2,8 +2,5 @@
extends =
software.cfg
[python]
part = python2.7
[openssl]
<= openssl-1.1
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
......@@ -6,6 +6,10 @@ extends =
../../component/manpy/buildout.cfg
../../stack/monitor/buildout.cfg
./buildout.hash.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts =
slapos-cookbook
manpy
......@@ -21,12 +25,6 @@ max_version = 0
# also use old gcc version for old scipy version used in python2
part = gcc-8.5
[python]
part = python2.7
[openssl]
<= openssl-1.1
[dream-repository.git]
revision = f3bcf115741886835df8c0ca0fdbf510d77d8db8
......
......@@ -213,6 +213,7 @@
"type": "object"
}
},
"additionalProperties": false,
"type": "object"
},
"zope-partition-dict": {
......@@ -285,6 +286,7 @@
"type": "object"
}
},
"additionalProperties": false,
"type": "object"
},
"kumofs": {
......
[buildout]
extends =
../../stack/erp5/buildout.cfg
[python]
part = python3
[openssl]
<= openssl-3.0
[erp5]
repository = https://lab.nexedi.com/nexedi/erp5.git
branch = zope4py3
develop = true
[neoppod-repository]
# Pinned to a revision before 6ffafcbd (Fix egg dependencies, 2024-05-17) which
# is incompatible with our versions:
# The requirement ('msgpack<1,>=0.5.6') is not allowed by your [versions] constraint (1.0.5)
revision = c4443632e3541c064f5b43096099f4a8b74cbf58
[template-zope]
link-binary +=
${python3:location}/bin/2to3
software.cfg.json
\ No newline at end of file
[buildout]
extends =
../../stack/erp5/buildout.cfg
../../stack/slapos-py2.cfg
[template-zope]
link-binary +=
${python2.7:location}/bin/2to3
# Test Suite: ERP5.UnitTest-Master ran at 2024/05/29 06:00:29.349755 UTC
......
......@@ -46,10 +46,14 @@ from cryptography.x509.oid import NameOID
from slapos.testing.testcase import ManagedResource, makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import findFreeTCPPort
ERP5PY3 = os.environ['SLAPOS_SR_TEST_NAME'] == 'erp5-py3'
_setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', 'software.cfg')))
os.path.join(os.path.dirname(__file__), '..', '..', 'software%s.cfg' % (
'-py3' if ERP5PY3 else ''))),
software_id=os.environ['SLAPOS_SR_TEST_NAME'],
)
setup_module_executed = False
......@@ -191,7 +195,10 @@ def neo(instance_parameter_dict):
class ERP5InstanceTestCase(SlapOSInstanceTestCase, metaclass=ERP5InstanceTestMeta):
"""ERP5 base test case
"""
__test_matrix__ = matrix((zeo, neo)) # switch between NEO and ZEO mode
if ERP5PY3:
__test_matrix__ = matrix((zeo, )) # TODO: NEO is not yet enabled for py3
else:
__test_matrix__ = matrix((zeo, neo)) # switch between NEO and ZEO mode
@classmethod
def isNEO(cls):
......
......@@ -51,7 +51,7 @@ import urllib3
from slapos.testing.utils import CrontabMixin
import zc.buildout.configparser
from . import CaucaseService, ERP5InstanceTestCase, default, matrix, neo, setUpModule
from . import CaucaseService, ERP5InstanceTestCase, default, matrix, neo, setUpModule, ERP5PY3
setUpModule # pyflakes
......@@ -1302,60 +1302,80 @@ class TestNEO(ZopeSkinsMixin, CrontabMixin, ERP5InstanceTestCase):
__partition_reference__ = 'n'
__test_matrix__ = matrix((neo,))
def _getCrontabCommand(self, crontab_name: str) -> str:
"""Read a crontab and return the command that is executed.
overloaded to use crontab from neo partition
"""
with open(
os.path.join(
if ERP5PY3:
# NEO is not ready for python3 at this time, this test is here to become
# an unexpected success once it starts working, so that we remember to
# remove this and enable neo in ERP5InstanceTestCase.__test_matrix__
setup_failed_exception = None
@classmethod
def setUpClass(cls):
try:
super().setUpClass()
except BaseException as e:
cls.setup_failed_exception = e
cls.setUp = lambda self: None
cls.tearDownClass = classmethod(lambda cls: None)
@unittest.expectedFailure
def test_neo_py3(self):
self.assertIsNone(self.setup_failed_exception)
else:
def _getCrontabCommand(self, crontab_name: str) -> str:
"""Read a crontab and return the command that is executed.
overloaded to use crontab from neo partition
"""
with open(
os.path.join(
self.getComputerPartitionPath('neo-0'),
'etc',
'cron.d',
crontab_name,
)) as f:
crontab_spec, = f.readlines()
self.assertNotEqual(crontab_spec[0], '@', crontab_spec)
return crontab_spec.split(None, 5)[-1]
def test_log_rotation(self):
# first run to create state files
self._executeCrontabAtDate('logrotate', '2000-01-01')
def check_sqlite_log(path):
with self.subTest(path), contextlib.closing(sqlite3.connect(path)) as con:
con.execute('select * from log')
logfiles = ('neoadmin.log', 'neomaster.log', 'neostorage-0.log')
for f in logfiles:
check_sqlite_log(
os.path.join(
self.getComputerPartitionPath('neo-0'),
'etc',
'cron.d',
crontab_name,
)) as f:
crontab_spec, = f.readlines()
self.assertNotEqual(crontab_spec[0], '@', crontab_spec)
return crontab_spec.split(None, 5)[-1]
def test_log_rotation(self):
# first run to create state files
self._executeCrontabAtDate('logrotate', '2000-01-01')
def check_sqlite_log(path):
with self.subTest(path), contextlib.closing(sqlite3.connect(path)) as con:
con.execute('select * from log')
'var',
'log',
f))
logfiles = ('neoadmin.log', 'neomaster.log', 'neostorage-0.log')
for f in logfiles:
check_sqlite_log(
os.path.join(
self.getComputerPartitionPath('neo-0'),
'var',
'log',
f))
self._executeCrontabAtDate('logrotate', '2050-01-01')
self._executeCrontabAtDate('logrotate', '2050-01-01')
for f in logfiles:
check_sqlite_log(
os.path.join(
self.getComputerPartitionPath('neo-0'),
'srv',
'backup',
'logrotate',
f'{f}-20500101'))
for f in logfiles:
check_sqlite_log(
os.path.join(
self.getComputerPartitionPath('neo-0'),
'srv',
'backup',
'logrotate',
f'{f}-20500101'))
self._executeCrontabAtDate('logrotate', '2050-01-02')
requests.get(self._getAuthenticatedZopeUrl('/'), verify=False).raise_for_status()
self._executeCrontabAtDate('logrotate', '2050-01-02')
requests.get(self._getAuthenticatedZopeUrl('/'), verify=False).raise_for_status()
for f in logfiles:
check_sqlite_log(
os.path.join(
self.getComputerPartitionPath('neo-0'),
'var',
'log',
f))
for f in logfiles:
check_sqlite_log(
os.path.join(
self.getComputerPartitionPath('neo-0'),
'var',
'log',
f))
class TestPassword(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
__partition_reference__ = 'p'
......
......@@ -23,7 +23,7 @@ import unittest
from slapos.grid.utils import md5digest
from . import ERP5InstanceTestCase
from . import setUpModule as _setUpModule
from . import setUpModule as _setUpModule, ERP5PY3
from .test_erp5 import TestPublishedURLIsReachableMixin
......@@ -38,6 +38,8 @@ def setUpModule():
md5digest(cls.getSoftwareURL()),
'bin', 'wcfs')):
raise unittest.SkipTest("built with wendelin.core 1")
if ERP5PY3:
raise unittest.SkipTest("wendelin.core does not support python3 yet")
class TestWCFS(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
......
......@@ -14,7 +14,7 @@
# not need these here).
[instance.cfg]
filename = instance.cfg.in
md5sum = d1ca30a1b910b6b775f4f95bd91123a6
md5sum = 956ae53af22b551fbb087415e835868b
[watcher]
_update_hash_filename_ = watcher.in
......@@ -30,35 +30,35 @@ md5sum = 61d1d04b9347b3168a1ad7676e4681ef
[gitconfig.in]
_update_hash_filename_ = template/gitconfig.in
md5sum = eb1230fee50067924ba89f4dc6e82fa9
md5sum = c559a24ab6281268b608ed3bccb8e4ce
[gitlab-parameters.cfg]
_update_hash_filename_ = gitlab-parameters.cfg
md5sum = cfda6d959bb90bf0b9c947383f45ce0a
md5sum = 95b18789111ed239146d243e39ffefbe
[gitlab-shell-config.yml.in]
_update_hash_filename_ = template/gitlab-shell-config.yml.in
md5sum = 69e8ed76b06233d11932a5c0ef16f03b
md5sum = 70d394305f4e1482a5c1a673b0762c6a
[gitlab-unicorn-startup.in]
_update_hash_filename_ = gitlab-unicorn-startup.in
md5sum = 705825e6d8c6b37699f1321805d09de3
[gitlab-puma-startup.in]
_update_hash_filename_ = gitlab-puma-startup.in
md5sum = 838209b9246fa86d6a21fef910f17e25
[gitlab.yml.in]
_update_hash_filename_ = template/gitlab.yml.in
md5sum = 673c393e6728a8d82e6b9a44886785a8
md5sum = 6f8df1467a6168bde7d2f5f42abb1012
[gitaly-config.toml.in]
_update_hash_filename_ = template/gitaly-config.toml.in
md5sum = 58e3d5bbda32583d00cd8f44ec0525b0
md5sum = d769ea27820e932c596c35bbbf3f2902
[instance-gitlab.cfg.in]
_update_hash_filename_ = instance-gitlab.cfg.in
md5sum = b913c4a1f199a87ad71da6d102adffa4
md5sum = 6d8d20ded84622339d49c60b0e61380c
[instance-gitlab-export.cfg.in]
_update_hash_filename_ = instance-gitlab-export.cfg.in
md5sum = b8dea5ca4c6f9fc1ca54eb0265e1fdee
md5sum = c8231583d04bf0d3fe2d26230b94d78d
[macrolib.cfg.in]
_update_hash_filename_ = macrolib.cfg.in
......@@ -72,22 +72,18 @@ md5sum = 4980c1571a4dd7753aaa60d065270849
_update_hash_filename_ = template/nginx.conf.in
md5sum = 8c904510eb39dc212204f68f2b81b068
[rack_attack.rb.in]
_update_hash_filename_ = template/rack_attack.rb.in
md5sum = 7d0e6dc6b826f6df6b20d8574a29e2f8
[resque.yml.in]
_update_hash_filename_ = template/resque.yml.in
md5sum = 7c89a730889e3224548d9abe51a2d719
[smtp_settings.rb.in]
_update_hash_filename_ = template/smtp_settings.rb.in
md5sum = 4e1ced687a86e4cfff2dde91237e3942
md5sum = b1becd9ec4c2eeefe573af4bb53c9751
[template-gitlab-resiliency-restore.sh.in]
_update_hash_filename_ = template/template-gitlab-resiliency-restore.sh.in
md5sum = 87f16b4f4a2370acada46b2751ef3366
md5sum = 8ce31a27e814e750dfd38c92a278fb9e
[unicorn.rb.in]
_update_hash_filename_ = template/unicorn.rb.in
md5sum = b4758129a8d0c47b2c3adb10fefb8275
[puma.rb.in]
_update_hash_filename_ = template/puma.rb.in
md5sum = 707c0c713af41518d21724c1be8efe22
......@@ -15,8 +15,11 @@ configuration.external_url = https://lab.example.com
configuration.db_pool = 10
# rack-attack
configuration.rate_limit_requests_per_period = 10
configuration.rate_limit_period = 60
configuration.rack_attack_enable = true
configuration.rack_attack_max_retry = 10
configuration.rack_attack_find_time = 60
configuration.rack_attack_ban_time = 3600
configuration.rack_attack_ip_whitelist = 127.0.0.1
configuration.time_zone = UTC
......@@ -64,8 +67,10 @@ configuration.sidekiq_memory_killer_max_rss = 1000000
# unicorn
configuration.unicorn_worker_timeout = 60
configuration.unicorn_worker_processes = 2
configuration.puma_worker_timeout = 60
configuration.puma_worker_processes = 2
configuration.puma_min_threads = 1
configuration.puma_max_threads = 16
# unicorn advanced
configuration.unicorn_backlog_socket = 1024
......
......@@ -38,6 +38,8 @@ echo "I: PostgreSQL ready." 1>&2
# make sure pg_trgm extension is enabled for gitlab db
psql -c 'CREATE EXTENSION IF NOT EXISTS pg_trgm;' || die "pg_trgm setup failed"
psql -c 'CREATE EXTENSION IF NOT EXISTS btree_gist;' || die "btree_gist setup failed"
if echo "$pgtables" | grep -q '^Did not find any relations' ; then
$RAKE gitlab:setup RAILS_ENV=production force=yes || die "initial db setup failed"
fi
......@@ -70,8 +72,7 @@ $RAKE cache:clear || die "cache:clear failed"
force=yes $RAKE gitlab:shell:setup || die "gitlab:shell:setup failed"
# 3. finally exec to unicorn
exec {{ gitlab_unicorn }} \
-E production \
-c {{ unicorn_rb.output }} \
{{ gitlab_work.location }}/config.ru
# 3. finally exec to puma
exec {{ gitlab_puma }} \
-e production \
-C {{ puma_rb.output }}
......@@ -6,6 +6,7 @@ depends_gitfetch =
${go_github.com_pkg_errors:recipe}
${go_lab.nexedi.com_kirr_git-backup:recipe}
${go_lab.nexedi.com_kirr_go123:recipe}
${go_golang.org_x_crypto:recipe}
[go_github.com_libgit2_git2go]
......@@ -13,7 +14,7 @@ depends_gitfetch =
go.importpath = github.com/libgit2/git2go
repository = https://github.com/libgit2/git2go.git
# branch 'next' is required by git-backup
revision = next-g5d0a4c752a74258a5f42e40fccd2908ac4e336b8
revision = cbca5b82b8c22c08c183a1f44cad4b8b51ba6f25
[go_github.com_pkg_errors]
<= go-git-package
......@@ -25,10 +26,17 @@ revision = v0.8.0-12-g816c908556
<= go-git-package
go.importpath = lab.nexedi.com/kirr/git-backup
repository = https://lab.nexedi.com/kirr/git-backup.git
revision = da754af24da351291c99caa421a103db09e7a4c4
revision = 3327aedfbe903b210366105b8a4f9f85a313a0a3
[go_lab.nexedi.com_kirr_go123]
<= go-git-package
go.importpath = lab.nexedi.com/kirr/go123
repository = https://lab.nexedi.com/kirr/go123.git
revision = 95433de34f
revision = 8299741f
[go_golang.org_x_crypto]
<= go-git-package
go.importpath = golang.org/x/crypto
repository = https://go.googlesource.com/crypto.git
revision = 75b288015ac94e66e3d6715fb68a9b41bf046ec2
......@@ -54,7 +54,8 @@ input = inline: gitlab-shell-work*
srv/backup/logrotate/**
etc/service/postgres-start
srv/redis/**
srv/unicorn/unicorn.socket
srv/puma/puma.socket
.cache
output = ${directory:srv}/exporter.exclude
[gitlab-resiliency-restore-script]
......@@ -70,13 +71,14 @@ context =
raw git_location {{ git_location }}
raw bin_directory ${directory:bin}
raw etc_directory ${directory:etc}
raw run_directory ${directory:run}
raw var_directory ${directory:var}
raw postgress_script ${service-postgresql:services}/postgres-start
raw redis_script ${service-redis:wrapper}
raw unicorn_script ${service-unicorn:wrapper-path}
raw puma_script ${service-puma:wrapper-path}
raw sidekiq_script ${service-sidekiq:wrapper-path}
raw gitlab_backup_dir ${gitlab-backup-directory:backup-gitlab.git}
raw redis_pid_file ${service-redis:pid-file}
raw postgres_pid_file ${service-postgresql:pgdata-directory}/postmaster.pid
raw puma_pid_file ${puma:pid}/puma.pid
raw gitlab_work_location ${gitlab-work:location}
raw promise_lab_location ${directory:promise.slow}
......@@ -12,7 +12,7 @@ parts =
# gitlab-<prog>
# ? mailroom
{% set gitlab_progv = 'rails rake unicorn sidekiq unicorn-startup' .split() %}
{% set gitlab_progv = 'rails rake puma sidekiq puma-startup' .split() %}
{% for prog in gitlab_progv %}
gitlab-{{ prog }}
{% endfor %}
......@@ -23,7 +23,7 @@ parts =
gitlab-shell-work
service-gitlab-workhorse
service-unicorn
service-puma
service-sidekiq
service-nginx
......@@ -51,29 +51,29 @@ offline = true
[worker-processes]
recipe = slapos.recipe.build
unicorn-worker-processes = {{ instance_parameter_dict['configuration.unicorn_worker_processes'] }}
puma-worker-processes = {{ instance_parameter_dict['configuration.puma_worker_processes'] }}
init =
import multiprocessing
worker_count = int(options['unicorn-worker-processes'])
worker_count = int(options['puma-worker-processes'])
if worker_count == 0:
# automatically load all available CPUs
worker_count = multiprocessing.cpu_count() + 1
worker_count = 2 if worker_count < 2 else worker_count
options['unicorn-worker-processes'] = worker_count
options['puma-worker-processes'] = worker_count
options['nginx-worker-processes'] = worker_count -1
[instance-parameter]
{#- There are dangerous keys like recipe, etc #}
{#- XXX: Some other approach would be useful #}
{%- set DROP_KEY_LIST = ['recipe', '__buildout_signature__', 'computer', 'partition', 'url', 'key', 'cert',
'configuration.unicorn_worker_processes', 'configuration.nginx_worker_processes'] %}
'configuration.puma_worker_processes', 'configuration.nginx_worker_processes'] %}
{%- for key, value in instance_parameter_dict.items() -%}
{%- if key not in DROP_KEY_LIST %}
{{ key }} = {{ value }}
{%- endif -%}
{%- endfor %}
# settings for worker processes:
configuration.unicorn_worker_processes = ${worker-processes:unicorn-worker-processes}
configuration.puma_worker_processes = ${worker-processes:puma-worker-processes}
configuration.nginx_worker_processes = ${worker-processes:nginx-worker-processes}
......@@ -186,16 +186,18 @@ mode = 0700
[gitaly-dir]
recipe = slapos.cookbook:mkdirectory
gitaly = ${directory:var}/gitaly
sockets = ${:gitaly}/sockets
internal = ${directory:var}/int
sockets = ${:gitaly}/s
internal = ${:sockets}/int
log = ${directory:log}/gitaly
[gitaly]
socket = ${directory:var}/gitaly.socket
log = ${gitaly-dir:log}
socket = ${gitaly-dir:sockets}/gitaly.socket
logdir = ${gitaly-dir:log}
location = {{ gitaly_location }}
pid = ${directory:run}/gitaly.pid
internal_socket = ${gitaly-dir:internal}
basedir = ${gitaly-dir:gitaly}
num_workers = 2
[gitaly-socket-listening-promise]
<= monitor-promise-base
......@@ -249,7 +251,7 @@ context-extra =
section gitlab gitlab
section gitlab_shell gitlab-shell
section gitlab_shell_work gitlab-shell-work
section unicorn unicorn
section puma puma
section service_redis service-redis
raw redis_binprefix {{ redis_binprefix }}
......@@ -261,6 +263,7 @@ context-extra =
section gitlab gitlab
section gitlab_shell gitlab-shell
section gitlab_shell_work gitlab-shell-work
section gitlab_workhorse gitlab-workhorse
section gitaly gitaly
[nginx.conf]
......@@ -288,12 +291,10 @@ context-extra =
import urllib urllib
section gitlab gitlab
section gitlab_shell_work gitlab-shell-work
section gitlab_shell gitlab-shell
section gitlab_workhorse gitlab-workhorse
section gitaly gitaly
[rack_attack.rb]
<= gitlab-etc-template
url = {{ rack_attack_rb_in }}
[resque.yml]
<= gitlab-etc-template
url = {{ resque_yml_in }}
......@@ -306,11 +307,11 @@ url = {{ smtp_settings_rb_in }}
# contains smtp password
mode = 0600
[unicorn.rb]
[puma.rb]
<= gitlab-etc-template
url = {{ unicorn_rb_in }}
url = {{ puma_rb_in }}
context-extra =
section unicorn unicorn
section puma puma
section directory directory
section gitlab_work gitlab-work
......@@ -340,20 +341,20 @@ prog = {{ prog }}
{% endfor %}
[gitlab-unicorn-startup]
[gitlab-puma-startup]
recipe = slapos.recipe.template:jinja2
mode = 0755
url = {{ gitlab_unicorn_startup_in }}
url = {{ gitlab_puma_startup_in }}
output= ${directory:bin}/${:_buildout_section_name_}
context =
raw bash_bin {{ bash_bin }}
raw gitlab_rake ${gitlab-rake:wrapper-path}
raw gitlab_unicorn ${gitlab-unicorn:wrapper-path}
raw gitlab_puma ${gitlab-puma:wrapper-path}
raw psql_bin {{ postgresql_location }}/bin/psql
section pgsql service-postgresql
raw log_dir ${gitlab:log}
raw var_dir ${directory:var}
section unicorn_rb unicorn.rb
section puma_rb puma.rb
section gitlab_work gitlab-work
......@@ -421,14 +422,13 @@ tune-command =
ln -sf ${gitlab-workhorse:secret} .gitlab_workhorse_secret
# config/
cd config &&
ln -sf ${unicorn.rb:output} unicorn.rb &&
ln -sf ${puma.rb:output} puma.rb &&
ln -sf ${gitlab.yml:output} gitlab.yml &&
ln -sf ${database.yml:output} database.yml &&
ln -sf ${resque.yml:output} resque.yml &&
ln -sf ${secrets:secrets}/gitlab_secrets.yml secrets.yml &&
# config/initializers/
cd initializers &&
ln -sf ${rack_attack.rb:output} rack_attack.rb &&
ln -sf ${smtp_settings.rb:output} smtp_settings.rb &&
# public/
cd ../../public &&
......@@ -573,11 +573,12 @@ wrapper-path = ${directory:service}/gitlab-workhorse
command-line = {{ gitlab_workhorse }}
-listenNetwork unix
-listenAddr ${gitlab-workhorse:socket}
-authSocket ${unicorn:socket}
-authSocket ${puma:socket}
-documentRoot ${gitlab-work:location}/public
-secretPath ${gitlab-workhorse:secret}
-logFile ${gitlab-workhorse:log}
-repoPath ${gitlab-repo-dir:repositories}
# repoPath is for patched gitlab-workhorse
# -repoPath ${gitlab-repo-dir:repositories}
# NOTE for profiling
# -pprofListenAddr ...
......@@ -606,41 +607,47 @@ config-command = {{ curl_bin }} --unix-socket ${gitlab-workhorse:socket} ht
######################
# unicorn worker #
# puma worker #
######################
[unicorn-dir]
[puma-dir]
recipe = slapos.cookbook:mkdirectory
srv = ${directory:srv}/unicorn
log = ${directory:log}/unicorn
srv = ${directory:srv}/puma
log = ${directory:log}/puma
pid = ${directory:srv}/pids
[unicorn]
srv = ${unicorn-dir:srv}
log = ${unicorn-dir:log}
socket = ${directory:srv}/unicorn.socket
[puma]
srv = ${puma-dir:srv}
log = ${puma-dir:log}
socket = ${puma-dir:srv}/puma.socket
pid = ${puma-dir:pid}
[service-unicorn]
[service-puma]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:service}/unicorn
# NOTE we perform db setup / migrations as part of unicorn startup.
wrapper-path = ${directory:service}/puma
# NOTE we perform db setup / migrations as part of puma startup.
# Those operations require PG and Redis to be up and running already, that's
# why we do it here. See gitlab-unicorn-startup for details.
command-line = ${gitlab-unicorn-startup:output}
# why we do it here. See gitlab-puma-startup for details.
command-line = ${gitlab-puma-startup:output}
{% if instance_parameter_dict.get('configuration.root-password') -%}
environment =
GITLAB_ROOT_PASSWORD={{ instance_parameter_dict['configuration.root-password'] }}
{% endif %}
depend =
${promise-unicorn:recipe}
${promise-puma:recipe}
${promise-gitlab-app:recipe}
${promise-gitlab-shell:recipe}
${logrotate-entry-unicorn:recipe}
# gitlab is a service "run" under unicorn
${logrotate-entry-puma:recipe}
# gitlab is a service "run" under puma
# gitlab-shell is called by gitlab
# -> associate their logs rotation to here
${logrotate-entry-gitlab:recipe}
[promise-unicorn]
[promise-puma]
<= promise-byurl
config-command = {{ curl_bin }} --unix-socket ${unicorn:socket} http://localhost/
config-command = {{ curl_bin }} --unix-socket ${puma:socket} http://localhost/
[promise-rakebase]
recipe = slapos.cookbook:wrapper
......@@ -662,10 +669,10 @@ command-line = ${:rake} gitlab:gitlab_shell:check
# rake gitlab:repo:check (fsck all repos)
[logrotate-entry-unicorn]
[logrotate-entry-puma]
<= logrotate-entry-base
log = ${unicorn:log}/*.log
name = unicorn
log = ${puma:log}/*.log
name = puma
copytruncate = true
[logrotate-entry-gitlab]
......@@ -682,8 +689,8 @@ copytruncate = true
[logrotate-entry-gitlab-workhorse]
<= logrotate-entry-base
log = ${gitlab-workhorse-dir:log}//*.log
name = gitlab-shell
log = ${gitlab-workhorse-dir:log}/*.log
name = gitlab-workhorse
copytruncate = true
#######################################
......@@ -828,18 +835,17 @@ cron-entries = ${cron:cron-entries}
[service-gitaly]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:service}/gitaly
#command-line = ${gitlab-work:location}/bin/daemon_with_pidfile ${gitaly:pid}
command-line = {{ gitaly_location }}/gitaly ${gitaly-config.toml:output}
environment =
PATH={{ bundler_1_17_3_dir }}:{{ ruby_location }}/bin:/bin:/usr/bin
PATH={{ buildout_bin_directory }}:{{ ruby_location }}/bin:/bin:/usr/bin
# 6. on-reinstantiate actions
# NOTE here we only recompile assets. Other on-reinstantiate actions, which
# require pg and redis running, are performed as part of unicorn service -
# right before its startup (see gitlab-unicorn-startup).
# require pg and redis running, are performed as part of puma service -
# right before its startup (see gitlab-puma-startup).
[on-reinstantiate]
recipe = plone.recipe.command
stop-on-error = true
......
......@@ -53,6 +53,7 @@ context =
section instance_parameter_dict slap-configuration
# program binaries
raw buildout_bin_directory ${buildout:bin-directory}
raw bash_bin ${bash:location}/bin/bash
raw bzip2_location ${bzip2:location}
raw bundler_4gitlab ${bundler-4gitlab:bundle}
......@@ -64,7 +65,7 @@ context =
raw git_location ${git:location}
raw gitaly_location ${gitaly-repository:location}
raw gitlab_export ${gitlab-export:output}
raw gitlab_workhorse ${gowork:bin}/gitlab-workhorse
raw gitlab_workhorse ${gitlab-workhorse:binary}
raw gopath_bin ${gowork:bin}
raw gunzip_bin ${gzip:location}/bin/gunzip
raw grep_location ${grep:location}
......@@ -75,8 +76,8 @@ context =
raw nginx_mime_types ${nginx-output:mime}
raw node_bin_location ${nodejs:location}/bin/
raw openssl_bin ${openssl-output:openssl}
raw postgresql_location ${postgresql10:location}
raw redis_binprefix ${redis28:location}/bin
raw postgresql_location ${postgresql:location}
raw redis_binprefix ${redis:location}/bin
raw ruby_location ${bundler-4gitlab:ruby-location}
raw tar_location ${tar:location}
raw watcher ${watcher:output}
......@@ -88,17 +89,16 @@ context =
raw gitconfig_in ${gitconfig.in:target}
raw monitor_template ${monitor2-template:output}
raw gitlab_shell_config_yml_in ${gitlab-shell-config.yml.in:target}
raw gitlab_unicorn_startup_in ${gitlab-unicorn-startup.in:target}
raw gitlab_puma_startup_in ${gitlab-puma-startup.in:target}
raw gitlab_yml_in ${gitlab.yml.in:target}
raw gitaly_config_toml_in ${gitaly-config.toml.in:target}
raw macrolib_cfg_in ${macrolib.cfg.in:target}
raw nginx_conf_in ${nginx.conf.in:target}
raw nginx_gitlab_http_conf_in ${nginx-gitlab-http.conf.in:target}
raw rack_attack_rb_in ${rack_attack.rb.in:target}
raw resque_yml_in ${resque.yml.in:target}
raw smtp_settings_rb_in ${smtp_settings.rb.in:target}
raw gitlab_restore_sh_in ${template-gitlab-resiliency-restore.sh.in:target}
raw unicorn_rb_in ${unicorn.rb.in:target}
raw puma_rb_in ${puma.rb.in:target}
$${:context-extra}
context-extra =
......
......@@ -5,8 +5,10 @@ extends =
../../stack/slapos.cfg
../../stack/nodejs.cfg
../../stack/monitor/buildout.cfg
../../component/libgit2/buildout.cfg
../../component/ruby/buildout.cfg
../../component/golang/buildout.cfg
../../component/perl-Image-ExifTool/buildout.cfg
../../component/postgresql/buildout.cfg
../../component/redis/buildout.cfg
../../component/cmake/buildout.cfg
......@@ -28,19 +30,19 @@ extends =
../../component/gzip/buildout.cfg
../../component/dcron/buildout.cfg
../../component/logrotate/buildout.cfg
../../component/libgpgme/buildout.cfg
../../component/patchelf/buildout.cfg
parts =
golang1.13
golang1.15
git
postgresql10
redis28
postgresql
cmake
icu
pkgconfig
nginx-output
gowork
gitlab-workhorse
gitaly-build
gitlab-shell/vendor
gitlab/vendor/bundle
......@@ -65,16 +67,16 @@ parts =
revision = 571d6514f7290e8faa9439c4b86aa2f6c87df261
[nodejs]
<= nodejs-12.18.3
<= nodejs-14.16.0
[yarn]
<= yarn-1.16.0
[openssl]
<= openssl-1.1
# Gitlab backup (git-backup) is failing (segfault) with recent git version > 2.30.9
# We will use git 2.30.9 version for production upgrade
# TODO: fix the issue with git and use latest version
[git]
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.30.9.tar.xz
md5sum = c1d42936036cc44a448738329c821569
[libgit2]
# This version is for rugged 1.1.0 needed by gitlab and gitaly 13.12.15,
# see: https://github.com/libgit2/rugged/tree/v1.1.0/vendor
version = 1.1.0
############################
# Software compilation #
......@@ -122,7 +124,7 @@ url = https://rubygems.org/rubygems/rubygems-3.1.2.zip
# - run gitlab services / jobs (via `bundle exec ...`)
[bundler-4gitlab]
<= rubygemsrecipe
ruby-location = ${ruby2.6:location}
ruby-location = ${ruby:location}
ruby-executable = ${:ruby-location}/bin/ruby
gems =
bundler==1.17.3
......@@ -148,7 +150,7 @@ bundle1.17.3 = ${buildout:parts-directory}/${:_buildout_section_name_}/lib/ruby/
# gitlab (via github-markup) wants to convert rst -> html via running: python (with docutils egg)
environment =
PATH = ${python-4gitlab:bin}:${yarn:location}/bin:${:ruby-location}/bin:${cmake:location}/bin:${pkgconfig:location}/bin:${nodejs:location}/bin:${postgresql10:location}/bin:${redis28:location}/bin:${git:location}/bin:${buildout:bin-directory}:%(PATH)s
PATH = ${python-4gitlab:bin}:${yarn:location}/bin:${:ruby-location}/bin:${cmake:location}/bin:${pkgconfig:location}/bin:${nodejs:location}/bin:${postgresql:location}/bin:${redis:location}/bin:${git:location}/bin:${libgpgme:location}/bin:${buildout:bin-directory}:%(PATH)s
# gitlab, gitlab-shell & gitlab-workhorse checked out as git repositories
......@@ -160,26 +162,21 @@ git-executable = ${git:location}/bin/git
[gitlab-repository]
<= git-repository
repository = https://lab.nexedi.com/nexedi/gitlab-ce.git
revision = v12.10.14-12-g7ce27b49193
revision = v13.12.15-10-gf4b76cb11f
location = ${buildout:parts-directory}/gitlab
[gitlab-shell-repository]
<= git-repository
repository = https://gitlab.com/gitlab-org/gitlab-shell.git
revision = v12.2.0
revision = v13.18.1
location = ${buildout:parts-directory}/gitlab-shell
[gitaly-repository]
<= git-repository
repository = https://gitlab.com/gitlab-org/gitaly.git
revision = v12.10.14
revision = v13.12.15
location = ${buildout:parts-directory}/gitaly
[gitlab-workhorse-repository]
<= git-repository
repository = https://lab.nexedi.com/nexedi/gitlab-workhorse.git
revision = v8.30.3-19-g919c9b532c
# build needed-by-gitlab gems via bundler
[gitlab/vendor/bundle]
recipe = slapos.recipe.cmmi
......@@ -188,19 +185,30 @@ bundle = ${bundler-4gitlab:bundle}
configure-command = cd ${:path} &&
${:bundle} config --local build.charlock_holmes --with-icu-dir=${icu:location} &&
${:bundle} config --local build.pg --with-pg-config=${postgresql10:location}/bin/pg_config &&
${:bundle} config --local build.pg --with-pg-config=${postgresql:location}/bin/pg_config &&
${:bundle} config --local build.re2 --with-re2-dir=${re2:location} &&
${:bundle} config --local build.nokogiri --with-zlib-dir=${zlib:location} --with-cflags=-I${xz-utils:location}/include --with-ldflags="-L${xz-utils:location}/lib -Wl,-rpath=${xz-utils:location}/lib"
${:bundle} config --local build.rugged --use-system-libraries --with-git2-dir=${libgit2:location}
${:bundle} config --local build.openssl --with-openssl-dir=${openssl:location}
${:bundle} config --local build.puma --with-openssl-dir=${openssl:location}
${:bundle} config --local build.gpgme --use-system-libraries
${:bundle} config set without 'development test mysql aws kerberos'
${:bundle} config set deployment 'true'
make-binary =
make-targets= cd ${:path} && ${:bundle} install
environment =
PKG_CONFIG_PATH=${openssl-1.0:location}/lib/pkgconfig:${re2:location}/lib/pkgconfig:${icu:location}/lib/pkgconfig:${xz-utils:location}/lib/pkgconfig
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${re2:location}/lib/pkgconfig:${icu:location}/lib/pkgconfig:${xz-utils:location}/lib/pkgconfig:${libgit2:location}/lib/pkgconfig
PATH=${pkgconfig:location}/bin:%(PATH)s
CFLAGS=-I${xz-utils:location}/include
gpme-rpath = ${ruby:location}/lib:${gdbm:location}/lib:${libffi:location}/lib:${libyaml:location}/lib:${ncurses:location}/lib:${readline:location}/lib:${zlib:location}/lib:${libgpgme:location}/lib:${libgpg-error:location}/lib:${libassuan:location}/lib
post-install =
# bundle is not setting libgpgme lib and dependencies in gpgme_n.so rpath
# it result to `libgpgme.so.11 => not found`
find ${bundler-4gitlab:location}/lib/ruby/gems/ -type f -name gpgme_n.so -exec \
${patchelf:location}/bin/patchelf --set-rpath ${:gpme-rpath} {} \;
################## Google re2
[re2]
recipe = slapos.recipe.cmmi
......@@ -208,7 +216,6 @@ url = https://github.com/google/re2/archive/2019-12-01.tar.gz
md5sum = 527eab0c75d6a1a0044c6eefd816b2fb
configure-command = :
[gitlab_npm]
recipe = slapos.recipe.cmmi
path = ${gitlab-repository:location}
......@@ -229,37 +236,39 @@ configure-command = :
make-binary =
make-targets= cd ${go_github.com_libgit2_git2go:location}
&& git submodule update --init
&& sed -i 's/.*--build.*/cmake --build . --target install/' script/build-libgit2-static.sh
&& make install
&& make install-static
environment =
PKG_CONFIG_PATH=${openssl-1.0:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${git:location}/bin:${golang1.13:location}/bin:${buildout:bin-directory}:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${git:location}/bin:${golang1.15:location}/bin:${buildout:bin-directory}:%(PATH)s
GOPATH=${gowork:directory}
[gowork.goinstall]
git2go = ${go_github.com_libgit2_git2go_prepare:path}/vendor/libgit2/install
git2go = ${go_github.com_libgit2_git2go_prepare:path}/static-build/install
command = bash -c ". ${gowork:env.sh} && CGO_CFLAGS=-I${:git2go}/include CGO_LDFLAGS='-L${:git2go}/lib -lgit2' go install ${gowork:buildflags} -v $(echo -n '${gowork:install}' |tr '\n' ' ') && go test -v lab.nexedi.com/kirr/git-backup"
[gowork]
golang = ${golang1.13:location}
# gitlab.com/gitlab-org/gitlab-workhorse
# gitlab.com/gitlab-org/gitlab-workhorse/cmd/gitlab-zip-cat
# gitlab.com/gitlab-org/gitlab-workhorse/cmd/gitlab-zip-metadata
golang = ${golang1.15:location}
install =
lab.nexedi.com/kirr/git-backup
cpkgpath =
${openssl-1.0:location}/lib/pkgconfig
${openssl:location}/lib/pkgconfig
${zlib:location}/lib/pkgconfig
${go_github.com_libgit2_git2go_prepare:path}/vendor/libgit2/install/lib/pkgconfig
${go_github.com_libgit2_git2go_prepare:path}/static-build/install/lib/pkgconfig
buildflags = --tags "static"
[gitlab-workhorse]
recipe = slapos.recipe.cmmi
path = ${gitlab-workhorse-repository:location}
path = ${gitlab-repository:location}/workhorse
configure-command = :
make-binary =
make-targets =
. ${gowork:env.sh} && make test && make install PREFIX=${gowork:directory}
. ${gowork:env.sh} && make test && make install PREFIX=${gowork:directory}
binary = ${gowork:bin}/${:_buildout_section_name_}
# adding ExifTool in env so make test can work.
# ExifTool is not used for now in gitlab-workhorse service for security reason.
# see: https://lab.nexedi.com/nexedi/slapos/-/merge_requests/1558?diff_id=54289&start_sha=725054eeef9dcd8018abfbf6147cbbbfccfbba5c#note_208105
environment =
PATH=${perl-Image-ExifTool:location}/bin/:%(PATH)s
[gitlab-backup]
recipe = plone.recipe.command
......@@ -275,6 +284,7 @@ bundle = ${bundler-4gitlab:bundle}
configure-command = cd ${:path}/ruby &&
${:bundle} config --local build.charlock_holmes --with-icu-dir=${icu:location}
${:bundle} config --local build.rugged --use-system-libraries --with-git2-dir=${libgit2:location}
make-binary =
make-targets =
. ${gowork:env.sh} &&
......@@ -284,8 +294,9 @@ post-install =
# solve the problem error="not executable: ruby/git-hooks/pre-receive"
chmod 755 ${:path}/ruby/git-hooks/gitlab-shell-hook
environment =
PKG_CONFIG_PATH=${openssl-1.0:location}/lib/pkgconfig:${icu:location}/lib/pkgconfig
PATH=${pkgconfig:location}/bin:${ruby2.6:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${icu:location}/lib/pkgconfig:${libgit2:location}/lib/pkgconfig
PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${ruby:location}/bin:%(PATH)s
OPENSSL_ROOT_DIR=${openssl:location}
[xnice-repository]
# to get kirr's misc repo containing xnice script for executing processes
......@@ -310,7 +321,7 @@ make-targets= cd ${:path} &&
. ${gowork:env.sh} && make build &&
${:bundle} install --deployment --without development test
environment =
PATH=${ruby2.6:location}/bin:%(PATH)s
PATH=${ruby:location}/bin:%(PATH)s
###############################
# Trampoline for instance #
......@@ -368,7 +379,7 @@ destination = ${buildout:directory}/${:_buildout_section_name_}
[gitlab-shell-config.yml.in]
<= download-file
[gitlab-unicorn-startup.in]
[gitlab-puma-startup.in]
<= download-file
[gitlab.yml.in]
......@@ -392,9 +403,6 @@ destination = ${buildout:directory}/${:_buildout_section_name_}
[nginx.conf.in]
<= download-file
[rack_attack.rb.in]
<= download-file
[resque.yml.in]
<= download-file
......@@ -404,16 +412,12 @@ destination = ${buildout:directory}/${:_buildout_section_name_}
[template-gitlab-resiliency-restore.sh.in]
<= download-file
[unicorn.rb.in]
[puma.rb.in]
<= download-file
[gitlab-demo-backup.git]
recipe = slapos.recipe.build:download-unpacked
url = https://lab.nexedi.com/alain.takoudjou/labdemo.backup/repository/archive.tar.gz?ref=master
md5sum = d40e5e211dc9a4e5ada9c0250377c639
[versions]
docutils = 0.16
cns.recipe.symlink = 0.2.3
plone.recipe.command = 1.1
z3c.recipe.scripts = 1.0.1
beautifulsoup4 = 4.12.3
......@@ -7,7 +7,12 @@ socket_path = "{{ gitaly.socket }}"
# The directory where Gitaly's executables are stored
bin_dir = "{{ gitaly.location }}"
# # Optional: listen on a TCP socket. This is insecure (no authentication)
# # Optional. The directory where Gitaly can create all files required to
# # properly operate at runtime. If not set, Gitaly will create a directory in
# # the global temporary directory. This directory must exist.
runtime_dir = "{{ gitaly.basedir }}"
# # Optional if socket_path is set. TCP address for Gitaly to listen on. This is insecure (unencrypted connection).
# listen_addr = "localhost:9999"
# tls_listen_addr = "localhost:8888
......@@ -33,6 +38,10 @@ internal_socket_dir = "{{ gitaly.internal_socket }}"
bin_path = "{{ git }}"
# catfile_cache_size = 100
# [[git.config]]
# key = fetch.fsckObjects
# value = true
[[storage]]
name = "default"
path = "{{ gitlab.repositories }}"
......@@ -47,9 +56,9 @@ path = "{{ gitlab.repositories }}"
# You can optionally configure Gitaly to output JSON-formatted log messages to stdout
[logging]
# The directory where Gitaly stores extra log files
dir = "{{ gitaly.log }}"
# format = "json"
# format = "json"
dir = "{{ gitaly.logdir }}"
# format = "text"
format = "json"
# # Optional: Set log level to only log entries with that severity or above
# # One of, in order: debug, info, warn, errror, fatal, panic
# # Defaults to "info"
......@@ -79,7 +88,7 @@ dir = "{{ gitaly.location }}/ruby"
# restart_delay = "5m"
#
# # Number of gitaly-ruby worker processes
# num_workers = 2
num_workers = {{ gitaly.num_workers }}
#
# # Search path for system gitconfig file (e.g. /etc, /opt/gitlab/embedded/etc)
# # NOTE: This only affects RPCs that use Rugged.
......@@ -89,7 +98,47 @@ dir = "{{ gitaly.location }}/ruby"
# The directory where gitlab-shell is installed
dir = "{{ gitlab_shell_work.location }}"
[hooks]
custom_hooks_dir = "{{ gitlab_shell_work.location }}/hooks/"
[gitlab]
secret_file = "{{ gitlab_shell.secret }}"
url = "http+unix://{{ urllib.parse.unquote_plus(gitlab_workhorse.socket) }}"
# Only needed if a UNIX socket is used in `url` and GitLab is configured to
# use a relative path (e.g. /gitlab).
# relative_url_root = '/'
[gitlab.http-settings]
# read_timeout = 300
# user = someone
# password = somepass
# ca_file = /etc/ssl/cert.pem
# ca_path = /etc/pki/tls/certs
# self_signed_cert = false
# # You can adjust the concurrency of each RPC endpoint
# [[concurrency]]
# rpc = "/gitaly.RepositoryService/GarbageCollect"
# max_per_repo = 1
# Daily maintenance designates time slots to run daily to optimize and maintain
# enabled storages.
# [daily_maintenance]
# start_hour = 23
# start_minute = 30
# duration = "45m"
# storages = ["default"]
# disabled = false
# [cgroups]
# count = 10
# mountpoint = "/sys/fs/cgroup"
# hierarchy_root = "gitaly"
# [cgroups.memory]
# enabled = true
# limit = 1048576
# [cgroups.cpu]
# enabled = true
# shares = 512
......@@ -12,15 +12,24 @@
[pack]
threads = 1
# Enable packfile bitmaps
[repack]
writeBitmaps = true
# don't allow corrupt/broken objects to go in
# Enable push (advertisePushOptions) options
[receive]
fsckObjects = true
advertisePushOptions = true
[user]
name = {{ cfg('email_display_name') }}
email = {{ cfg('email_from') }}
# Enable fsyncObjectFiles to reduce risk of repository corruption if the server crashes
[core]
autocrlf = input
fsyncObjectFiles = true
[gc]
auto = 0
......@@ -7,13 +7,15 @@
# GitLab user. git by default
user: {{ backend_info.user }}
# Url to gitlab instance. Used for api calls. Should end with a slash.
gitlab_url: "http+unix://{{ urllib.parse.quote_plus(unicorn.socket) }}/"
# URL to GitLab instance, used for API calls. Default: http://localhost:8080.
# For relative URL support read http://doc.gitlab.com/ce/install/relative_url.html
gitlab_url: "http+unix://{{ urllib.parse.quote_plus(puma.socket) }}/"
http_settings:
{# we don't need any
<%= @http_settings.to_json if @http_settings %>
#}
# read_timeout: 300
# user: someone
# password: somepass
# ca_file: /etc/ssl/cert.pem
......@@ -34,35 +36,17 @@ auth_file: "{{ gitlab.var }}/sshkeys-notused"
# Default is .gitlab_shell_secret in the root directory.
secret_file: "{{ gitlab_shell.secret }}"
# Parent directory for global custom hook directories (pre-receive.d, update.d, post-receive.d)
# Default is hooks in the gitlab-shell directory.
custom_hooks_dir: "{{ gitlab_shell_work.location }}/hooks/"
# Redis settings used for pushing commit notices to gitlab
redis:
bin: {{ redis_binprefix }}/redis-cli
host: {# <%= @redis_host %> #}
port: {# <%= @redis_port %> #}
socket: {{ service_redis.unixsocket }}
database: {# <%= @redis_database %> #}
namespace: resque:gitlab
# Log file.
# Default is gitlab-shell.log in the root directory.
log_file: "{{ gitlab_shell.log }}/gitlab-shell.log"
# Log level. INFO by default
log_level:
log_level: INFO
# Log format. 'text' by default
log_format: text
# Audit usernames.
# Set to true to see real usernames in the logs instead of key ids, which is easier to follow, but
# incurs an extra API call on every gitlab-shell command.
audit_usernames:
# Enable git-annex support
# git-annex allows managing files with git, without checking the file contents into git
# See https://git-annex.branchable.com/ for documentation
# If enabled, git-annex needs to be installed on the server where gitlab-shell is setup
# For Debian and Ubuntu systems this can be done with: sudo apt-get install git-annex
# For CentOS: sudo yum install epel-release && sudo yum install git-annex
git_annex_enabled:
audit_usernames: false
{{ autogenerated }}
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/gitlab.yml.example
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/gitlab.yml.erb
# (last updated for omnibus-gitlab 8.8.9+ce.0-g25376053)
{% from 'macrolib.cfg.in' import cfg, cfg_https, external_url with context %}
# # # # # # # # # # # # # # # # # #
# GitLab application config file #
# # # # # # # # # # # # # # # # # #
#
########################### NOTE #####################################
# This file should not receive new settings. All configuration options #
# * are being moved to ApplicationSetting model! #
# If a setting requires an application restart say so in that screen. #
# If you change this file in a merge request, please also create #
# a MR on https://gitlab.com/gitlab-org/omnibus-gitlab/merge_requests. #
# For more details see https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/doc/settings/gitlab.yml.md #
########################################################################
#
#
# How to use:
# 1. Copy file as gitlab.yml
# 2. Update gitlab -> host with your fully qualified domain name
# 3. Update gitlab -> email_from
# 4. If you installed Git from source, change git -> bin_path to /usr/local/bin/git
# IMPORTANT: If Git was installed in a different location use that instead.
# You can check with `which git`. If a wrong path of Git is specified, it will
# result in various issues such as failures of GitLab CI builds.
# 5. Review this configuration file for other settings you may want to adjust
production: &base
#
# 1. GitLab app settings
......@@ -18,6 +40,9 @@ production: &base
host: {{ external_url.hostname }}
port: {{ external_url.port or default_port[external_url.scheme] }}
https: {{ cfg_https }}
# The maximum time unicorn/puma can spend on the request. This needs to be smaller than the worker timeout.
# Default is 95% of the worker timeout
max_request_duration_seconds: 57
{# ssh is disabled completely in slapos version
# Uncommment this line below if your ssh host is different from HTTP/HTTPS one
......@@ -55,6 +80,8 @@ production: &base
worker_src: "'self' blob:"
report_uri:
allowed_hosts: []
# Trusted Proxies
# Customize if you have GitLab behind a reverse proxy which is running on a different machine.
# Add the IP address for your reverse proxy to the list, otherwise users will appear signed in from that address.
......@@ -122,6 +149,15 @@ production: &base
repository_downloads_path: <%= @gitlab_repository_downloads_path %>
#}
## Impersonation settings
impersonation_enabled: true
## Disable jQuery and CSS animations
# disable_animations: true
## Application settings cache expiry in seconds (default: 60)
# application_settings_cache_seconds: 60
{# we do not support reply by email
## Reply by email
# Allow users to comment on issues and merge requests by replying to notification emails.
......@@ -414,7 +450,9 @@ production: &base
# Gitaly settings
gitaly:
# Default Gitaly authentication token. Can be overriden per storage. Can
# Path to the directory containing Gitaly client executables.
client_path: {{ gitaly.location }}
# Default Gitaly authentication token. Can be overridden per storage. Can
# be left blank when Gitaly is running locally on a Unix socket, which
# is the normal way to deploy Gitaly.
token:
......@@ -463,7 +501,6 @@ production: &base
authorized_keys_file: {{ gitlab.var }}/sshkeys-notused
repos_path: {{ gitlab.repositories }}
hooks_path: {{ gitlab_shell_work.location }}/hooks/
secret_file: {{ gitlab_shell.secret }}
# Git over HTTP
......@@ -483,17 +520,16 @@ production: &base
# gitlab-shell needs to be set to true
git_annex_enabled: <%= @git_annex_enabled %>
workhorse:
# File that contains the secret key for verifying access for gitlab-workhorse.
secret_file: {{ gitlab_workhorse.secret }}
## Git settings
# CAUTION!
# Use the default values unless you really know what you are doing
git:
bin_path: {{ git }}
# The next value is the maximum memory size grit can use
# Given in number of bytes per git object (e.g. a commit)
# This value can be increased if you have very large commits
max_size: {{ cfg('git_max_size') }}
# Git timeout to read a commit, in seconds
timeout: {{ cfg('git_timeout') }}
#
# 5. Extra customization
......@@ -515,11 +551,6 @@ production: &base
<% end %>
#}
{# we are ok (for now) with default rack-attack git settings
rack_attack:
git_basic_auth: <%= @rack_attack_git_basic_auth.to_json if @rack_attack_git_basic_auth %>
#}
## Site ICP License
# XXX unquote needed only for slapos.core earlier than
......@@ -530,6 +561,22 @@ production: &base
{# ICP: '{{ cfg("icp_license") }}' #}
{% endif %}
rack_attack:
git_basic_auth:
# Rack Attack IP banning enabled
enabled: {{ cfg("rack_attack_enable") }}
#
# Whitelist requests from 127.0.0.1 for web proxies (NGINX/Apache) with incorrect headers
ip_whitelist: [{{ cfg("rack_attack_ip_whitelist")}}]
#
# Limit the number of Git HTTP authentication attempts per IP
maxretry: {{ cfg("rack_attack_max_retry") }}
#
# Reset the auth attempt counter per IP after 60 seconds
findtime: {{ cfg("rack_attack_find_time") }}
#
# Ban an IP for one hour (3600s) after too many auth attempts
bantime: {{ cfg("rack_attack_ban_time") }}
development:
<<: *base
......
# see: https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/master/files/gitlab-cookbooks/gitlab/templates/default/puma.rb.erb
{% from 'macrolib.cfg.in' import cfg with context %}
# frozen_string_literal: true
# Load "path" as a rackup file.
#
# The default is "config.ru".
#
rackup 'config.ru'
pidfile '{{ puma.pid }}/puma.pid'
state_path '{{ puma.pid }}/puma.state'
stdout_redirect '{{ puma.log }}/puma.stdout.log',
'{{ puma.log }}/puma.stderr.log',
true
# Configure "min" to be the minimum number of threads to use to answer
# requests and "max" the maximum.
#
# The default is "0, 16".
#
threads {{ cfg("puma_min_threads") }}, {{ cfg("puma_max_threads") }}
# By default, workers accept all requests and queue them to pass to handlers.
# When false, workers accept the number of simultaneous requests configured.
#
# Queueing requests generally improves performance, but can cause deadlocks if
# the app is waiting on a request to itself. See https://github.com/puma/puma/issues/612
#
# When set to false this may require a reverse proxy to handle slow clients and
# queue requests before they reach puma. This is due to disabling HTTP keepalive
queue_requests false
# Bind the server to "url". "tcp://", "unix://" and "ssl://" are the only
# accepted protocols.
bind 'unix://{{ puma.socket }}'
directory '{{ gitlab_work.location }}'
workers {{ cfg("puma_worker_processes") }}
require_relative "{{ gitlab_work.location }}/lib/gitlab/cluster/lifecycle_events"
require_relative "{{ gitlab_work.location }}/lib/gitlab/cluster/puma_worker_killer_initializer"
on_restart do
# Signal application hooks that we're about to restart
Gitlab::Cluster::LifecycleEvents.do_before_master_restart
end
options = { workers: {{ cfg("puma_worker_processes") }} }
before_fork do
# Signal to the puma killer
Gitlab::Cluster::PumaWorkerKillerInitializer.start options unless ENV['DISABLE_PUMA_WORKER_KILLER']
# Signal application hooks that we're about to fork
Gitlab::Cluster::LifecycleEvents.do_before_fork
end
Gitlab::Cluster::LifecycleEvents.set_puma_options options
on_worker_boot do
# Signal application hooks of worker start
Gitlab::Cluster::LifecycleEvents.do_worker_start
end
# Preload the application before starting the workers; this conflicts with
# phased restart feature. (off by default)
preload_app!
tag 'gitlab-puma-worker'
# Verifies that all workers have checked in to the master process within
# the given timeout. If not the worker process will be restarted. Default
# value is 60 seconds.
#
worker_timeout {{ cfg("puma_worker_timeout") }}
# https://github.com/puma/puma/blob/master/5.0-Upgrade.md#lower-latency-better-throughput
wait_for_less_busy_worker ENV.fetch('PUMA_WAIT_FOR_LESS_BUSY_WORKER', 0.001).to_f
# https://github.com/puma/puma/blob/master/5.0-Upgrade.md#nakayoshi_fork
nakayoshi_fork unless ENV['DISABLE_PUMA_NAKAYOSHI_FORK'] == 'true'
# Use json formatter
require_relative "{{ gitlab_work.location }}/lib/gitlab/puma_logging/json_formatter"
json_formatter = Gitlab::PumaLogging::JSONFormatter.new
log_formatter do |str|
json_formatter.call(str)
end
{{ autogenerated }}
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/initializers/rack_attack.rb.example
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/rack_attack.rb.erb
# (last updated for omnibus-gitlab 8.8.9+ce.0-g25376053)
{% from 'macrolib.cfg.in' import cfg with context %}
# 1. Rename this file to rack_attack.rb
# 2. Review the paths_to_be_protected and add any other path you need protecting
#
paths_to_be_protected = [
"#{Rails.application.config.relative_url_root}/users/password",
"#{Rails.application.config.relative_url_root}/users/sign_in",
"#{Rails.application.config.relative_url_root}/api/#{API::API.version}/session.json",
"#{Rails.application.config.relative_url_root}/api/#{API::API.version}/session",
"#{Rails.application.config.relative_url_root}/users",
"#{Rails.application.config.relative_url_root}/users/confirmation",
"#{Rails.application.config.relative_url_root}/unsubscribes/"
]
# Create one big regular expression that matches strings starting with any of
# the paths_to_be_protected.
paths_regex = Regexp.union(paths_to_be_protected.map { |path| /\A#{Regexp.escape(path)}/ })
rack_attack_enabled = Gitlab.config.rack_attack.git_basic_auth['enabled']
unless Rails.env.test? || !rack_attack_enabled
Rack::Attack.throttle('protected paths', limit: {{ cfg('rate_limit_requests_per_period') }}, period: {{ cfg('rate_limit_period') }}.seconds) do |req|
if req.post? && req.path =~ paths_regex
req.ip
end
end
end
......@@ -21,8 +21,6 @@ if Rails.env.production?
enable_starttls_auto: {{ cfg('smtp_enable_starttls_auto') }},
# ssl:
openssl_verify_mode: '{{ cfg("smtp_openssl_verify_mode") }}'
# ca_path:
# ca_file:
}
end
{% else %}
......
......@@ -21,15 +21,15 @@ redis_pid_file="{{ redis_pid_file }}"
postgres_pid_file="{{ postgres_pid_file }}"
bin_location="{{ bin_directory }}"
run_location="{{ run_directory }}"
git_location="{{ git_location }}"
go_work_bin="{{ go_work_bin }}"
etc_location="{{ etc_directory }}"
gitlab_work="{{ gitlab_work_location }}"
promise_check="{{ promise_lab_location }}"
unicorn_script="{{ unicorn_script }}"
puma_script="{{ puma_script }}"
puma_pid_file="{{ puma_pid_file }}"
sidekiq_script="{{ sidekiq_script }}"
var_location="{{ run_directory }}/.."
var_location="{{ var_directory }}"
# export GIT_EXEC_PATH=$git_location/libexec/git-core/
......@@ -56,7 +56,7 @@ kill_process () {
check_process $postgres_pid_file "Postgres"
check_process $redis_pid_file "Redis"
check_process $run_location/unicorn.pid "Unicorn"
check_process $puma_pid_file "Puma"
if [ -f "$postgres_pid_file" ]; then
rm $postgres_pid_file
......@@ -90,14 +90,14 @@ echo "Checking gitlab promises..."
echo "[info] Not all promises are checked!"
$promise_check/gitlab-app
echo "Starting Unicorn to check gitlab-shell promise..."
$unicorn_script &
unicorn_pid=$!
trap "kill $postgres_pid $redis_pid $unicorn_pid" EXIT TERM INT
echo "Starting Puma to check gitlab-shell promise..."
$puma_script &
puma_pid=$!
trap "kill $postgres_pid $redis_pid $puma_pid" EXIT TERM INT
sleep 60
if [ -s "$run_location/unicorn.pid" ]; then
unicorn_ppid=$(head -n 1 $run_location/unicorn.pid) > /dev/null 2>&1
trap "kill $postgres_pid $redis_pid $unicorn_ppid" EXIT TERM INT
if [ -s "$puma_pid_file" ]; then
puma_pid=$(head -n 1 $puma_pid_file) > /dev/null 2>&1
trap "kill $postgres_pid $redis_pid $puma_pid" EXIT TERM INT
fi
$promise_check/gitlab-shell
......@@ -109,7 +109,7 @@ $promise_check/gitlab-shell
kill_process $postgres_pid
kill_process $redis_pid
kill_process $unicorn_pid
kill_process $puma_pid
RESTORE_EXIT_CODE=$?
......
{{ autogenerated }}
# see:
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/unicorn.rb.example
# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/unicorn.rb.example.development
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/templates/default/unicorn.rb.erb
# (last updated for omnibus-gitlab 8.7.9+ce.1-0-gf589ad7)
{% from 'macrolib.cfg.in' import cfg with context %}
# What ports/sockets to listen on, and what options for them.
# we listen only on unix socket
listen "{{ unicorn.socket }}", :backlog => {{ cfg('unicorn_backlog_socket') }}
#listen "127.0.0.1:8888", :tcp_nopush => true
working_directory '{{ gitlab_work.location }}'
# What the timeout for killing busy workers is, in seconds
timeout {{ cfg('unicorn_worker_timeout') }}
# combine Ruby 2.0.0dev or REE with "preload_app true" for memory savings
# http://rubyenterpriseedition.com/faq.html#adapt_apps_for_cow
preload_app true
# Enable this flag to have unicorn test client connections by writing the
# beginning of the HTTP headers before calling the application. This
# prevents calling the application for connections that have disconnected
# while queued. This is only guaranteed to detect clients on the same
# host unicorn runs on, and unlikely to detect disconnects even on a
# fast LAN.
check_client_connection false
require_relative '{{ gitlab_work.location }}/lib/gitlab/cluster/lifecycle_events'
before_exec do |server|
# Signal application hooks that we're about to restart
Gitlab::Cluster::LifecycleEvents.do_before_master_restart
end
# How many worker processes
worker_processes {{ cfg('unicorn_worker_processes') }}
# about before_fork / after_fork - see:
# https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/definitions/unicorn_service.rb
# http://bogomips.org/unicorn.git/tree/examples/unicorn.conf.rb?id=3312aca8#n75
# What to do before we fork a worker
before_fork do |server, worker|
# Signal application hooks that we're about to fork
Gitlab::Cluster::LifecycleEvents.do_before_fork
# The following is only recommended for memory/DB-constrained
# installations. It is not needed if your system can house
# twice as many worker_processes as you have configured.
#
# This allows a new master process to incrementally
# phase out the old master process with SIGTTOU to avoid a
# thundering herd (especially in the "preload_app false" case)
# when doing a transparent upgrade. The last worker spawned
# will then kill off the old master process with a SIGQUIT.
old_pid = "#{server.config[:pid]}.oldbin"
if old_pid != server.pid
begin
sig = (worker.nr + 1) >= server.worker_processes ? :QUIT : :TTOU
Process.kill(sig, File.read(old_pid).to_i)
rescue Errno::ENOENT, Errno::ESRCH
end
end
#
# Throttle the master from forking too quickly by sleeping. Due
# to the implementation of standard Unix signal handlers, this
# helps (but does not completely) prevent identical, repeated signals
# from being lost when the receiving process is busy.
# sleep 1
end
# What to do after we fork a worker
after_fork do |server, worker|
# Signal application hooks of worker start
Gitlab::Cluster::LifecycleEvents.do_worker_start
# per-process listener ports for debugging/admin/migrations
# addr = "127.0.0.1:#{9293 + worker.nr}"
# server.listen(addr, :tries => -1, :delay => 5, :tcp_nopush => true)
end
# Where to drop a pidfile
pid '{{ directory.run }}/unicorn.pid'
# Where stderr gets logged
stderr_path '{{ unicorn.log }}/unicorn_stderr.log'
# Where stdout gets logged
stdout_path '{{ unicorn.log }}/unicorn_stdout.log'
{# we do not support Relative url
<%- if @relative_url %>
# Relative url from where GitLab is served
ENV['RAILS_RELATIVE_URL_ROOT'] = "<%= @relative_url %>"
<%- end %>
#}
# Min memory size (RSS) per worker
ENV['GITLAB_UNICORN_MEMORY_MIN'] = ({{ cfg('unicorn_worker_memory_limit_min') }}).to_s
# Max memory size (RSS) per worker
ENV['GITLAB_UNICORN_MEMORY_MAX'] = ({{ cfg('unicorn_worker_memory_limit_max') }}).to_s
......@@ -46,6 +46,7 @@ setup(
'erp5.util',
'supervisor',
'requests',
'beautifulsoup4'
],
zip_safe=True,
test_suite='test',
......
......@@ -26,10 +26,10 @@
##############################################################################
import os
import logging
from urllib.parse import urlparse
import requests
import functools
import bs4
from urllib.parse import urljoin
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
......@@ -46,6 +46,10 @@ class TestGitlab(SlapOSInstanceTestCase):
def getInstanceSoftwareType(cls):
return 'gitlab'
@classmethod
def getInstanceParameterDict(cls):
return {'root-password': 'admin1234'}
def setUp(self):
self.backend_url = self.computer_partition.getConnectionParameterDict(
)['backend_url']
......@@ -54,3 +58,33 @@ class TestGitlab(SlapOSInstanceTestCase):
resp = requests.get(self.backend_url, verify=False)
self.assertTrue(
resp.status_code in [requests.codes.ok, requests.codes.found])
def test_rack_attack_sign_in_rate_limiting(self):
session = requests.session()
# Load the login page to get a CSRF token.
response = session.get(urljoin(self.backend_url, 'users/sign_in'), verify=False)
self.assertEqual(response.status_code, 200)
# Extract the CSRF token and param.
bsoup = bs4.BeautifulSoup(response.text, 'html.parser')
csrf_param = bsoup.find('meta', dict(name='csrf-param'))['content']
csrf_token = bsoup.find('meta', dict(name='csrf-token'))['content']
request_data = {
'user[login]': 'test',
'user[password]': 'random',
csrf_param: csrf_token}
sign_in = functools.partial(
session.post,
response.url,
data=request_data,
verify=False)
for _ in range(10):
sign_in(headers={'X-Forwarded-For': '1.2.3.4'})
# after 10 authentication failures, this client is rate limited
self.assertEqual(sign_in(headers={'X-Forwarded-For': '1.2.3.4'}).status_code, 429)
# but other clients are not
self.assertNotEqual(sign_in(headers={'X-Forwarded-For': '5.6.7.8'}).status_code, 429)
......@@ -2,16 +2,14 @@
extends =
../../stack/slapos.cfg
../../component/rina-tools/buildout.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts =
slapos-cookbook
template
[python]
part = python2.7
[openssl]
<= openssl-1.1
[file]
# For old GCC like 4.9.2 on Debian 8.
# XXX: This should be moved to component/rina-tools/buildout.cfg, next to where
......
......@@ -19,11 +19,11 @@ md5sum = 5f39952f94095b1f12f41db76867e71e
[instance-jupyter]
filename = instance-jupyter.cfg.in
md5sum = f9a0e5a134456d74ca8b4d87862f903d
md5sum = 1812fa797b9eb687a634ebe96134b504
[jupyter-notebook-config]
filename = jupyter_notebook_config.py.jinja
md5sum = 089e4c511a3c7b110471bf41ca2695a4
md5sum = 6c03113fb53d6ba98476f3353c083984
[erp5-kernel]
filename = ERP5kernel.py
......
......@@ -73,7 +73,7 @@ key_file = ${directory:etc}/jupyter_cert.key
[instance]
recipe = slapos.cookbook:wrapper
command-line =
{{ bin_directory }}/jupyter-lab
{{ bin_directory }}/jupyter-notebook
--no-browser
--ip=${instance-parameter:host}
--port=${instance-parameter:port}
......@@ -87,16 +87,16 @@ environment =
JUPYTER_PATH=${directory:jupyter_dir}
JUPYTER_CONFIG_DIR=${directory:jupyter_config_dir}
JUPYTER_RUNTIME_DIR=${directory:jupyter_runtime_dir}
JUPYTERLAB_DIR=${directory:jupyterlab-dir}
LANG=C.UTF-8
[jupyter-password]
recipe = slapos.cookbook:generate.password
bytes = 10
[jupyter-notebook-config]
recipe = slapos.recipe.template:jinja2
url = {{ jupyter_config_location }}/{{ jupyter_config_filename }}
output = ${directory:jupyter_config_dir}/jupyter_notebook_config.py
output = ${directory:jupyter_config_dir}/jupyter_server_config.py
context =
key password jupyter-password:passwd
raw gcc_location {{ gcc_location }}
......@@ -119,6 +119,7 @@ jupyter_runtime_dir = ${:jupyter_dir}/runtime
jupyter_custom_dir = ${:jupyter_config_dir}/custom
jupyter_nbextensions_dir = ${:jupyter_dir}/nbextensions
erp5_kernel_dir = ${:jupyter_kernel_dir}/ERP5
jupyterlab-dir = ${:jupyter_dir}/lab
[request-slave-frontend-base]
recipe = slapos.cookbook:requestoptional
......@@ -194,7 +195,7 @@ output = ${directory:erp5_kernel_dir}/kernel.json
context =
raw python_executable {{ python_executable }}
raw kernel_dir ${erp5-kernel:target-directory}/{{ erp5_kernel_filename }}
key erp5_url slapconfiguration:configuration.erp5-url
key erp5_url slapconfiguration:configuration.erp5-url
raw display_name ERP5
raw language_name python
......
......@@ -2,13 +2,47 @@
This script initializes Jupyter's configuration such as passwords and other
things. It is run by IPython hence why it can use functions like get_config().
'''
import configparser
from notebook.auth import passwd
import os
import pathlib
import sys
from jupyter_server.auth import passwd
import jupyterlab
c = get_config()
c.NotebookApp.password = passwd("{{ password }}")
c.ServerApp.password = passwd("{{ password }}")
jupyterlab_dir = pathlib.Path(os.environ['JUPYTERLAB_DIR'])
# symlink all schemas in a folder, jupyter seems to assume that everything is installed
# in the same place.
schemas_dir = jupyterlab_dir / 'schemas'
if not schemas_dir.exists():
schemas_dir.mkdir()
for p in sys.path:
for schema in (pathlib.Path(p) / 'share' / 'jupyter' / 'lab' / 'schemas').glob('*/'):
dest = (schemas_dir / schema.name)
if dest.exists():
dest.unlink()
dest.symlink_to(schema)
c.LabServerApp.schemas_dir = str(schemas_dir)
# static really needs to be a sub-folder of $JUPYTERLAB_DIR
static = pathlib.Path(jupyterlab.__file__).parent.parent / 'share' / 'jupyter' / 'lab' / 'static'
static_dir = jupyterlab_dir / 'static'
if static_dir.exists():
static_dir.unlink()
static_dir.symlink_to(static)
c.LabServerApp.themes_dir = str(pathlib.Path(jupyterlab.__file__).parent / 'themes')
c.ServerApp.jpserver_extensions = {
'notebook': True,
'jupyter_lsp':True,
'jupyter_server_terminals': True,
'jupyterlab': True,
'notebook_shim': True,
}
try:
os.environ['PATH'] = "{{ gcc_location }}/bin" + os.pathsep + os.environ['PATH']
......
......@@ -47,7 +47,7 @@ class TestJupyter(InstanceTestCase):
def test(self):
connection_dict = self.computer_partition.getConnectionParameterDict()
self.assertTrue('password' in connection_dict)
self.assertIn('password', connection_dict)
password = connection_dict['password']
self.assertEqual(
......
......@@ -35,7 +35,7 @@ md5sum = a463a5e3cd2287d275d6943c2a11b7e4
[template-kvm-import-script]
filename = template/kvm-import.sh.jinja2
md5sum = cd0008f1689dfca9b77370bc4d275b70
md5sum = 013725987114c82ca3fd11097d0a7f9f
[template-kvm-export]
filename = instance-kvm-export.cfg.jinja2
......@@ -83,7 +83,7 @@ md5sum = 12779e690aa8341da660d833e102e552
[image-download-controller]
filename = template/image-download-controller.py.in
md5sum = 9636903e683e4712e313ca2c470cfb94
md5sum = 527b1f287233c0329648411ae1b62087
[image-download-config-creator]
filename = template/image-download-config-creator.py.in
......
......@@ -448,7 +448,7 @@
},
"boot-image-url-list": {
"title": "Boot image list",
"description": "The list shall be list of direct URLs to images, followed by hash (#), then by image MD5SUM. Each image shall appear on newline, like: \"https://example.com/image.iso#06226c7fac5bacfa385872a19bb99684<newline>https://example.com/another-image.iso#31b40d58b18e038498ddb46caea1361c\". They will be provided in KVM image list according to the order on the list. After updating the list, the instance has to be restarted to refresh it. Amount of images is limited to 4, and one image can be maximum 10G. Image will be downloaded and checked against its MD5SUM 4 times, then it will be considered as impossible to download with given MD5SUM. Each image has to be downloaded in time shorter than 4 hours, so in case of very slow images to access, it can take up to 16 hours to download all of them. Note: The instance has to be restarted in order to update the list of available images in the VM. Note: Maximum 3 ISOs are supported.",
"description": "The list shall be list of direct URLs to images, followed by hash (#), then by image MD5SUM. Each image shall appear on newline, like: \"https://example.com/image.iso#06226c7fac5bacfa385872a19bb99684<newline>https://example.com/another-image.iso#31b40d58b18e038498ddb46caea1361c\". They will be provided in KVM image list according to the order on the list. After updating the list, the instance has to be restarted to refresh it. Amount of images is limited to 4, and one image can be maximum 20GB. Image will be downloaded and checked against its MD5SUM 4 times, then it will be considered as impossible to download with given MD5SUM. Each image has to be downloaded in time shorter than 4 hours, so in case of very slow images to access, it can take up to 16 hours to download all of them. Note: The instance has to be restarted in order to update the list of available images in the VM. Note: Maximum 3 ISOs are supported.",
"type": "string",
"textarea": true
},
......
......@@ -311,7 +311,7 @@
},
"boot-image-url-list": {
"title": "Boot image list",
"description": "The list shall be list of direct URLs to images, followed by hash (#), then by image MD5SUM. Each image shall appear on newline, like: \"https://example.com/image.iso#06226c7fac5bacfa385872a19bb99684<newline>https://example.com/another-image.iso#31b40d58b18e038498ddb46caea1361c\". They will be provided in KVM image list according to the order on the list. After updating the list, the instance has to be restarted to refresh it. Amount of images is limited to 4, and one image can be maximum 10G. Image will be downloaded and checked against its MD5SUM 4 times, then it will be considered as impossible to download with given MD5SUM. Each image has to be downloaded in time shorter than 4 hours, so in case of very slow images to access, it can take up to 16 hours to download all of them. Note: The instance has to be restarted in order to update the list of available images in the VM. Note: Maximum 3 ISOs are supported.",
"description": "The list shall be list of direct URLs to images, followed by hash (#), then by image MD5SUM. Each image shall appear on newline, like: \"https://example.com/image.iso#06226c7fac5bacfa385872a19bb99684<newline>https://example.com/another-image.iso#31b40d58b18e038498ddb46caea1361c\". They will be provided in KVM image list according to the order on the list. After updating the list, the instance has to be restarted to refresh it. Amount of images is limited to 4, and one image can be maximum 20GB. Image will be downloaded and checked against its MD5SUM 4 times, then it will be considered as impossible to download with given MD5SUM. Each image has to be downloaded in time shorter than 4 hours, so in case of very slow images to access, it can take up to 16 hours to download all of them. Note: The instance has to be restarted in order to update the list of available images in the VM. Note: Maximum 3 ISOs are supported.",
"type": "string",
"textarea": true
},
......
......@@ -98,7 +98,7 @@ if __name__ == "__main__":
'--location', # follow redirects
'--no-progress-meter', # do not tell too much
'--max-time', '14400', # maximum time for download is 4 hours
'--max-filesize', '10737418240', # maximum 10G for an image
'--max-filesize', '21474836480', # maximum 20GB for an image
'--output', destination_tmp, image['url']],
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
......
#!/bin/bash
set -e
VM_DIR={{ directory['srv'] }}
BACKUP_DIR={{ directory['backup'] }}
VM_FILE=virtual.qcow2
......@@ -10,7 +12,7 @@ umask 077
write_backup_proof () {
cd {{ directory['backup'] }}
find -type f ! -name backup.signature ! -wholename "./rdiff-backup-data/*" -print0 | xargs -0 sha256sum | LC_ALL=C sort -k 66 > {{ directory['srv'] }}/proof.signature
diff -ruw {{ directory['backup'] }} {{ directory['srv'] }}/proof.signature > {{ directory['srv'] }}/backup.diff
diff -ruw {{ directory['backup'] }}/backup.signature {{ directory['srv'] }}/proof.signature > {{ directory['srv'] }}/backup.diff
}
# For now we just make the diff before
......
......@@ -66,13 +66,20 @@ scripts =
surykatka = 0.8.0
# For surykatka 0.7.1
dnspython = 2.1.0
dnspython = 2.1.0:whl
forcediphttpsadapter = 1.0.1
miniupnpc = 2.0.2
peewee = 3.14.4
python-whois = 0.7.3
future = 0.18.3
# For statsmodel =0.11.1
numpy = 1.22.0
# For numy = 1.22.0
setuptools = 44.1.1
# Build GCC with Fortran for OpenBLAS (scipy & numpy)
[gcc]
max_version = 0
......@@ -82,4 +89,3 @@ eggs +=
${pandas:egg}
${statsmodels:egg}
${scipy:egg}
......@@ -22,7 +22,7 @@ md5sum = 102a7f1c1bc46a9b3fa5bd9b9a628e1d
[instance-neo-admin]
filename = instance-neo-admin.cfg.in
md5sum = b6e1ccb1d90160110202e5111eec2afa
md5sum = a0ec1dce4c7a237fbeef3f8aee62e55a
[instance-neo-master]
filename = instance-neo-master.cfg.in
......@@ -34,7 +34,7 @@ md5sum = bc647a29f9d6ece2e4117ce8f04d27c5
[template-neo-my-cnf]
filename = my.cnf.in
md5sum = 56ea8f452d9e1526157ab9d03e631e1a
md5sum = 3ae93702f3890a504cc8a93eb5ad52bc
[template-neo]
filename = instance.cfg.in
......
......@@ -45,7 +45,7 @@ ssl = {{ dumps(bool(slapparameter_dict['ssl'])) }}
cluster = {{ dumps(slapparameter_dict['cluster']) }}
masters = {{ dumps(slapparameter_dict['masters']) }}
extra-options =
{%- for k, v in monitor_dict.iteritems() %}
{%- for k, v in six.iteritems(monitor_dict) %}
{%- if k == 'backup' %}
{%- set k = 'monitor-backup' %}
{%- endif %}
......
......@@ -45,7 +45,7 @@ innodb_locks_unsafe_for_binlog = 1
{{x}}sync_frm = 0
# Extra parameters.
{%- for k, v in extra_dict.iteritems() %}
{%- for k, v in six.iteritems(extra_dict) %}
{%- do assert('-' not in k) %}
{{ k }} = {{ v }}
{%- endfor %}
......
......@@ -30,12 +30,6 @@ parts =
neoppod
slapos-cookbook
[python]
part = python2.7
[openssl]
<= openssl-1.1
[gcc:python2]
# use old gcc version for old scipy version used in python2
part = gcc-8.5
......@@ -138,14 +132,18 @@ inline =
exec "$basedir/bin/mysqld" --defaults-file='{{defaults_file}}' "$@"
[versions]
coverage = 5.5
coverage = 7.5.1
ecdsa = 0.13
mysqlclient = 1.3.12
mysqlclient = 2.0.1
PyMySQL = 0.10.1
pycrypto = 2.6.1
cython-zstd = 0.2
funcsigs = 1.0.2
[versions:python2]
coverage = 5.5
mysqlclient = 1.3.12
pycrypto = 2.6.1
# Test Suite: NEO.UnitTest-Master ran at 2024/05/28 10:46:0.013466 UTC
# 39 failures, 4 errors, 1423 total, status: FAIL
......
......@@ -2,7 +2,7 @@
extends =
buildout.hash.cfg
software-common.cfg
../../stack/slapos-py2.cfg
parts +=
# NEO instanciation
template-neo
......
......@@ -19,6 +19,8 @@ extends =
../../component/pygolang/buildout.cfg
../../stack/nxdtest.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts =
gowork
......@@ -47,12 +49,6 @@ parts =
slapos-cookbook
instance.cfg
[python]
part = python2.7
[openssl]
<= openssl-1.1
[neo]
<= go-git-package
go.importpath = lab.nexedi.com/kirr/neo
......
[instance-profile]
filename = instance.cfg.in
md5sum = 17549c62b5ad20cfe2520fb2ef133c22
md5sum = 5bff4e422a2b22bec348bc5e4cb2f3b7
......@@ -16,8 +16,6 @@ extends = {{ template_monitor }}
[coupler-opc-ua]
recipe = slapos.cookbook:wrapper
environment =
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{{ open62541_location }}/lib:{{ mbedtls_location }}/lib
command-line =
{{ coupler_location }}server -d ${instance-parameter:configuration.coupler_block_device} -s ${instance-parameter:configuration.coupler_i2c_slave_list} -p ${instance-parameter:configuration.opc_ua_port} -u ${instance-parameter:configuration.username} -w ${instance-parameter:configuration.password} -b ${instance-parameter:configuration.heart_beat} -t ${instance-parameter:configuration.heart_beat_interval} -l ${instance-parameter:configuration.heart_beat_id_list} -n ${instance-parameter:configuration.network_address_url_data_type} -o ${instance-parameter:configuration.heart_beat_timeout_interval} -i ${instance-parameter:configuration.id} -m ${instance-parameter:configuration.mode} -j ${instance-parameter:configuration.network_interface}
wrapper-path = ${directory:service}/coupler-opc-ua
......
[buildout]
parts =
open62541
compile-coupler
slapos-cookbook
instance-profile
extends =
../../component/git/buildout.cfg
../../component/mbedtls/buildout.cfg
../../component/open62541/buildout.cfg
../../stack/monitor/buildout.cfg
../../stack/slapos.cfg
../../component/defaults.cfg
../../component/osie-coupler/buildout.cfg
[gcc]
# we want this SR to use a fixed gcc (so that we have the same gcc as open62541 component)
max_version = 0
[open62541]
pre-configure +=
${git:location}/bin/git clone -b v1.1.6 https://github.com/LiamBindle/MQTT-C.git deps/mqtt-c
configure-options =
-DBUILD_SHARED_LIBS=ON
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-DUA_ENABLE_PUBSUB=ON
-DUA_ENABLE_PUBSUB_MONITORING=ON
-DUA_ENABLE_PUBSUB_ETH_UADP=ON
-DUA_NAMESPACE_ZERO=REDUCED
-DUA_ENABLE_ENCRYPTION=MBEDTLS
-DUA_ENABLE_ENCRYPTION_MBEDTLS=ON
-DMBEDTLS_INCLUDE_DIRS=${mbedtls:location}/include
-DMBEDTLS_LIBRARY=${mbedtls:location}/lib/libmbedtls.so
-DMBEDX509_LIBRARY=${mbedtls:location}/lib/libmbedx509.so
-DMBEDCRYPTO_LIBRARY=${mbedtls:location}/lib/libmbedcrypto.so
-DUA_ENABLE_PUBSUB_INFORMATIONMODEL=ON
-DUA_ENABLE_PUBSUB_MQTT=ON
environment +=
LDFLAGS=-L${mbedtls:location}/lib -Wl,-rpath=${mbedtls:location}/lib
# we need open62541's sources even after compiling and linking in [open62541]
# section. Reasons is that coupler's C application depends on it.
keep-compile-dir = true
source-location = @@LOCATION@@/.build/open62541-pack-${open62541:tag}
[osie-repository]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
repository = https://lab.nexedi.com/nexedi/osie.git
revision = 5d193e57b50a7ebb3df147e403917201eb9cb6c6
[compile-coupler]
recipe = slapos.recipe.cmmi
path = ${osie-repository:location}/coupler
bin_dir = ${:path}/bin/
environment =
PATH=${gcc:prefix}/bin:/usr/bin
OPEN62541_HOME = ${open62541:location}
OPEN62541_SOURCE_HOME = ${open62541:source-location}
C_COMPILER_EXTRA_FLAGS = -L ${mbedtls:location}/lib -Wl,-rpath=${mbedtls:location}/lib -l:libopen62541.so -L${open62541:location}/lib -Wl,-rpath=${open62541:location}/lib -I${open62541:location}/include -I${open62541:source-location}/src/pubsub/ -I${open62541:source-location}/deps
configure-command = true
[instance-profile]
recipe = slapos.recipe.template:jinja2
......@@ -69,6 +18,4 @@ extensions = jinja2.ext.do
context =
section buildout buildout
raw template_monitor ${monitor2-template:output}
key open62541_location open62541:location
key mbedtls_location mbedtls:location
key coupler_location compile-coupler:bin_dir
key coupler_location osie-coupler:bin_dir
......@@ -35,10 +35,37 @@ setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
class OsieTestCase(SlapOSInstanceTestCase):
MULTICAST_INTERFACE = 'lo'
MULTICAST_GROUP_COUNT = 2
MULTICAST_GROUP = '224.0.0.22'
@classmethod
def getInstanceParameterDict(cls):
return {"mode": 1}
return {"mode": 1, "network_interface": "127.0.0.1", "heart_beat_id_list": "1"}
def test(self):
parameter_dict = self.computer_partition.getConnectionParameterDict()
self.assertIn('url-ipv6', parameter_dict)
def test_process(self):
with self.slap.instance_supervisor_rpc as supervisor:
process_names = [process['name']
for process in supervisor.getAllProcessInfo()]
self.assertIn('coupler-opc-ua-on-watch', process_names)
def IPV4_to_little_endian_hex_str(self, ipv4_str):
hex_str_list = []
for int_str in ipv4_str.split('.'):
hex_str_list.append('%0.2X' % int(int_str))
return ''.join(reversed(hex_str_list))
def test_joined_multicast_grp(self):
with open('/proc/net/igmp') as f:
igmp_content = f.readlines()
for igmp_line in (
'1\t%s : %s V3\n' % (self.MULTICAST_INTERFACE, self.MULTICAST_GROUP_COUNT),
'\t\t\t\t%s 1 0:00000000\t\t0\n' % self.IPV4_to_little_endian_hex_str(self.MULTICAST_GROUP),
):
self.assertIn(igmp_line, igmp_content)
......@@ -13,16 +13,13 @@ extends =
# Monitoring stack (keep on bottom)
../../stack/monitor/buildout.cfg
# Python2 versions for buildout (keep bottom-most)
../../stack/slapos-py2.cfg
parts +=
slapos-cookbook
template
[python]
part = python2.7
[openssl]
<= openssl-1.1
[re6stnet-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/re6stnet.git
......
......@@ -15,4 +15,4 @@
[template]
filename = instance.cfg
md5sum = f10fbca22d1d30dd7a4f36e1cd521b97
md5sum = 59a5b559b22ad0590e691226cea45055
......@@ -67,6 +67,7 @@ inline =
command,
cwd={{ repr(folder) }},
summaryf=UnitTest.summary,
envadj={ 'SLAPOS_SR_TEST_NAME': {{ repr(name) }} },
)
{%- endif %}
{%- endfor %}
......
......@@ -332,6 +332,7 @@ eggs +=
${recurls-setup:egg}
${pillow-python:egg}
${pycurl:egg}
beautifulsoup4
caucase
erp5.util
${python-pynacl:egg}
......@@ -344,7 +345,7 @@ eggs +=
supervisor
${slapos.cookbook-setup:egg}
${slapos.test.backupserver-setup:egg}
${slapos.test.beremiz-ide-setup:egg}
# ${slapos.test.beremiz-ide-setup:egg}
${slapos.test.caucase-setup:egg}
${slapos.test.cloudooo-setup:egg}
${slapos.test.dream-setup:egg}
......@@ -412,7 +413,7 @@ forbid-download-cache = true
[slapos-repository]
<= git-clone-repository
repository = https://lab.nexedi.com/nexedi/slapos.git
branch = ors-ims
branch = master
[template]
recipe = slapos.recipe.template:jinja2
......@@ -434,12 +435,13 @@ context =
tests =
json-schemas ${slapos.cookbook-setup:setup}
backupserver ${slapos.test.backupserver-setup:setup}
beremiz-ide ${slapos.test.beremiz-ide-setup:setup}
# beremiz-ide ${slapos.test.beremiz-ide-setup:setup}
caucase ${slapos.test.caucase-setup:setup}
cloudooo ${slapos.test.cloudooo-setup:setup}
dream ${slapos.test.dream-setup:setup}
dufs ${slapos.test.dufs-setup:setup}
erp5 ${slapos.test.erp5-setup:setup}
erp5-py3 ${slapos.test.erp5-setup:setup}
erp5testnode ${slapos.test.erp5testnode-setup:setup}
fluentd ${slapos.test.fluentd-setup:setup}
galene ${slapos.test.galene-setup:setup}
......@@ -487,7 +489,7 @@ recurls =
slapos.core =
# Various needed versions
Pillow = 9.2.0
Pillow = 10.2.0+SlapOSPatched001
forcediphttpsadapter = 1.0.1
image = 1.5.25
plantuml = 0.3.0:whl
......@@ -495,8 +497,6 @@ pysftp = 0.2.9
requests-toolbelt = 0.8.0
testfixtures = 6.11.0
mysqlclient = 2.1.1
pexpect = 4.8.0
ptyprocess = 0.6.0
paho-mqtt = 1.5.0
pcpp = 1.30
xmltodict = 0.13.0
......
......@@ -2,15 +2,17 @@
extends =
software.cfg
[python]
part = python2.7
[openssl]
<= openssl-1.1
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
[extra-eggs]
eggs +=
${re6stnet-setup:egg}
[slapos.toolbox-setup]
recipe = zc.recipe.egg
eggs = ${:egg}
[versions]
pathlib = 1.0.1
slapos.toolbox = 0.128.1
......@@ -23,11 +23,11 @@ md5sum = 9658a11340c018de816d0de40683706a
[instance-import]
_update_hash_filename_ = instance-import.cfg.jinja.in
md5sum = c8d1be7aee980deb0f0d1a3126c9b9da
md5sum = d0ff7b93e392f9b9233f9ae6ba81583f
[instance-export]
_update_hash_filename_ = instance-export.cfg.jinja.in
md5sum = bb6d26c56b4bb9cf553c130fdd51000d
md5sum = b982e83fa42103b7391d97eb36591174
[instance-resilient]
_update_hash_filename_ = instance-resilient.cfg.jinja
......
......@@ -10,6 +10,10 @@ parts +=
$${:theia-environment-parts}
resilient-publish-connection-parameter
# Avoid publication conflicts
parts -=
publish-connection-parameter
# The resilient stack makes the 'resilient' instance
# request the 'export' instance with a 'namebase' parameter.
......
......@@ -14,6 +14,10 @@ parts +=
$${:theia-parts}
$${:theia-environment-parts}
# Avoid publication conflicts
parts -=
publish-connection-parameter
# The resilient stack makes the 'resilient' instance
# request the 'import' instance with a 'namebase' parameter.
......
......@@ -17,4 +17,4 @@ update =
json.dump(dict(os.environ), f)
[versions]
slapos.recipe.build = 0.48
slapos.recipe.build = 0.57
[buildout]
find-links +=
http://www.nexedi.org/static/packages/source/slapos.buildout/zc.buildout-2.7.1%2Bslapos010.tar.gz
http://www.nexedi.org/static/packages/source/zc.recipe.egg-2.0.3%2Bslapos003.tar.gz
http://www.nexedi.org/static/packages/source/
http://www.nexedi.org/static/packages/source/slapos.buildout/
parts =
instance-template
......@@ -20,8 +20,8 @@ url = ${:_profile_base_location_}/instance.cfg.in
output = ${buildout:directory}/instance.cfg
[versions]
setuptools = 44.0.0
zc.buildout = 2.7.1+slapos010
setuptools = 44.1.1
zc.buildout = 2.7.1+slapos019
zc.recipe.egg = 2.0.3+slapos003
Jinja2 = 2.11.2
MarkupSafe = 1.0
......@@ -263,7 +263,8 @@ class TestTheia(TheiaTestCase):
ipv6, *prefixlen = self._ipv6_address.split('/')
if not prefixlen:
raise unittest.SkipTest('No IPv6 range')
elif int(prefixlen[0]) >= 123:
elif int(prefixlen[0]) + 16 >= 123:
# Note: prefixlen-theia = prefixlen-sr-testing + 16
raise unittest.SkipTest('IPv6 range too small: %s' % self._ipv6_address)
with sqlite3.connect(proxy_path) as db:
......
......@@ -187,7 +187,7 @@ class ExportAndImportMixin(object):
old_value = self.slap._force_slapos_node_instance_all
self.slap._force_slapos_node_instance_all = True
try:
self.slap.waitForInstance(error_lines=0)
self.slap.waitForInstance(max_retry=2, error_lines=0)
except SlapOSNodeCommandError as e:
s = str(e)
self.assertNotIn("Promise 'resiliency-export-promise.py' failed", s)
......@@ -561,10 +561,7 @@ class TestTheiaResilience(TheiaSyncMixin, ResilientTheiaTestCase):
def _checkSync(self):
# Check that ~/etc still contains everything it did before
etc_listdir = os.listdir(self.getPartitionPath('import', 'etc'))
try:
self.assertTrue(set(self.etc_listdir).issubset(etc_listdir))
except AssertionError:
breakpoint()
self.assertTrue(set(self.etc_listdir).issubset(etc_listdir))
def _checkTakeover(self):
# Check that there is an export, import and frozen instance and get their new partition IDs
......
......@@ -11,7 +11,7 @@ import test
import test_resiliency
stable_software_url = "https://lab.nexedi.com/nexedi/slapos/raw/1.0.324/software/theia/software.cfg"
stable_software_url = "https://lab.nexedi.com/nexedi/slapos/raw/1.0.349/software/theia/software.cfg"
dev_software_url = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg'))
......@@ -102,87 +102,3 @@ class TestTheiaResilienceWithInitialInstance(
def beforeUpgrade(cls):
# Check initial embedded instance
test.TestTheiaWithEmbeddedInstance.test(cls())
class TestResilientTheiaUpgradeWithInitialInstance(
UpgradeTestCase,
test_resiliency.ResilientTheiaTestCase,
test_resiliency.TheiaSyncMixin):
backup_max_tries = 70
backup_wait_interval = 10
old_flag_file = os.path.join('etc', 'embedded-instance-config.json.done')
old_exitcode_file = os.path.join('etc', 'embedded-request-exitcode')
flag_file = os.path.join('var', 'state', 'standalone-ran-before.flag')
exitcode_file = os.path.join('var', 'state', 'embedded-request.exitcode')
@classmethod
def getInstanceParameterDict(cls):
return {
'initial-embedded-instance': json.dumps({
'software-url': test_resiliency.dummy_software_url
}),
}
def assertExists(self, path):
self.assertTrue(os.path.exists(path))
def assertNotFound(self, path):
self.assertFalse(os.path.exists(path))
@classmethod
def beforeUpgrade(cls):
self = cls()
self.assertExists(cls.getPartitionPath('export', self.old_flag_file))
self.assertExists(cls.getPartitionPath('export', self.old_exitcode_file))
self.assertExists(self.getPartitionPath('import', self.old_flag_file))
self.assertNotFound(self.getPartitionPath('import', self.old_exitcode_file))
def _prepareExport(self): # after upgrade
self.assertNotFound(self.getPartitionPath('export', self.old_flag_file))
self.assertNotFound(self.getPartitionPath('export', self.old_exitcode_file))
self.assertNotFound(self.getPartitionPath('import', self.old_flag_file))
self.assertNotFound(self.getPartitionPath('import', self.old_exitcode_file))
self.assertExists(self.getPartitionPath('export', self.flag_file))
self.assertExists(self.getPartitionPath('export', self.exitcode_file))
self.assertExists(self.getPartitionPath('import', self.flag_file))
self.assertNotFound(self.getPartitionPath('import', self.exitcode_file))
def _checkSync(self):
self.assertExists(self.getPartitionPath('export', self.flag_file))
self.assertExists(self.getPartitionPath('export', self.exitcode_file))
self.assertExists(self.getPartitionPath('import', self.flag_file))
self.assertExists(self.getPartitionPath('import', self.exitcode_file))
def _checkTakeover(self):
self.assertNotFound(self.getPartitionPath('export', self.old_flag_file))
self.assertNotFound(self.getPartitionPath('export', self.old_exitcode_file))
self.assertNotFound(self.getPartitionPath('import', self.old_flag_file))
self.assertNotFound(self.getPartitionPath('import', self.old_exitcode_file))
self.assertExists(self.getPartitionPath('export', self.flag_file))
self.assertExists(self.getPartitionPath('export', self.exitcode_file))
self.assertExists(self.getPartitionPath('import', self.flag_file))
self.assertNotFound(self.getPartitionPath('import', self.exitcode_file))
class TestResilientTheiaUpgradeWithInitialInstanceAndSync(
TestResilientTheiaUpgradeWithInitialInstance):
@classmethod
def beforeUpgrade(cls):
cls()._doSync()
def _prepareExport(self): # after upgrade
self.assertNotFound(self.getPartitionPath('export', self.old_flag_file))
self.assertNotFound(self.getPartitionPath('export', self.old_exitcode_file))
self.assertNotFound(self.getPartitionPath('import', self.old_flag_file))
self.assertNotFound(self.getPartitionPath('import', self.old_exitcode_file))
self.assertExists(self.getPartitionPath('export', self.flag_file))
self.assertExists(self.getPartitionPath('export', self.exitcode_file))
self.assertExists(self.getPartitionPath('import', self.flag_file))
self.assertExists(self.getPartitionPath('import', self.exitcode_file))
......@@ -6,18 +6,15 @@ extends =
../../component/tsn-rt-measures/buildout.cfg
../../component/util-linux/buildout.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts =
slapos-cookbook
eggs
util-linux
template
[python]
part = python2.7
[openssl]
<= openssl-1.1
# eggs for instance-rsn-client.cfg
[eggs]
recipe = zc.recipe.egg
......
......@@ -6,16 +6,13 @@ extends =
../../component/zabbix/buildout.cfg
../../stack/slapos.cfg
# Python2 versions for buildout (keep last)
../../stack/slapos-py2.cfg
parts =
slapos-cookbook
template
[python]
part = python2.7
[openssl]
<= openssl-1.1
[template]
# Default template for the instance.
recipe = slapos.recipe.template
......
......@@ -55,7 +55,7 @@ extends =
../../component/pylint/buildout.cfg
../../component/perl-Image-ExifTool/buildout.cfg
../../component/wendelin.core/buildout.cfg
../../component/jupyter-py2/buildout.cfg
../../component/jupyter-erp5/buildout.cfg
../../component/pygolang/buildout.cfg
../../component/bcrypt/buildout.cfg
../../component/python-pynacl/buildout.cfg
......@@ -737,7 +737,7 @@ PyPDF2 = 1.26.0+SlapOSPatched001
## https://lab.nexedi.com/nexedi/slapos/merge_requests/648
pylint = 1.4.4+SlapOSPatched002
# astroid 1.4.1 breaks testDynamicClassGeneration
astroid = 1.3.8+SlapOSPatched001
astroid = 1.3.8+SlapOSPatched002
# use newer version than specified in ZTK
PasteDeploy = 1.5.2
......
......@@ -2,7 +2,7 @@
extends =
# versions pins from zope, vendored with:
# curl https://zopefoundation.github.io/Zope/releases/4.8.9/versions-prod.cfg > zope-versions.cfg
# curl https://zopefoundation.github.io/Zope/releases/5.10/versions-prod.cfg > zope-versions.cfg
# When updating, keep in mind that some versions are defined in other places,
# for example component/ZEO , component/ZODB and stack/slapos
zope-versions.cfg
......@@ -35,6 +35,7 @@ extends =
../../component/pycrypto-python/buildout.cfg
../../component/pytracemalloc/buildout.cfg
../../component/pysvn-python/buildout.cfg
../../component/pystemmer/buildout.cfg
../../component/python-ldap-python/buildout.cfg
../../component/scikit-learn/buildout.cfg
../../component/scikit-image/buildout.cfg
......@@ -58,21 +59,22 @@ extends =
../../component/pylint/buildout.cfg
../../component/perl-Image-ExifTool/buildout.cfg
../../component/wendelin.core/buildout.cfg
../../component/jupyter-py2/buildout.cfg
../../component/jupyter-erp5/buildout.cfg
../../component/pygolang/buildout.cfg
../../component/bcrypt/buildout.cfg
../../component/python-pynacl/buildout.cfg
../../component/python-pyzmq/buildout.cfg
../../component/python-xmlsec/buildout.cfg
../../component/selenium/buildout.cfg
../../stack/caucase/buildout.cfg
../../software/neoppod/software-common.cfg
# keep neoppod extends last
parts +=
erp5-util-develop
slapos-cookbook
mroonga-mariadb
tesseract
# Buildoutish
eggs-all-scripts
......@@ -268,7 +270,6 @@ link-binary =
${poppler:location}/bin/pdfinfo
${poppler:location}/bin/pdftohtml
${poppler:location}/bin/pdftotext
${python2.7:location}/bin/2to3
${sed:location}/bin/sed
${tesseract:location}/bin/tesseract
${w3m:location}/bin/w3m
......@@ -475,7 +476,6 @@ eggs +=
ipython_genutils
ipykernel
ipywidgets
requests
[egg-with-zope-proxy]
......@@ -490,24 +490,33 @@ egg = ${:_buildout_section_name_}
setup-eggs +=
${persistent:egg}
[wstools]
# a SOAPpy dependency
recipe = zc.recipe.egg:custom
egg = wstools
setup-eggs =
pbr
pytest-runner
[eggs]
<= neoppod
eggs = ${neoppod:eggs}
eggs =
${erp5-eggs-python-version-dependent:eggs}
${neoppod:eggs}
${caucase-eggs:eggs}
${wendelin.core:egg}
${numpy:egg}
${matplotlib:egg}
${lxml-python:egg}
${ocropy:egg}
${pandas:egg}
${pillow-python:egg}
${python-ldap-python:egg}
${python-xmlsec:egg}
${pysvn-python:egg}
${pycrypto-python:egg}
${scipy:egg}
${scikit-learn:egg}
${scikit-image:egg}
${PyStemmer:egg}
${python-PyYAML:egg}
sympy
more-itertools
......@@ -518,16 +527,13 @@ eggs = ${neoppod:eggs}
lock_file
astor
APacheDEX
PyStemmer
Pympler
SOAPpy
chardet
collective.recipe.template
erp5diff
interval
ipdb
Jinja2
jsonschema
mechanize
oauthlib
objgraph
......@@ -539,39 +545,30 @@ eggs = ${neoppod:eggs}
PyPDF2
python-magic
python-memcached
pytz
requests
responses
uritemplate
urlnorm
uuid
xml_marshaller
xupdate_processor
feedparser
validictory
erp5.util
z3c.etestbrowser
huBarcode
qrcode
spyne
httplib2
suds
pprofile
pycountry
xfw
jsonschema
${selenium:egg}
pytesseract
decorator
networkx
# Needed for checking ZODB Components source code
${astroid:egg}
${pylint:egg}
jedi
yapf
typing
# Used for Python 2 only
${pytracemalloc:egg}
xlrd
pydot
# Zope
......@@ -584,25 +581,16 @@ eggs = ${neoppod:eggs}
# for runzeo
${ZEO:egg}
# Other Zope 2 packages
Products.PluggableAuthService
Products.DCWorkflow
# Other products
Products.PluggableAuthService
Products.MimetypesRegistry
Products.TIDStorage
# Currently forked in our repository
# Products.PortalTransforms
# Dependency for our fork of PortalTransforms
StructuredText
# Needed for parsing .po files from our Localizer subset
polib
# Needed for Google OAuth
google-api-python-client
# Need for Facebook OAuth
facebook-sdk
......@@ -628,11 +616,9 @@ eggs = ${neoppod:eggs}
docutils
zLOG
Products.ZSQLMethods
ZServer
Products.ExternalMethod
Products.SiteErrorLog
tempstorage
Products.DCWorkflow
Products.Sessions
Products.ZODBMountPoint
Record
......@@ -643,7 +629,6 @@ entry-points =
runwsgi=Products.ERP5.bin.zopewsgi:runwsgi
scripts =
apachedex
performance_tester_erp5
runwsgi
runzeo
tidstoraged
......@@ -658,24 +643,44 @@ extra-paths =
patch-binary = ${patch:location}/bin/patch
Acquisition-patches = ${:_profile_base_location_}/../../component/egg-patch/Acquisition/aq_dynamic-4.7.patch#85b0090e216cead0fc86c5c274450d96
Acquisition-patch-options = -p1
DateTime-patches =
${:_profile_base_location_}/../../component/egg-patch/DateTime/0001-Cast-int-to-float-in-compare-methods.patch#9898a58ce90dd31c884a7183aeec4361
${:_profile_base_location_}/../../component/egg-patch/DateTime/0002-Fix-compare-methods-between-DateTime-0-and-None-fix-.patch#733903a564c8b14df65c45c4f2eec262
${:_profile_base_location_}/../../component/egg-patch/DateTime/0003-Make-it-possible-to-pickle-datetimes-returned-by-asd.patch#e94a71ef40de130720e621e296537000
${:_profile_base_location_}/../../component/egg-patch/DateTime/0004-Repair-equality-comparison-between-DateTime-instance.patch#ea146c00dfbc31c7d96af8abc6f0b301
DateTime-patch-options = -p1
Products.BTreeFolder2-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.BTreeFolder2/0001-Add-a-confirmation-prompt-on-Delete-All-Objects-butt.patch#44de3abf382e287b8766c2f29ec1cf74
Products.BTreeFolder2-patch-options = -p1
Products.CMFCore-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.CMFCore/portal_skins_ZMI_find.patch#19ec05c0477c50927ee1df6eb75d1e7f
Products.CMFCore-patches =
${:_profile_base_location_}/../../component/egg-patch/Products.CMFCore/portal_skins_ZMI_find.patch#19ec05c0477c50927ee1df6eb75d1e7f
${:_profile_base_location_}/../../component/egg-patch/Products.CMFCore/resource-registerClass.patch#90dc91d4635b98fe28d673a24dc7ebea
${:_profile_base_location_}/../../component/egg-patch/Products.CMFCore/import-from-zope.lifecycleevent.interfaces.patch#77728241324583d2d1c8d2fb4de86122
Products.CMFCore-patch-options = -p1
Products.DCWorkflow-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.DCWorkflow/workflow_method-2.4.1.patch#ec7bb56a9f1d37fcbf960cd1e96e6e6d
Products.DCWorkflow-patch-options = -p1
PyPDF2-patches =
${:_profile_base_location_}/../../component/egg-patch/PyPDF2/0001-Custom-implementation-of-warnings.formatwarning-remo.patch#d25bb0f5dde7f3337a0a50c2f986f5c8
${:_profile_base_location_}/../../component/egg-patch/PyPDF2/0002-fix-pdf-reader-getting-stuck-when-trying-to-read-lar.patch#c06a29b6b6a5df612ae36731b938fb95
PyPDF2-patch-options = -p1
python-magic-patches = ${:_profile_base_location_}/../../component/egg-patch/python_magic/magic.patch#de0839bffac17801e39b60873a6c2068
python-magic-patch-options = -p1
urlnorm-patches = ${:_profile_base_location_}/../../component/egg-patch/urlnorm/urlnorm-1.1.4-py3.patch#5ef268fb44cbc005b62140099c33b641
urlnorm-patch-options = -p1
SOAPpy-py3-patches = ${:_profile_base_location_}/../../component/egg-patch/SOAPpy-py3/0001-backport-changes-from-0.52.29.patch#28a08e587bf2e287ec3491c5ae7e8f1a
SOAPpy-py3-patch-options = -p1
[eggs:python3]
interval-patches = ${:_profile_base_location_}/../../component/egg-patch/interval/0001-python3-support.patch#66ac345f0a6d73e0bd29e394b7646311
interval-patch-options = -p1
Products.DCWorkflow-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.DCWorkflow/workflow_method-3.0.0.patch#4cc8607213b1ef08331366d9873becaa
Products.DCWorkflow-patch-options = -p1
Products.PythonScripts-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.PythonScripts/65.patch#61bd90d4c1ead3669bfe7c959d957ab6
Products.PythonScripts-patch-options = -p1
[eggs:python2]
DateTime-patches =
${:_profile_base_location_}/../../component/egg-patch/DateTime/0001-Cast-int-to-float-in-compare-methods.patch#9898a58ce90dd31c884a7183aeec4361
${:_profile_base_location_}/../../component/egg-patch/DateTime/0002-Fix-compare-methods-between-DateTime-0-and-None-fix-.patch#733903a564c8b14df65c45c4f2eec262
${:_profile_base_location_}/../../component/egg-patch/DateTime/0003-Make-it-possible-to-pickle-datetimes-returned-by-asd.patch#e94a71ef40de130720e621e296537000
${:_profile_base_location_}/../../component/egg-patch/DateTime/0004-Repair-equality-comparison-between-DateTime-instance.patch#ea146c00dfbc31c7d96af8abc6f0b301
DateTime-patch-options = -p1
huBarcode-patches =
${:_profile_base_location_}/../../component/egg-patch/huBarcode/fix-loading-font-for-ean13.patch#77879186092d0b55ee009c6ef3c3e2e4
huBarcode-patch-options = -p1
Products.DCWorkflow-patches = ${:_profile_base_location_}/../../component/egg-patch/Products.DCWorkflow/workflow_method-2.4.1.patch#ec7bb56a9f1d37fcbf960cd1e96e6e6d
Products.DCWorkflow-patch-options = -p1
RestrictedPython-patches = ${:_profile_base_location_}/../../component/egg-patch/RestrictedPython/0001-compile-implicitly-enable-__future__.print_function-.patch#f746dccbf3b462e67386490b898512e4
RestrictedPython-patch-options = -p1
# backported security patches for waitress-1.4.4 from Debian 1.4.4-1.1+deb11u1 package.
......@@ -692,6 +697,30 @@ Zope-patches =
${:_profile_base_location_}/../../component/egg-patch/Zope/0001-Fix-redirections-to-URLS-with-host-given-as-IP-litte.patch#093ad5755094d537c6a4deadc959ade0
Zope-patch-options = -p1
# python version dependent eggs
[erp5-eggs-python-version-dependent:python2]
eggs =
huBarcode
${ocropy:egg}
${pytracemalloc:egg}
${pycrypto-python:egg}
Products.TIDStorage
${wstools:egg}
SOAPpy
suds
typing
uuid
xlrd
ZServer
[erp5-eggs-python-version-dependent:python3]
eggs =
${python-pyzmq:egg}
python-barcode
SOAPpy-py3
suds-py3
zodbupdate
# neoppod installs bin/coverage so we inject erp5 plugin here so that coverage script can use it in report
[neoppod]
eggs +=
......@@ -722,129 +751,128 @@ depends =
# neoppod, mysqlclient, slapos.recipe.template
# patched eggs
Acquisition = 4.7+SlapOSPatched001
DateTime = 4.9+SlapOSPatched004
Products.DCWorkflow = 2.4.1+SlapOSPatched001
ocropy = 1.0+SlapOSPatched001
Acquisition = 5.2+SlapOSPatched001
PyPDF2 = 1.26.0+SlapOSPatched002
pysvn = 1.9.15+SlapOSPatched001
python-ldap = 2.4.32+SlapOSPatched001
python-magic = 0.4.12+SlapOSPatched001
RestrictedPython = 5.4+SlapOSPatched001
waitress = 1.4.4+SlapOSPatched006
Zope = 4.8.9+SlapOSPatched002
## https://lab.nexedi.com/nexedi/slapos/merge_requests/648
pylint = 1.4.4+SlapOSPatched002
# astroid 1.4.1 breaks testDynamicClassGeneration
astroid = 1.3.8+SlapOSPatched001
# modified version that works fine for buildout installation
SOAPpy = 0.12.0nxd001
# Pinned versions
alabaster = 0.7.12
APacheDEX = 1.8
APacheDEX = 2.0
astroid = 3.2.1:whl
Beaker = 1.11.0
cloudpickle = 0.5.3
cookies = 2.2.1
dask = 0.18.1
deepdiff = 3.3.0
deepdiff = 6.7.1
dill = 0.3.8:whl
docutils = 0.17.1
erp5-coverage-plugin = 0.0.1
erp5diff = 0.8.1.9
facebook-sdk = 2.0.0
five.formlib = 1.0.4
five.localsitemanager = 4.0
fpconst = 0.7.2
future = 0.18.2
google-api-python-client = 1.6.1
graphviz = 0.5.2
haufe.requestmonitoring = 0.6.0
html5lib = 1.1
huBarcode = 1.0.0
interval = 1.0.0
ipdb = 0.10.2
huBarcode = 1.0.0+SlapOSPatched001
imageio = 2.34.0
interval = 1.0.0+SlapOSPatched001
ipdb = 0.13.13
isort = 5.13.2:whl
jdcal = 1.3
jedi = 0.15.1
jsonpickle = 0.9.6
jsonpointer = 2.2
lazy-object-proxy = 1.10.0
logilab-common = 1.3.0
Mako = 1.1.4
mccabe = 0.7.0:whl
mechanize = 0.4.8
Missing = 5.0.0
mock = 4.0.3
mpmath = 0.19
munnel = 0.3
networkx = 2.1
networkx = 3.1
nt-svcutils = 2.13.0
numpy = 1.13.1
oauth2client = 4.0.0
oauthlib = 3.1.0
objgraph = 3.1.0
oic = 0.15.1
oic = 1.6.1
olefile = 0.44
openpyxl = 2.4.8
parso = 0.5.1
Pillow = 6.2.2
polib = 1.0.8
pprofile = 2.0.4
ordered-set = 4.1.0:whl
Pillow = 10.2.0+SlapOSPatched001
polib = 1.2.0
pprofile = 2.1.0
Products.BTreeFolder2 = 4.4+SlapOSPatched001
Products.CMFCore = 2.7.0+SlapOSPatched001
Products.ExternalMethod = 4.7
Products.GenericSetup = 2.3.0
Products.MailHost = 4.13
Products.MimetypesRegistry = 2.1.8
Products.PluggableAuthService = 2.8.1
Products.PluginRegistry = 1.11
Products.PythonScripts = 4.15
Products.Sessions = 4.15
Products.SiteErrorLog = 5.7
Products.StandardCacheManagers = 4.2
Products.TIDStorage = 5.5.0
Products.ZODBMountPoint = 1.3
Products.ZSQLMethods = 3.16
pyasn1-modules = 0.0.8
Products.CMFCore = 2.7.0+SlapOSPatched003
Products.DCWorkflow = 3.0.0+SlapOSPatched001
Products.ExternalMethod = 5.0
Products.GenericSetup = 3.0.2
Products.MailHost = 5.2
Products.MimetypesRegistry = 3.0.1
Products.PluggableAuthService = 3.0
Products.PluginRegistry = 2.0
Products.PythonScripts = 5.0+SlapOSPatched001
Products.Sessions = 5.0
Products.SiteErrorLog = 6.0
Products.StandardCacheManagers = 5.0
Products.ZCatalog = 7.0
Products.ZODBMountPoint = 2.0
Products.ZSQLMethods = 4.1
protobuf = 4.25.3
pyasn1-modules = 0.3
pycountry = 17.1.8
pycrypto = 2.6.1
pycryptodomex = 3.10.1
pydantic-settings = 2.2.1:whl
pydot = 1.4.2
pyflakes = 1.5.0
pyjwkest = 1.4.2
Pympler = 0.4.3
pylint = 3.2.1:whl
Pympler = 1.0.1
pyPdf = 1.13
PyStemmer = 1.3.0
PyStemmer = 2.2.0.1
pytesseract = 0.2.2
python-barcode = 0.15.1:whl
python-dotenv = 1.0.1
python-gettext = 4.1
python-ldap = 3.4.4
python-libmilter = 1.0.3
python-memcached = 1.58
pytracemalloc = 1.2
PyWavelets = 0.5.2
pythran = 0.15.0:whl
PyWavelets = 1.4.0
qrcode = 5.3
Record = 4.1.0
responses = 0.10.6
rfc3987 = 1.3.8
rsa = 3.4.2
scikit-image = 0.14.0
scipy = 0.19.0
spyne = 2.12.14
scikit-image = 0.19.3
SOAPpy-py3 = 0.52.26+SlapOSPatched001
spyne = 2.14.0
strict-rfc3339 = 0.7
StructuredText = 2.11.1
suds = 0.4
suds-py3 = 1.4.5.0
tifffile = 2024.2.12
tomlkit = 0.12.4:whl
toolz = 0.9.0
typed-ast = 1.5.5
typing = 3.10.0.0
unidiff = 0.5.5
urlnorm = 1.1.4
urlnorm = 1.1.4+SlapOSPatched001
uuid = 1.30
validictory = 1.1.0
webcolors = 1.10
webencodings = 0.5.1
WebOb = 1.8.5
WebTest = 2.0.33
wrapt = 1.16.0
WSGIProxy2 = 0.4.6
WSGIUtils = 0.7
wstools-py3 = 0.54.4
xfw = 0.10
xupdate-processor = 0.5
yapf = 0.28.0
z3c.etestbrowser = 3.0.1
zbarlight = 2.3
zLOG = 3.1
zodbupdate = 2.0
zope.app.appsetup = 4.2.0
zope.app.debug = 3.4.1
zope.app.dependable = 3.5.1
......@@ -855,5 +883,133 @@ zope.authentication = 5.0
zope.error = 4.6
zope.minmax = 2.3
zope.password = 4.4
zope.sendmail = 6.1
zope.session = 4.5
zope.testbrowser = 5.5.1
# temporary versions, until updated in zope-versions.cfg
[versions]
AccessControl = 7.0
DateTime = 5.5
[versions:python2]
AccessControl = 4.4
Acquisition = 4.13+SlapOSPatched001
APacheDEX = 1.8
astroid = 1.3.8+SlapOSPatched002
AuthEncoding = 4.3
Chameleon = 3.9.1
DateTime = 4.9+SlapOSPatched004
deepdiff = 3.3.0
DocumentTemplate = 3.4
ExtensionClass = 4.9
five.globalrequest = 99.1
five.localsitemanager = 3.4
interval = 1.0.0
ipdb = 0.10.2
ipython = 5.3.0
jedi = 0.15.1
Missing = 4.2
mock = 3.0.5
MultiMapping = 4.1
multipart = 0.1.1
networkx = 2.1
numpy = 1.13.1
ocropy = 1.0+SlapOSPatched001
oic = 0.15.1
openpyxl = 2.4.8
parso = 0.5.1
Paste = 3.5.2
PasteDeploy = 2.1.1
pbr = 5.11.0
Persistence = 3.6
Pillow = 6.2.2
Products.BTreeFolder2 = 4.4
Products.DCWorkflow = 2.4.1+SlapOSPatched001
Products.ExternalMethod = 4.7
Products.GenericSetup = 2.3.0
Products.MailHost = 4.13
Products.MimetypesRegistry = 2.1.8
Products.PluggableAuthService = 2.8.1
Products.PluginRegistry = 1.11
Products.PythonScripts = 4.15
Products.Sessions = 4.15
Products.SiteErrorLog = 5.7
Products.StandardCacheManagers = 4.2
Products.TIDStorage = 5.5.0
Products.ZCatalog = 5.4
Products.ZODBMountPoint = 1.3
Products.ZSQLMethods = 3.16
pyasn1-modules = 0.0.8
pylint = 1.4.4+SlapOSPatched002
Pympler = 0.4.3
PyStemmer = 1.3.0
python-ldap = 2.4.32+SlapOSPatched001
pytracemalloc = 1.2
pytz = 2022.7
PyWavelets = 0.5.2
Record = 3.6
RestrictedPython = 5.4+SlapOSPatched001
roman = 3.3
scikit-image = 0.14.0
scipy = 0.19.0
shutilwhich = 1.1.0
SOAPpy = 0.12.22
transaction = 3.0.1
waitress = 1.4.4+SlapOSPatched006
webcolors = 1.10
WebOb = 1.8.7
WebTest = 2.0.35
WSGIProxy2 = 0.4.6
wstools = 0.4.8
z3c.pt = 3.3.1
zc.lockfile = 2.0
zdaemon = 4.4
zExceptions = 4.3
Zope = 4.8.9+SlapOSPatched002
zope.annotation = 4.8
zope.authentication = 4.5.0
zope.browser = 2.4
zope.browsermenu = 4.4
zope.browserpage = 4.4.0
zope.browserresource = 4.4
zope.cachedescriptors = 4.4
zope.component = 5.0.1
zope.componentvocabulary = 2.3.0
zope.configuration = 4.4.1
zope.container = 4.10
zope.contentprovider = 4.2.1
zope.contenttype = 4.6
zope.datetime = 4.3.0
zope.deferredimport = 4.4
zope.deprecation = 4.4.0
zope.dottedname = 4.3
zope.event = 4.6
zope.exceptions = 4.6
zope.filerepresentation = 5.0.0
zope.formlib = 5.0.1
zope.globalrequest = 1.6
zope.hookable = 5.4
zope.i18n = 4.9.0
zope.i18nmessageid = 5.1.1
zope.lifecycleevent = 4.4
zope.location = 4.3
zope.pagetemplate = 4.6.0
zope.processlifetime = 2.4
zope.proxy = 4.6.1
zope.ptresource = 4.3.0
zope.publisher = 6.1.0
zope.ramcache = 2.4
zope.schema = 6.2.1
zope.security = 5.8
zope.sendmail = 5.3
zope.sequencesort = 4.2
zope.site = 4.6.1
zope.size = 4.4
zope.structuredtext = 4.4
zope.tal = 4.5
zope.tales = 5.2
zope.testbrowser = 5.6.1
zope.traversing = 4.4.1
zope.viewlet = 4.3
ZServer = 4.0.2
......@@ -30,7 +30,7 @@ md5sum = 93b2277185e4949a3d17be79d3710d2d
[template-kumofs]
filename = instance-kumofs.cfg.in
md5sum = 45cc45510b59ceb730b6e38448b5c0c3
md5sum = 97b70cdd32616e21cbf4e2f2e6828526
[template-zope-conf]
filename = zope.conf.in
......@@ -74,7 +74,7 @@ md5sum = ca0cb83950dd9079cc289891cce08e76
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = edce1c63c13f0d8ec477711ea646444f
md5sum = d6f7d2fa1bde019892897c767f93e089
[template-zeo]
filename = instance-zeo.cfg.in
......@@ -86,11 +86,11 @@ md5sum = 0ac4b74436f554cd677f19275d18d880
[template-zope]
filename = instance-zope.cfg.in
md5sum = 34da5e6d80b2992689825bb00bcd911d
md5sum = 9547bacad0635b0f64cac48f15c4e9ae
[template-balancer]
filename = instance-balancer.cfg.in
md5sum = 727c6f045da382fe50916e6ea5ae6405
md5sum = 48b8b8b4b87973beaa1fd6299244ebd6
[template-haproxy-cfg]
filename = haproxy.cfg.in
......
......@@ -472,10 +472,9 @@ command = generate-apachedex-report
recipe = slapos.recipe.template
output = ${directory:etc}/${:_buildout_section_name_}
inline =
{% for line in slapparameter_dict['apachedex-configuration'] %}
{% for line in slapparameter_dict['apachedex-configuration'] -%}
{# apachedex config files use shlex.split, so we need to quote the arguments. #}
{# BBB: in python 3 we can use shlex.quote instead. #}
{{ repr(line.encode('utf-8')) }}
{{ six.moves.shlex_quote(line) }}
{% endfor %}
[apachedex-parameters]
......
......@@ -200,7 +200,7 @@ config-zodb-dict = {{ dumps(zodb_dict) }}
{% for server_type, server_dict in six.iteritems(storage_dict) -%}
{% if server_type == 'neo' -%}
config-neo-cluster = ${publish-early:neo-cluster}
config-neo-name = {{ server_dict.keys()[0] }}
config-neo-name = {{ list(server_dict.keys())[0] }}
config-neo-masters = ${publish-early:neo-masters}
{% else -%}
config-zodb-zeo = ${request-zodb:connection-storage-dict}
......
......@@ -44,7 +44,7 @@ gateway-port = {{ tcpv4_port + 3 }}
data-path = *#capsiz={{ ram_storage_size }}m
{% else -%}
# (with 10M buckets and HDBTLARGE option)
data-path = ${directory:kumofs-data}/kumodb.tch#bnum=10485760#opts=l
data-path = ${directory:kumofs-data}/${:kumodb-tch}#bnum=10485760#opts=l
{% endif -%}
# Paths: Running wrappers
......@@ -63,6 +63,12 @@ kumo-manager-binary = {{ parameter_dict['kumo-location'] }}/bin/kumo-manager
kumo-server-binary = {{ parameter_dict['kumo-location'] }}/bin/kumo-server
shell-path = {{ parameter_dict['dash-location'] }}/bin/dash
kumodb-tch = kumodb.tch
[kumofs-instance:python3]
# use different filename on python3, so that we don't have to deal with data
# migration issues.
kumodb-tch = kumodb-py3.tch
[logrotate-entry-kumofs]
< = logrotate-entry-base
name = kumofs
......
......@@ -80,6 +80,8 @@ environment +=
TZ={{ slapparameter_dict['timezone'] }}
MATPLOTLIBRC={{ parameter_dict['matplotlibrc'] }}
PYTHONUNBUFFERED=1
OFS_IMAGE_USE_DENYLIST=1
DISALLOWED_INLINE_MIMETYPES=
INSTANCE_HOME=${:instance-home}
FONTCONFIG_FILE=${fontconfig-conf:output}
JUPYTER_PATH=${directory:jupyter-dir}
......@@ -372,17 +374,11 @@ instance-home = ${directory:instance}
port = {{ port }}
[{{ section("promise-" ~ name) }}]
{% if six.PY3 -%}
# Disable the promise in Python 3. ERP5 is not compatible with Python 3 yet, so
# the promise would always fail.
recipe =
{% else -%}
<= monitor-promise-base
promise = check_socket_listening
name = {{ name }}.py
config-host = {{ ipv4 }}
config-port = {{ port }}
{% endif -%}
{% if use_ipv6 -%}
[{{ zope_tunnel_section_name }}]
......@@ -408,8 +404,8 @@ config-port = {{ '${' ~ zope_tunnel_section_name ~ ':ipv6-port}' }}
promise = check_error_on_zope_longrequest_log
name = {{'check-' ~ name ~ '-longrequest-error-log.py'}}
config-log-file = {{ '${' ~ conf_parameter_name ~ ':longrequest-logger-file}' }}
config-error-threshold = {{ slapparameter_dict["zope-longrequest-logger-error-threshold"] }}
config-maximum-delay = {{ slapparameter_dict["zope-longrequest-logger-maximum-delay"] }}
config-error-threshold = {{ dumps(slapparameter_dict["zope-longrequest-logger-error-threshold"]) }}
config-maximum-delay = {{ dumps(slapparameter_dict["zope-longrequest-logger-maximum-delay"]) }}
{% endif -%}
[{{ section('logrotate-entry-' ~ name) }}]
......
......@@ -2,150 +2,91 @@
# Version pins for required and commonly used dependencies.
[versions]
Zope = 4.8.9
Zope = 5.10
Zope2 = 4.0
# AccessControl 5+ no longer supports Zope 4.
AccessControl = 4.4
Acquisition = 4.13
AuthEncoding = 4.3
BTrees = 4.11.3
Chameleon = 3.10.2
DateTime = 4.9
DocumentTemplate = 4.1
ExtensionClass = 4.9
Missing = 4.2
MultiMapping = 4.1
Paste = 3.5.2
PasteDeploy = 3.0.1
Persistence = 3.6
Products.BTreeFolder2 = 4.4
# ZCatalog 6+ no longer supports Zope 4.
Products.ZCatalog = 5.4
Record = 3.6
# RestrictedPython >= 6 no longer supports Zope 4
RestrictedPython = 5.4
AccessControl = 6.3
Acquisition = 5.1
AuthEncoding = 5.0
BTrees = 5.1
Chameleon = 4.2.0
DateTime = 5.3
DocumentTemplate = 4.6
ExtensionClass = 5.1
MultiMapping = 5.0
Paste = 3.7.1
PasteDeploy = 3.1.0
Persistence = 4.1
RestrictedPython = 7.1
WebTest = 3.0.0
WSGIProxy2 = 0.5.1
WebOb = 1.8.7
WebTest = 3.0.0
ZConfig = 3.6.1
ZEO = 5.3.0
ZODB = 5.8.0
five.globalrequest = 99.1
five.localsitemanager = 3.4
funcsigs = 1.0.2
future = 0.18.2
ipaddress = 1.0.23
mock = 4.0.3
ZConfig = 4.0
ZODB = 5.8.1
beautifulsoup4 = 4.12.2
cffi = 1.16.0
multipart = 0.2.4
pbr = 5.11.0
persistent = 4.9.3
pytz = 2022.7
roman = 3.3
shutilwhich = 1.1.0
persistent = 5.1
pycparser = 2.21
python-gettext = 5.0
pytz = 2023.3.post1
six = 1.16.0
transaction = 3.0.1
roman = 4.1
soupsieve = 2.5
transaction = 4.0
waitress = 2.1.2
z3c.pt = 3.3.1
zExceptions = 4.3
zc.lockfile = 2.0
zdaemon = 4.4
zodbpickle = 2.6
zope.annotation = 4.8
zope.browser = 2.4
zope.browsermenu = 4.4
zope.browserpage = 4.4.0
zope.browserresource = 4.4
zope.cachedescriptors = 4.4
zope.component = 5.0.1
zope.componentvocabulary = 2.3.0
zope.configuration = 4.4.1
zope.container = 5.1
zope.contentprovider = 4.2.1
zope.contenttype = 4.6
zope.datetime = 4.3.0
zope.deferredimport = 4.4
zope.deprecation = 4.4.0
zope.dottedname = 5.0
zope.event = 4.6
zope.exceptions = 4.6
zope.filerepresentation = 5.0.0
zope.formlib = 5.0.1
zope.globalrequest = 1.6
zope.hookable = 5.4
zope.i18n = 4.9.0
zope.i18nmessageid = 5.1.1
zope.interface = 5.5.2
zope.lifecycleevent = 4.4
zope.location = 4.3
zope.pagetemplate = 4.6.0
zope.processlifetime = 2.4
zope.proxy = 4.6.1
zope.ptresource = 4.3.0
zope.publisher = 6.1.0
zope.ramcache = 2.4
zope.schema = 6.2.1
zope.security = 5.8
zope.sendmail = 5.3
zope.sequencesort = 4.2
zope.site = 4.6.1
zope.size = 4.4
zope.structuredtext = 4.4
zope.tal = 4.5
zope.tales = 5.2
zope.testbrowser = 5.6.1
zope.testing = 4.10
zope.testrunner = 5.6
zope.traversing = 4.4.1
zope.viewlet = 4.3
[versions:python27]
# Chameleon 3.10 doesn't work on Python 2.7
Chameleon = 3.9.1
# DocumentTemplate 4+ requires Python 3.5 or higher
DocumentTemplate = 3.4
# PasteDeploy >3 requires Python 3.7
PasteDeploy = 2.1.1
# WSGIProxy 0.5 and up requires Python 3.7 and up
WSGIProxy2 = 0.4.6
# WebTest 3.0 and up requires Python 3.6 and up
WebTest = 2.0.35
# ZServer is only available for Python 2
ZServer = 4.0.2
# mock 4.0 and up requires Python 3.6 or higher
mock = 3.0.5
# multipart 0.2 and up requires Python 3
multipart = 0.1.1
# waitress 2 requires Python 3.6 or higher
waitress = 1.4.4
# zope.dottedname >= 5 requires Python 3.6 or higher
zope.dottedname = 4.3
# zope.container 5.x requires Python 3.7 or higher
zope.container = 4.10
[versions:python35]
# DocumentTemplate 4+ cannot be installed on Zope 4 for Python 3.5
DocumentTemplate = 3.4
# PasteDeploy >3 requires Python 3.7
PasteDeploy = 2.1.1
# WSGIProxy 0.5 and up requires Python 3.7 and up
WSGIProxy2 = 0.4.6
# WebTest 3.0 and up requires Python 3.6 and up
WebTest = 2.0.35
# mock 4.0 and up requires Python 3.6 or higher
mock = 3.0.5
# waitress 2 requires Python 3.6 or higher
waitress = 1.4.4
# zope.dottedname >= 5 requires Python 3.6 or higher
zope.dottedname = 4.3
# zope.container 5.x requires Python 3.7 or higher
zope.container = 4.10
z3c.pt = 4.0
zExceptions = 5.0
zc.lockfile = 3.0.post1
zc.recipe.egg = 2.0.7
zodbpickle = 3.1
zope.annotation = 5.0
zope.browser = 3.0
zope.browsermenu = 5.0
zope.browserpage = 5.0
zope.browserresource = 5.1
zope.cachedescriptors = 5.0
zope.component = 6.0
zope.configuration = 5.0
zope.container = 5.2
zope.contentprovider = 5.0
zope.contenttype = 5.1
zope.datetime = 5.0.0
zope.deferredimport = 5.0
zope.deprecation = 5.0
zope.dottedname = 6.0
zope.event = 5.0
zope.exceptions = 5.0.1
zope.filerepresentation = 6.0
zope.globalrequest = 2.0
zope.hookable = 6.0
zope.i18n = 5.1
zope.i18nmessageid = 6.1.0
zope.interface = 6.3
zope.lifecycleevent = 5.0
zope.location = 5.0
zope.pagetemplate = 5.0
zope.processlifetime = 3.0
zope.proxy = 5.1
zope.ptresource = 5.0
zope.publisher = 7.0
zope.schema = 7.0.1
zope.security = 6.2
zope.sequencesort = 5.0
zope.site = 5.0
zope.size = 5.0
zope.structuredtext = 5.0
zope.tal = 5.0.1
zope.tales = 6.0
zope.testbrowser = 6.0
zope.testing = 5.0.1
zope.traversing = 5.0
zope.viewlet = 5.0
[versions:python36]
# PasteDeploy >3 requires Python 3.7
PasteDeploy = 2.1.1
# WSGIProxy 0.5 and up requires Python 3.7 and up
WSGIProxy2 = 0.4.6
# waitress 2.1 requires Python 3.7 or higher
waitress = 2.0.0
# zope.container 5.x requires Python 3.7 or higher
zope.container = 4.10
## XXX our old buildout for bootstrap does not support `python37`
## [versions:python37]
## # PasteDeploy 3.x works on Python 3.7 but pulls tons of dependencies
## PasteDeploy = 2.1.1
## # SoupSieve 2.5 and up requires Python 3.8
## soupsieve = 2.4.1
## # cffi 1.16.0 requires Python 3.8
## cffi = 1.15.1
[buildout]
extends = slapos.cfg
[python]
part = python2.7
[openssl]
<= openssl-1.1
# Use Python2-compatible versions of zc.buildout dependencies from the start.
# These versions must be pinned unconditionally because slapos.rebootstrap
# will necessarily keep the same versions of zc.buildout and dependencies
# with the new Python. This is an intrisic limitation of slapos.rebootstrap.
[versions]
setuptools = 44.1.1
pip = 20.3.4
# Avoid https://github.com/pypa/wheel/issues/331
wheel = 0.33.6
......@@ -135,35 +135,42 @@ eggs =
slapos.libnetworkcache
[versions]
setuptools = 44.1.1
# The last version of setuptools compatible with Python 3.7
setuptools = 67.8.0
# Use SlapOS patched zc.buildout
zc.buildout = 2.7.1+slapos020
zc.buildout = 3.0.1+slapos002
pip = 23.2.1
# Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2)
zc.recipe.egg = 2.0.3+slapos003
zc.recipe.egg = 2.0.8.dev0+slapos002
aiohttp = 3.8.5:whl
aiosignal = 1.3.1:whl
annotated-types = 0.6.0:whl
anyio = 4.3.0:whl
apache-libcloud = 2.4.0
argon2-cffi = 20.1.0
asn1crypto = 1.3.0
astor = 0.5
astor = 0.8.1
asttokens = 2.4.1:whl
async-generator = 1.10
async-lru = 2.0.4:whl
async-timeout = 4.0.3
atomicwrites = 1.4.0
atomize = 0.2.0
attrs = 23.1.0:whl
Babel = 2.14.0
backcall = 0.2.0
backports-abc = 0.5
backports.functools-lru-cache = 1.6.1:whl
backports.lzma = 0.0.14
backports.shutil-get-terminal-size = 1.0.0
bcrypt = 3.1.4
beautifulsoup4 = 4.8.2
beautifulsoup4 = 4.12.2:whl
bleach = 5.0.1
blinker = 1.6.3:whl
CacheControl = 0.12.6:whl
cachetools = 5.3.1
cattrs = 22.2.0
cattrs = 22.2.0:whl
certifi = 2023.7.22
cffi = 1.15.0
chardet = 3.0.4
......@@ -172,22 +179,25 @@ click = 8.1.3
cliff = 2.8.3:whl
cmd2 = 0.7.0
collective.recipe.shelloutput = 0.1
collective.recipe.template = 2.0
collective.recipe.template = 2.2
comm = 0.2.1:whl
configparser = 4.0.2:whl
contextlib2 = 0.6.0.post1
croniter = 0.3.25
cryptography = 3.3.2+SlapOSPatched001
dataclasses = 0.8
dateparser = 0.7.6
debugpy = 1.8.1
decorator = 4.3.0
defusedxml = 0.7.1
distro = 1.7.0
dnspython = 1.16.0
entrypoints = 0.3
entrypoints = 0.3:whl
enum34 = 1.1.10
erp5.util = 0.4.76
et-xmlfile = 1.0.1
exceptiongroup = 1.1.3:whl
executing = 2.0.1:whl
fastjsonschema = 2.18.1
feedparser = 6.0.10
Flask = 3.0.0:whl
......@@ -201,8 +211,10 @@ gitdb = 4.0.10
GitPython = 3.1.30
greenlet = 3.0.1
h11 = 0.14.0
h5py = 2.7.1
h5py = 3.11.0
httpcore = 1.0.4:whl
httplib2 = 0.22.0
httpx = 0.27.0:whl
idna = 3.4:whl
igmp = 1.0.4
Importing = 1.10
......@@ -210,22 +222,34 @@ importlib-metadata = 6.8.0:whl
importlib-resources = 5.10.2:whl
inotify-simple = 1.1.1
ipaddress = 1.0.23
ipykernel = 5.3.4:whl
ipython = 7.16.3
ipython-genutils = 0.1.0
ipywidgets = 6.0.0
ipykernel = 6.29.3:whl
ipython = 8.18.1:whl
ipython-genutils = 0.2.0
ipywidgets = 8.1.2:whl
itsdangerous = 2.1.2
jdcal = 1.4
jedi = 0.17.2
Jinja2 = 3.1.2:whl
joblib = 1.3.2:whl
json5 = 0.9.20:whl
jsonpointer = 2.2
jsonschema = 4.17.3:whl
jupyter = 1.0.0
jupyter-client = 7.3.1
jupyter-console = 6.4.4
jupyter-core = 4.9.2
jupyterlab = 0.26.3
jupyterlab-launcher = 0.3.1
jupyterlab-pygments = 0.1.2
jupyter-client = 8.6.1:whl
jupyter-console = 6.6.3:whl
jupyter-core = 5.7.1:whl
jupyter-events = 0.6.3:whl
isoduration = 20.11.0
jupyter-lsp = 2.2.3:whl
jupyter-server = 2.10.0:whl
jupyter-server-terminals = 0.5.2:whl
jupyterlab = 4.1.3:whl
jupyterlab-launcher = 0.13.1
jupyterlab-pygments = 0.3.0:whl
jupyterlab-server = 2.24.0:whl
jupyterlab-widgets = 3.0.10:whl
arrow = 1.2.3
fqdn = 1.5.1
lock-file = 2.0
lockfile = 0.12.2:whl
lsprotocol = 2023.0.0b1:whl
......@@ -238,18 +262,20 @@ meld3 = 1.0.2
mistune = 0.8.4
mock = 3.0.5
more-itertools = 5.0.0
mpmath = 1.0.0
mpmath = 1.3.0
msgpack = 1.0.5
multidict = 6.0.4
nbclient = 0.5.1
nbclient = 0.10.0:whl
nbconvert = 6.5.4
nbformat = 5.9.2:whl
nest-asyncio = 1.5.6
netaddr = 0.7.19
netifaces = 0.10.7
notebook = 6.1.5
notebook = 7.1.2:whl
notebook-shim = 0.2.4:whl
openpyxl = 2.5.2
outcome = 1.2.0
overrides = 7.7.0
packaging = 23.2:whl
pandocfilters = 1.4.3
paramiko = 2.11.0
......@@ -261,23 +287,26 @@ pbr = 5.9.0
pexpect = 4.8.0
pickleshare = 0.7.4
pim-dm = 1.4.0nxd002
pkgconfig = 1.5.1
pkgutil-resolve-name = 1.3.10
pkgconfig = 1.5.1:whl
pkgutil-resolve-name = 1.3.10:whl
platformdirs = 4.2.0:whl
plone.recipe.command = 1.1
pluggy = 0.13.1:whl
ply = 3.11
prettytable = 0.7.2
prometheus-client = 0.9.0
prompt-toolkit = 3.0.19
prompt-toolkit = 3.0.43
psutil = 5.8.0
psycopg2 = 2.9.9
ptyprocess = 0.5.1
ptyprocess = 0.6.0:whl
pure-eval = 0.2.2:whl
py = 1.11.0:whl
py-mld = 1.0.3
pyasn1 = 0.4.5
pyasn1 = 0.5.1
pycparser = 2.20
pycurl = 7.45.0
pydantic = 1.9.1
pydantic = 2.6.3:whl
pydantic-core = 2.16.3:whl
pygls = 1.1.0:whl
Pygments = 2.9.0
PyNaCl = 1.3.0
......@@ -289,19 +318,23 @@ PyRSS2Gen = 1.1
PySocks = 1.7.1
pytest-runner = 5.2:whl
python-dateutil = 2.8.2:whl
python-json-logger = 2.0.7
pytz = 2022.2.1
PyYAML = 5.4.1
pyzmq = 22.3.0
qtconsole = 4.3.0
pyzmq = 24.0.1
qtconsole = 5.5.1
qtpy = 2.4.1:whl
random2 = 1.0.1
regex = 2020.9.27
requests = 2.31.0
rfc3339-validator = 0.1.4
rfc3986-validator = 0.1.1:whl
rpdb = 0.1.5
rubygemsrecipe = 0.4.4
scandir = 1.10.0
scikit-learn = 0.20.4
scikit-learn = 0.24.2
seaborn = 0.7.1
Send2Trash = 1.5.0
Send2Trash = 1.8.2:whl
setproctitle = 1.1.10
setuptools-dso = 2.10
sgmllib3k = 1.0.0
......@@ -312,7 +345,7 @@ slapos.cookbook = 1.0.360
slapos.core = 1.11.0
slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.25
slapos.rebootstrap = 4.5
slapos.rebootstrap = 4.7
slapos.recipe.build = 0.57
slapos.recipe.cmmi = 0.20
slapos.recipe.template = 5.1
......@@ -321,6 +354,7 @@ smmap = 5.0.0
sniffio = 1.3.0
sortedcontainers = 2.4.0
soupsieve = 1.9.5
stack-data = 0.6.3:whl
statsmodels = 0.13.5+SlapOSPatched001
stevedore = 1.21.0:whl
subprocess32 = 3.5.4
......@@ -328,24 +362,28 @@ supervisor = 4.1.0
sympy = 1.1.1
terminado = 0.9.1
testpath = 0.4.4
threadpoolctl = 3.3.0:whl
tinycss2 = 1.2.1:whl
tornado = 6.1
traitlets = 5.11.2:whl
tomli = 2.0.1:whl
tornado = 6.4
traitlets = 5.14.1:whl
trio = 0.22.0
trio-websocket = 0.9.2
typeguard = 3.0.2:whl
typing-extensions = 4.8.0:whl
tzlocal = 1.5.1
unicodecsv = 0.14.1
uri-template = 1.2.0
uritemplate = 4.1.1
urllib3 = 1.26.12
wcwidth = 0.2.5
webcolors = 1.12
webencodings = 0.5.1
websocket-client = 1.5.1
websockets = 10.4
Werkzeug = 3.0.0:whl
wheel = 0.41.2:whl
widgetsnbextension = 2.0.0
widgetsnbextension = 4.0.10:whl
wsproto = 1.2.0
xlrd = 1.1.0
xml-marshaller = 1.0.2
......@@ -356,11 +394,11 @@ ZConfig = 3.6.1
zdaemon = 4.2.0
zipp = 3.12.0:whl
zodburi = 2.5.0
zope.event = 4.6.0
zope.exceptions = 4.6
zope.interface = 5.4.0
zope.testing = 4.7
zope.testrunner = 5.2
zope.event = 5.0
zope.exceptions = 5.0.1
zope.interface = 6.3
zope.testing = 5.0.1
zope.testrunner = 6.4
[versions:sys.version_info < (3,10)]
# keep old statsmodels by default until slapos.toolbox is updated
......@@ -378,6 +416,7 @@ Werkzeug = 2.0.2
[versions:python2]
attrs = 18.2.0
beautifulsoup4 = 4.8.2
certifi = 2020.4.5.1
charset-normalizer = 2.0.12
click = 6.7
......@@ -388,30 +427,53 @@ gevent = 20.9.0
gitdb2 = 2.0.5
GitPython = 2.1.11
greenlet = 0.4.17
h5py = 2.7.1
idna = 2.9
importlib-metadata = 1.7.0:whl
itsdangerous = 0.24
Jinja2 = 2.11.3
ipykernel = 5.3.4:whl
ipython = 7.16.3
ipython-genutils = 0.1.0
ipywidgets = 6.0.0
jsonschema = 3.0.2:whl
jupyter-client = 7.3.1
jupyter-console = 6.4.4
jupyter-core = 4.9.2
jupyterlab = 0.26.3
jupyterlab-launcher = 0.3.1
jupyterlab-pygments = 0.1.2
lxml = 4.9.1
MarkupSafe = 1.0
mpmath = 1.0.0
msgpack = 0.6.2
nbclient = 0.5.1
notebook = 6.1.5
packaging = 16.8
psycopg2 = 2.8.6
pycurl = 7.43.0
pyparsing = 2.4.7
pyrsistent = 0.16.1
pyzmq = 22.3.0
qtconsole = 4.3.0
requests = 2.27.1
scikit-learn = 0.20.4
selectors34 = 1.2
Send2Trash = 1.5.0
slapos.toolbox = 0.128.1
smmap = 0.9.0
smmap2 = 2.0.5
statsmodels = 0.11.1
statsmodels = 0.11.0
tornado = 6.1
traitlets = 4.3.3
uritemplate = 3.0.0
websocket-client = 0.59.0
Werkzeug = 1.0.1
wheel = 0.35.1:whl
widgetsnbextension = 2.0.0
zipp = 1.2.0:whl
zope.event = 4.6.0
zope.exceptions = 4.6
zope.interface = 5.5.2
zope.testing = 4.10
zope.testrunner = 5.6
[networkcache]
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment