Commit 2c0e2f1b authored by Jérome Perrin's avatar Jérome Perrin

Merge branch 'master' into 1.0

parents b138ec22 e01663b1
Pipeline #27025 failed with stage
in 0 seconds
......@@ -15,10 +15,10 @@ parts = ZODB/scripts
# [ZODB]
# major = <ZODB-version-major>
#
# By default ZODB4 is used.
# By default ZODB5 is used.
[ZODB]
recipe = slapos.recipe.build
major = 4
major = 5
init =
# link/depend ZODB -> ZODB<X>
zodb_x = 'ZODB'+options['major']
......@@ -59,7 +59,7 @@ egg-versions =
<= _ZODB
egg-versions =
ZODB = 5.8.0
transaction = 2.4.0
transaction = 3.0.1
# ZODB4-wc2 is ZODB4 version with patches for wendelin.core 2 to work correctly.
......@@ -94,9 +94,9 @@ setup-eggs = ${python-cffi:egg}
# eggs that are common to ZODB4 and ZODB5.
[versions]
BTrees = 4.5.1
persistent = 4.6.4
zodbpickle = 2.0.0
BTrees = 4.11.3
persistent = 4.9.3
zodbpickle = 2.6.0
# Provide ZODB3 for those eggs that still care about ZODB3 compatibility -
# for example wendelin.core. ZODB3 3.11 is just a dependency egg on _latest_
......
diff -Naur Acquisition-4.7.orig/src/Acquisition/_Acquisition.c Acquisition-4.7-py2.7-linux-x86_64.egg/src/Acquisition/_Acquisition.c
--- Acquisition-4.7.orig/src/Acquisition/_Acquisition.c 2021-03-17 16:22:28.266539592 +0100
+++ Acquisition-4.7-py2.7-linux-x86_64.egg/src/Acquisition/_Acquisition.c 2021-03-17 16:28:59.609842948 +0100
@@ -543,6 +543,64 @@
}
static PyObject *
+Wrapper_GetAttr(PyObject *self, PyObject *attr_name, PyObject *orig)
+{
+ /* This function retrieves an attribute from an object by PyObject_GetAttr.
+
+ The main difference between Wrapper_GetAttr and PyObject_GetAttr is that
+ Wrapper_GetAttr calls _aq_dynamic to generate an attribute dynamically, if
+ the attribute is not found.
+ */
+ PyObject *r, *v, *tb;
+ PyObject *d, *m;
+ PyObject *o;
+
+ if (isWrapper (self))
+ o = WRAPPER(self)->obj;
+ else
+ o = self;
+
+ /* Try to get an attribute in the normal way first. */
+ r = PyObject_GetAttr(o, attr_name);
+ if (r)
+ return r;
+
+ /* If an unexpected error happens, return immediately. */
+ PyErr_Fetch(&r,&v,&tb);
+ if (r != PyExc_AttributeError)
+ {
+ PyErr_Restore(r,v,tb);
+ return NULL;
+ }
+
+ /* Try to get _aq_dynamic. */
+ m = PyObject_GetAttrString(o, "_aq_dynamic");
+ if (! m) {
+ PyErr_Restore(r,v,tb);
+ return NULL;
+ }
+
+ /* Call _aq_dynamic in the context of the original acquisition wrapper. */
+ if (PyECMethod_Check(m) && PyECMethod_Self(m)==o)
+ ASSIGN(m,PyECMethod_New(m,OBJECT(self)));
+ else if (has__of__(m)) ASSIGN(m,__of__(m,OBJECT(self)));
+ d = PyObject_CallFunction(m, "O", attr_name);
+ Py_DECREF(m);
+
+ /* In the case of None, assume that the attribute is not found. */
+ if (d == Py_None) {
+ Py_DECREF(d);
+ PyErr_Restore(r,v,tb);
+ return NULL;
+ }
+
+ Py_XDECREF(r);
+ Py_XDECREF(v);
+ Py_XDECREF(tb);
+ return d;
+}
+
+static PyObject *
Wrapper_acquire(Wrapper *self, PyObject *oname,
PyObject *filter, PyObject *extra, PyObject *orig,
int explicit, int containment);
@@ -677,8 +735,8 @@
return NULL;
}
- /* normal attribute lookup */
- else if ((r = PyObject_GetAttr(self->obj, oname))) {
+ /* Give _aq_dynamic a chance, then normal attribute lookup */
+ else if ((r = Wrapper_GetAttr(OBJECT(self), oname, orig))) {
if (r == Acquired) {
Py_DECREF(r);
return Wrapper_acquire(
@@ -806,7 +864,7 @@
return NULL;
}
- if ((r = PyObject_GetAttr(self->container, oname)) == NULL) {
+ if ((r = Wrapper_GetAttr(self->container, oname, orig)) == NULL) {
/* May be AttributeError or some other kind of error */
return NULL;
}
@@ -830,7 +888,7 @@
static PyObject *
Wrapper_getattro(Wrapper *self, PyObject *oname)
{
- return Wrapper_findattr(self, oname, NULL, NULL, NULL, 1, 1, 0, 0);
+ return Wrapper_findattr(self, oname, NULL, NULL, OBJECT(self), 1, 1, 0, 0);
}
static PyObject *
@@ -846,7 +904,7 @@
if (STR_EQ(PyBytes_AS_STRING(tmp), "acquire")) {
result = Py_FindAttr(OBJECT(self), oname);
} else {
- result = Wrapper_findattr(self, oname, NULL, NULL, NULL, 1, 0, 0, 0);
+ result = Wrapper_findattr(self, oname, NULL, NULL, OBJECT(self), 1, 0, 0, 0);
}
Py_DECREF(tmp);
diff -Naur Acquisition-4.7.orig/src/Acquisition/test_dynamic_acquisition.py Acquisition-4.7-py2.7-linux-x86_64.egg/src/Acquisition/test_dynamic_acquisition.py
--- Acquisition-4.7.orig/src/Acquisition/test_dynamic_acquisition.py 1970-01-01 01:00:00.000000000 +0100
+++ Acquisition-4.7-py2.7-linux-x86_64.egg/src/Acquisition/test_dynamic_acquisition.py 2021-03-17 16:30:07.082413986 +0100
@@ -0,0 +1,160 @@
+##############################################################################
+#
+# Copyright (c) 1996-2002 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+import Acquisition
+
+def checkContext(self, o):
+ # Python equivalent to aq_inContextOf
+ from Acquisition import aq_base, aq_parent, aq_inner
+ subob = self
+ o = aq_base(o)
+ while 1:
+ if aq_base(subob) is o:
+ return True
+ self = aq_inner(subob)
+ if self is None: break
+ subob = aq_parent(self)
+ if subob is None: break
+ return False
+
+class B(Acquisition.Implicit):
+ color='red'
+
+ def __init__(self, name='b'):
+ self.name = name
+
+ def _aq_dynamic(self, attr):
+ if attr == 'bonjour': return None
+
+ def dynmethod():
+ chain = ' <- '.join(repr(obj) for obj in Acquisition.aq_chain(self))
+ print repr(self) + '.' + attr
+ print 'chain:', chain
+
+ return dynmethod
+
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self.name)
+
+class A(Acquisition.Implicit):
+
+ def __init__(self, name='a'):
+ self.name = name
+
+ def hi(self):
+ print self, self.color
+
+ def _aq_dynamic(self, attr):
+ return None
+
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self.name)
+
+def test_dynamic():
+ r'''
+ The _aq_dynamic functionality allows an object to dynamically provide an
+ attribute.
+
+ If an object doesn't have an attribute, Acquisition checks to see if the
+ object has a _aq_dynamic method, which is then called. It is functionally
+ equivalent to __getattr__, but _aq_dynamic is called with 'self' as the
+ acquisition wrapped object where as __getattr__ is called with self as the
+ unwrapped object.
+
+ Let's see how this works. In the examples below, the A class defines
+ '_aq_dynamic', but returns 'None' for all attempts, which means that no new
+ attributes should be generated dynamically. It also doesn't define 'color'
+ attribute, even though it uses it in the 'hi' method.
+
+ >>> A().hi()
+ Traceback (most recent call last):
+ ...
+ AttributeError: color
+
+ The class B, on the other hand, generates all attributes dynamically,
+ except if it is called 'bonjour'.
+
+ First we need to check that, even if an object provides '_aq_dynamic',
+ "regular" Aquisition attribute access should still work:
+
+ >>> b=B()
+ >>> b.a=A()
+ >>> b.a.hi()
+ A('a') red
+ >>> b.a.color='green'
+ >>> b.a.hi()
+ A('a') green
+
+ Now, let's see some dynamically generated action. B does not define a
+ 'salut' method, but remember that it dynamically generates a method for
+ every attribute access:
+
+ >>> b.a.salut()
+ B('b').salut
+ chain: B('b')
+
+ >>> a=A('a1')
+ >>> a.b=B('b1')
+ >>> a.b.salut()
+ B('b1').salut
+ chain: B('b1') <- A('a1')
+
+ >>> b.a.bonjour()
+ Traceback (most recent call last):
+ ...
+ AttributeError: bonjour
+
+ >>> a.b.bonjour()
+ Traceback (most recent call last):
+ ...
+ AttributeError: bonjour
+
+ '''
+
+def test_wrapper_comparissons():
+ r'''
+
+ Test wrapper comparisons in presence of _aq_dynamic
+
+ >>> b=B()
+ >>> b.a=A()
+ >>> foo = b.a
+ >>> bar = b.a
+ >>> assert( foo == bar )
+ >>> c = A('c')
+ >>> b.c = c
+ >>> b.c.d = c
+ >>> b.c.d == c
+ True
+ >>> b.c.d == b.c
+ True
+ >>> b.c == c
+ True
+
+ Test contextuality in presence of _aq_dynamic
+
+ >>> checkContext(b.c, b)
+ True
+ >>> checkContext(b.c, b.a)
+ False
+
+ >>> assert b.a.aq_inContextOf(b)
+ >>> assert b.c.aq_inContextOf(b)
+ >>> assert b.c.d.aq_inContextOf(b)
+ >>> assert b.c.d.aq_inContextOf(c)
+ >>> assert b.c.d.aq_inContextOf(b.c)
+ >>> assert not b.c.aq_inContextOf(foo)
+ >>> assert not b.c.aq_inContextOf(b.a)
+ >>> assert not b.a.aq_inContextOf('somestring')
+'''
+
diff -Naur Acquisition-4.7.orig/src/Acquisition/tests.py Acquisition-4.7-py2.7-linux-x86_64.egg/src/Acquisition/tests.py
--- Acquisition-4.7.orig/src/Acquisition/tests.py 2021-03-17 16:22:28.266539592 +0100
+++ Acquisition-4.7-py2.7-linux-x86_64.egg/src/Acquisition/tests.py 2021-03-17 16:31:31.971132854 +0100
@@ -3366,6 +3366,7 @@
suites = [
DocTestSuite(),
+ DocTestSuite('Acquisition.test_dynamic_acquisition'),
unittest.defaultTestLoader.loadTestsFromName(__name__),
]
diff -Naur Acquisition-4.7.orig/src/Acquisition.egg-info/SOURCES.txt Acquisition-4.7-py2.7-linux-x86_64.egg/src/Acquisition.egg-info/SOURCES.txt
--- Acquisition-4.7.orig/src/Acquisition.egg-info/SOURCES.txt 2021-03-17 16:22:28.262539558 +0100
+++ Acquisition-4.7-py2.7-linux-x86_64.egg/src/Acquisition.egg-info/SOURCES.txt 2021-03-17 16:32:31.619638229 +0100
@@ -15,9 +15,10 @@
src/Acquisition/__init__.py
src/Acquisition/interfaces.py
src/Acquisition/tests.py
+src/Acquisition/test_dynamic_acquisition.py
src/Acquisition.egg-info/PKG-INFO
src/Acquisition.egg-info/SOURCES.txt
src/Acquisition.egg-info/dependency_links.txt
src/Acquisition.egg-info/not-zip-safe
src/Acquisition.egg-info/requires.txt
-src/Acquisition.egg-info/top_level.txt
\ Pas de fin de ligne à la fin du fichier
+src/Acquisition.egg-info/top_level.txt
diff -Naur Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/DCWorkflow.py Products.DCWorkflow-2.4.1/Products/DCWorkflow/DCWorkflow.py
--- Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/DCWorkflow.py 2020-03-09 22:05:43.000000000 +0100
+++ Products.DCWorkflow-2.4.1/Products/DCWorkflow/DCWorkflow.py 2021-03-18 15:43:47.791236880 +0100
@@ -38,6 +38,7 @@
from Products.DCWorkflow.interfaces import IDCWorkflowDefinition
from Products.DCWorkflow.Transitions import TRIGGER_AUTOMATIC
from Products.DCWorkflow.Transitions import TRIGGER_USER_ACTION
+from Products.DCWorkflow.Transitions import TRIGGER_WORKFLOW_METHOD
from Products.DCWorkflow.utils import Message as _
from Products.DCWorkflow.utils import modifyRolesForGroup
from Products.DCWorkflow.utils import modifyRolesForPermission
@@ -279,6 +280,52 @@
self._changeStateOf(ob, tdef, kw)
@security.private
+ def isWorkflowMethodSupported(self, ob, method_id):
+ '''
+ Returns a true value if the given workflow method
+ is supported in the current state.
+ '''
+ sdef = self._getWorkflowStateOf(ob)
+ if sdef is None:
+ return 0
+ if method_id in sdef.transitions:
+ tdef = self.transitions.get(method_id, None)
+ if (tdef is not None and
+ tdef.trigger_type == TRIGGER_WORKFLOW_METHOD and
+ self._checkTransitionGuard(tdef, ob)):
+ return 1
+ return 0
+
+ @security.private
+ def wrapWorkflowMethod(self, ob, method_id, func, args, kw):
+ '''
+ Allows the user to request a workflow action. This method
+ must perform its own security checks.
+ '''
+ sdef = self._getWorkflowStateOf(ob)
+ if sdef is None:
+ raise WorkflowException('Object is in an undefined state')
+ if method_id not in sdef.transitions:
+ raise Unauthorized(method_id)
+ tdef = self.transitions.get(method_id, None)
+ if tdef is None or tdef.trigger_type != TRIGGER_WORKFLOW_METHOD:
+ raise WorkflowException(
+ 'Transition %s is not triggered by a workflow method'
+ % method_id)
+ if not self._checkTransitionGuard(tdef, ob):
+ raise Unauthorized(method_id)
+ res = func(*args, **kw)
+ try:
+ self._changeStateOf(ob, tdef)
+ except ObjectDeleted:
+ # Re-raise with a different result.
+ raise ObjectDeleted(res)
+ except ObjectMoved as ex:
+ # Re-raise with a different result.
+ raise ObjectMoved(ex.getNewObject(), res)
+ return res
+
+ @security.private
def isInfoSupported(self, ob, name):
'''
Returns a true value if the given info name is supported.
diff -Naur Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/dtml/transition_properties.dtml Products.DCWorkflow-2.4.1/Products/DCWorkflow/dtml/transition_properties.dtml
--- Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/dtml/transition_properties.dtml 2020-03-09 22:05:43.000000000 +0100
+++ Products.DCWorkflow-2.4.1/Products/DCWorkflow/dtml/transition_properties.dtml 2021-03-18 15:37:55.144028451 +0100
@@ -56,6 +56,16 @@
</tr>
<tr>
+<th></th>
+<td>
+<dtml-let checked="trigger_type==2 and 'checked' or ' '">
+<input type="radio" name="trigger_type" value="2" &dtml-checked; />
+Initiated by WorkflowMethod
+</dtml-let>
+</td>
+</tr>
+
+<tr>
<th align="left">Script (before)</th>
<td>
<select name="script_name">
diff -Naur Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/dtml/transitions.dtml Products.DCWorkflow-2.4.1/Products/DCWorkflow/dtml/transitions.dtml
--- Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/dtml/transitions.dtml 2020-03-09 22:05:43.000000000 +0100
+++ Products.DCWorkflow-2.4.1/Products/DCWorkflow/dtml/transitions.dtml 2021-03-18 15:37:55.144028451 +0100
@@ -17,7 +17,8 @@
<td>
Destination state: <code><dtml-if new_state_id>&dtml-new_state_id;<dtml-else>(Remain in state)</dtml-if></code> <br />
Trigger: <dtml-var expr="(trigger_type == 0 and 'Automatic') or
- (trigger_type == 1 and 'User action')">
+ (trigger_type == 1 and 'User action') or
+ (trigger_type == 2 and 'WorkflowMethod')">
<br />
<dtml-if script_name>
Script (before): &dtml-script_name;
diff -Naur Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/exportimport.py Products.DCWorkflow-2.4.1/Products/DCWorkflow/exportimport.py
--- Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/exportimport.py 2020-03-09 22:05:43.000000000 +0100
+++ Products.DCWorkflow-2.4.1/Products/DCWorkflow/exportimport.py 2021-03-18 15:44:34.903667147 +0100
@@ -40,7 +40,7 @@
from Products.DCWorkflow.utils import _xmldir
-TRIGGER_TYPES = ('AUTOMATIC', 'USER')
+TRIGGER_TYPES = ('AUTOMATIC', 'USER', 'METHOD' )
_FILENAME = 'workflows.xml'
diff -Naur Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/Transitions.py Products.DCWorkflow-2.4.1/Products/DCWorkflow/Transitions.py
--- Products.DCWorkflow-2.4.1.orig/Products/DCWorkflow/Transitions.py 2020-03-09 22:05:43.000000000 +0100
+++ Products.DCWorkflow-2.4.1/Products/DCWorkflow/Transitions.py 2021-03-18 15:37:55.148028486 +0100
@@ -31,6 +31,7 @@
TRIGGER_AUTOMATIC = 0
TRIGGER_USER_ACTION = 1
+TRIGGER_WORKFLOW_METHOD = 2
class TransitionDefinition(SimpleItem):
From: Bert JW Regeer <bertjw@regeer.org>
Date: Sat, 12 Mar 2022 18:30:30 -0700
Subject: Add new regular expressions for Chunked Encoding
This also moves some regular expressions for QUOTED_PAIR/QUOTED_STRING
into this module from utilities so that they may be reused.
Part of CVE-2022-24761
---
src/waitress/rfc7230.py | 27 ++++++++++++++++++++++++++-
src/waitress/utilities.py | 28 +++-------------------------
2 files changed, 29 insertions(+), 26 deletions(-)
diff --git a/src/waitress/rfc7230.py b/src/waitress/rfc7230.py
index cd33c90..0b76a38 100644
--- a/src/waitress/rfc7230.py
+++ b/src/waitress/rfc7230.py
@@ -7,6 +7,9 @@ import re
from .compat import tobytes
+HEXDIG = "[0-9a-fA-F]"
+DIGIT = "[0-9]"
+
WS = "[ \t]"
OWS = WS + "{0,}?"
RWS = WS + "{1,}?"
@@ -27,6 +30,12 @@ TOKEN = TCHAR + "{1,}"
# ; visible (printing) characters
VCHAR = r"\x21-\x7e"
+# The '\\' between \x5b and \x5d is needed to escape \x5d (']')
+QDTEXT = "[\t \x21\x23-\x5b\\\x5d-\x7e" + OBS_TEXT + "]"
+
+QUOTED_PAIR = r"\\" + "([\t " + VCHAR + OBS_TEXT + "])"
+QUOTED_STRING = '"(?:(?:' + QDTEXT + ")|(?:" + QUOTED_PAIR + '))*"'
+
# header-field = field-name ":" OWS field-value OWS
# field-name = token
# field-value = *( field-content / obs-fold )
@@ -45,8 +54,24 @@ FIELD_CONTENT = FIELD_VCHAR + "+(?:[ \t]+" + FIELD_VCHAR + "+)*"
# Which allows the field value here to just see if there is even a value in the first place
FIELD_VALUE = "(?:" + FIELD_CONTENT + ")?"
-HEADER_FIELD = re.compile(
+# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
+# chunk-ext-name = token
+# chunk-ext-val = token / quoted-string
+
+CHUNK_EXT_NAME = TOKEN
+CHUNK_EXT_VAL = "(?:" + TOKEN + ")|(?:" + QUOTED_STRING + ")"
+CHUNK_EXT = (
+ "(?:;(?P<extension>" + CHUNK_EXT_NAME + ")(?:=(?P<value>" + CHUNK_EXT_VAL + "))?)*"
+)
+
+# Pre-compiled regular expressions for use elsewhere
+ONLY_HEXDIG_RE = re.compile(("^" + HEXDIG + "+$").encode("latin-1"))
+ONLY_DIGIT_RE = re.compile(("^" + DIGIT + "+$").encode("latin-1"))
+HEADER_FIELD_RE = re.compile(
tobytes(
"^(?P<name>" + TOKEN + "):" + OWS + "(?P<value>" + FIELD_VALUE + ")" + OWS + "$"
)
)
+QUOTED_PAIR_RE = re.compile(QUOTED_PAIR)
+QUOTED_STRING_RE = re.compile(QUOTED_STRING)
+CHUNK_EXT_RE = re.compile(("^" + CHUNK_EXT + "$").encode("latin-1"))
diff --git a/src/waitress/utilities.py b/src/waitress/utilities.py
index 556bed2..fa59657 100644
--- a/src/waitress/utilities.py
+++ b/src/waitress/utilities.py
@@ -22,7 +22,7 @@ import re
import stat
import time
-from .rfc7230 import OBS_TEXT, VCHAR
+from .rfc7230 import QUOTED_PAIR_RE, QUOTED_STRING_RE
logger = logging.getLogger("waitress")
queue_logger = logging.getLogger("waitress.queue")
@@ -216,32 +216,10 @@ def parse_http_date(d):
return retval
-# RFC 5234 Appendix B.1 "Core Rules":
-# VCHAR = %x21-7E
-# ; visible (printing) characters
-vchar_re = VCHAR
-
-# RFC 7230 Section 3.2.6 "Field Value Components":
-# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
-# qdtext = HTAB / SP /%x21 / %x23-5B / %x5D-7E / obs-text
-# obs-text = %x80-FF
-# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
-obs_text_re = OBS_TEXT
-
-# The '\\' between \x5b and \x5d is needed to escape \x5d (']')
-qdtext_re = "[\t \x21\x23-\x5b\\\x5d-\x7e" + obs_text_re + "]"
-
-quoted_pair_re = r"\\" + "([\t " + vchar_re + obs_text_re + "])"
-quoted_string_re = '"(?:(?:' + qdtext_re + ")|(?:" + quoted_pair_re + '))*"'
-
-quoted_string = re.compile(quoted_string_re)
-quoted_pair = re.compile(quoted_pair_re)
-
-
def undquote(value):
if value.startswith('"') and value.endswith('"'):
# So it claims to be DQUOTE'ed, let's validate that
- matches = quoted_string.match(value)
+ matches = QUOTED_STRING_RE.match(value)
if matches and matches.end() == len(value):
# Remove the DQUOTE's from the value
@@ -249,7 +227,7 @@ def undquote(value):
# Remove all backslashes that are followed by a valid vchar or
# obs-text
- value = quoted_pair.sub(r"\1", value)
+ value = QUOTED_PAIR_RE.sub(r"\1", value)
return value
elif not value.startswith('"') and not value.endswith('"'):
From: Bert JW Regeer <bertjw@regeer.org>
Date: Sat, 12 Mar 2022 18:32:24 -0700
Subject: Be more strict in parsing Content-Length
Validate that we are only parsing digits and nothing else. RFC7230 is
explicit in that the Content-Length can only exist of 1*DIGIT and may
not include any additional sign information.
The Python int() function parses `+10` as `10` which means we were more
lenient than the standard intended.
Part of CVE-2022-24761
---
src/waitress/parser.py | 12 ++++++------
tests/test_parser.py | 24 ++++++++++++++++++++++++
2 files changed, 30 insertions(+), 6 deletions(-)
diff --git a/src/waitress/parser.py b/src/waitress/parser.py
index 765fe59..acaf494 100644
--- a/src/waitress/parser.py
+++ b/src/waitress/parser.py
@@ -22,6 +22,7 @@ from io import BytesIO
from waitress.buffers import OverflowableBuffer
from waitress.compat import tostr, unquote_bytes_to_wsgi, urlparse
from waitress.receiver import ChunkedReceiver, FixedStreamReceiver
+from waitress.rfc7230 import HEADER_FIELD_RE, ONLY_DIGIT_RE
from waitress.utilities import (
BadRequest,
RequestEntityTooLarge,
@@ -29,8 +30,6 @@ from waitress.utilities import (
ServerNotImplemented,
find_double_newline,
)
-from .rfc7230 import HEADER_FIELD
-
class ParsingError(Exception):
pass
@@ -209,7 +208,7 @@ class HTTPRequestParser(object):
headers = self.headers
for line in lines:
- header = HEADER_FIELD.match(line)
+ header = HEADER_FIELD_RE.match(line)
if not header:
raise ParsingError("Invalid header")
@@ -299,11 +298,12 @@ class HTTPRequestParser(object):
self.connection_close = True
if not self.chunked:
- try:
- cl = int(headers.get("CONTENT_LENGTH", 0))
- except ValueError:
+ cl = headers.get("CONTENT_LENGTH", "0")
+
+ if not ONLY_DIGIT_RE.match(cl.encode("latin-1")):
raise ParsingError("Content-Length is invalid")
+ cl = int(cl)
self.content_length = cl
if cl > 0:
buf = OverflowableBuffer(self.adj.inbuf_overflow)
diff --git a/tests/test_parser.py b/tests/test_parser.py
index 91837c7..eabf353 100644
--- a/tests/test_parser.py
+++ b/tests/test_parser.py
@@ -194,6 +194,30 @@ class TestHTTPRequestParser(unittest.TestCase):
else: # pragma: nocover
self.assertTrue(False)
+ def test_parse_header_bad_content_length_plus(self):
+ from waitress.parser import ParsingError
+
+ data = b"GET /foobar HTTP/8.4\r\ncontent-length: +10\r\n"
+
+ try:
+ self.parser.parse_header(data)
+ except ParsingError as e:
+ self.assertIn("Content-Length is invalid", e.args[0])
+ else: # pragma: nocover
+ self.assertTrue(False)
+
+ def test_parse_header_bad_content_length_minus(self):
+ from waitress.parser import ParsingError
+
+ data = b"GET /foobar HTTP/8.4\r\ncontent-length: -10\r\n"
+
+ try:
+ self.parser.parse_header(data)
+ except ParsingError as e:
+ self.assertIn("Content-Length is invalid", e.args[0])
+ else: # pragma: nocover
+ self.assertTrue(False)
+
def test_parse_header_multiple_content_length(self):
from waitress.parser import ParsingError
From: Bert JW Regeer <bertjw@regeer.org>
Date: Sat, 12 Mar 2022 18:35:01 -0700
Subject: Update tests to remove invalid chunked encoding chunk-size
RFC7230 states the following:
chunk = chunk-size [ chunk-ext ] CRLF
chunk-data CRLF
chunk-size = 1*HEXDIG
Where chunk-ext is:
chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
Only if there is a chunk-ext should there be a `;` after the 1*HEXDIG.
And a chunk-ext that is empty is invalid.
Part of CVE-2022-24761
---
tests/test_functional.py | 6 +++---
tests/test_parser.py | 2 +-
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/tests/test_functional.py b/tests/test_functional.py
index e894497..7a54b22 100644
--- a/tests/test_functional.py
+++ b/tests/test_functional.py
@@ -302,7 +302,7 @@ class EchoTests(object):
self.assertFalse("transfer-encoding" in headers)
def test_chunking_request_with_content(self):
- control_line = b"20;\r\n" # 20 hex = 32 dec
+ control_line = b"20\r\n" # 20 hex = 32 dec
s = b"This string has 32 characters.\r\n"
expected = s * 12
header = tobytes("GET / HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n")
@@ -321,7 +321,7 @@ class EchoTests(object):
self.assertFalse("transfer-encoding" in headers)
def test_broken_chunked_encoding(self):
- control_line = "20;\r\n" # 20 hex = 32 dec
+ control_line = "20\r\n" # 20 hex = 32 dec
s = "This string has 32 characters.\r\n"
to_send = "GET / HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n"
to_send += control_line + s + "\r\n"
@@ -346,7 +346,7 @@ class EchoTests(object):
self.assertRaises(ConnectionClosed, read_http, fp)
def test_broken_chunked_encoding_missing_chunk_end(self):
- control_line = "20;\r\n" # 20 hex = 32 dec
+ control_line = "20\r\n" # 20 hex = 32 dec
s = "This string has 32 characters.\r\n"
to_send = "GET / HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n"
to_send += control_line + s
diff --git a/tests/test_parser.py b/tests/test_parser.py
index eabf353..420f280 100644
--- a/tests/test_parser.py
+++ b/tests/test_parser.py
@@ -152,7 +152,7 @@ class TestHTTPRequestParser(unittest.TestCase):
b"Transfer-Encoding: chunked\r\n"
b"X-Foo: 1\r\n"
b"\r\n"
- b"1d;\r\n"
+ b"1d\r\n"
b"This string has 29 characters\r\n"
b"0\r\n\r\n"
)
From: Bert JW Regeer <bertjw@regeer.org>
Date: Sat, 12 Mar 2022 18:42:51 -0700
Subject: Error when receiving back Chunk Extension
Waitress discards chunked extensions and does no further processing on
them, however it failed to validate that the chunked encoding extension
did not contain invalid data.
We now validate that if there are any chunked extensions that they are
well-formed, if they are not and contain invalid characters, then
Waitress will now correctly return a Bad Request and stop any further
processing of the request.
Part of CVE-2022-24761
---
src/waitress/receiver.py | 11 ++++++++++-
tests/test_functional.py | 22 ++++++++++++++++++++++
tests/test_receiver.py | 37 +++++++++++++++++++++++++++++++++++++
3 files changed, 69 insertions(+), 1 deletion(-)
diff --git a/src/waitress/receiver.py b/src/waitress/receiver.py
index 5d1568d..106dbc7 100644
--- a/src/waitress/receiver.py
+++ b/src/waitress/receiver.py
@@ -14,6 +14,7 @@
"""Data Chunk Receiver
"""
+from waitress.rfc7230 import CHUNK_EXT_RE, ONLY_HEXDIG_RE
from waitress.utilities import BadRequest, find_double_newline
@@ -110,6 +111,7 @@ class ChunkedReceiver(object):
s = b""
else:
self.chunk_end = b""
+
if pos == 0:
# Chop off the terminating CR LF from the chunk
s = s[2:]
@@ -140,7 +142,14 @@ class ChunkedReceiver(object):
semi = line.find(b";")
if semi >= 0:
- # discard extension info.
+ extinfo = line[semi:]
+ valid_ext_info = CHUNK_EXT_RE.match(extinfo)
+
+ if not valid_ext_info:
+ self.error = BadRequest("Invalid chunk extension")
+ self.all_chunks_received = True
+
+ break
line = line[:semi]
try:
sz = int(line.strip(), 16) # hexadecimal
diff --git a/tests/test_functional.py b/tests/test_functional.py
index 7a54b22..853942c 100644
--- a/tests/test_functional.py
+++ b/tests/test_functional.py
@@ -345,6 +345,28 @@ class EchoTests(object):
self.send_check_error(to_send)
self.assertRaises(ConnectionClosed, read_http, fp)
+ def test_broken_chunked_encoding_invalid_extension(self):
+ control_line = b"20;invalid=\r\n" # 20 hex = 32 dec
+ s = b"This string has 32 characters.\r\n"
+ to_send = b"GET / HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n"
+ to_send += control_line + s + b"\r\n"
+ self.connect()
+ self.sock.send(to_send)
+ with self.sock.makefile("rb", 0) as fp:
+ line, headers, response_body = read_http(fp)
+ self.assertline(line, "400", "Bad Request", "HTTP/1.1")
+ cl = int(headers["content-length"])
+ self.assertEqual(cl, len(response_body))
+ self.assertIn(b"Invalid chunk extension", response_body)
+ self.assertEqual(
+ sorted(headers.keys()),
+ ["connection", "content-length", "content-type", "date", "server"],
+ )
+ self.assertEqual(headers["content-type"], "text/plain")
+ # connection has been closed
+ self.send_check_error(to_send)
+ self.assertRaises(ConnectionClosed, read_http, fp)
+
def test_broken_chunked_encoding_missing_chunk_end(self):
control_line = "20\r\n" # 20 hex = 32 dec
s = "This string has 32 characters.\r\n"
diff --git a/tests/test_receiver.py b/tests/test_receiver.py
index b4910bb..a6261ea 100644
--- a/tests/test_receiver.py
+++ b/tests/test_receiver.py
@@ -1,5 +1,7 @@
import unittest
+import pytest
+
class TestFixedStreamReceiver(unittest.TestCase):
def _makeOne(self, cl, buf):
@@ -226,6 +228,41 @@ class TestChunkedReceiver(unittest.TestCase):
self.assertEqual(inst.error, None)
+class TestChunkedReceiverParametrized:
+ def _makeOne(self, buf):
+ from waitress.receiver import ChunkedReceiver
+
+ return ChunkedReceiver(buf)
+
+ @pytest.mark.parametrize(
+ "invalid_extension", [b"\n", b"invalid=", b"\r", b"invalid = true"]
+ )
+ def test_received_invalid_extensions(self, invalid_extension):
+ from waitress.utilities import BadRequest
+
+ buf = DummyBuffer()
+ inst = self._makeOne(buf)
+ data = b"4;" + invalid_extension + b"\r\ntest\r\n"
+ result = inst.received(data)
+ assert result == len(data)
+ assert inst.error.__class__ == BadRequest
+ assert inst.error.body == "Invalid chunk extension"
+
+ @pytest.mark.parametrize(
+ "valid_extension", [b"test", b"valid=true", b"valid=true;other=true"]
+ )
+ def test_received_valid_extensions(self, valid_extension):
+ # While waitress may ignore extensions in Chunked Encoding, we do want
+ # to make sure that we don't fail when we do encounter one that is
+ # valid
+ buf = DummyBuffer()
+ inst = self._makeOne(buf)
+ data = b"4;" + valid_extension + b"\r\ntest\r\n"
+ result = inst.received(data)
+ assert result == len(data)
+ assert inst.error == None
+
+
class DummyBuffer(object):
def __init__(self, data=None):
if data is None:
From: Bert JW Regeer <bertjw@regeer.org>
Date: Sat, 12 Mar 2022 18:48:26 -0700
Subject: Validate chunk size in Chunked Encoding are HEXDIG
RFC7230 states that a chunk-size should be 1*HEXDIG, this is now
validated before passing the resulting string to int() which would also
parse other formats for hex, such as: `0x01` as `1` and `+0x01` as `1`.
This would lead to a potential for a frontend proxy server and waitress
to disagree on where a chunk started and ended, thereby potentially
leading to request smuggling.
With the increased validation if the size is not just hex digits,
Waitress now returns a Bad Request and stops processing the request.
Part of CVE-2022-24761
---
src/waitress/receiver.py | 19 ++++++++++++++-----
tests/test_functional.py | 22 ++++++++++++++++++++++
tests/test_receiver.py | 12 ++++++++++++
3 files changed, 48 insertions(+), 5 deletions(-)
diff --git a/src/waitress/receiver.py b/src/waitress/receiver.py
index 106dbc7..9e4bffe 100644
--- a/src/waitress/receiver.py
+++ b/src/waitress/receiver.py
@@ -150,12 +150,21 @@ class ChunkedReceiver(object):
self.all_chunks_received = True
break
+
line = line[:semi]
- try:
- sz = int(line.strip(), 16) # hexadecimal
- except ValueError: # garbage in input
- self.error = BadRequest("garbage in chunked encoding input")
- sz = 0
+
+ # Remove any whitespace
+ line = line.strip()
+
+ if not ONLY_HEXDIG_RE.match(line):
+ self.error = BadRequest("Invalid chunk size")
+ self.all_chunks_received = True
+
+ break
+
+ # Can not fail due to matching against the regular
+ # expression above
+ sz = int(line.strip(), 16) # hexadecimal
if sz > 0:
# Start a new chunk.
diff --git a/tests/test_functional.py b/tests/test_functional.py
index 853942c..448e0c0 100644
--- a/tests/test_functional.py
+++ b/tests/test_functional.py
@@ -345,6 +345,28 @@ class EchoTests(object):
self.send_check_error(to_send)
self.assertRaises(ConnectionClosed, read_http, fp)
+ def test_broken_chunked_encoding_invalid_hex(self):
+ control_line = b"0x20\r\n" # 20 hex = 32 dec
+ s = b"This string has 32 characters.\r\n"
+ to_send = b"GET / HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n"
+ to_send += control_line + s + b"\r\n"
+ self.connect()
+ self.sock.send(to_send)
+ with self.sock.makefile("rb", 0) as fp:
+ line, headers, response_body = read_http(fp)
+ self.assertline(line, "400", "Bad Request", "HTTP/1.1")
+ cl = int(headers["content-length"])
+ self.assertEqual(cl, len(response_body))
+ self.assertIn(b"Invalid chunk size", response_body)
+ self.assertEqual(
+ sorted(headers.keys()),
+ ["connection", "content-length", "content-type", "date", "server"],
+ )
+ self.assertEqual(headers["content-type"], "text/plain")
+ # connection has been closed
+ self.send_check_error(to_send)
+ self.assertRaises(ConnectionClosed, read_http, fp)
+
def test_broken_chunked_encoding_invalid_extension(self):
control_line = b"20;invalid=\r\n" # 20 hex = 32 dec
s = b"This string has 32 characters.\r\n"
diff --git a/tests/test_receiver.py b/tests/test_receiver.py
index a6261ea..17328d4 100644
--- a/tests/test_receiver.py
+++ b/tests/test_receiver.py
@@ -262,6 +262,18 @@ class TestChunkedReceiverParametrized:
assert result == len(data)
assert inst.error == None
+ @pytest.mark.parametrize("invalid_size", [b"0x04", b"+0x04", b"x04", b"+04"])
+ def test_received_invalid_size(self, invalid_size):
+ from waitress.utilities import BadRequest
+
+ buf = DummyBuffer()
+ inst = self._makeOne(buf)
+ data = invalid_size + b"\r\ntest\r\n"
+ result = inst.received(data)
+ assert result == len(data)
+ assert inst.error.__class__ == BadRequest
+ assert inst.error.body == "Invalid chunk size"
+
class DummyBuffer(object):
def __init__(self, data=None):
From: Bert JW Regeer <bertjw@regeer.org>
Date: Sat, 12 Mar 2022 19:16:23 -0700
Subject: Remove extraneous calls to .strip() in Chunked Encoding
To be valid chunked encoding we should not be removing any whitespace as
the standard does not allow for optional whitespace.
If whitespace is encountered in the wrong place, it should lead to a 400
Bad Request instead.
Part of CVE-2022-24761
---
src/waitress/receiver.py | 6 +-----
tests/test_receiver.py | 4 +++-
2 files changed, 4 insertions(+), 6 deletions(-)
diff --git a/src/waitress/receiver.py b/src/waitress/receiver.py
index 9e4bffe..806ff87 100644
--- a/src/waitress/receiver.py
+++ b/src/waitress/receiver.py
@@ -135,7 +135,6 @@ class ChunkedReceiver(object):
line = s[:pos]
s = s[pos + 2 :]
self.control_line = b""
- line = line.strip()
if line:
# Begin a new chunk.
@@ -153,9 +152,6 @@ class ChunkedReceiver(object):
line = line[:semi]
- # Remove any whitespace
- line = line.strip()
-
if not ONLY_HEXDIG_RE.match(line):
self.error = BadRequest("Invalid chunk size")
self.all_chunks_received = True
@@ -164,7 +160,7 @@ class ChunkedReceiver(object):
# Can not fail due to matching against the regular
# expression above
- sz = int(line.strip(), 16) # hexadecimal
+ sz = int(line, 16) # hexadecimal
if sz > 0:
# Start a new chunk.
diff --git a/tests/test_receiver.py b/tests/test_receiver.py
index 17328d4..014f785 100644
--- a/tests/test_receiver.py
+++ b/tests/test_receiver.py
@@ -262,7 +262,9 @@ class TestChunkedReceiverParametrized:
assert result == len(data)
assert inst.error == None
- @pytest.mark.parametrize("invalid_size", [b"0x04", b"+0x04", b"x04", b"+04"])
+ @pytest.mark.parametrize(
+ "invalid_size", [b"0x04", b"+0x04", b"x04", b"+04", b" 04", b" 0x04"]
+ )
def test_received_invalid_size(self, invalid_size):
from waitress.utilities import BadRequest
......@@ -18,8 +18,8 @@ parts =
fluentbit-plugin-wendelin
[fluentbit-plugin-wendelin]
url = https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin/-/archive/0.3.1/fluentbit-plugin-wendelin-0.3.1.tar.gz
md5sum = 7bafdcbeb2bf9634e041fde95b63b51f
url = https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin/-/archive/0.3.2/fluentbit-plugin-wendelin-0.3.2.tar.gz
md5sum = 3d5c1e0457ec1ab46a4e464eba6a1abc
[golang1.17]
# Using "./make.bash" instead of "./all.bash" disables golang tests. Some of these tests attempt to use the network, which fails on OBS' VM.
......
......@@ -18,8 +18,8 @@ parts =
[git]
recipe = slapos.recipe.cmmi
shared = true
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.39.1.tar.xz
md5sum = 74b450a513504fd7b3f5016f80de5e54
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.39.2.tar.xz
md5sum = 32d34dc65ae0955cc68c7152b5ca8b13
configure-options =
--with-curl=${curl:location}
--with-openssl=${openssl:location}
......
......@@ -15,8 +15,8 @@ extends =
[groonga]
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/groonga/groonga-12.0.7.tar.gz
md5sum = 5ef412a6941994e623cb50d76a8be261
url = https://packages.groonga.org/source/groonga/groonga-13.0.0.tar.gz
md5sum = 76aae9bc04c4047cbb31cc543bde8540
groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/
# temporary patch to respect more tokens in natural language mode.
patches =
......@@ -48,8 +48,8 @@ environment =
[groonga-normalizer-mysql]
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.1.8.tar.gz
md5sum = a1520691da3083e14bdc65a9ec57a620
url = https://packages.groonga.org/source/groonga-normalizer-mysql/groonga-normalizer-mysql-1.2.1.tar.gz
md5sum = d9b48b3ea9dc7423e7c40eb326d86395
groonga-plugin-dir = @@LOCATION@@/lib/groonga/plugins/
pre-configure = mkdir -p ${:groonga-plugin-dir}
make-targets = GROONGA_PLUGINS_DIR=${:groonga-plugin-dir} install
......
......@@ -9,6 +9,9 @@ extends =
../pcre/buildout.cfg
../perl/buildout.cfg
../perl-XML-Parser/buildout.cfg
../meson/buildout.cfg
../nodejs/buildout.cfg
../ninja/buildout.cfg
../xorg/buildout.cfg
[at-spi2-core]
......@@ -33,6 +36,18 @@ environment =
PATH=${intltool:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${atk:location}/lib/pkgconfig:${at-spi2-core:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig:${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
[gtk-materia-theme]
recipe = slapos.recipe.cmmi
url = https://github.com/nana-4/materia-theme/archive/eb83bc174fad52cba9541efa78587664ca19542c.tar.gz
md5sum = 7072e92f8395770f0ebb43403bd754cc
configure-command =
meson _build -Dprefix="@@LOCATION@@" -Dcolors=default,light,dark -Dsizes=default,compact
make-targets =
make-binary = meson install -C _build
environment =
PATH=${ninja:location}/bin:${meson:location}/bin:${glib:location}/bin:${nodejs:location}/bin:%(PATH)s
[gtk-3]
recipe = slapos.recipe.cmmi
shared = true
......
......@@ -13,8 +13,8 @@ parts = haproxy
[haproxy]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.haproxy.org/download/2.6/src/haproxy-2.6.7.tar.gz
md5sum = cfa36413f2bc5187ab34ffcdf71914d4
url = https://www.haproxy.org/download/2.6/src/haproxy-2.6.9.tar.gz
md5sum = 8c8fa1a2f9592efa2793bcc74e2ea501
configure-command = true
# for Linux kernel 2.6.28 and above, we use "linux-glibc" as the TARGET,
# otherwise use "generic".
......
[buildout]
extends =
../golang/buildout.cfg
../../stack/slapos.cfg
parts =
slapos-cookbook
......
[buildout]
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../openssl/buildout.cfg
../jupyter/buildout.cfg
../../stack/monitor/buildout.cfg
......
......@@ -22,7 +22,7 @@ configure-options =
--disable-webp
patch-options = -p1
patches =
${:_profile_base_location_}/debian_4.2.0-1+deb11u1.patch#2ded3a01abc353bad4aa1a1f128d6d1a
${:_profile_base_location_}/debian_4.2.0-1+deb11u3.patch#d4396255ca214694501f4a44e8e685c6
environment =
CPPFLAGS=-I${libjpeg:location}/include -I${jbigkit:location}/include -I${zlib:location}/include
LDFLAGS=-L${libjpeg:location}/lib -Wl,-rpath=${libjpeg:location}/lib -L${jbigkit:location}/lib -Wl,-rpath=${jbigkit:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
......
This diff is collapsed.
This diff is collapsed.
......@@ -30,8 +30,8 @@ parts =
recipe = slapos.recipe.cmmi
shared = true
url = https://archive.mariadb.org//mariadb-${:version}/source/mariadb-${:version}.tar.gz
version = 10.4.25
md5sum = 76e6ee973adb7deb15d7936f710eb5a4
version = 10.4.28
md5sum = d098e9af77a26260a6a6c21a7ed0daf6
pre-configure =
set '\bSET(PLUGIN_AUTH_PAM YES CACHE BOOL "")' cmake/build_configurations/mysql_release.cmake
grep -q "$@"
......@@ -59,6 +59,8 @@ configure-options =
-DWITH_ROCKSDB_snappy=ON
-DWITH_ROCKSDB_ZSTD=ON
-DWITH_SAFEMALLOC=OFF
# TokuDB is removed in 10.6
-DWITHOUT_TOKUDB=true
-DPLUGIN_DAEMON_EXAMPLE=NO
-DPLUGIN_EXAMPLE=NO
-DPLUGIN_MROONGA=NO
......@@ -99,8 +101,8 @@ post-install =
# as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir )
recipe = slapos.recipe.cmmi
shared = true
url = https://packages.groonga.org/source/mroonga/mroonga-12.09.tar.gz
md5sum = 637d73b86239cc9c3758e9486746d430
url = https://packages.groonga.org/source/mroonga/mroonga-13.00.tar.gz
md5sum = e7bda4edd284e788abf78e3a6f485fbe
pre-configure =
rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source
......@@ -134,8 +136,8 @@ environment =
### (we just override here for easier revert)
[mariadb-10.3]
<= mariadb-10.4
version = 10.3.35
md5sum = b7a2e69d103eda3dd61c8bad8775c7bd
version = 10.3.38
md5sum = 4cb91021431f514afbb1c1c2ab13944f
post-install =
ldd=`ldd %(location)s/lib/plugin/ha_rocksdb.so`
for x in ${lz4:location} ${snappy:location} ${zstd:location}
......
......@@ -12,22 +12,31 @@ extends =
parts =
nodejs
#[nodejs]
#<= nodejs-X.Y.Z
[nodejs]
<= nodejs-16.19.0
# nodejs 16 needs gcc > 8.3
[gcc]
min_version = 8.3
[nodejs-16.13.2]
[nodejs-16.19.0]
<= nodejs-base
openssl_location = ${openssl:location}
version = v16.13.2
md5sum = ae3a05fc273536f83c685a7425a7882d
version = v16.19.0
md5sum = e72b698681aff62bf17146ad70dc9425
patches =
https://raw.githubusercontent.com/nxhack/openwrt-node-packages/9e3ab4cc9fd5f19c25ccd6f19be5a9b47e2c6933/node/patches/v16.x/010-execvp-arg-list-too-long.patch#17bb14ea3a1b5b4832e3680e4edfeded
patch-options = -p1
PATH = ${pkgconfig:location}/bin:${python3:location}/bin:${patch:location}/bin/:%(PATH)s
post-install =
# configure header tarball for node-gyp
# https://stackoverflow.com/a/64052237
@@LOCATION@@/bin/node @@LOCATION@@/bin/npm config set --global tarball ${nodejs-headers-base-16.19.0:target}
[nodejs-headers-base-16.19.0]
<= nodejs-headers-base
version = v16.19.0
md5sum = e7bfbf135ae54d1dcca63bf17be84818
[nodejs-14.16.0]
<= nodejs-base
......@@ -81,3 +90,10 @@ environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-Wl,-rpath=${:openssl_location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LD_LIBRARY_PATH=${:openssl_location}/lib
[nodejs-headers-base]
recipe = slapos.recipe.build:download
shared = true
version =
md5sum =
url = https://nodejs.org/download/release/${:version}/node-${:version}-headers.tar.gz
......@@ -17,8 +17,8 @@ parts =
[openssl]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.openssl.org/source/openssl-1.1.1o.tar.gz
md5sum = d05e96e200d2ff0aef20c114cb5f17bf
url = https://www.openssl.org/source/openssl-1.1.1t.tar.gz
md5sum = 1cfee919e0eac6be62c88c5ae8bcd91e
location = @@LOCATION@@
# 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with DESTDIR). Used by slapos.package.git/obs
......@@ -48,8 +48,8 @@ environment =
[openssl-quictls]
<= openssl
url = https://github.com/quictls/openssl/archive/refs/tags/openssl-3.0.7+quic1.tar.gz
md5sum = 8e27cd201b554a33ed03a59f6c679c77
url = https://github.com/quictls/openssl/archive/refs/tags/openssl-3.0.8-quic1.tar.gz
md5sum = a203b9b158429ca75539b55a137d317b
[openssl-output]
# Shared binary location to ease migration
......
......@@ -8,15 +8,15 @@ parts =
[pycurl-env]
PATH = ${curl:location}/bin:${openssl:location}/bin:%(PATH)s
PYCURL_SSL_LIBRARY=openssl
CPPFLAGS=-I${openssl:location}/include
CFLAGS=-I${openssl:location}/include
PYCURL_SSL_LIBRARY = openssl
[pycurl]
recipe = zc.recipe.egg:custom
egg = pycurl
rpath =
${curl:location}/lib/
${openssl:location}/lib/
include-dirs =
${openssl:location}/include
library-dirs =
${openssl:location}/lib
rpath = ${:library-dirs}
${curl:location}/lib
environment = pycurl-env
......@@ -6,50 +6,93 @@ extends =
../libffi/buildout.cfg
../ncurses/buildout.cfg
../openssl/buildout.cfg
../patch/buildout.cfg
../pkgconfig/buildout.cfg
../python-2.7/buildout.cfg
../sqlite3/buildout.cfg
../zlib/buildout.cfg
[pycparser-shared]
# XXX:
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/eliben/pycparser/archive/release_v2.20.tar.gz
md5sum = a5d9ea5350a8edb8239af73913ea2858
configure-command = :
make-binary =
make-targets = python setup.py install --install-lib @@LOCATION@@
[pypy2]
recipe = slapos.recipe.cmmi
recipe = slapos.recipe.build
shared = true
url = https://downloads.python.org/pypy/pypy2.7-v7.3.1-src.tar.bz2
md5sum = 7608bd58940ffc5403632c2c786d83bb
configure-command =
sed -i '/"_tkinter":/s/^/#/' lib_pypy/tools/build_cffi_imports.py
cat <<EOF > Makefile
PREFIX = @@LOCATION@@
export PYPY_USESSION_BASENAME=slapos
export TMPDIR=\$(realpath ..)
all: pypy/goal/pypy-c lib_pypy/tools/build_cffi_imports.py
\$^
c_src_dir:
cd pypy/goal && PYTHONPATH=${pycparser-shared:location} $${PYTHON:-python2} ../../rpython/bin/rpython --batch --source --opt=jit --shared targetpypystandalone
ln -s ../usession-\$\$PYPY_USESSION_BASENAME-\$\$USER/testing_1 \$@
pypy/goal/pypy-c: c_src_dir
\$(MAKE) -C \$<
mv \$</libpypy-c.so \$</pypy-c pypy/goal
touch \$@
install:
mkdir -p \$(PREFIX)/bin \$(PREFIX)/include
find lib_pypy lib-python/2.7 -type d '(' '(' -name __pycache__ -o -name _tkinter -o -name test -o -name tests ')' -prune -o -print ')' \
|while read d; do mkdir -p \$(PREFIX)/\$\$d && find \$\$d -maxdepth 1 -type f ! -name '*.o' ! -name '*.c' |xargs -r cp -t \$(PREFIX)/\$\$d; done
d=lib-python/2.7/test && mkdir -p \$(PREFIX)/\$\$d && for x in __init__ pystone regrtest test_support; do echo \$\$d/\$\$x.py; done |xargs -r cp -t \$(PREFIX)/\$\$d
cd lib-python && cp conftest.py stdlib-version.* \$(PREFIX)/lib-python
cp -r include/pypy_*.h include/PyPy.h pypy/module/cpyext/include/* pypy/module/cpyext/parse/* \$(PREFIX)/include
cd pypy/goal && cp libpypy-c.so \$(PREFIX)/bin && cp pypy-c \$(PREFIX)/bin/pypy
version = 2.7
url = https://downloads.python.org/pypy/pypy${:version}-v7.3.11-src.tar.bz2
md5sum = 249ad8b0ddffbb16a9d416c3ae85dd2c
pycparser-url = https://github.com/eliben/pycparser/archive/release_v2.21.tar.gz
pycparser-md5sum = cf4b60f0beca2a25bb599e9e858a8223
patch-binary = ${patch:location}/bin/patch
patches = ${python2.7-lib-patches:patches}
init =
import os, sys
options['executable'] = os.path.join(location, 'bin', 'pypy')
if sys.version_info.major == 2:
self.python = sys.executable
else:
# XXX: We don't care which Python 2 we use to build PyPy
# and we don't want to rebootstrap twice.
depends = options.depends
options.depends = set(depends)
self.python = self.buildout['python2.7']['executable']
options.depends = depends
install =
import os
env = self.environ
pycparser = self.extract(self.download(
options['pycparser-url'], options['pycparser-md5sum']))
pycparser_lib = os.path.join(pycparser, 'lib')
call((self.python, 'setup.py', 'install', '--install-lib', pycparser_lib),
cwd=guessworkdir(pycparser), env=env)
pypy_src = guessworkdir(self.extract(self.download()))
build_cffi_imports = 'lib_pypy/pypy_tools/build_cffi_imports.py'
version = options['version']
self.applyPatchList(options['patches'].replace('#',' '),
'--no-backup-if-mismatch -p2', options['patch-binary'],
os.path.join(pypy_src, 'lib-python', version))
with open(os.path.join(pypy_src, 'Makefile'), 'w') as f:
f.write(options['makefile'].replace('\n|','\n')[1:] % {
'build_cffi_imports': build_cffi_imports,
'location': location,
'pycparser': pycparser_lib,
'python': self.python,
'version': version,
})
for cmd in ( ('sed', '-i', '/"_tkinter",/s/^/#/', build_cffi_imports)
, ('make',)
, ('make', 'install')
):
call(cmd, cwd=pypy_src, env=env)
for x in '', version[0], version:
os.symlink('pypy', os.path.join(location, 'bin', 'python' + x))
# WKRD: Buildout does not preserve leading tabs in .installed.cfg
# so prefix with a dummy character.
makefile =
|PREFIX = %(location)s
|export PYPY_USESSION_BASENAME=slapos
|export TMPDIR=$(realpath ..)
|all: pypy/goal/pypy-c %(build_cffi_imports)s
| $^
|c_src_dir:
| cd pypy/goal && PYTHONPATH=%(pycparser)s %(python)s ../../rpython/bin/rpython --batch --source --opt=jit --shared targetpypystandalone
| ln -s ../usession-$$PYPY_USESSION_BASENAME-$$USER/testing_1 $@
|pypy/goal/pypy-c: c_src_dir
| $(MAKE) -C $<
| mv $</libpypy-c.so $</pypy-c pypy/goal
| touch $@
|install:
| mkdir -p $(PREFIX)/bin $(PREFIX)/include
| find lib_pypy lib-python/%(version)s -type d '(' '(' -name __pycache__ -o -name _tkinter -o -name test -o -name tests ')' -prune -o -print ')' \
| |while read d; do mkdir -p $(PREFIX)/$$d && find $$d -maxdepth 1 -type f ! -name '*.o' ! -name '*.c' ! -name '*.pyc' |xargs -r cp -t $(PREFIX)/$$d; done
| d=lib-python/%(version)s/test && mkdir -p $(PREFIX)/$$d && for x in __init__ pystone regrtest test_support; do echo $$d/$$x.py; done |xargs -r cp -t $(PREFIX)/$$d
| cd lib-python && cp conftest.py stdlib-version.* $(PREFIX)/lib-python
| cp -r include/pypy_*.h pypy/module/cpyext/include/* pypy/module/cpyext/parse/* $(PREFIX)/include
| cd pypy/goal && cp libpypy-c.so $(PREFIX)/bin && cp pypy-c $(PREFIX)/bin/pypy
| rm $(PREFIX)/lib_pypy/_cffi_ssl/tools/make_ssl_data.py # this is a Py3 script
| rmdir $(PREFIX)/lib_pypy/_cffi_ssl/tools
| cd $(PREFIX) && find lib_pypy lib-python/%(version)s -name '*.py' |bin/pypy -Bm py_compile -
# the entry "-Wl,-rpath=${file:location}/lib" below is needed by python-magic,
# which would otherwise load the system libmagic.so with ctypes
environment =
C_INCLUDE_PATH=${bzip2:location}/include:${gdbm:location}/include:${libexpat:location}/include:${ncurses:location}/include:${ncurses:location}/include:${openssl:location}/include:${sqlite3:location}/include:${zlib:location}/include
LDFLAGS=-L${bzip2:location}/lib -L${gdbm:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -L${zlib:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib
LDFLAGS=-L${bzip2:location}/lib -L${gdbm:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -L${zlib:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libffi:location}/lib/pkgconfig
......@@ -26,6 +26,13 @@ scripts =
arguments = sys.argv[1:] + ["bootstrap"]
python = python2.7
[python2.7-lib-patches]
# Patches that apply to both CPython & PyPy.
patches =
${:_profile_base_location_}/asyncore_poll_insteadof_select.patch#ab6991c0ee6e25aeb8951e71f280a2f1
${:_profile_base_location_}/fix_compiler_module_issue_20613.patch#94443a77f903e9de880a029967fa6aa7
${:_profile_base_location_}/py27-subproc-closefds-fast.patch#e495e44491694a8972da11739206f2e6
[python2.7]
recipe = slapos.recipe.cmmi
shared = true
......@@ -37,12 +44,9 @@ version = 2.7
executable = @@LOCATION@@/bin/python${:version}
patch-options = -p1
patches =
${:_profile_base_location_}/fix_compiler_module_issue_20613.patch#94443a77f903e9de880a029967fa6aa7
${:_profile_base_location_}/pytracemalloc_pep445.patch#9f3145817afa2b7fad801fde8447e396
patches = ${python2.7-lib-patches:patches}
${:_profile_base_location_}/disabled_module_list.patch#e038a8016475574c810cbaaf0e42f4ac
${:_profile_base_location_}/asyncore_poll_insteadof_select.patch#ab6991c0ee6e25aeb8951e71f280a2f1
${:_profile_base_location_}/py27-subproc-closefds-fast.patch#e495e44491694a8972da11739206f2e6
${:_profile_base_location_}/pytracemalloc_pep445.patch#9f3145817afa2b7fad801fde8447e396
url =
http://www.python.org/ftp/python/${:package_version}/Python-${:package_version}${:package_version_suffix}.tar.xz
configure-options =
......@@ -70,4 +74,4 @@ post-install = cd '%(prefix)s' &&
environment =
PATH=${patch:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include -fPIC
LDFLAGS=-L${zlib:location}/lib -L${readline:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib
LDFLAGS=-L${bzip2:location}/lib -L${gdbm:location}/lib -L${gettext:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -L${readline:location}/lib -L${zlib:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib
......@@ -148,7 +148,7 @@ content =
"editor.tabSize": 2,
"plantuml.server": "https://plantuml.host.vifib.net/svg/",
"plantuml.render": "PlantUMLServer",
"python.pythonPath": "${python:executable}",
"python.defaultInterpreterPath": "${python:executable}",
"gitlens.remotes": [{ "domain": "lab.nexedi.com", "type": "GitLab" }]
},
"warnOnPotentiallyInsecureHostPattern": false
......
......@@ -19,8 +19,8 @@ md5sum = 6343592161a349bb40e0de16ce67aa51
[yarn.lock]
_update_hash_filename_ = yarn.lock
md5sum = c28b0fa56592066e7031de696a5d02b3
md5sum = 6435aaf48cbbfe7911505c2c45cc53ea
[ms-python-disable-jedi-buildout.patch]
_update_hash_filename_ = ms-python-disable-jedi-buildout.patch
md5sum = 0ec840ffecceaf7e91a037d439962415
md5sum = 0b45bdaf2a353d4644c2292e07a3f2f8
......@@ -35,7 +35,7 @@ urls = vscode-bat https://open-vsx.org/api/vscode/bat/1.66.2/file/vscode.bat-1.6
vscode-lua https://open-vsx.org/api/vscode/lua/1.66.2/file/vscode.lua-1.66.2.vsix 9dc3f27215ec54eb5826be1028fb9dea
vscode-make https://open-vsx.org/api/vscode/make/1.66.2/file/vscode.make-1.66.2.vsix 40e96e4b77a76d5a4d3bb3003213c984
vscode-markdown https://open-vsx.org/api/vscode/markdown/1.66.2/file/vscode.markdown-1.66.2.vsix 1e5255ea17d052b923adb5c9c348daf6
vscode-markdown-language-features https://open-vsx.org/api/vscode/markdown-language-features/1.66.2/file/vscode.markdown-language-features-1.66.2.vsix 1eb8385b1bd0b7e4423550527cfc9521
vscode-markdown-language-features https://open-vsx.org/api/vscode/markdown-language-features/1.64.2/file/vscode.markdown-language-features-1.64.2.vsix 965b7d0fed9ae49df6dd0c923da7ad7e
vscode-merge-conflict https://open-vsx.org/api/vscode/merge-conflict/1.66.2/file/vscode.merge-conflict-1.66.2.vsix e15004f78d0d543e99b229997c5b30c1
vscode-npm https://open-vsx.org/api/vscode/npm/1.66.2/file/vscode.npm-1.66.2.vsix f0b5566477b44dfbbb6c51442c0156c3
ms-vscode-node-debug https://open-vsx.org/api/ms-vscode/node-debug/1.45.0/file/ms-vscode.node-debug-1.45.0.vsix 676769e9901f5f51ed6a21d7c6a831fb
......@@ -64,16 +64,16 @@ urls = vscode-bat https://open-vsx.org/api/vscode/bat/1.66.2/file/vscode.bat-1.6
vscode-theme-solarized-dark https://open-vsx.org/api/vscode/theme-solarized-dark/1.66.2/file/vscode.theme-solarized-dark-1.66.2.vsix 4d4d5b8a9dc298614c1e33aab061e58d
vscode-theme-tomorrow-night-blue https://open-vsx.org/api/vscode/theme-tomorrow-night-blue/1.66.2/file/vscode.theme-tomorrow-night-blue-1.66.2.vsix 7177906c411b424e6fbfc56e529a2d76
vscode-typescript https://open-vsx.org/api/vscode/typescript/1.66.2/file/vscode.typescript-1.66.2.vsix 3ab8adbc1d624020ec53ce8886b379e9
vscode-typescript-language-features https://open-vsx.org/api/vscode/typescript-language-features/1.66.2/file/vscode.typescript-language-features-1.66.2.vsix 9bf14498209062cde3a642ce129ec6e0
vscode-typescript-language-features https://open-vsx.org/api/vscode/typescript-language-features/1.62.3/file/vscode.typescript-language-features-1.62.3.vsix de0fcfc97774ee2804c7952e1267d34a
vscode-vb https://open-vsx.org/api/vscode/vb/1.66.2/file/vscode.vb-1.66.2.vsix 75c5d545ef4b16d13ccfcc7f458b2336
vscode-vscode-theme-seti https://open-vsx.org/api/vscode/vscode-theme-seti/1.66.2/file/vscode.vscode-theme-seti-1.66.2.vsix 138ffbc1baad2a7e3211af579428ed4a
vscode-xml https://open-vsx.org/api/vscode/xml/1.66.2/file/vscode.xml-1.66.2.vsix 0866e13f45a18ce2f932411600bf8529
vscode-yaml https://open-vsx.org/api/vscode/yaml/1.66.2/file/vscode.yaml-1.66.2.vsix 2f9ab05c8f9bc676cecb59c26d8ff799
EditorConfig-EditorConfig https://open-vsx.org/api/EditorConfig/EditorConfig/0.16.6/file/EditorConfig.EditorConfig-0.16.6.vsix e787245e6c68617178ae995ad97c3ccb
dbaeumer-vscode-eslint https://open-vsx.org/api/dbaeumer/vscode-eslint/2.1.20/file/dbaeumer.vscode-eslint-2.1.20.vsix 1cb024ac02ebeb5ce6b0dfed6e51cdd2
dbaeumer-vscode-eslint https://open-vsx.org/api/dbaeumer/vscode-eslint/2.4.0/file/dbaeumer.vscode-eslint-2.4.0.vsix 35fa3a096ee97e327ff2fde4c2ea2309
ms-vscode-references-view https://open-vsx.org/api/ms-vscode/references-view/0.0.89/file/ms-vscode.references-view-0.0.89.vsix 7ec05cb01a77ee7f6c5198a5225fa707
ms-python-python https://open-vsx.org/api/ms-python/python/2020.10.332292344/file/ms-python.python-2020.10.332292344.vsix e5cb6d850db94278b9fda02e38851361
perrinjerome-vscode-zc-buildout https://open-vsx.org/api/perrinjerome/vscode-zc-buildout/0.9.0/file/perrinjerome.vscode-zc-buildout-0.9.0.vsix 82d96aab8f0960f956b93aca422a58ee
ms-python-python https://open-vsx.org/api/ms-python/python/2022.4.1/file/ms-python.python-2022.4.1.vsix 94ad676e762885fbd4e32d931aedd1de
perrinjerome-vscode-zc-buildout https://open-vsx.org/api/perrinjerome/vscode-zc-buildout/0.9.2/file/perrinjerome.vscode-zc-buildout-0.9.2.vsix 7782da250445b1b5781577aecf27c38c
jebbs-plantuml https://open-vsx.org/api/jebbs/plantuml/2.14.0/file/jebbs.plantuml-2.14.0.vsix 13fa7cbd14a30ecca166c41a307c7a73
rafaelmaiolla-diff https://open-vsx.org/api/rafaelmaiolla/diff/0.0.1/file/rafaelmaiolla.diff-0.0.1.vsix 1d8f868bc19b7d703c1be2bf99c4c7f9
perrinjerome-git-commit-syntax https://open-vsx.org/api/perrinjerome/git-commit-syntax/0.0.1/file/perrinjerome.git-commit-syntax-0.0.1.vsix 46625f2f05e244911c2cb9cc5032c0ef
......
import configparser
import hashlib
import logging
import os
import requests
import hashlib
urls = []
session = requests.Session()
......@@ -47,7 +49,9 @@ for plugin_and_version in '''\
vscode/lua/latest
vscode/make/latest
vscode/markdown/latest
vscode/markdown-language-features/latest
# Activating extension 'Markdown Language Features (built-in)' failed:
# i.workspace.onWillDropOnTextEditor is not a function
vscode/markdown-language-features/1.64.2
vscode/merge-conflict/latest
vscode/npm/latest
ms-vscode/node-debug/latest
......@@ -76,19 +80,15 @@ for plugin_and_version in '''\
vscode/theme-solarized-dark/latest
vscode/theme-tomorrow-night-blue/latest
vscode/typescript/latest
vscode/typescript-language-features/latest
vscode/typescript-language-features/1.62.3
vscode/vb/latest
vscode/vscode-theme-seti/latest
vscode/xml/latest
vscode/yaml/latest
EditorConfig/EditorConfig/latest
# latest (2.2.2) does not activate:
# Activating extension 'ESLint' failed: Class extends value undefined is not a constructor or null
dbaeumer/vscode-eslint/2.1.20
dbaeumer/vscode-eslint/latest
ms-vscode/references-view/latest
# golang.Go removed because it overwrites the PATH in theia shell
# golang/Go/0.16.2
ms-python/python/2020.10.332292344
ms-python/python/2022.4.1
perrinjerome/vscode-zc-buildout/latest
jebbs/plantuml/2.14.0
rafaelmaiolla/diff/latest
......@@ -112,7 +112,7 @@ cfg.set('theia-download-plugins', 'urls', '\n'.join(urls))
with open('download-plugins.cfg', 'w') as f:
f.write(f"""\
# This file is automatically generated from {__file__}
# This file is automatically generated from {os.path.basename(__file__)}
# Do not edit directly.
""")
cfg.write(f)
......@@ -5,13 +5,13 @@ This slows down jedi a lot and can make it crash on some scripts.
See also https://github.com/davidhalter/jedi/issues/1325
---
extension/pythonFiles/lib/python/jedi/inference/sys_path.py | 1 +
extension/pythonFiles/lib/jedilsp/jedi/inference/sys_path.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/extension/pythonFiles/lib/python/jedi/inference/sys_path.py b/extension/pythonFiles/lib/python/jedi/inference/sys_path.py
index 5b82ec1..256b19c 100644
--- a/extension/pythonFiles/lib/python/jedi/inference/sys_path.py
+++ b/extension/pythonFiles/lib/python/jedi/inference/sys_path.py
diff --git a/extension/pythonFiles/lib/jedilsp/jedi/inference/sys_path.py b/extension/pythonFiles/lib/jedilsp/jedi/inference/sys_path.py
index 062a0aa..7b5fd42 100644
--- a/extension/pythonFiles/lib/jedilsp/jedi/inference/sys_path.py
+++ b/extension/pythonFiles/lib/jedilsp/jedi/inference/sys_path.py
@@ -137,6 +137,7 @@ def check_sys_path_modifications(module_context):
def discover_buildout_paths(inference_state, script_path):
......
This diff is collapsed.
......@@ -589,6 +589,16 @@ environment =
CPPFLAGS=-I${libXt:location}/include
LDFLAGS=-L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -L${libxkbfile:location}/lib -Wl,-rpath=${libxkbfile:location}/lib
[xdpyinfo]
recipe = slapos.recipe.cmmi
url = https://www.x.org/releases/individual/app/xdpyinfo-1.3.3.tar.xz
md5sum = f67116760888f2e06486ee3d179875d2
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig:${xproto:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${xextproto:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libXext:location}/lib/pkgconfig:${libXtst:location}/lib/pkgconfig:${recordproto:location}/lib/pkgconfig:${kbproto:location}/lib/pkgconfig:${libXi:location}/lib/pkgconfig:${libXi:pkg_config_depends}
CPPFLAGS=-I${libXt:location}/include
LDFLAGS=-L${libX11:location}/lib -Wl,-rpath=${libX11:location}/lib -Wl,-rpath=${libXi:location}/lib -Wl,-rpath=${libXtst:location}/lib -Wl,-rpath=${libXext:location}/lib -Wl,-rpath=${libXau:location}/lib -Wl,-rpath=${libXdmcp:location}/lib -Wl,-rpath=${libxcb:location}/lib
[xserver]
# Adds Xvfb functionnality
recipe = slapos.recipe.cmmi
......
This diff is collapsed.
......@@ -4,8 +4,8 @@
"./format-json"
],
"{component,software,stack}/**": [
"python -c 'import sys, os.path, subprocess; [subprocess.check_call((\"./update-hash\", buildout_hash)) for buildout_hash in { os.path.join(os.path.dirname(staged), \"buildout.hash.cfg\") for staged in sys.argv[1:]} if os.path.exists(buildout_hash)]'",
"python -c 'import sys, os.path, subprocess; [subprocess.check_call((\"git\", \"add\", buildout_hash)) for buildout_hash in { os.path.join(os.path.dirname(staged), \"buildout.hash.cfg\") for staged in sys.argv[1:]} if os.path.exists(buildout_hash)]'"
"python3 -c 'import sys, os.path, subprocess; [subprocess.check_call((\"./update-hash\", buildout_hash)) for buildout_hash in { os.path.join(os.path.dirname(staged), \"buildout.hash.cfg\") for staged in sys.argv[1:]} if os.path.exists(buildout_hash)]'",
"python3 -c 'import sys, os.path, subprocess; [subprocess.check_call((\"git\", \"add\", buildout_hash)) for buildout_hash in { os.path.join(os.path.dirname(staged), \"buildout.hash.cfg\") for staged in sys.argv[1:]} if os.path.exists(buildout_hash)]'"
]
},
"devDependencies": {
......
......@@ -15,15 +15,15 @@
[instance]
filename = instance.cfg.in
md5sum = e8aae0fe3a8bc3f006b8638ed326bbcb
md5sum = ed2bd38b78f2a66f474205249f6e6f2c
[template-instance-beremiz]
filename = instance-beremiz.cfg.jinja2.in
md5sum = 2b990148e527117bcfb366f8b700c807
md5sum = bca63fc8943e7c5fa7dd43841e49bf95
[template-instance-beremiz-test]
filename = instance-beremiz-test.cfg.jinja2.in
md5sum = a2fa2b9d3a225a1dd71db67bd4fea769
md5sum = 2d6d892d1cc4a09c598fa14c721069b6
[template-fluxbox-menu.in]
filename = fluxbox-menu.in
......
......@@ -13,6 +13,12 @@ environment =
XORG_LOCK_DIR=${xserver:lock-dir}
DISPLAY=${xserver:display}
[enable-matieria-theme]
inline =
[Settings]
gtk-font-name=FreeSans,12
gtk-theme-name=Materia
[xdotool]
recipe = slapos.cookbook:wrapper
command-line = {{ xdotool_bin }}
......@@ -24,9 +30,9 @@ environment =
[beremiz-env.sh]
# Add openssl needed to build matiec during tests
inline +=
#export OPENSSL_CRYPTO_LIBRARY={{ openssl_location }}/lib
#export OPENSSL_INCLUDE_DIR={{ openssl_location }}/include
export OPENSSL_ROOT_DIR={{ openssl_location }}
export LDFLAGS=-L{{ openssl_location }}/lib
export TESSDATAPATH={{ tesseract_data }}
[runTestSuite]
env.sh = ${beremiz-env.sh:output}
......
......@@ -11,6 +11,9 @@ var = ${buildout:directory}/var
tmp = ${buildout:directory}/tmp
log = ${:var}/log
vnc = ${buildout:directory}/.vnc
themes = ${buildout:directory}/.themes
config = ${buildout:directory}/.config
gtk3-config = ${:config}/gtk-3.0
scripts = ${:etc}/run
services = ${:etc}/service
promise = ${:etc}/promise
......@@ -159,6 +162,21 @@ recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:promise}/xserver-is-up
command-line = bash -c "[ -S ${xserver:lock-dir}/.X11-unix/X${xserver:display-num} ]"
[install-gtk-materia-theme]
recipe = plone.recipe.command
command =
cp -r {{ gtk_materia_theme }}/share/themes/* ${directory:themes}/
update-command = ${:command}
stop-on-error = true
[enable-matieria-theme]
recipe = slapos.recipe.template
inline =
[Settings]
gtk-font-name=FreeSans,10
gtk-theme-name=Materia
output = ${directory:gtk3-config}/settings.ini
[fluxbox-menu]
recipe = slapos.recipe.template:jinja2
url = {{ fluxbox_menu }}
......@@ -201,6 +219,7 @@ inline =
export LD_LIBRARY_PATH={{ mesa_location }}/lib
export PATH=${directory:bin}:{{ git_bin_dir }}:{{ cmake_bin }}:{{ autoconf_bin }}:{{ automake_bin }}:{{ matiec_location }}/bin:{{ bison_location }}/bin:{{ flex_location }}/bin:{{ bin_directory }}:{{ gcc_location }}/bin:$PATH
export XDG_DATA_DIR={{ gtk3_location }}/share
export HOME=${buildout:directory}
export GSETTINGS_SCHEMA_DIR={{ gtk3_location }}/share/glib-2.0/schemas
export FONTCONFIG_FILE=${font-config:output}
export DISPLAY=${xserver:display}
......@@ -208,6 +227,8 @@ inline =
export LANG=C.UTF-8
export LC_ALL=C.UTF-8
export BEREMIZPYTHONPATH={{ python_bin }}
export OPCUA_DEFAULT_HOST={{ ipv4 }}
export BEREMIZ_LOCAL_HOST={{ ipv4 }}
output = ${directory:bin}/beremiz-env.sh
......@@ -261,6 +282,8 @@ parts =
novnc-promise
xserver-promise
x11vnc-listen-promise
enable-matieria-theme
install-gtk-materia-theme
beremiz-x11
nginx-launcher
nginx-graceful
......
......@@ -61,6 +61,7 @@ extra-context =
raw gcc_location {{ gcc_location }}
raw git_bin_dir {{ git_location }}
raw gtk3_location {{ gtk3_location }}
raw gtk_materia_theme {{ gtk_materia_theme}}
raw matiec_location {{ matiec_location }}
raw mesa_location {{ mesa_location }}
raw nginx_executable {{ nginx_executable }}
......@@ -86,3 +87,4 @@ extra-context =
raw opencv_location {{ opencv_location }}
raw openssl_location {{ openssl_location }}
raw nxdtest_dir {{ buildout_directory }}
raw tesseract_data {{ tesseract_location }}/share/tessdata
......@@ -22,6 +22,7 @@ extends =
../../component/xorg/buildout.cfg
../../component/pytest/buildout.cfg
../../component/opencv/buildout.cfg
../../component/tesseract/buildout.cfg
../../component/xterm/buildout.cfg
../../stack/monitor/buildout.cfg
../../stack/nxdtest.cfg
......@@ -34,6 +35,7 @@ parts +=
Modbus
xterm
instance
xdpyinfo
[gcc]
# Always build GCC for Fortran (see openblas).
......@@ -86,6 +88,7 @@ eggs =
${beremiz-setup:egg}
opcua
msgpack
click
[python-interpreter]
eggs +=
......@@ -93,9 +96,9 @@ eggs +=
[beremiz]
recipe = slapos.recipe.build:download-unpacked
# download beremiz at revision caee3ad3b7bb2865c77328de5ffdaeec61dbf49f
url = https://github.com/beremiz/beremiz/archive/caee3ad3b7bb2865c77328de5ffdaeec61dbf49f.tar.gz
md5sum = 52f9407e1706cdecf01fabfc61090276
# download beremiz from github
url = https://github.com/beremiz/beremiz/archive/a8efd2fee83733939a1fa48ea924bf7a7a5ef819.tar.gz
md5sum = 2a141b97047896bb9206c221aa56fd55
[beremiz-setup]
recipe = zc.recipe.egg:develop
......@@ -139,6 +142,7 @@ context =
key gcc_location gcc:prefix
key git_location git:location
key gtk3_location gtk-3:location
key gtk_materia_theme gtk-materia-theme:location
key instance_template_type :type
key matiec_location matiec:location
key mesa_location mesa:location
......@@ -150,6 +154,7 @@ context =
key opencv_location opencv:location
key openssl_location openssl:location
key sikulix_bin sikuli:output
key tesseract_location tesseract:location
key xdotool_location xdotool:location
key xserver_location xserver:location
key xterm_location xterm:location
......@@ -205,3 +210,4 @@ futures = 3.3.0
trollius = 2.2.1
pathlib = 1.0.1
ddt = 1.4.4
click = 7.1.2
......@@ -45,8 +45,10 @@ inline =
#!/bin/sh -e
cd ${beremiz-repository:location}/tests/ide_tests/
testlist=$(ls -d *.sikuli)
cd ${beremiz-repository:location}/tests/cli_tests/
clitestlist=$(ls -d *.bash)
rm -f ${:nxdtest}
for test in $testlist; do
for test in $(echo $testlist $clitestlist); do
if [ -z "$test" ]; then
continue;
fi
......
{
"$schema": "https://json-schema.org/draft/2019-09/schema",
"description": "Parameters to instantiate beremiz-runtime.",
"type": "object",
"properties": {
"runtime_plc_url": {
"description": "The network (http / https) address of the gzipped PLC source project program file.",
"type": "string",
"default": ""
}
}
}
{
"$schema": "https://json-schema.org/draft/2019-09/schema",
"description": "Values returned by Beremiz-runtime's instantiation.",
"additionalProperties": false,
"properties": {},
"type": "object"
}
{
"name": "Beremiz-runtime",
"description": "Beremiz-runtime is a headless implementation of Beremiz-ide which is responsible for compiling and running PLC programs.",
"serialisation": "xml",
"software-type": {
"default": {
"title": "Default",
"software-type": "default",
"description": "Default",
"request": "instance-input-schema.json",
"response": "instance-output-schema.json"
}
}
}
Available ``software-type`` values
==================================
- ``default``
Recommended for development and production use. Automatic creation of
erp5-site.
Notes
=====
This software release is not intended to be accessed directly, but through a
front-end instance which is expected to contains the RewriteRules_ (or
equivalent) needed to relocate Zope's urls via its VirtualHostMonster_. See the
``frontend`` erp5 instance parameter.
ERP5 defaults connect to the public cloudooo on https://cloudooo.erp5.net/.
See the ``cloudooo`` Software Release to setup a cloudooo cluster if necessary.
Replication
===========
Replication allows setting up an ERP5 instance whose data follows another
instance.
Relations between ERP5 instances in a replication graph depend in what is
supported by individual data managers (ex: a neo cluster can replicate from a
neo cluster which itself replicates from a 3rd).
Replication lag constraints (aka sync/async replication) depends on individual
data managers (ex: neo replication between clusters is always asynchronous).
Ignoring replication lag, replicated data can be strictly identical (ex:
replicating ZODB or SQL database will contain the same data as upstream), or
may imply some remaping (ex: replicating Zope logs from an instance with 2 zope
families with 2 partition of 2 zopes each to an instance with a single zope
total).
Data whose replication is supported
-----------------------------------
- neo database
Data whose replication will eventually be supported
---------------------------------------------------
- mariadb database
- zope ``zope-*-access.log`` and ``zope-*-Z2.log``
- ``mariadb-slow.log``
Data whose replication is not planned
-------------------------------------
- zeo: use neo instead
Setting up replication
----------------------
In addition to your usual parameter set, you needs to provide the following parameters::
{
"zope-partition-dict": {}, So no zope is instantiated
"zodb": [
{
"storage-dict": {
"upstream-masters": ..., As published by to-become upstream ERP5 instance as "neo-masters"
},
"type": "neo", The only ZODB type supporting replication
...
}
...
]
...
}
Port ranges
===========
This software release assigns the following port ranges by default:
==================== ==========
Partition type Port range
==================== ==========
memcached-persistent 2000-2009
memcached-volatile 2010-2019
smtp 2025-2029
neo (admin, master) 2050-2052
mariadb 2099
zeo 2100-2149
balancer 2150-2199
zope 2200-*
jupyter 8888
caucase 8890,8891
==================== ==========
Non-zope partitions are unique in an ERP5 cluster, so you shouldn't have to
care about them as a user (but a Software Release developer needs to know
them).
Zope partitions should be assigned port ranges starting at 2200, incrementing
by some value which depends on how many zope process you want per partition
(see the ``port-base`` parameter in ``zope-partition-dict``).
Notes to the Software Release developer: These ranges are not strictly
defined. Not each port is actually used so one may reduce alread-assigned
ranges if needed (ex: memcached partitions use actually fewer ports). There
should be enough room for evolution (as between smtp and mariadb types). It is
important to not allocate any port after 2200 as user may have assigned ports
to his zope processes.
.. _RewriteRules: http://httpd.apache.org/docs/current/en/mod/mod_rewrite.html#rewriterule
.. _VirtualHostMonster: http://docs.zope.org/zope2/zope2book/VirtualHosting.html
\ No newline at end of file
This diff is collapsed.
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Values returned by ERP5 instantiation",
"additionalProperties": false,
"properties": {
"hosts-dict": {
"description": "Hosts mapping, including auto-generated entries",
"patternProperties": {
".*": {
"description": "IP current entry resolves to",
"type": "string"
}
},
"type": "object"
},
"site-id": {
"description": "Chosen ERP5Site object identifier",
"type": "string"
},
"inituser-login": {
"description": "Initial user login",
"type": "string"
},
"inituser-password": {
"description": "Initial user password",
"type": "string"
},
"deadlock-debugger-password": {
"description": "Deadlock debugger password",
"type": "string"
},
"memcached-persistent-url": {
"description": "Persistent memcached access information",
"pattern": "^memcached://",
"type": "string"
},
"memcached-volatile-url": {
"description": "Volatile memcached access information",
"pattern": "^memcached://",
"type": "string"
},
"mariadb-database-list": {
"description": "Relational database access information",
"items": {
"pattern": "^mysql://",
"type": "string"
},
"uniqueItems": true,
"type": "array"
},
"mariadb-test-database-list": {
"description": "Relational database access information",
"items": {
"pattern": "^mysql://",
"type": "string"
},
"uniqueItems": true,
"type": "array"
},
"neo-masters": {
"$ref": "../neoppod/instance-neo-output-schema.json#/properties/masters"
},
"neo-admins": {
"$ref": "../neoppod/instance-neo-output-schema.json#/properties/admins"
},
"jupyter-url": {
"description": "Jupyter notebook web UI access information",
"pattern": "^https://",
"type": "string"
},
"caucase-http-url": {
"description": "Caucase url on HTTP. For HTTPS URL, uses https scheme, if port is explicitely specified in http URL, take that port and add 1 and use it as https port. If it is not specified.",
"pattern": "^http://",
"type": "string"
}
},
"patternProperties": {
"family-.*": {
"description": "Zope family access information",
"pattern": "^https://",
"type": "string"
}
},
"type": "object"
}
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"additionalProperties": false,
"properties": {
"tcpv4-port": {
"allOf": [
{
"$ref": "./schemas-definitions.json#/tcpv4port"
},
{
"description": "Start allocating ports at this value, going upward"
}
]
},
"ram-storage-size": {
"description": "If 0 use disk storage, otherwise use ram and limit data size to this many megabytes",
"default": 0,
"type": "integer"
}
}
}
This diff is collapsed.
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"additionalProperties": false,
"properties": {
"tcpv4-port": {
"allOf": [
{
"$ref": "./schemas-definitions.json#/tcpv4port"
},
{
"description": "Start allocating ports at this value, going upward"
}
]
},
"postmaster": {
"description": "Mail address to send technical mails to. Non-empty value required for smptd relay service to be deployed. Values will be put in alias-dict as 'postmaster' key (alias-dict takes precedence)",
"default": "",
"type": "string"
},
"alias-dict": {
"description": "Mail alias support",
"default": {},
"patternProperties": {
".*": {
"description": "List of addresses alias expands to",
"type": "array"
}
},
"type": "object"
},
"relay": {
"description": "Forward outgoing mails to a specific relay. If enabled, relay must support TLS-encrypted SASL authentication.",
"dependencies": {
"host": [
"sasl-credential"
]
},
"properties": {
"host": {
"description": "Host name or address of relay, with optional port (ex: '[example.com]:submission'). Enclosing hostname with [] prevents MX lookup.",
"type": "string"
},
"sasl-credential": {
"description": "SASL credential, in the login:password form",
"type": "string"
}
},
"default": {},
"type": "object"
},
"divert": {
"description": "Intercept all mails and send them to given addresses instead of original recipient",
"type": "array",
"items": {
"type": "string"
},
"uniqueItems": true
}
}
}
This diff is collapsed.
{
"$schema": "http://json-schema.org/draft-07/schema#",
"tcpv4port": {
"minimum": 0,
"maximum": 65535,
"type": "integer"
}
}
[buildout]
extends = software.cfg
shared-parts = /opt/slapgrid/shared-parts
eggs-directory = /opt/slapgrid/shared-eggs
abi-tag-eggs = true
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
  • something strange happened with 1.0 branch, using ./update-rc caused conflict:

    $ ./update-rc
    Already on 'master'
    Your branch is up to date with 'origin/master'.
    warning: redirecting to https://lab.nexedi.com/nexedi/slapos.git/
    Deleted branch 1.0 (was 238bb076b).
    branch '1.0' set up to track 'origin/1.0'.
    Switched to a new branch '1.0'
    HEAD is now at 238bb076b NEO: add profile to use MariaDB 10.4
    Auto-merging software/erp5/test/test/test_erp5.py
    CONFLICT (content): Merge conflict in software/erp5/test/test/test_erp5.py
    Auto-merging software/neoppod/software-common.cfg
    Auto-merging software/slapos-master/software.cfg
    Auto-merging stack/erp5/buildout.hash.cfg
    CONFLICT (content): Merge conflict in stack/erp5/buildout.hash.cfg
    Auto-merging stack/erp5/instance-zope.cfg.in
    Automatic merge failed; fix conflicts and then commit the result.
    

    some commits where both in master and 1.0 with different hashes: db979522 and 238bb076

    I don't understand, but this is probably because of some push force:

    image

    anyway state seems OK now

    Edited by Jérome Perrin
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment