Commit 3780f5da authored by Ivan Tyagov's avatar Ivan Tyagov

Merge remote-tracking branch 'upstream/master' into beremiz-python3

parents fce9ef26 fef3c165
......@@ -58,7 +58,7 @@ egg-versions =
[ZODB5]
<= _ZODB
egg-versions =
ZODB = 5.8.0
ZODB = 5.8.1
transaction = 3.0.1
......
......@@ -20,8 +20,8 @@ parts =
[curl]
recipe = slapos.recipe.cmmi
shared = true
url = https://curl.se/download/curl-8.4.0.tar.bz2
md5sum = 1a61fde1fe5c7db5c29c1196435188a5
url = https://curl.se/download/curl-8.6.0.tar.xz
md5sum = 8f28f7e08c91cc679a45fccf66184fbc
configure-options =
--disable-static
--disable-ech
......@@ -57,6 +57,7 @@ configure-options =
--with-nghttp2=${nghttp2:location}
--without-ngtcp2
--without-nghttp3
--without-openssl-quic
--without-quiche
--without-zsh-functions-dir
--without-fish-functions-dir
......
From 4c132f622f33575aca8da1d0450caa3a33b8c0a0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Thu, 4 Jan 2024 00:13:08 +0900
Subject: [PATCH] compile: implicitly enable __future__.print_function when
compiling functions
---
src/RestrictedPython/compile.py | 5 +++--
tests/test_compile_restricted_function.py | 2 +-
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/src/RestrictedPython/compile.py b/src/RestrictedPython/compile.py
index 3fc2881..98f3026 100644
--- a/src/RestrictedPython/compile.py
+++ b/src/RestrictedPython/compile.py
@@ -3,6 +3,7 @@ from RestrictedPython._compat import IS_CPYTHON
from RestrictedPython._compat import IS_PY2
from RestrictedPython.transformer import RestrictingNodeTransformer
+import __future__
import ast
import warnings
@@ -134,7 +135,7 @@ def compile_restricted_function(
name,
filename='<string>',
globalize=None, # List of globals (e.g. ['here', 'context', ...])
- flags=0,
+ flags=__future__.print_function.compiler_flag,
dont_inherit=False,
policy=RestrictingNodeTransformer):
"""Compile a restricted code object for a function.
@@ -144,7 +145,7 @@ def compile_restricted_function(
"""
# Parse the parameters and body, then combine them.
try:
- body_ast = ast.parse(body, '<func code>', 'exec')
+ body_ast = compile(body, '<func code>', 'exec', ast.PyCF_ONLY_AST | flags, dont_inherit)
except SyntaxError as v:
error = syntax_error_template.format(
lineno=v.lineno,
diff --git a/tests/test_compile_restricted_function.py b/tests/test_compile_restricted_function.py
index 5c81f86..a49e475 100644
--- a/tests/test_compile_restricted_function.py
+++ b/tests/test_compile_restricted_function.py
@@ -9,7 +9,7 @@ from types import FunctionType
def test_compile_restricted_function():
p = ''
body = """
-print("Hello World!")
+print("Hello", "World!")
return printed
"""
name = "hello_world"
--
2.42.0
From 8e7c9a6a86104e306aee2224ff5e517ee201b28f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9rome=20Perrin?= <jerome@nexedi.com>
Date: Tue, 9 Jan 2024 17:15:11 +0900
Subject: [PATCH] Fix redirections to URLS with host given as IP-litteral
(#1192)
When redirecting to an URL with an IPv6 host with surrounding brackets,
we should not escape the surrounding brackets.
The patch updates referenced RFC from 2396 to 3986, which obsoletes it
and change the safe characters for the netloc part to allow [ and ].
The RFC specifies that [ and ] are only allowed when they are the first
and last characters, but we don't need to be more specific here, because
using [ or ] in other places of the host is rejected by urlparse above.
Fixes #1191
---
src/ZPublisher/HTTPResponse.py | 14 +++++++-------
src/ZPublisher/tests/testHTTPResponse.py | 8 ++++++--
2 files changed, 13 insertions(+), 9 deletions(-)
diff --git a/src/ZPublisher/HTTPResponse.py b/src/ZPublisher/HTTPResponse.py
index b0b4ca2b1..b1a824151 100644
--- a/src/ZPublisher/HTTPResponse.py
+++ b/src/ZPublisher/HTTPResponse.py
@@ -230,24 +230,24 @@ class HTTPBaseResponse(BaseResponse):
# To be entirely correct, we must make sure that all non-ASCII
# characters are quoted correctly.
parsed = list(urlparse(location))
- rfc2396_unreserved = "-_.!~*'()" # RFC 2396 section 2.3
+ rfc3986_unreserved = "-_.!~*'()" # RFC 3986 section 2.3
for idx, idx_safe in (
# authority
- (1, ";:@?/&=+$,"), # RFC 2396 section 3.2, 3.2.1, 3.2.3
+ (1, "[];:@?/&=+$,"), # RFC 3986 section 3.2, 3.2.1, 3.2.3
# path
- (2, "/;:@&=+$,"), # RFC 2396 section 3.3
+ (2, "/;:@&=+$,"), # RFC 3986 section 3.3
# params - actually part of path; empty in Python 3
- (3, "/;:@&=+$,"), # RFC 2396 section 3.3
+ (3, "/;:@&=+$,"), # RFC 3986 section 3.3
# query
- (4, ";/?:@&=+,$"), # RFC 2396 section 3.4
+ (4, ";/?:@&=+,$"), # RFC 3986 section 3.4
# fragment
- (5, ";/?:@&=+$,"), # RFC 2396 section 4
+ (5, ";/?:@&=+$,"), # RFC 3986 section 4
):
# Make a hacky guess whether the component is already
# URL-encoded by checking for %. If it is, we don't touch it.
if '%' not in parsed[idx]:
parsed[idx] = quote(parsed[idx],
- safe=rfc2396_unreserved + idx_safe)
+ safe=rfc3986_unreserved + idx_safe)
location = urlunparse(parsed)
self.setStatus(status, lock=lock)
diff --git a/src/ZPublisher/tests/testHTTPResponse.py b/src/ZPublisher/tests/testHTTPResponse.py
index a7f816c04..08a1674ba 100644
--- a/src/ZPublisher/tests/testHTTPResponse.py
+++ b/src/ZPublisher/tests/testHTTPResponse.py
@@ -767,15 +767,19 @@ class HTTPResponseTests(unittest.TestCase):
self._redirectURLCheck(ENC_URL)
def test_redirect_unreserved_chars(self):
- # RFC 2396 section 2.3, characters that should not be encoded
+ # RFC 3986 section 2.3, characters that should not be encoded
url = "http://example.com/-_.!~*'()"
self._redirectURLCheck(url)
def test_redirect_reserved_chars(self):
- # RFC 2396 section 3.3, characters with reserved meaning in a path
+ # RFC 3986 section 3.3, characters with reserved meaning in a path
url = 'http://example.com/+/$/;/,/=/?/&/@@index.html'
self._redirectURLCheck(url)
+ def test_redirect_ipv6(self):
+ url = "http://[fe80::1ff:fe23:4567:890a]:1234"
+ self._redirectURLCheck(url)
+
def test__encode_unicode_no_content_type_uses_default_encoding(self):
UNICODE = u'<h1>Tr\u0039s Bien</h1>'
response = self._makeOne()
--
2.42.0
[buildout]
extends =
../gnu-config/buildout.cfg
parts =
gdbm
......@@ -8,6 +10,7 @@ shared = true
version = 1.23
url = http://ftp.gnu.org/gnu/gdbm/gdbm-${:version}.tar.gz
md5sum = 8551961e36bf8c70b7500d255d3658ec
pre-configure = cp -f ${gnu-config:location}/config.sub ${gnu-config:location}/config.guess build-aux/
configure-options =
--disable-static
--enable-libgdbm-compat
......
......@@ -19,11 +19,18 @@ shared = true
configure-command = :
location = @@LOCATION@@
make-binary =
make-targets =
cd src &&
unset GOBIN &&
# build and test Go and its standard library
./all.bash &&
# clean intermediate cache before installing
# contrary to build cache in gowork, build cache, that Go uses during its own build, is not reused anywhere:
# https://github.com/golang/go/blob/go1.18.2-0-g8ed0e51b5e/src/cmd/dist/build.go#L239-L242
make-targets= cd src && unset GOBIN && ./all.bash && GOCACHE=`pwd`/../pkg/obj/go-build ../bin/go clean -cache && cp -alf .. ${:location}
GOCACHE=`pwd`/../pkg/obj/go-build ../bin/go clean -cache &&
# also clean the global cache, which is used during the tests.
../bin/go clean -cache &&
cp -alf .. ${:location}
# some testdata files have an issue with slapos.extension.strip.
post-install = ${findutils:location}/bin/find ${:location}/src -type d -name testdata -exec rm -rf {} \; || true
environment =
......
# simple, standalone, language-agnostic, RFC6455 compliant WebSocket Server, written in C. https://gwsocket.io
[buildout]
parts = gwsocket
[gwsocket]
recipe = slapos.recipe.cmmi
shared = true
url = https://tar.gwsocket.io/gwsocket-0.4.tar.gz
md5sum = 1367e77c47cb6379025e64deb85fb066
......@@ -79,7 +79,6 @@ tornado = 4.4.2
nbconvert = 4.1.0
pathlib2 = 2.2.1
patsy = 0.4.1
pexpect = 4.2.1
scandir = 1.5
wcwidth = 0.1.7
jupyter-console = 5.1.0
......@@ -7,8 +7,8 @@ parts =
[libexpat]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/libexpat/libexpat/releases/download/R_2_5_0/expat-2.5.0.tar.lz
md5sum = 4add8675872d4b923d9b7871dc0f24d3
url = https://github.com/libexpat/libexpat/releases/download/R_2_6_2/expat-2.6.2.tar.lz
md5sum = 16ad24a204d5aee5fe8fb19e1a9b4700
configure-options =
--disable-static
--without-xmlwf
......
......@@ -35,9 +35,10 @@ shared = true
url = https://archive.mariadb.org//mariadb-${:version}/source/mariadb-${:version}.tar.gz
pcre-location = ${pcre2:location}
pre-configure =
set '\bSET(PLUGIN_AUTH_PAM YES CACHE BOOL "")' cmake/build_configurations/mysql_release.cmake
grep -q "$@"
sed -i "/$1/d" "$2"
d() { grep -q "$@"; sed -i "/$1/d" "$2"; }
d '\bSET(PLUGIN_AUTH_PAM YES CACHE BOOL "")' cmake/build_configurations/mysql_release.cmake
d 'ADD_SUBDIRECTORY(\(mysql-test\|tests\)\b' CMakeLists.txt
d '\bINSTALL_MYSQL_TEST\b' cmake/plugin.cmake
configure-command = ${cmake:location}/bin/cmake
configure-options =
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
......
......@@ -4,37 +4,56 @@ extends =
../curl/buildout.cfg
../git/buildout.cfg
../jsoncpp/buildout.cfg
../lxml-python/buildout.cfg
../macros/macro.pythonpath.eggs.cfg
../tinyxml2/buildout.cfg
../zlib/buildout.cfg
parts =
mavsdk
[c-astral-headers]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/c-astral-c-library
revision = v1.0
git-executable = ${git:location}/bin/git
[gcc]
min_version = 7.1
[c-astral-xml-definition]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/c-astral-c-library.git
revision = v2.1
git-executable = ${git:location}/bin/git
[mavsdk-source]
recipe = slapos.recipe.build:gitclone
repository = https://github.com/mavlink/MAVSDK.git
revision = v0.39.0
revision = v1.4.13
git-executable = ${git:location}/bin/git
ignore-cloning-submodules = true
[future]
recipe = zc.recipe.egg:custom
egg = future
[mavsdk-env]
CMAKE_INCLUDE_PATH=${curl:location}/include:${jsoncpp:location}/include:${tinyxml2:location}/include
CMAKE_LIBRARY_PATH=${curl:location}/lib:${jsoncpp:location}/lib:${tinyxml2:location}/lib:${zlib:location}/lib
CMAKE_PROGRAM_PATH=${cmake:location}/bin
PATH=${pkgconfig:location}/bin/:${git:location}/bin/:%(PATH)s
LDFLAGS=-L${curl:location}/lib -Wl,-rpath=${curl:location}/lib -L${jsoncpp:location}/lib -Wl,-rpath=${jsoncpp:location}/lib -L${tinyxml2:location}/lib -Wl,-rpath=${tinyxml2:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=@@LOCATION@@/lib
[mavsdk-pythonpath]
<= macro.pythonpath.eggs
environment = mavsdk-env
eggs =
${future:egg}
${lxml-python:egg}
[mavsdk]
recipe = slapos.recipe.cmmi
path = ${mavsdk-source:location}
cmake = ${cmake:location}/bin/cmake
depends = ${mavsdk-pythonpath:recipe}
pre-configure =
${git:location}/bin/git submodule update --init --recursive
cp -r ${c-astral-headers:location}/* ${mavsdk-source:location}/src/third_party/mavlink/include/mavlink/v2.0/
sed -i 's#common/mavlink.h#CAstral/mavlink.h#' ${mavsdk-source:location}/src/core/mavlink_include.h
sed -i 's#message_definitions/v1.0#${c-astral-xml-definition:location}#' ${mavsdk-source:location}/third_party/mavlink/CMakeLists.txt
configure-command =
${:cmake}
configure-options =
......@@ -42,19 +61,23 @@ configure-options =
-DCMAKE_C_FLAGS="${:CMAKE_CFLAGS}"
-DCMAKE_CXX_FLAGS="${:CMAKE_CFLAGS}"
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-DCMAKE_INSTALL_RPATH=${:CMAKE_LIBRARY_PATH}:@@LOCATION@@/lib
-DCMAKE_INSTALL_RPATH=${mavsdk-env:CMAKE_LIBRARY_PATH}:@@LOCATION@@/lib
-DPKG_CONFIG_EXECUTABLE=${pkgconfig:location}/bin/pkg-config
-DSUPERBUILD=OFF
-Bbuild/default
-H.
-Wno-dev
make-binary =
${:cmake} --build build/default --target install
environment =
CMAKE_INCLUDE_PATH=${curl:location}/include:${jsoncpp:location}/include:${tinyxml2:location}/include
CMAKE_LIBRARY_PATH=${:CMAKE_LIBRARY_PATH}
CMAKE_PROGRAM_PATH=${cmake:location}/bin
PATH=${pkgconfig:location}/bin/:%(PATH)s
LDFLAGS=-L${curl:location}/lib -Wl,-rpath=${curl:location}/lib -L${jsoncpp:location}/lib -Wl,-rpath=${jsoncpp:location}/lib -L${tinyxml2:location}/lib -Wl,-rpath=${tinyxml2:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=@@LOCATION@@/lib
environment = mavsdk-env
CMAKE_CFLAGS=-I${tinyxml2:location}/include
CMAKE_LIBRARY_PATH=${curl:location}/lib:${jsoncpp:location}/lib:${tinyxml2:location}/lib:${zlib:location}/lib
[c-astral-wrapper]
recipe = slapos.recipe.cmmi
configure-command = true
url = https://lab.nexedi.com/nexedi/c-astral-wrapper/-/archive/v2.0/c-astral-wrapper-v2.0.tar.gz
md5sum = ee2d05d225a57d17318282ff595fd498
environment =
CPLUS_INCLUDE_PATH=${qjs-wrapper-source:location}/include:${mavsdk:location}/include:${mavsdk:location}/include/mavsdk
LDFLAGS=-L${mavsdk:location}/lib -Wl,-rpath=${mavsdk:location}/lib
# Implementation of OPC UA (OPC Unified Architecture). https://open62541.org/
[buildout]
parts = open62541
parts =
open62541
gcc-10.2
extends =
../cmake/buildout.cfg
../patch/buildout.cfg
../python3/buildout.cfg
../gcc/buildout.cfg
../defaults.cfg
[gcc]
......@@ -35,4 +38,4 @@ configure-options =
post-install =
cp src/pubsub/*.h deps/open62541_queue.h @@LOCATION@@/include
environment =
PATH=${python3:location}/bin:${patch:location}/bin:%(PATH)s
PATH=${gcc-10.2:location}/bin:${python3:location}/bin:${patch:location}/bin:%(PATH)s
......@@ -16,9 +16,9 @@ parts =
[openssh]
recipe = slapos.recipe.cmmi
shared = true
md5sum = 3d29a7394816deeb57186899d7f7662c
md5sum = 1100f170ca1bc669038ca3743e074094
location = @@LOCATION@@
url = https://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-9.5p1.tar.gz
url = https://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-9.7p1.tar.gz
patch-binary = ${patch:location}/bin/patch
patch-options = -p1
patches =
......
......@@ -52,10 +52,7 @@ inline =
ipython = 5.10.0
ipython-genutils = 0.2.0
simplegeneric = 0.8.1
Pygments = 2.5.2
prompt-toolkit = 1.0.18
pickleshare = 0.7.5
pexpect = 4.8.0
backports.shutil-get-terminal-size = 1.0.0
ptyprocess = 0.6.0
[buildout]
extends =
../cython/buildout.cfg
../libpcap/buildout.cfg
../macros/macro.pythonpath.eggs.cfg
../python-PyYAML/buildout.cfg
parts =
eggs
# this section is used to pass info from macro pythonpath.eggs to the python-pim-dm section
[pimdm-env]
[pimdm-pythonpath]
<= macro.pythonpath.eggs
environment = pimdm-env
eggs = ${cython:egg}
[python-pim-dm]
recipe = zc.recipe.egg:custom
egg = pim-dm
......@@ -15,6 +25,8 @@ library-dirs =
${libpcap:location}/lib
rpath =
${libpcap:location}/lib/
environment = pimdm-env
depends = ${pimdm-pythonpath:recipe}
[eggs]
recipe = zc.recipe.egg
......
......@@ -78,5 +78,5 @@ md5sum = 8847dc6458d1431d0ae0f55942deeb89
[python3.11]
<= python3-common
version = 3.11
package_version = 3.11.5
md5sum = 393856f1b7713aa8bba4b642ab9985d3
package_version = 3.11.8
md5sum = b353b8433e560e1af2b130f56dfbd973
[buildout]
extends =
../git/buildout.cfg
../mavsdk/buildout.cfg
../open62541/buildout.cfg
../quickjs/buildout.cfg
parts = qjs-wrapper
[qjs-wrapper-source]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/qjs-wrapper.git
revision = v2.0
git-executable = ${git:location}/bin/git
[qjs-wrapper]
recipe = slapos.recipe.cmmi
configure-command = true
url = https://lab.nexedi.com/nexedi/qjs-wrapper/-/archive/v1.3/qjs-wrapper-v1.3.tar.gz
md5sum = 5f63356c6a10bf227e2641ea4f78c7a2
path = ${qjs-wrapper-source:location}
environment =
C_INCLUDE_PATH=include:${open62541:location}/include:${open62541:location}/deps:${open62541:location}/src/pubsub:${quickjs:location}/include
CPLUS_INCLUDE_PATH=include:${mavsdk:location}/include:${mavsdk:location}/include/mavsdk
LDFLAGS=-L${open62541:location}/lib -Wl,-rpath=${open62541:location}/lib -L${mavsdk:location}/lib -Wl,-rpath=${mavsdk:location}/lib
LDFLAGS=-L${open62541:location}/lib -Wl,-rpath=${open62541:location}/lib -L${c-astral-wrapper:location}/lib -Wl,-rpath=${c-astral-wrapper:location}/lib
......@@ -43,7 +43,7 @@ eggs =
[versions]
setuptools = 44.1.1
zc.buildout = 2.7.1+slapos019
zc.buildout = 2.7.1+slapos020
zc.recipe.egg = 2.0.3+slapos003
EOF
......
......@@ -7,8 +7,8 @@ parts = tar
[tar]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.gnu.org/gnu/tar/tar-1.29.tar.xz
md5sum = a1802fec550baaeecff6c381629653ef
url = http://ftp.gnu.org/gnu/tar/tar-1.35.tar.xz
md5sum = a2d8042658cfd8ea939e6d911eaf4152
environment =
FORCE_UNSAFE_CONFIGURE=1
PATH=${xz-utils:location}/bin:%(PATH)s
......@@ -30,4 +30,4 @@ eggs = ${zodbtools:eggs}
[versions]
zodbtools = 0.0.0.dev8
zodbtools = 0.0.0.dev9
......@@ -45,10 +45,11 @@ class CertificateAuthority:
os.unlink(f)
try:
# no CA, let us create new one
popenCommunicate([self.openssl_binary, 'req', '-nodes', '-config',
self.openssl_configuration, '-new', '-x509', '-extensions',
'v3_ca', '-keyout', self.key, '-out', self.certificate,
'-days', '10950'], 'Certificate Authority %s\n' % uuid.uuid1())
popenCommunicate([self.openssl_binary, 'req', '-utf8', '-nodes',
'-config', self.openssl_configuration, '-new', '-x509',
'-extensions', 'v3_ca', '-keyout', self.key, '-out',
self.certificate, '-days', '10950'],
'Certificate Authority %s\n' % uuid.uuid1())
except:
try:
for f in file_list:
......
......@@ -45,6 +45,10 @@ def createInstanceParameterSchemaValidatorTest(path):
"http://json-schema.org/draft-04/schema#": jsonschema.Draft4Validator,
"http://json-schema.org/draft-06/schema#": jsonschema.Draft6Validator,
"http://json-schema.org/draft-07/schema#": jsonschema.Draft7Validator,
"http://json-schema.org/draft/2019-09/schema": jsonschema.Draft201909Validator,
"http://json-schema.org/draft/2019-09/schema#": jsonschema.Draft201909Validator,
"http://json-schema.org/draft/2020-12/schema": jsonschema.Draft202012Validator,
"http://json-schema.org/draft/2020-12/schema#": jsonschema.Draft202012Validator,
}
def run(self, *args, **kwargs):
with open(path, "r") as json_file:
......@@ -55,7 +59,6 @@ def createInstanceParameterSchemaValidatorTest(path):
validator.check_schema(json_dict)
return run
def createSoftwareCfgValidatorTest(path, software_cfg_schema):
# Test that software json follows the schema for softwares json,
# which is defined in schema.json in this directory
......@@ -64,13 +67,20 @@ def createSoftwareCfgValidatorTest(path, software_cfg_schema):
schema = json.load(json_file)
jsonschema.validate(schema, software_cfg_schema)
_viewed_software_type = []
# also make sure request and response schemas can be resolved
schema.setdefault('$id', 'file://' + path)
resolver = jsonschema.RefResolver.from_schema(schema)
for software_type_definition in six.itervalues(schema['software-type']):
for key, software_type_definition in six.iteritems(schema['software-type']):
resolver.resolve(software_type_definition['request'])
resolver.resolve(software_type_definition['response'])
# Ensure there inst a duplicated entry.
_software_type_tuple = (
software_type_definition.get("software-type", key),
software_type_definition.get("shared", False))
assert _software_type_tuple not in _viewed_software_type, \
"Duplicated software release on %s, shared: %s" % _software_type_tuple
_viewed_software_type.append(_software_type_tuple)
return run
......
[instance-profile]
filename = instance.cfg.in
md5sum = 17004b2adb98b545b16c6be60e8165e8
md5sum = 4b7e36bbb077f91cdde5a4a05502cf71
......@@ -53,9 +53,8 @@ recipe = slapos.cookbook:wrapper
# needed libraries and tools inside SlapOS context
environment =
BEREMIZPYTHONPATH = {{ buildout['bin-directory'] }}/pythonwitheggs
PATH=$PATH:/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin
LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/9/:/usr/lib/x86_64-linux-gnu:/usr/lib:/lib/x86_64-linux-gnu/:/lib:/usr/lib/x86_64-linux-gnu/
COMPILER_PATH=/usr/lib/gcc/x86_64-linux-gnu/9/:/usr/lib/gcc/x86_64-linux-gnu/
PATH={{ gcc_location }}/bin
LIBRARY_PATH={{ openssl_location }}/lib
command-line =
{{ buildout['bin-directory'] }}/pythonwitheggs {{ buildout['directory'] }}/parts/beremiz-source/Beremiz_cli.py -k --project-home ${directory:home}/parts/download-plc/ build transfer run
......
......@@ -6,6 +6,8 @@ extends =
../../component/open62541/buildout.cfg
../../component/numpy/buildout.cfg
../../component/lxml-python/buildout.cfg
../../component/python-sslpsk/buildout.cfg
../../component/gcc/buildout.cfg
../../stack/monitor/buildout.cfg
../../stack/slapos.cfg
......@@ -16,10 +18,14 @@ parts =
python-interpreter
matiec
open62541
gcc-10.2
[python]
part = python2.7
[gcc]
part = gcc-10.2
[open62541]
configure-options =
-DBUILD_SHARED_LIBS=OFF
......@@ -78,7 +84,7 @@ eggs = click
opcua
${beremiz:egg}
${Twisted:egg}
sslpsk
${python-sslpsk:egg}
[instance-profile]
recipe = slapos.recipe.template:jinja2
......@@ -88,8 +94,8 @@ extensions = jinja2.ext.do
context =
section buildout buildout
raw template_monitor ${monitor2-template:output}
# md5sum is fetched from buildout.hash.cfg and can be recalculated automatically by
# calling update-hash
key openssl_location openssl:location
key gcc_location gcc-10.2:location
[versions]
Twisted = 20.3.0
......
......@@ -13,8 +13,8 @@ parts =
[dufs]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/sigoden/dufs/archive/refs/tags/v0.39.0.tar.gz
md5sum = e0fca35530c043c5dff7fcbe415ed35d
url = https://github.com/sigoden/dufs/archive/refs/tags/v0.40.0.tar.gz
md5sum = 3b71b3d07af69d6ba92c054625dc0dd2
configure-command = :
make-binary = cargo install --root=%(location)s --path . --locked
make-targets =
......
import json
import time
import slapos.testing.e2e as e2e
from websocket import create_connection
class WebsocketTestClass(e2e.EndToEndTestCase):
@classmethod
def setUpClass(cls):
try:
super().setUpClass()
cls.enb_instance_name = time.strftime('e2e-cb003-enb-%Y-%B-%d-%H:%M:%S')
cls.cn_instance_name = time.strftime('e2e-cb003-core-network-%Y-%B-%d-%H:%M:%S')
cls.sim_instance_name = time.strftime('e2e-cb003-sim-%Y-%B-%d-%H:%M:%S')
cls.ue_instance_name = time.strftime('e2e-sb005-ue-%Y-%B-%d-%H:%M:%S')
cls.product = "/opt/e2e/slapos/software/ors-amarisoft/software-fdd-lopcomm.cfg"
cls.ue_product = "/opt/e2e/slapos/software/ors-amarisoft/software-fdd-lopcomm.cfg"
# Component GUIDs and configurations
cls.comp_enb ="COMP-3920"
cls.comp_cn = "COMP-3920"
cls.comp_ue = "COMP-3756"
cls.dl_earfcn = 300
# Retry configurations
cls.max_retries = 10
cls.retry_delay = 180 # seconds
# Setup instances
cls.setup_instances()
cls.waitUntilGreen(cls.enb_instance_name)
cls.waitUntilGreen(cls.cn_instance_name)
except Exception as e:
cls.logger.error("Error during setup: " + str(e))
# Ensure cleanup
cls.tearDownClass()
raise
@classmethod
def retry_request(cls, func, *args, **kwargs):
for attempt in range(cls.max_retries):
try:
result = func(*args, **kwargs)
if result:
return result
except Exception as e:
cls.logger.error(f"Error on attempt {attempt + 1}: {e}")
if attempt < cls.max_retries - 1:
time.sleep(cls.retry_delay)
return None
@classmethod
def setup_instances(cls):
cls.request_enb()
cls.request_core_network()
cls.setup_websocket_connection()
@classmethod
def request_enb(cls, custom_params=None):
cls.logger.info("Request "+ cls.enb_instance_name)
enb_parameters = {
"bandwidth": "20 MHz",
"n_antenna_dl": 1,
"n_antenna_ul": 1,
"cpri_mult": 16,
"cell_list": {
"RRH-B1": {
"cpri_rx_delay": 25.11,
"cpri_tx_delay": 13.77,
"cpri_tx_dbm": 56,
"ru_mac_addr": "00:0a:00:00:10:20",
"dl_earfcn": cls.dl_earfcn
}
},
"dnsmasq": True,
"txa0cc00_active": "ACTIVE",
"rxa0cc00_active": "ACTIVE",
"txa0cc00_center_frequency": 2140,
"rxa0cc00_center_frequency_earfcn": 18300,
"rxa0cc00_center_frequency": 1950,
"txa0cc00_gain": -20,
"user-authorized-key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDegkDlZaDJEoiXo5FZ5iJmYcVHyqd5G+YaWLmZ/Ae6wtY8Pp0e/+eCcARO67pwn73MAj9IELu3h5rdPuZvZx0xXWGOc3ceOQBsJh/h4eMpiBKvA5ELWVuXDIl98xgIIjiaO4QgZyw1OhpN5EB6EyUNKt/xCHuU37mZaFLbcNDW3h6JI5U5plIARY0e/dFPFywtKqCgnqhJubJh/kHcb4ZeJzQMnA33WGwVD/b+F015kHXfk4T259Z27yqMTokVjaiUnI2Wbac3e+Lc5bpecA68rlmhc6fs0bh5Geldy2Q8y8gJQUX3sihA9PjlDN+T8mNYHyk9QaCM/SQkwxB71D172nMoUcrppUZyf6JaLmB/cO0iVsIr8x2GnGT0EzL/y1hmvi1dD17E0DpgoRcjI3DxleTbUTpayT4ZHrtVnkp2Nf1LgEJmdTx0hqTb9HTqhXATTKLSETYAwIu0yWnlA9oK2MwsiPPQ/8IS5HzhN3XFEIdV+tQ7GZPVfv4sYpwt7us= root@root",
"plmn_list": {"Australia": {"plmn": "50501"}}
}
if custom_params:
enb_parameters.update(custom_params)
json_enb_parameters = json.dumps(enb_parameters)
cls.retry_request(cls.request, cls.product, cls.enb_instance_name,
filter_kw={"computer_guid": cls.comp_enb},
partition_parameter_kw={'_': json_enb_parameters},
software_type='enb')
@classmethod
def request_core_network(cls):
cls.logger.info("Request "+ cls.cn_instance_name)
core_network_parameters = json.dumps({"core_network_plmn": "50501"})
cls.retry_request(cls.request_core_network_with_guid, core_network_parameters)
@classmethod
def request_core_network_with_guid(cls, core_network_parameters):
core_network_instance = cls.request(cls.product, cls.cn_instance_name,
filter_kw={"computer_guid": cls.comp_cn},
partition_parameter_kw={'_': core_network_parameters},
software_type='core-network')
if core_network_instance:
instance_infos = cls.getInstanceInfos(cls.cn_instance_name)
cls.cn_instance_guid = instance_infos.news['instance'][0]['reference']
cls.request_demo_sim_cards()
return True
return False
@classmethod
def request_demo_sim_cards(cls):
cls.logger.info("Request "+ cls.sim_instance_name)
if cls.cn_instance_guid is None:
cls.logger.error("Core network instance GUID not set. Cannot request demo SIM cards.")
return
sim_card_parameters = json.dumps({
"sim_algo": "xor",
"imsi": "505010123456789",
"k": "00112233445566778899aabbccddeeff",
"imeisv": "8682430000000101",
"impi": "505010123456789@ims.mnc505.mcc001.3gppnetwork.org",
"impu": ["505010123456789", "tel:0600000000", "tel:600"]
})
cls.retry_request(cls.request, cls.product, cls.sim_instance_name,
partition_parameter_kw={'_': sim_card_parameters},
software_type='core-network',
filter_kw={"instance_guid": cls.cn_instance_guid},
shared=True, state='started')
@classmethod
def setup_websocket_connection(cls):
ue_instance = cls.retry_request(cls.request_ue)
cls.waitUntilGreen(cls.ue_instance_name)
cls.ue_com_addr = ue_instance.get('com_addr') if ue_instance else None
if not cls.ue_com_addr:
cls.logger.error("Failed to obtain UE com address.")
return
cls.ws_url = f"ws://{cls.ue_com_addr}"
cls.logger.info(f"Websocket URL: {cls.ws_url}")
for attempt in range(cls.max_retries):
try:
cls.ws = create_connection(cls.ws_url)
cls.logger.info("Websocket connection established.")
break
except Exception as e:
cls.logger.error(f"Websocket connection attempt {attempt + 1} failed: {e}")
if attempt < cls.max_retries - 1:
time.sleep(5)
@classmethod
def request_ue(cls):
cls.logger.info("Request "+ cls.ue_instance_name)
ue_parameters = json.dumps({
"n_antenna_dl": 2,
"n_antenna_ul": 2,
"dl_earfcn": cls.dl_earfcn,
"sim_algo": "xor",
"imsi": "505010123456789",
"k": "00112233445566778899aabbccddeeff",
"imeisv": "8682430000000101",
"impi": "505010123456789@ims.mnc505.mcc001.3gppnetwork.org",
"impu": ["505010123456789", "tel:0600000000", "tel:600"]
})
return cls.retry_request(cls.request, cls.ue_product, cls.ue_instance_name,
filter_kw={"computer_guid": cls.comp_ue},
partition_parameter_kw={'_': ue_parameters},
software_type='ue-lte')
@classmethod
def tearDownClass(cls):
if hasattr(cls, 'ws') and cls.ws is not None:
cls.ws.close()
super().tearDownClass()
def send(self, msg):
self.ws.send(json.dumps(msg))
def recv(self):
return json.loads(self.ws.recv())
def ue_get(self):
self.send({"message": "ue_get"})
result = self.recv()
if 'message' not in result:
raise ValueError(f"Unexpected response format: {result}")
if 'ue_list' in result:
if not result['ue_list']:
raise ValueError(f"No UE found in response: {result}")
return result['ue_list'][0]
else:
return result
def power_on(self, ue_id):
self.assertFalse(self.ue_get()['power_on'], "UE already powered on")
self.send({"message": "power_on", "ue_id": ue_id})
self.recv()
def power_off(self, ue_id):
self.assertTrue(self.ue_get()['power_on'], "UE already powered off")
self.send({"message": "power_off", "ue_id": ue_id})
self.recv()
class BBUTest(WebsocketTestClass):
def test_ue_has_ip(self):
result = self.recv()
result = self.ue_get()
ue_id = result['ue_id']
try:
self.power_on(ue_id)
time.sleep(5)
result = self.ue_get()
self.assertIn('pdn_list', result, "UE didn't connect")
self.assertIn('ipv4', result['pdn_list'][0], "UE didn't get IPv4")
self.logger.info("UE connected with ip: " + result['pdn_list'][0]['ipv4'])
finally:
self.power_off(ue_id)
def test_max_rx_sample_db(self):
pass
# check-rx-saturated is disabled for BBU+RU
# custom_params = {"max_rx_sample_db": -99}
# BBUTest.request_enb(custom_params)
# self.waitUntilPromises(BBUTest.enb_instance_name, promise_name="check-rx-saturated", expected=False)
def test_min_rxtx_delay(self):
custom_params = {"min_rxtx_delay": 99}
BBUTest.request_enb(custom_params)
self.waitUntilPromises(BBUTest.enb_instance_name, promise_name="check-baseband-latency", expected=False)
import json
import time
import slapos.testing.e2e as e2e
from websocket import create_connection
class WebsocketTestClass(e2e.EndToEndTestCase):
@classmethod
def setUpClass(cls):
try:
super().setUpClass()
cls.enb_instance_name = time.strftime('e2e-ors84-enb-%Y-%B-%d-%H:%M:%S')
cls.cn_instance_name = time.strftime('e2e-ors84-core-network-%Y-%B-%d-%H:%M:%S')
cls.sim_instance_name = time.strftime('e2e-ors84-sim-%Y-%B-%d-%H:%M:%S')
cls.ue_instance_name = time.strftime('e2e-simbox005-ue-%Y-%B-%d-%H:%M:%S')
cls.product = cls.product.get('ors-tdd')
cls.ue_product = "/opt/e2e/slapos/software/ors-amarisoft/software-fdd-lopcomm.cfg"
# Component GUIDs and configurations
cls.comp_enb = "COMP-4057"
cls.comp_cn = "COMP-4057"
cls.comp_ue = "COMP-3756"
cls.dl_earfcn = 38550
# Retry configurations
cls.max_retries = 10
cls.retry_delay = 180 # seconds
# Setup instances
cls.setup_instances()
cls.waitUntilGreen(cls.enb_instance_name)
cls.waitUntilGreen(cls.cn_instance_name)
except Exception as e:
cls.logger.error("Error during setup: " + str(e))
# Ensure cleanup
cls.tearDownClass()
raise
@classmethod
def retry_request(cls, func, *args, **kwargs):
for attempt in range(cls.max_retries):
try:
result = func(*args, **kwargs)
if result:
return result
except Exception as e:
cls.logger.error(f"Error on attempt {attempt + 1}: {e}")
if attempt < cls.max_retries - 1:
time.sleep(cls.retry_delay)
return None
@classmethod
def setup_instances(cls):
cls.request_enb()
cls.request_core_network()
cls.setup_websocket_connection()
@classmethod
def request_enb(cls, custom_params=None):
cls.logger.info("Request "+ cls.enb_instance_name)
enb_parameters = {
"dl_earfcn": cls.dl_earfcn,
"plmn_list": {"Australia": {"plmn": "50501"}}
}
if custom_params:
enb_parameters.update(custom_params)
json_enb_parameters = json.dumps(enb_parameters)
cls.retry_request(cls.request, cls.product, cls.enb_instance_name,
filter_kw={"computer_guid": cls.comp_enb},
partition_parameter_kw={'_': json_enb_parameters},
software_type='enb')
@classmethod
def request_core_network(cls):
core_network_parameters = json.dumps({"core_network_plmn": "50501"})
cls.retry_request(cls.request_core_network_with_guid, core_network_parameters)
@classmethod
def request_core_network_with_guid(cls, core_network_parameters):
cls.logger.info("Request "+ cls.cn_instance_name)
core_network_instance = cls.request(cls.product, cls.cn_instance_name,
filter_kw={"computer_guid": cls.comp_cn},
partition_parameter_kw={'_': core_network_parameters},
software_type='core-network')
if core_network_instance:
instance_infos = cls.getInstanceInfos(cls.cn_instance_name)
cls.cn_instance_guid = instance_infos.news['instance'][0]['reference']
cls.request_demo_sim_cards()
return True
return False
@classmethod
def request_demo_sim_cards(cls):
if cls.cn_instance_guid is None:
cls.logger.error("Core network instance GUID not set. Cannot request demo SIM cards.")
return
cls.logger.info("Request "+ cls.sim_instance_name)
sim_card_parameters = json.dumps({
"sim_algo": "xor",
"imsi": "505010123456789",
"k": "00112233445566778899aabbccddeeff",
"imeisv": "8682430000000101",
"impi": "505010123456789@ims.mnc505.mcc001.3gppnetwork.org",
"impu": ["505010123456789", "tel:0600000000", "tel:600"]
})
cls.retry_request(cls.request, cls.product, cls.sim_instance_name,
partition_parameter_kw={'_': sim_card_parameters},
software_type='core-network',
filter_kw={"instance_guid": cls.cn_instance_guid},
shared=True, state='started')
@classmethod
def setup_websocket_connection(cls):
ue_instance = cls.retry_request(cls.request_ue)
cls.waitUntilGreen(cls.ue_instance_name)
cls.ue_com_addr = ue_instance.get('com_addr') if ue_instance else None
if not cls.ue_com_addr:
cls.logger.error("Failed to obtain UE com address.")
return
cls.ws_url = f"ws://{cls.ue_com_addr}"
cls.logger.info(f"Websocket URL: {cls.ws_url}")
for attempt in range(cls.max_retries):
try:
cls.ws = create_connection(cls.ws_url)
cls.logger.info("Websocket connection established.")
break
except Exception as e:
cls.logger.error(f"Websocket connection attempt {attempt + 1} failed: {e}")
if attempt < cls.max_retries - 1:
time.sleep(5)
@classmethod
def request_ue(cls):
cls.logger.info("Request "+ cls.ue_instance_name)
ue_parameters = json.dumps({
"n_antenna_dl": 2,
"n_antenna_ul": 2,
"dl_earfcn": cls.dl_earfcn,
"sim_algo": "xor",
"imsi": "505010123456789",
"k": "00112233445566778899aabbccddeeff",
"imeisv": "8682430000000101",
"impi": "505010123456789@ims.mnc505.mcc001.3gppnetwork.org",
"impu": ["505010123456789", "tel:0600000000", "tel:600"]
})
return cls.retry_request(cls.request, cls.ue_product, cls.ue_instance_name,
filter_kw={"computer_guid": cls.comp_ue},
partition_parameter_kw={'_': ue_parameters},
software_type='ue-lte')
@classmethod
def tearDownClass(cls):
if hasattr(cls, 'ws') and cls.ws is not None:
cls.ws.close()
super().tearDownClass()
def send(self, msg):
self.ws.send(json.dumps(msg))
def recv(self):
return json.loads(self.ws.recv())
def ue_get(self):
self.send({"message": "ue_get"})
result = self.recv()
if 'message' not in result:
raise ValueError(f"Unexpected response format: {result}")
if 'ue_list' in result:
if not result['ue_list']:
raise ValueError(f"No UE found in response: {result}")
return result['ue_list'][0]
else:
return result
def power_on(self, ue_id):
self.assertFalse(self.ue_get()['power_on'], "UE already powered on")
self.send({"message": "power_on", "ue_id": ue_id})
self.recv()
def power_off(self, ue_id):
self.assertTrue(self.ue_get()['power_on'], "UE already powered off")
self.send({"message": "power_off", "ue_id": ue_id})
self.recv()
class ORSTest(WebsocketTestClass):
def test_ue_has_ip(self):
result = self.recv()
result = self.ue_get()
ue_id = result['ue_id']
try:
self.power_on(ue_id)
time.sleep(5)
result = self.ue_get()
self.assertIn('pdn_list', result, "UE didn't connect")
self.assertIn('ipv4', result['pdn_list'][0], "UE didn't get IPv4")
self.logger.info("UE connected with ip: " + result['pdn_list'][0]['ipv4'])
finally:
self.power_off(ue_id)
def test_max_rx_sample_db(self):
custom_params = {"max_rx_sample_db": -99}
ORSTest.request_enb(custom_params)
self.waitUntilPromises(ORSTest.enb_instance_name, promise_name="check-rx-saturated", expected=False)
def test_min_rxtx_delay(self):
# Fixed by 9798ef1e, change `expected` to False when released
custom_params = {"min_rxtx_delay": 99}
ORSTest.request_enb(custom_params)
self.waitUntilPromises(ORSTest.enb_instance_name, promise_name="check-baseband-latency", expected=True)
[buildout]
extends =
../../stack/erp5-zope2/buildout.cfg
[erp5]
branch = zope2
......@@ -90,7 +90,6 @@ This software release assigns the following port ranges by default:
balancer 2150-2199
zope 2200-*
jupyter 8888
caucase 8890,8891
==================== ==========
Non-zope partitions are unique in an ERP5 cluster, so you shouldn't have to
......
......@@ -5,7 +5,7 @@
"additionalProperties": false,
"definitions": {
"routing-rule-list": {
"description": "Maps the path received in requests to given zope path. Rules are applied in the order they are given. This requires the path received from the outside world (typically: frontend) to have its root correspond to Zope's root (for frontend: 'path' parameter must be empty), with the customary VirtualHostMonster construct (for frontend: 'type' must be 'zope').",
"description": "Maps the path received in requests to given zope path. Rules are applied in the order they are given, after 'internal-path' from 'frontend' parameter. This also supports legacy frontends, using Rapid CDN with \"zope\" type.",
"type": "array",
"default": [
[
......@@ -37,7 +37,7 @@
},
"properties": {
"sla-dict": {
"description": "Where to request instances. Each key is a query string for criterions (e.g. \"computer_guid=foo\"), and each value is a list of partition references (note: Zope partitions reference must be prefixed with \"zope-\").",
"description": "Where to request instances. Each key is a query string for criterions (e.g. \"computer_guid=foo\"), and each value is a list of partition references (notes: Zope partitions reference must be prefixed with \"zope-\", frontends must be prefixed with \"frontend-\").",
"additionalProperties": {
"type": "array",
"items": {
......@@ -107,11 +107,23 @@
"description": "Set open file descriptors soft limit to hard limit",
"type": "boolean"
},
"python-hash-seed": {
"description": "Sets the value of `PYTHONHASHSEED` environment variable for zope processes and test runner. If not provided, zope processes use python default (`0` for python2, `random` for python3) and test runner choose a different `PYTHONHASHSEED` for each execution.",
"oneOf": [
{
"type": "number"
},
{
"const": "random",
"type": "string"
}
]
},
"family-override": {
"description": "Family-wide options, possibly overriding global options",
"default": {},
"patternProperties": {
".*": {
"^[a-zA-Z0-9_-]+$": {
"default": {},
"properties": {
"webdav": {
......@@ -159,33 +171,41 @@
"type": "object"
},
"frontend": {
"description": "Front-end slave instance request parameters",
"properties": {
"software-url": {
"description": "Front-end's software type. If this parameter is empty, no front-end instance is requested. Else, sla-dict must specify 'frontend' which is a special value matching all frontends (e.g. {\"instance_guid=bar\": [\"frontend\"]}).",
"default": "",
"type": "string",
"format": "uri"
},
"domain": {
"description": "The domain name to request front-end to respond as.",
"default": "",
"type": "string"
},
"software-type": {
"description": "Request a front-end slave instance of this software type.",
"default": "RootSoftwareInstance",
"type": "string"
},
"virtualhostroot-http-port": {
"description": "Front-end slave http port. Port where http requests to frontend will be redirected.",
"default": 80,
"type": "integer"
},
"virtualhostroot-https-port": {
"description": "Front-end slave https port. Port where https requests to frontend will be redirected.",
"default": 443,
"type": "integer"
"description": "Frontend shared instances requests parameters. When this parameter is unset, the system defaults to requesting a frontend, but only when exactly one family exists in `zope-partition-dict`. For more complex zope partition layout, the frontend layout also have to be explicitly defined.",
"default": {
"default": {}
},
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"required": [
"zope-family"
],
"properties": {
"zope-family": {
"description": "The zope family to which the requests will be routed.",
"type": "string"
},
"internal-path": {
"description": "Internal path from the backend. `%(site-id)s` is substituted by the site id.",
"type": "string",
"default": "/%(site-id)s"
},
"software-url": {
"description": "Software URL of the frontend shared instance.",
"type": "string",
"format": "uri",
"default": "http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg"
},
"software-type": {
"description": "Software type of the frontend shared instance.",
"type": "string"
},
"instance-parameters": {
"description": "Instance parameters for the frontend shared instance.",
"$ref": "../rapid-cdn/instance-slave-input-schema.json"
}
},
"type": "object"
}
},
"type": "object"
......@@ -196,13 +216,14 @@
"1": {}
},
"patternProperties": {
".*": {
"^[a-zA-Z0-9_-]+$": {
"additionalProperties": false,
"properties": {
"family": {
"description": "The family this partition is part of. For example: 'public', 'admin', 'backoffice', 'web-service'... Each family gets its own balancer entry. It has no special meaning for the system.",
"default": "default",
"type": "string"
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"instance-count": {
"description": "Number of Zopes to setup on this partition",
......@@ -505,7 +526,7 @@
"properties": {
"url": {
"title": "Caucase URL",
"description": "URL of existing caucase instance to use. If empty, a new caucase instance will be deployed. If not empty, other properties in this section will be ignored.",
"description": "URL of existing caucase instance to use. If empty, caucase instances will be deployed inside partitions.",
"default": "",
"type": "string",
"format": "uri"
......@@ -712,15 +733,9 @@
},
"uniqueItems": true
},
"caucase-url": {
"title": "Caucase URL",
"description": "URL of caucase service to use. If not set, global setting will be used.",
"type": "string",
"format": "uri"
},
"csr": {
"title": "csr",
"description": "PEM-encoded certificate signature request to request server certificate with. If not provided, HTTPS will be disabled.",
"description": "PEM-encoded certificate signature request to request server certificate with.",
"type": "string"
},
"max-crl-update-delay": {
......
......@@ -69,7 +69,7 @@
"type": "string"
},
"caucase-http-url": {
"description": "Caucase url on HTTP. For HTTPS URL, uses https scheme, if port is explicitely specified in http URL, take that port and add 1 and use it as https port. If it is not specified.",
"description": "Caucase url on HTTP. For HTTPS URL, uses https scheme, if port is explicitely specified in http URL, take that port and add 1 and use it as https port.",
"pattern": "^http://",
"type": "string"
}
......@@ -79,6 +79,11 @@
"description": "Zope family access information",
"pattern": "^https://",
"type": "string"
},
"url-frontend-.*": {
"description": "Frontend URL, following `url-frontend-{frontend_name}` naming scheme",
"pattern": "^https://",
"type": "string"
}
},
"type": "object"
......
......@@ -314,7 +314,7 @@ class CaucaseCertificate(ManagedResource):
)
return os.path.join(software_release_root_path, 'bin', 'caucase')
def request(self, common_name: str, caucase: CaucaseService) -> None:
def request(self, common_name: str, caucase: CaucaseService, san: x509.SubjectAlternativeName=None) -> None:
"""Generate certificate and request signature to the caucase service.
This overwrite any previously requested certificate for this instance.
......@@ -345,11 +345,10 @@ class CaucaseCertificate(ManagedResource):
NameOID.COMMON_NAME,
common_name,
),
])).sign(
key,
hashes.SHA256(),
default_backend(),
)
]))
if san:
csr = csr.add_extension(san, critical=True)
csr = csr.sign(key, hashes.SHA256(), default_backend())
with open(self.csr_file, 'wb') as f:
f.write(csr.public_bytes(serialization.Encoding.PEM))
......
This diff is collapsed.
This diff is collapsed.
......@@ -145,8 +145,7 @@ bundle1.17.3 = ${buildout:parts-directory}/${:_buildout_section_name_}/lib/ruby/
# gitlab wants git to be really on path ( it uses git from abspath defined in
# gitlab.yml, but there are not all cases like this, e.g. in
# https://gitlab.com/gitlab-org/gitlab_git/blob/2f0d3c1a/lib/gitlab_git/repository.rb#L259 )
# gitlab (via github-markup) wants to convert rst -> html via running: python2 (with docutils egg)
# (python-4gitlab puts interpreter into ${buildout:bin-directory})
# gitlab (via github-markup) wants to convert rst -> html via running: python (with docutils egg)
environment =
PATH = ${python-4gitlab:bin}:${yarn:location}/bin:${:ruby-location}/bin:${cmake:location}/bin:${pkgconfig:location}/bin:${nodejs:location}/bin:${postgresql10:location}/bin:${redis28:location}/bin:${git:location}/bin:${buildout:bin-directory}:%(PATH)s
......@@ -161,7 +160,7 @@ git-executable = ${git:location}/bin/git
[gitlab-repository]
<= git-repository
repository = https://lab.nexedi.com/nexedi/gitlab-ce.git
revision = v12.10.14-11-gdf89aa7e6bf
revision = v12.10.14-12-g7ce27b49193
location = ${buildout:parts-directory}/gitlab
[gitlab-shell-repository]
......@@ -239,7 +238,7 @@ environment =
[gowork.goinstall]
git2go = ${go_github.com_libgit2_git2go_prepare:path}/vendor/libgit2/install
command = bash -c ". ${gowork:env.sh} && CGO_CFLAGS=-I${:git2go}/include CGO_LDFLAGS='-L${:git2go}/lib -lgit2' go install ${gowork:buildflags} -v $(echo -n '${gowork:install}' |tr '\n' ' ')"
command = bash -c ". ${gowork:env.sh} && CGO_CFLAGS=-I${:git2go}/include CGO_LDFLAGS='-L${:git2go}/lib -lgit2' go install ${gowork:buildflags} -v $(echo -n '${gowork:install}' |tr '\n' ' ') && go test -v lab.nexedi.com/kirr/git-backup"
[gowork]
golang = ${golang1.13:location}
......@@ -264,6 +263,7 @@ make-targets =
[gitlab-backup]
recipe = plone.recipe.command
stop-on-error = true
command =
cp -a ${go_lab.nexedi.com_kirr_git-backup:location}/contrib/gitlab-backup ${gowork:bin}
update-command = ${:command}
......
# Javascript drone #
## Presentation ##
* Deploy `user.js` flight script on a drone swarm
* Deploy a GUI on subscribers
* Run the flight script or the GUI as a SlapOS service
* Compile all required libraries to run the flight script
## Parameters ##
* autopilot-ip: IPv4 address to identify the autopilot from the companion board
* autopilotIp: IPv4 address to identify the autopilot from the companion board
* droneGuidList: List of computer id on which flight script must be deployed
* isASimulation: Must be set to 'true' to automatically take off during simulation
* multicastIp: IPv6 of the multicast group of the swarm
* netIf: Network interface used for multicast traffic
* flightScript: URL of user's script to execute to fly drone swarm
* subscriberGuidList: List of computer id on which a GUI must be deployed
* drone-guid-list: List of computer id on which flight script must be deployed
## How it works ##
* is-a-simulation: Must be set to 'true' to automatically take off during simulation
For each computer listed in `droneGuidList` and `subscriberGuidList` the `peer` SR type will be instanciated.
* multicast-ip: IPv6 of the multicast group of the swarm
Each instance will return a `instance-path`. Under this path one will find `quickjs binary` in `bin` folder
and `scripts` in `etc` folder. Subcribers also return a `httpd-url` (the GUI address) and a `websocket-url` (used by the
GUI).
* net-if: Network interface used for multicast traffic
`quickjs binary location` `scripts location`/main.js `scripts location`/user.js is run as a SlapOS service. This allows
each instance to communicate with the others through OPC-UA pub/sub. For the drones it also establishes a connexion with
the UAV autopilot, for a subscriber it sends the pub/sub messages through the websocket.
* flight-script: URL of user's script to execute to fly drone swarm
* subscriber-guid-list: List of computer id on which subscription script must be deployed
## Web GUI (subcribers)
## How it works ##
For each computer listed in `drone-guid-list` and `subscriber-guid-list` a drone SR will be instanciated.
Each instance will return a `instance-path`. Under this path one will find `quickjs binary` in `bin` folder
and `scripts` in `etc` folder.
Run `quickjs binary location` `scripts location`/main.js `scripts location`/user.js .
### Drones informations
For each drone is displayed:
* the user script and autopilot logs
* the flight state (ready, flying, landing)
* the latitude in degrees
* the longitude in degrees
* the relative altitude in meters
* the yaw angle in degrees
* the speed (ground speed for multicopters, airspeed for fixed wings) in meters per second
* the climb rate in meters per second
### Buttons
* Start: sends a "start" message to the swarm and changes into a stop button
* Stop: sends a "stop" message to the swarm
* Switch leader: sends a "switch" message to the swarm, it is usually used to change the leader
* Quit: exits (closes websocket and stops pub/sub)
![GUI screenshot](images/js-drone_GUI_screenshot.png)
......@@ -12,26 +12,34 @@
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[index-html]
_update_hash_filename_ = web-gui/index.html.jinja2
md5sum = 1eedc017ecc9d1a6761dc2fff3bbab9b
[instance-profile]
filename = instance.cfg
md5sum = 360b58007c25727b7bd8a9154d5cafd4
filename = instance.cfg.in
md5sum = 80dae3e883663311d9814def78ee875a
[instance-default]
filename = instance-default.cfg
md5sum = 903939308701b11b1ff751784a9be110
filename = instance-default.cfg.jinja2
md5sum = 9db922cc0fcaa67006a2d6b9b95b95fe
[instance-drone]
filename = instance-drone.cfg
md5sum = 1ff50063f5a54712a0bc0ff38fa74630
[instance-peer]
filename = instance-peer.cfg.jinja2.in
md5sum = d12fbb134c587173ddff46ff1bc6ffe7
[main]
filename = main.js
md5sum = d0bfcc79cdd7c1e5b8f5d264cc59074e
_update_hash_filename_ = drone-scripts/main.js.jinja2
md5sum = 9a8ec8a2778f63789f39291795f47e98
[pubsub]
filename = pubsub.js
_update_hash_filename_ = drone-scripts/pubsub.js.jinja2
md5sum = 1555496ad591a31a845f33488d5c335d
[script-js]
_update_hash_filename_ = web-gui/script.js.jinja2
md5sum = e28492276416c2d84e770217ae97a88f
[worker]
filename = worker.js
md5sum = e4b4ca3bde1a21f1dbfc4ff7fa3b872c
_update_hash_filename_ = drone-scripts/worker.js.jinja2
md5sum = 48540afedd5437129196d84832d2ed40
/*jslint nomen: true, indent: 2, maxerr: 3, maxlen: 80 */
/*global arm, console, exit, open, scriptArgs, setTimeout, start, stop,
stopPubsub, takeOffAndWait, Worker*/
/*global arm, console, close, dup2, exit, open, scriptArgs, setTimeout, start,
stop, stopPubsub, takeOffAndWait, Worker, SIGINT, SIGTERM*/
import {
arm,
start,
......@@ -8,17 +8,27 @@ import {
stopPubsub,
takeOffAndWait
} from {{ json_module.dumps(qjs_wrapper) }};
import { setTimeout, Worker } from "os";
import { open, exit } from "std";
(function (arm, console, exit, open, scriptArgs, setTimeout, start, stop,
stopPubsub, takeOffAndWait, Worker) {
import {
Worker,
SIGTERM,
dup2,
setTimeout,
signal
} from "os";
import { err, exit, open, out } from "std";
(function (arm, console, dup2, err, exit, open, out, scriptArgs,
setTimeout, start, stop, stopPubsub, takeOffAndWait, Worker,
SIGTERM) {
"use strict";
var CONF_PATH = {{ json_module.dumps(configuration) }},
conf_file = open(CONF_PATH, "r"),
configuration = JSON.parse(conf_file.readAsString()),
LOG_FILE = "{{ log_dir }}/mavsdk-log",
MAVSDK_LOG_FILE_PATH =
"{{ log_dir }}/mavsdk_" + new Date().toISOString() + ".log",
LOG_FILE =
open("{{ log_dir }}/quickjs_" + new Date().toISOString() + ".log", "w"),
pubsubWorker,
worker,
user_script = scriptArgs[1],
......@@ -28,6 +38,10 @@ import { open, exit } from "std";
conf_file.close();
// redirect stdout and stderr
dup2(LOG_FILE.fileno(), out.fileno());
dup2(LOG_FILE.fileno(), err.fileno());
// Use a Worker to ensure the user script
// does not block the main script
// (preventing it to be stopped for example)
......@@ -37,17 +51,28 @@ import { open, exit } from "std";
worker = new Worker("{{ worker_script }}");
function quit(is_a_drone, exit_code) {
worker.onmessage = null;
stopPubsub();
if (is_a_drone) {
stop();
}
LOG_FILE.close();
exit(exit_code);
}
function exitWorker(exit_code) {
worker.postMessage({
type: "exit",
code: exit_code
});
}
signal(SIGTERM, exitWorker.bind(null, 0));
function exitOnFail(ret, msg) {
if (ret) {
console.log(msg);
quit(1);
exitWorker(1);
}
}
......@@ -55,7 +80,12 @@ import { open, exit } from "std";
var address = configuration.autopilotIp + ":" + configuration.autopilotPort;
console.log("Will connect to", address);
exitOnFail(
start(configuration.autopilotIp, configuration.autopilotPort, LOG_FILE, 60),
start(
configuration.autopilotIp,
configuration.autopilotPort,
MAVSDK_LOG_FILE_PATH,
60
),
"Failed to connect to " + address
);
}
......@@ -87,7 +117,7 @@ import { open, exit } from "std";
// First argument must provide the user script path
if (user_script === undefined) {
console.log('Please provide the user_script path.');
quit(1);
exitWorker(1);
}
worker.postMessage({
......@@ -138,14 +168,16 @@ import { open, exit } from "std";
// Start the update loop
loop();
} else if (type === 'updated') {
err.flush();
out.flush();
can_update = true;
} else if (type === 'exited') {
worker.onmessage = null;
quit(configuration.isADrone, e.data.exit);
} else {
console.log('Unsupported message type', type);
quit(configuration.isADrone, 1);
exitWorker(1);
}
};
}(arm, console, exit, open, scriptArgs, setTimeout, start, stop, stopPubsub,
takeOffAndWait, Worker));
}(arm, console, dup2, err, exit, open, out, scriptArgs, setTimeout, start, stop,
stopPubsub, takeOffAndWait, Worker, SIGTERM));
/*jslint nomen: true, indent: 2, maxerr: 3, maxlen: 80 */
/*global console, getAltitude, getAltitudeRel, getInitialAltitude, gpsIsOk,
getLatitude, getLongitude, getYaw, execUserScript, initPubsub, loiter,
setAirSpeed, setMessage, setTargetCoordinates, std, triggerParachute,
updateLogAndProjection, Drone, Worker*/
import {
Drone,
triggerParachute,
getAirspeed,
getAltitude,
getClimbRate,
getInitialAltitude,
gpsIsOk,
getPosition,
getYaw,
initPubsub,
isLanding,
loiter,
setAirSpeed,
setMessage,
setTargetCoordinates,
updateLogAndProjection
} from {{ json_module.dumps(qjs_wrapper) }};
import {
SIGTERM,
WNOHANG,
Worker,
close,
exec,
kill,
pipe,
setReadHandler,
waitpid
} from "os";
import { evalScript, fdopen, loadFile, open } from "std";
(function (Drone, SIGTERM, WNOHANG, Worker, close, console, evalScript, exec,
fdopen, getAltitude, getInitialAltitude, gpsIsOk, getPosition,
getYaw, initPubsub, kill, isLanding, loadFile, loiter, open, pipe,
setAirSpeed, setMessage, setReadHandler, setTargetCoordinates,
triggerParachute, updateLogAndProjection, waitpid) {
// Every script is evaluated per drone
"use strict";
var CONF_PATH = {{ json_module.dumps(configuration) }},
conf_file = open(CONF_PATH, "r"),
configuration = JSON.parse(conf_file.readAsString()),
clientId,
drone_dict = {},
gwsocket_pid,
gwsocket_r_pipe_fd,
gwsocket_w_pipe_fd,
handleWebSocketMessage,
last_message_timestamp = 0,
last_log_timestamp = 0,
parent = Worker.parent,
peer_dict = {},
user_me = {
//required to fly
triggerParachute: triggerParachute,
exit: exitWorker,
getDroneDict: function () { return drone_dict; },
getAltitudeAbs: getAltitude,
getCurrentPosition: getPosition,
getInitialAltitude: getInitialAltitude,
gpsIsOk: gpsIsOk,
getYaw: getYaw,
getSpeed: getAirspeed,
getClimbRate: getClimbRate,
id: configuration.id,
isLanding: isLanding,
loiter: loiter,
sendMsg: function (msg, id) {
if (id === undefined) { id = -1; }
setMessage(JSON.stringify({
content: msg,
timestamp: Date.now(),
dest_id: id
}));
},
setAirSpeed: setAirSpeed,
setTargetCoordinates: setTargetCoordinates
};
conf_file.close();
function exitWorker(exit_code) {
if (user_me.hasOwnProperty("onWebSocketMessage")) {
stopGwsocket();
}
parent.postMessage({type: "exited", exit: exit_code});
parent.onmessage = null;
}
function readMessage(rd) {
function read4() {
var b1, b2, b3, b4;
b1 = rd.getByte();
b2 = rd.getByte();
b3 = rd.getByte();
b4 = rd.getByte();
return (b1 << 24) | (b2 << 16) | (b3 << 8) | b4;
}
clientId = read4();
var type = read4();
var len = read4();
var data = new ArrayBuffer(len);
rd.read(data, 0, len);
return {
client: clientId,
type: type,
data: String.fromCharCode.apply(null, new Uint8Array(data)).trim()
};
}
function writeMessage(wr, m) {
function write4(v) {
wr.putByte((v >> 24) & 0xFF);
wr.putByte((v >> 16) & 0xFF);
wr.putByte((v >> 8) & 0xFF);
wr.putByte(v & 0xFF);
}
write4(m.client);
write4(m.type);
write4(m.data.byteLength);
wr.write(m.data, 0, m.data.byteLength);
wr.flush();
}
function runGwsocket(onMessage) {
var gwsocket_w_pipe = pipe(),
gwsocket_r_pipe = pipe();
gwsocket_pid = exec([
"gwsocket",
"--port=" + configuration.websocketPort,
"--addr=" + configuration.websocketIp,
"--std",
"--strict"
], {
block: false,
usePath: false,
file: {{ json_module.dumps(gwsocket_bin) }},
stdin: gwsocket_w_pipe[0],
stdout: gwsocket_r_pipe[1]
});
gwsocket_w_pipe_fd = fdopen(gwsocket_w_pipe[1], "w");
gwsocket_r_pipe_fd = fdopen(gwsocket_r_pipe[0], "r");
handleWebSocketMessage = function () {
var message = readMessage(gwsocket_r_pipe_fd).data;
if (message.includes(configuration.websocketIp)) {
return;
}
onMessage(message);
};
user_me.writeWebsocketMessage = function (message) {
var buf = new ArrayBuffer(message.length);
var bufView = new Uint8Array(buf);
for (var i=0; i<message.length; i++) {
bufView[i] = message.charCodeAt(i);
}
writeMessage(gwsocket_w_pipe_fd, {client: clientId, type: 1, data: buf});
}
setReadHandler(gwsocket_r_pipe[0], handleWebSocketMessage);
}
function stopGwsocket() {
handleWebSocketMessage = null;
close(gwsocket_w_pipe_fd);
close(gwsocket_r_pipe_fd);
kill(gwsocket_pid, SIGTERM);
waitpid(gwsocket_pid, WNOHANG);
}
function loadUserScript(path) {
var script_content = loadFile(path);
if (script_content === null) {
console.log("Failed to load user script " + path);
exitWorker(1);
}
try {
evalScript(
"function execUserScript(from, me) {" + script_content + "};"
);
} catch (e) {
console.log("Failed to evaluate user script", e);
exitWorker(1);
}
execUserScript(null, user_me);
if (user_me.hasOwnProperty("onWebSocketMessage")) {
runGwsocket(user_me.onWebSocketMessage);
}
// Call the drone onStart function
if (user_me.hasOwnProperty("onStart")) {
user_me.onStart();
}
}
function handleMainMessage(evt) {
var type = evt.data.type, message, peer_id;
switch (type) {
case "initPubsub":
initPubsub(configuration.numberOfDrone, configuration.numberOfSubscriber);
for (peer_id = 0; peer_id < configuration.numberOfDrone + configuration.numberOfSubscriber; peer_id++) {
peer_dict[peer_id] = new Drone(peer_id);
peer_dict[peer_id].init(peer_id);
if (peer_id < configuration.numberOfDrone) {
drone_dict[peer_id] = peer_dict[peer_id];
}
}
parent.postMessage({type: "initialized"});
break;
case "load":
loadUserScript(evt.data.path);
parent.postMessage({type: "loaded"});
break;
case "update":
Object.entries(peer_dict).forEach(function ([id, peer]) {
message = peer.message;
if (user_me.id !== Number(id) && message.length > 0) {
message = JSON.parse(message);
if (message.timestamp != last_message_timestamp &&
user_me.hasOwnProperty("onGetMsg") &&
[-1, user_me.id].includes(message.dest_id)) {
last_message_timestamp = message.timestamp;
user_me.onGetMsg(message.content);
}
}
});
// Call the drone onStart function
if (user_me.hasOwnProperty("onUpdate")) {
user_me.onUpdate(evt.data.timestamp);
}
if (evt.data.timestamp - last_log_timestamp >= 1000) {
updateLogAndProjection();
last_log_timestamp = evt.data.timestamp;
}
parent.postMessage({type: "updated"});
break;
case "exit":
exitWorker(evt.data.code);
break;
default:
throw new Error("Unsupported message type", type);
};
}
parent.onmessage = function (evt) {
try {
handleMainMessage(evt);
} catch (error) {
// Catch all potential bug to exit the main process
// if it occurs
console.log(error);
exitWorker(1);
}
};
}(Drone, SIGTERM, WNOHANG, Worker, close, console, evalScript, exec,
fdopen, getAltitude, getInitialAltitude, gpsIsOk, getPosition, getYaw,
initPubsub, isLanding, kill, loadFile, loiter, open, pipe, setAirSpeed,
setMessage, setReadHandler, setTargetCoordinates, triggerParachute,
updateLogAndProjection, waitpid));
{% set autopilot_ip = slapparameter_dict.get('autopilotIp', '192.168.27.1') -%}
{% set autopilot_port = slapparameter_dict.get('autopilotPort', 7909) -%}
{% set flight_script = slapparameter_dict.get('flightScript', 'https://lab.nexedi.com/nexedi/flight-scripts/raw/master/default.js') -%}
{% set is_a_simulation = slapparameter_dict.get('isASimulation', False) -%}
{% set multicast_ip = slapparameter_dict.get('multicastIp', 'ff15::1111') -%}
{% set net_if = slapparameter_dict.get('netIf', 'eth0') -%}
{% set drone_guid_list = slapparameter_dict.get('droneGuidList', []) -%}
{% set subscriber_guid_list = slapparameter_dict.get('subscriberGuidList', []) -%}
{% set guid_list = drone_guid_list + subscriber_guid_list -%}
{% set nb_peer = len(guid_list) -%}
{% set parameter_dict = dict(default_parameter_dict, **parameter_dict) -%}
{% set guid_list = parameter_dict['droneGuidList'] + parameter_dict['subscriberGuidList'] -%}
{% set drone_id_list = [] -%}
{% set subscriber_id_list = [] -%}
{% set part_list = ['publish-connection-information'] -%}
{% for id, guid in enumerate(guid_list) -%}
{% set request_drone_section_title = 'request-drone' ~ id -%}
{% do part_list.append(request_drone_section_title) %}
[{{ request_drone_section_title }}]
{% set request_peer_section_title = 'request-peer' ~ id -%}
{% do part_list.append(request_peer_section_title) %}
[{{ request_peer_section_title }}]
<= slap-connection
recipe = slapos.cookbook:request.serialised
name = Drone{{ id }}
software-url = $${:software-release-url}
software-type = drone
name = Peer{{ id }}
software-url = ${:software-release-url}
software-type = peer
return = instance-path
sla-computer_guid = {{ guid }}
config-autopilotIp = {{ autopilot_ip }}
config-autopilotPort = {{ dumps(autopilot_port) }}
config-numberOfPeers = {{ dumps(nb_peer) }}
config-autopilotIp = {{ parameter_dict['autopilotIp'] }}
config-autopilotPort = {{ dumps(parameter_dict['autopilotPort']) }}
config-numberOfDrone = {{ dumps(len(parameter_dict['droneGuidList'])) }}
config-numberOfSubscriber = {{ dumps(len(parameter_dict['subscriberGuidList'])) }}
config-id = {{ dumps(id) }}
config-isASimulation = {{ dumps(is_a_simulation) }}
{% if guid in drone_guid_list -%}
config-isASimulation = {{ dumps(parameter_dict['isASimulation']) }}
{% if id < len(parameter_dict['droneGuidList']) -%}
{% do drone_id_list.append(id) %}
config-isADrone = {{ dumps(True) }}
config-flightScript = {{ flight_script }}
config-flightScript = {{ parameter_dict['flightScript'] }}
{% else -%}
{% do subscriber_id_list.append(id) %}
config-isADrone = {{ dumps(False) }}
config-flightScript = https://lab.nexedi.com/nexedi/flight-scripts/raw/master/subscribe.js
config-flightScript = https://lab.nexedi.com/nexedi/flight-scripts/-/raw/v2.0/subscribe.js
{% endif -%}
config-multicastIp = {{ multicast_ip }}
config-netIf = {{ net_if }}
config-multicastIp = {{ parameter_dict['multicastIp'] }}
config-netIf = {{ parameter_dict['netIf'] }}
{% endfor %}
[publish-connection-information]
......@@ -50,6 +41,6 @@ subscriber-id-list = {{ dumps(subscriber_id_list) }}
[buildout]
parts =
{%- for part in part_list %}
{% for part in part_list %}
{{ part }}
{%- endfor -%}
{% endfor %}
......@@ -14,7 +14,7 @@
"title": "Port of the drone's autopilot",
"description": "Port on which autopilot service is running.",
"type": "integer",
"default": "7909"
"default": 7909
},
"droneGuidList": {
"title": "List of drones computer ID",
......@@ -44,7 +44,7 @@
"title": "Script's URL of the flight",
"description": "URL of the script which will be executed for the flight. This URL must be publicly accesible so that the drone can fetch the script.",
"type": "string",
"default": "https://lab.nexedi.com/nexedi/flight-scripts/raw/master/default.js"
"default": "https://lab.nexedi.com/nexedi/flight-scripts/-/raw/v2.0/default.js"
},
"subscriberGuidList": {
"title": "List of subscribers computer ID",
......
......@@ -14,9 +14,14 @@
"description": "Port on which autopilot service is running.",
"type": "integer"
},
"numberOfPeers": {
"title": "Number of Peers",
"description": "Number of drones and subscribers in the swarm",
"numberOfDrone": {
"title": "Number of drone",
"description": "Number of drone in the swarm",
"type": "integer"
},
"numberOfSubscriber": {
"title": "Number of subscriber",
"description": "Number of subscriber of the swarm",
"type": "integer"
},
"id": {
......
[buildout]
parts =
main
symlink-quickjs-binary
qjs-launcher
publish-connection-information
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
bin = $${:home}/bin
etc = $${:home}/etc
srv = $${:home}/srv
var = $${:home}/var
log = $${:var}/log
public = $${:srv}/public
service = $${:etc}/service
[js-dynamic-template]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:etc}/$${:_buildout_section_name_}.js
template = ${buildout:directory}/$${:_buildout_section_name_}.js
extra-context =
context =
import json_module json
raw gwsocket_bin ${gwsocket:location}/bin/gwsocket
raw qjs_wrapper ${qjs-wrapper:location}/lib/libqjswrapper.so
raw configuration {{ configuration }}
$${:extra-context}
[main]
<= js-dynamic-template
template = ${main:target}
extra-context =
key log_dir directory:log
key pubsub_script pubsub:rendered
......@@ -32,19 +37,58 @@ extra-context =
[pubsub]
<= js-dynamic-template
template = ${pubsub:target}
[worker]
<= js-dynamic-template
template = ${worker:target}
[user]
recipe = slapos.recipe.build:download
url = {{ parameter_dict['flightScript'] }}
destination = $${directory:etc}/user.js
offline = false
[qjs-launcher]
recipe = slapos.cookbook:wrapper
wrapper-path = $${directory:service}/qjs-launcher
command-line = ${quickjs:location}/bin/qjs $${main:rendered} $${user:target}
[script-js]
recipe = slapos.recipe.template:jinja2
template = ${script-js:target}
rendered = $${directory:public}/script.js
websocket-url = [{{ ipv6 }}]:{{ websocket_port }}
context =
raw websocket_url $${:websocket-url}
[index-html]
recipe = slapos.recipe.template:jinja2
template = ${index-html:target}
rendered = $${directory:public}/index.html
context =
raw nb_drones {{ parameter_dict['numberOfDrone'] }}
[httpd-port]
recipe = slapos.cookbook:free_port
minimum = 8080
maximum = 8090
ip = {{ ipv6 }}
[symlink-quickjs-binary]
recipe = slapos.recipe.build
binary-path = ${quickjs:location}/bin/qjs
target = $${directory:bin}/qjs
init =
import os
if not os.path.exists(options['target']):
os.symlink(options['binary-path'], options['target'])
[httpd]
recipe = slapos.cookbook:simplehttpserver
host = {{ ipv6 }}
port = $${httpd-port:port}
base-path = $${directory:public}
wrapper = $${directory:service}/http-server
log-file = $${directory:log}/httpd.log
use-hash-url = false
depends = $${index-html:rendered}
[publish-connection-information]
recipe = slapos.cookbook:publish.serialised
instance-path = $${directory:home}
{% if not parameter_dict['isADrone'] -%}
httpd-url = [$${httpd:host}]:$${httpd:port}
websocket-url = ws://$${script-js:websocket-url}
{% endif -%}
......@@ -9,7 +9,7 @@ offline = true
[switch-softwaretype]
recipe = slapos.cookbook:switch-softwaretype
default = instance-default:output
drone = instance-drone:output
peer = instance-peer:output
RootSoftwareInstance = $${:default}
[slap-configuration]
......@@ -22,36 +22,58 @@ cert = $${slap_connection:cert_file}
[dynamic-template-base]
recipe = slapos.recipe.template:jinja2
url = ${buildout:directory}/$${:_buildout_section_name_}.cfg
output = $${buildout:directory}/$${:_buildout_section_name_}
output = $${buildout:directory}/$${:_buildout_section_name_}.cfg
extra-context =
context =
jsonkey default_parameter_dict :default-parameters
key parameter_dict slap-configuration:configuration
$${:extra-context}
default-parameters =
{
"autopilotIp": "192.168.27.1",
"autopilotPort": 7909,
"flightScript": "https://lab.nexedi.com/nexedi/flight-scripts/-/raw/v2.0/default.js",
"isASimulation": false,
"multicastIp": "ff15::1111",
"netIf": "eth0",
"droneGuidList": [],
"subscriberGuidList":[]
}
[instance-default]
<= dynamic-template-base
url = ${instance-default:target}
extensions = jinja2.ext.do
context =
key slapparameter_dict slap-configuration:configuration
[instance-drone]
<= dynamic-template-base
context =
key configuration drone-configuration:output
key user-script user:destination
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
etc = $${:home}/etc
[drone-configuration]
[gwsocket-port]
recipe = slapos.cookbook:free_port
minimum = 6789
maximum = 6799
ip = $${slap-configuration:ipv6-random}
[peer-configuration]
recipe = slapos.recipe.template:jinja2
output = $${directory:etc}/configuration.json
extensions = jinja2.ext.do
context =
import json_module json
key slapparameter_dict slap-configuration:configuration
inline = {{ json_module.dumps(slapparameter_dict) }}
key websocket_ip gwsocket-port:ip
key websocket_port gwsocket-port:port
key parameter_dict slap-configuration:configuration
inline =
{% do parameter_dict.__setitem__('websocketIp', websocket_ip) -%}
{% do parameter_dict.__setitem__('websocketPort', websocket_port) -%}
{{ json_module.dumps(parameter_dict) }}
[user]
recipe = slapos.recipe.build:download
url = $${slap-configuration:configuration.flightScript}
destination = $${directory:etc}/user.js
offline = false
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
etc = $${:home}/etc
[instance-peer]
<= dynamic-template-base
url = ${instance-peer:output}
extra-context =
key configuration peer-configuration:output
key ipv6 slap-configuration:ipv6-random
key websocket_port gwsocket-port:port
......@@ -5,16 +5,18 @@ extends =
[sqdr-source]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/slaposdrone/squadrone.git
revision = v1.0
revision = v2.0
git-executable = ${git:location}/bin/git
[qjs-wrapper]
[sqdr-wrapper]
recipe = slapos.recipe.cmmi
configure-command = true
url =
path = ${sqdr-source:location}
md5sum =
environment =
CPLUS_INCLUDE_PATH=include:${qjs-wrapper-source:location}/include
LDFLAGS=-L${sqdr-source:location}/lib -Wl,-rpath=${sqdr-source:location}/lib
[qjs-wrapper]
environment =
C_INCLUDE_PATH=include:${open62541:location}/include:${open62541:location}/deps:${open62541:location}/src/pubsub:${quickjs:location}/include
CPLUS_INCLUDE_PATH=include
LDFLAGS=-L${open62541:location}/lib -Wl,-rpath=${open62541:location}/lib -L${sqdr-source:location}/lib -Wl,-rpath=${sqdr-source:location}/lib
LDFLAGS=-L${open62541:location}/lib -Wl,-rpath=${open62541:location}/lib -L${sqdr-wrapper:location}/lib -Wl,-rpath=${sqdr-wrapper:location}/lib
......@@ -3,42 +3,43 @@ extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../component/qjs-wrapper/buildout.cfg
../../component/gwsocket/buildout.cfg
parts =
instance-profile
instance-default
instance-drone
main
pubsub
worker
slapos-cookbook
[instance-profile]
recipe = slapos.recipe.template
[instance-default]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template.cfg
[jinja-template-base]
[template-base]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:_buildout_section_name_}.cfg
output = ${buildout:directory}/${:_buildout_section_name_}.cfg
url = ${:_profile_base_location_}/${:filename}
[instance-default]
<= jinja-template-base
[instance-peer]
<= template-base
output = ${buildout:directory}/${:_buildout_section_name_}
[instance-drone]
<= jinja-template-base
[instance-profile]
<= template-base
output = ${buildout:directory}/template.cfg
[download-file-base]
[download]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
destination = ${buildout:directory}/${:filename}
url = ${:_profile_base_location_}/${:_update_hash_filename_}
[index-html]
<= download
[main]
<= download-file-base
<= download
[pubsub]
<= download-file-base
<= download
[script-js]
<= download
[worker]
<= download-file-base
<= download
......@@ -12,11 +12,11 @@
"index": 0
},
"drone": {
"title": "Drone",
"software-type": "drone",
"description": "Drone Instance",
"request": "instance-drone-input-schema.json",
"response": "instance-drone-output-schema.json",
"title": "Peer",
"software-type": "peer",
"description": "Peer Instance",
"request": "instance-peer-input-schema.json",
"response": "instance-peer-output-schema.json",
"index": 1
}
}
......
......@@ -43,7 +43,8 @@ setup(name=name,
install_requires=[
'slapos.core',
'slapos.libnetworkcache',
'erp5.util'
'erp5.util',
'websocket-client',
],
zip_safe=True,
test_suite='test',
......
......@@ -30,12 +30,11 @@ import json
import os
import socket
import struct
import subprocess
import time
import websocket
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
MAIN_SCRIPT_NAME = 'main.js'
'''
0. positionArray
0.1 latitude
......@@ -52,7 +51,6 @@ MONITORED_ITEM_NB = 3
OPC_UA_PORT = 4840
OPC_UA_NET_IF = 'lo'
MCAST_GRP = 'ff15::1111'
USER_SCRIPT_NAME = 'user.js'
# OPC UA Pub/Sub related constants
VERSION = 1
......@@ -98,63 +96,47 @@ UA_DATETIME_UNIX_EPOCH = 11644473600 * UA_DATETIME_SEC
CONFIG_VERSION_MAJOR_VERSION = 1690792766
CONFIG_VERSION_MINOR_VERSION = 1690781976
POSITION_ARRAY_TYPE = 11 #double
POSITION_ARRAY_VALUES = (45.64, 14.25, 686.61, 91.24)
POSITION_ARRAY_TYPE = 8 #int64
POSITION_ARRAY_INPUT_VALUES = (456400000, 142500000, 686000, 91000, 1697878907)
POSITION_ARRAY_OUTPUT_COEFS = (1e7, 1e7, 1000, 1000)
POSITION_ARRAY_OUTPUT_VALUES = tuple(value / coef for value, coef in zip(POSITION_ARRAY_INPUT_VALUES[:-1], POSITION_ARRAY_OUTPUT_COEFS))
SPEED_ARRAY_TYPE = 10 #float
SPEED_ARRAY_VALUES = (-72.419998, 15.93, -0.015)
STRING_TYPE = 12
TEST_MESSAGE = b'{"content":"{\\"next_checkpoint\\":1}","dest_id":-1}'
MESSAGE_CONTENT = b'{\\"next_checkpoint\\":1}'
TEST_MESSAGE = b'{"content":"' + MESSAGE_CONTENT + b'","dest_id":-1}'
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class JSDroneTestCase(SlapOSInstanceTestCase):
class SubscriberTestCase(SlapOSInstanceTestCase):
@classmethod
def getInstanceParameterDict(cls):
return {
'_': json.dumps({
'droneGuidList': [cls.slap._computer_id],
'netIf': OPC_UA_NET_IF,
'subscriberGuidList': [cls.slap._computer_id],
})
}
def get_partition(self, instance_type):
def get_partition(self, partition_id):
software_url = self.getSoftwareURL()
for computer_partition in self.slap.computer.getComputerPartitionList():
partition_url = computer_partition.getSoftwareRelease()._software_release
partition_type = computer_partition.getType()
if partition_url == software_url and partition_type == instance_type:
if computer_partition.getId() == partition_id:
return computer_partition
raise Exception("JS-drone %s partition not found" % instance_type)
raise Exception("Partition %s not found" % partition_id)
def setUp(self):
super().setUp()
subscriber_partition = self.get_partition('drone')
instance_path = json.loads(
subscriber_partition.getConnectionParameterDict()['_'])['instance-path']
quickjs_bin = os.path.join(instance_path, 'bin', 'qjs')
script_dir = os.path.join(instance_path, 'etc')
self.qjs_process = subprocess.Popen(
[
quickjs_bin,
os.path.join(script_dir, MAIN_SCRIPT_NAME),
os.path.join(script_dir, USER_SCRIPT_NAME),
],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
)
time.sleep(0.1)
def tearDown(self):
if self.qjs_process.returncode == None:
self.qjs_process.kill()
self.qjs_process.communicate()
super().tearDown()
subscriber_partition = self.get_partition('SubscriberTestCase-2')
self.websocket_server_address = json.loads(
subscriber_partition.getConnectionParameterDict()['_'])['websocket-url']
time.sleep(0.5)
def ua_networkMessage_encodeHeader(self):
ua_byte1 = int(VERSION)
......@@ -224,8 +206,8 @@ class JSDroneTestCase(SlapOSInstanceTestCase):
data_set_message += struct.pack('H', MONITORED_ITEM_NB)
data_set_message += self.ua_array_encode(
POSITION_ARRAY_TYPE,
'd',
POSITION_ARRAY_VALUES,
'q',
POSITION_ARRAY_INPUT_VALUES,
)
data_set_message += self.ua_array_encode(
SPEED_ARRAY_TYPE,
......@@ -244,25 +226,39 @@ class JSDroneTestCase(SlapOSInstanceTestCase):
s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
s.sendto(ua_message, ('::1', OPC_UA_PORT))
def test_process(self):
expected_process_name_list = [
'qjs-launcher-on-watch',
'http-server-on-watch',
]
with self.slap.instance_supervisor_rpc as supervisor:
process_names = [process['name']
for process in supervisor.getAllProcessInfo()]
for expected_process_name in expected_process_name_list:
self.assertIn(expected_process_name, process_names)
def test_requested_instances(self):
connection_parameter_dict = json.loads(
self.computer_partition.getConnectionParameterDict()['_'])
self.assertEqual(connection_parameter_dict['drone-id-list'], [])
self.assertEqual(connection_parameter_dict['subscriber-id-list'], [0])
self.assertEqual(connection_parameter_dict['drone-id-list'], [0])
self.assertEqual(connection_parameter_dict['subscriber-id-list'], [1])
def test_subscriber_instance_parameter_dict(self):
self.assertEqual(
json.loads(self.get_partition('drone').getInstanceParameterDict()['_']),
json.loads(self.get_partition('SubscriberTestCase-2').getInstanceParameterDict()['_']),
{
'autopilotIp': '192.168.27.1',
'autopilotPort': 7909,
'id': 0,
'numberOfDrone': 1,
'numberOfSubscriber': 1,
'id': 1,
'isASimulation': False,
'isADrone': False,
'flightScript': 'https://lab.nexedi.com/nexedi/flight-scripts/raw/master/subscribe.js',
'multicastIp': MCAST_GRP,
'numberOfPeers': 1,
'netIf': OPC_UA_NET_IF
'flightScript': 'https://lab.nexedi.com/nexedi/flight-scripts/-/raw/v2.0/subscribe.js',
'netIf': OPC_UA_NET_IF,
'multicastIp': MCAST_GRP
}
)
......@@ -281,14 +277,32 @@ class JSDroneTestCase(SlapOSInstanceTestCase):
self.assertIn(expected_string, f.readlines())
def test_pubsub_subscription(self):
ws = websocket.WebSocket()
ws.connect(self.websocket_server_address, timeout=5)
# Check if first message is 'Unknown instruction IP' where IP is client IPv6 address
self.assertIn(
b'Unknown instruction %s' % ws.sock.getsockname()[0].encode(),
ws.recv_frame().data
)
self.assertEqual(
ws.recv_frame().data,
b''.join((
b'{"drone_dict":{"0":{"latitude":',
b'"%.6f","longitude":"%.6f","altitude":"%.2f",' % (0, 0, 0),
b'"yaw":"%.2f","speed":"%.2f","climbRate":"%.2f",' % (0, 0, 0),
b'"timestamp":%d}}}' % 0,
))
)
self.send_ua_networkMessage()
time.sleep(0.1)
outs, _ = self.qjs_process.communicate(b'q\n', timeout=15)
decoded_out = outs.decode()
for line in (
'Subscription 0 | MonitoredItem %s' % MONITORED_ITEM_NB,
'Received position of drone 0: %f° %f° %fm %fm' % POSITION_ARRAY_VALUES,
'Received speed of drone 0: %f° %fm/s %fm/s' % SPEED_ARRAY_VALUES,
'Received message for drone 0: %s' % TEST_MESSAGE.decode(),
):
self.assertIn(line, decoded_out)
self.assertEqual(ws.recv_frame().data, MESSAGE_CONTENT.replace(b'\\', b''))
self.assertEqual(
ws.recv_frame().data,
b''.join((
b'{"drone_dict":{"0":{"latitude":',
b'"%.6f","longitude":"%.6f","altitude":"%.2f",' % POSITION_ARRAY_OUTPUT_VALUES[:-1],
b'"yaw":"%.2f","speed":"%.2f","climbRate":"%.2f",' % SPEED_ARRAY_VALUES,
b'"timestamp":%d}}}' % POSITION_ARRAY_INPUT_VALUES[-1],
))
)
ws.close()
<!DOCTYPE html>
<html lang="en-GB">
<head>
<meta charset="utf-8">
<title>JS-Drone GUI</title>
<script src="script.js"></script>
<style>
button {
padding: 0.5%;
font-size: 24px;
cursor: pointer;
border: none;
border-radius: 10px;
box-shadow: 0 4px #999;
}
button:active {
box-shadow: 0 2px #666;
transform: translateY(4px);
}
div > * {margin: 1%}
label {margin: 2%}
table {width: 30%}
th, td{
padding: 1%;
text-align: center;
vertical-align: middle;
}
.connected {color: green}
.container {
display: flex;
align-items: center;
justify-content: center;
}
.disconnected {color: red}
.gray-button {background-color: lightgray}
.gray-button:hover {background-color: gray}
.green-button {background-color: #4caf50}
.green-button:hover {background-color: #3e8e41}
.red-button {background-color: red}
.red-button {background-color: #e42828}
</style>
</head>
<body>
<header class="container">
<label for="web-socket-status">web socket status:</label>
<output class="disconnected" id="web-socket-status">Disconnected</output>
</header>
<div class="container">
<table>
<tr>
<th></th>
{% for i in range(int(nb_drones)) -%}
<th>Drone {{ i }}</th>
{% endfor %}
</tr>
<tr>
<th>Flight state</th>
{% for i in range(int(nb_drones)) -%}
<td class="disconnected" id="flight_state_{{ i }}">Unknown</td>
{% endfor %}
</tr>
<tr>
<th>Latitude (°)</th>
{% for i in range(int(nb_drones)) -%}
<td id="latitude_{{ i }}"></td>
{% endfor %}
</tr>
<tr>
<th>Longitude (°)</th>
{% for i in range(int(nb_drones)) -%}
<td id="longitude_{{ i }}"></td>
{% endfor %}
</tr>
<tr>
<th>Altitude (m)</th>
{% for i in range(int(nb_drones)) -%}
<td id="altitude_{{ i }}"></td>
{% endfor %}
</tr>
<tr>
<th>Yaw (°)</th>
{% for i in range(int(nb_drones)) -%}
<td id="yaw_{{ i }}"></td>
{% endfor %}
</tr>
<tr>
<th>Speed (m/s)</th>
{% for i in range(int(nb_drones)) -%}
<td id="speed_{{ i }}"></td>
{% endfor %}
</tr>
<tr>
<th>Climb rate (m/s)</th>
{% for i in range(int(nb_drones)) -%}
<td id="climb_rate_{{ i }}"></td>
{% endfor %}
</tr>
</table>
</div>
<div class="container">
<button id="flight-btn" class="green-button" type="button">
Start
</button>
<button id="switch-btn" class="gray-button" type="button">
Switch leader
</button>
<button id="quit-btn" class="red-button" type="button">
Quit
</button>
</div>
</body>
</html>
/*jslint nomen: true, indent: 2, maxerr: 3, maxlen: 80 */
(function () {
"use strict";
var ALTITUDE_BASE_ID = "altitude_",
SPEED_BASE_ID = "speed_",
CONNECTED_CLASS_NAME = "connected",
CLIMB_RATE_BASE_ID = "climb_rate_",
DISCONNECTED_CLASS_NAME = "disconnected",
FLIGHT_BTN_ID = "flight-btn",
FLIGHT_STATUS_BASE_ID = "flight_state_",
GREEN_BTN_CLASS_NAME = "green-button",
LATITUDE_BASE_ID = "latitude_",
LONGITUDE_BASE_ID = "longitude_",
QUIT_BTN_ID = "quit-btn",
RED_BTN_CLASS_NAME = "red-button",
SWITCH_BTN_ID = "switch-btn",
WEB_SOCKET_STATUS_OUTPUT_ID = "web-socket-status",
YAW_BASE_ID = "yaw_",
socket;
function updateConnexionClass(element, status) {
element.classList.remove(status ? DISCONNECTED_CLASS_NAME : CONNECTED_CLASS_NAME);
element.classList.add(status ? CONNECTED_CLASS_NAME : DISCONNECTED_CLASS_NAME);
}
function setWebSocketStatus(connected, status) {
var status_output = document.getElementById(WEB_SOCKET_STATUS_OUTPUT_ID);
updateConnexionClass(status_output, connected);
status_output.value = status;
}
function stopFlight(event) {
socket.send("stop");
event.target.removeEventListener('click', stopFlight);
}
function startFlight(event) {
var button = event.target;
socket.send("start");
button.removeEventListener('click', startFlight);
button.innerHTML = "Stop";
button.classList.remove(GREEN_BTN_CLASS_NAME);
button.classList.add(RED_BTN_CLASS_NAME);
button.addEventListener('click', stopFlight);
}
socket = new WebSocket('ws://{{ websocket_url }}');
socket.onopen = function(event) {
setWebSocketStatus(true, "Connected");
};
socket.onmessage = function(event) {
var message = JSON.parse(event.data),
flight_state_cell;
if (message.hasOwnProperty("drone_dict")) {
Object.entries(message["drone_dict"]).forEach(function ([id, drone]) {
document.getElementById(LATITUDE_BASE_ID + id).innerHTML = drone["latitude"];
document.getElementById(LONGITUDE_BASE_ID + id).innerHTML = drone["longitude"];
document.getElementById(ALTITUDE_BASE_ID + id).innerHTML = drone["altitude"];
document.getElementById(YAW_BASE_ID + id).innerHTML = drone["yaw"];
document.getElementById(SPEED_BASE_ID + id).innerHTML = drone["speed"];
document.getElementById(CLIMB_RATE_BASE_ID + id).innerHTML = drone["climbRate"];
});
} else if (message.hasOwnProperty("state") && message.hasOwnProperty("id")) {
flight_state_cell = document.getElementById(FLIGHT_STATUS_BASE_ID + message['id']);
flight_state_cell.innerHTML = message['state'];
updateConnexionClass(flight_state_cell, message['inAir']);
} else {
console.info(message);
}
};
socket.onclose = function(event) {
setWebSocketStatus(false, "Closed");
};
socket.onerror = function(event) {
console.error(event.reason);
};
document.addEventListener("DOMContentLoaded", () => {
document.getElementById(FLIGHT_BTN_ID).addEventListener('click', startFlight);
document.getElementById(SWITCH_BTN_ID).addEventListener('click', event => {
socket.send("switch");
});
document.getElementById(QUIT_BTN_ID).addEventListener('click', event => {
socket.send("quit");
});
});
}());
/*jslint nomen: true, indent: 2, maxerr: 3, maxlen: 80 */
/*global console, getAltitude, getAltitudeRel, getInitialAltitude, getLatitude,
getLongitude, getYaw, execUserScript, initPubsub, landed, loiter, setAirspeed,
setMessage, setTargetCoordinates, std, triggerParachute, Drone, Worker*/
import {
Drone,
triggerParachute,
getAirspeed,
getAltitude,
getAltitudeRel,
getClimbRate,
getInitialAltitude,
getLatitude,
getLongitude,
getYaw,
initPubsub,
landed,
loiter,
setAirspeed,
setMessage,
setTargetCoordinates
} from {{ json_module.dumps(qjs_wrapper) }};
import * as std from "std";
import { Worker } from "os";
(function (console, getAltitude, getAltitudeRel, getInitialAltitude,
getLatitude, getLongitude, getYaw, initPubsub, landed, loiter,
setAirspeed, setMessage, setTargetCoordinates, std, triggerParachute,
Drone, Worker) {
// Every script is evaluated per drone
"use strict";
var CONF_PATH = {{ json_module.dumps(configuration) }},
conf_file = std.open(CONF_PATH, "r"),
configuration = JSON.parse(conf_file.readAsString()),
parent = Worker.parent,
user_me = {
//for debugging purpose
fdopen: std.fdopen,
in: std.in,
//required to fly
triggerParachute: triggerParachute,
drone_dict: {},
exit: function (exit_code) {
parent.postMessage({type: "exited", exit: exit_code});
parent.onmessage = null;
},
getAltitudeAbs: getAltitude,
getCurrentPosition: function () {
return {
x: getLatitude(),
y: getLongitude(),
z: getAltitudeRel()
};
},
getInitialAltitude: getInitialAltitude,
getYaw: getYaw,
getSpeed: getAirspeed,
getClimbRate: getClimbRate,
id: configuration.id,
landed: landed,
loiter: loiter,
sendMsg: function (msg, id) {
if (id === undefined) { id = -1; }
setMessage(JSON.stringify({ content: msg, dest_id: id }));
},
setAirspeed: setAirspeed,
setTargetCoordinates: setTargetCoordinates
};
conf_file.close();
function loadUserScript(path) {
var script_content = std.loadFile(path);
if (script_content === null) {
console.log("Failed to load user script " + path);
std.exit(1);
}
try {
std.evalScript(
"function execUserScript(from, me) {" + script_content + "};"
);
} catch (e) {
console.log("Failed to evaluate user script", e);
std.exit(1);
}
execUserScript(null, user_me);
// Call the drone onStart function
if (user_me.hasOwnProperty("onStart")) {
user_me.onStart();
}
}
function handleMainMessage(evt) {
var type = evt.data.type, message, drone_id;
if (type === "initPubsub") {
initPubsub(configuration.numberOfPeers);
for (drone_id = 0; drone_id < configuration.numberOfPeers; drone_id++) {
user_me.drone_dict[drone_id] = new Drone(drone_id);
user_me.drone_dict[drone_id].init(drone_id);
}
parent.postMessage({type: "initialized"});
} else if (type === "load") {
loadUserScript(evt.data.path);
parent.postMessage({type: "loaded"});
} else if (type === "update") {
Object.entries(user_me.drone_dict).forEach(function ([id, drone]) {
message = drone.message;
if (user_me.id !== Number(id) && message.length > 0) {
message = JSON.parse(message);
if (user_me.hasOwnProperty("onGetMsg") &&
[-1, user_me.id].includes(message.dest_id)) {
user_me.onGetMsg(message.content);
}
}
});
// Call the drone onStart function
if (user_me.hasOwnProperty("onUpdate")) {
user_me.onUpdate(evt.data.timestamp);
}
parent.postMessage({type: "updated"});
} else {
throw new Error("Unsupported message type", type);
}
}
parent.onmessage = function (evt) {
try {
handleMainMessage(evt);
} catch (error) {
// Catch all potential bug to exit the main process
// if it occurs
console.log(error);
std.exit(1);
}
};
}(console, getAltitude, getAltitudeRel, getInitialAltitude, getLatitude,
getLongitude, getYaw, initPubsub, landed, loiter, setAirspeed, setMessage,
setTargetCoordinates, std, triggerParachute, Drone, Worker));
......@@ -20,8 +20,8 @@ parts =
[metabase.jar]
recipe = slapos.recipe.build:download
url = https://downloads.metabase.com/v0.48.2/metabase.jar
md5sum = d708a85436da3d5751f0e48ebd10c142
url = https://downloads.metabase.com/v0.48.6/metabase.jar
md5sum = 7d75ee710f9518148999b69b7a7e9d79
[instance-profile]
recipe = slapos.recipe.template
......
......@@ -18,7 +18,7 @@ md5sum = e000e7134113b9d1c63d40861eaf0489
[root-common]
filename = root-common.cfg.in
md5sum = ae00507d9e69209a0babd725cf6be536
md5sum = 102a7f1c1bc46a9b3fa5bd9b9a628e1d
[instance-neo-admin]
filename = instance-neo-admin.cfg.in
......
......@@ -6,12 +6,12 @@
{% do sla_dict.update(dict.fromkeys(ref_list, sla)) -%}
{% endfor -%}
{% macro sla(name, required=False) -%}
{% macro sla(name, required=False, default_to_same_computer=True) -%}
{% if required or name in sla_dict -%}
{% for k, (v,) in six.iteritems(urllib_parse.parse_qs(sla_dict.pop(name), strict_parsing=1)) -%}
sla-{{ k }} = {{ v }}
{% endfor -%}
{% else -%}
{% elif default_to_same_computer -%}
sla-computer_guid = ${slap-connection:computer-id}
{% endif -%}
{% endmacro -%}
......@@ -107,12 +107,11 @@ config-autostart = {{ dumps(sum(storage_count)) }}
{%- if monitor or node.get('admin') == 0 %}
{%- do node.setdefault('monitor', 0) %}
{%- endif %}
{%- for x in 'admin', 'master', 'storage-count' if node.get(x, 1) %}
{%- if node.get('admin', 1) or node.get('master', 1) or node.get('storage-count', 1) %}
{%- do section_id_list.append(section_id) %}
[{{section_id}}]
<= {{ prefix }}request-common
name = {{ section_id }}
return =
master
admin
......@@ -138,8 +137,14 @@ config-{{ k }} = {{ dumps(v) }}
{%- endfor %}
{{ sla(section_id) }}
{%- break %}
{%- endfor %}
{%- else %}
[{{section(section_id)}}]
<= request-common-base
state = destroyed
{%- endif %}
name = {{ section_id }}
{%- endfor %}
{%- do assert(len(monitor) == 1, monitor) %}
......
......@@ -9,29 +9,6 @@ How to deploy from scratch
2. Install ors playbook
3. Deploy this SR
## Generated buildout configurations and json input schemas
Since there are multiple ors-amarisoft softwares releases and software types, the following files are
generated with jinja2 templates with the render-templates script before being pushed to gitlab:
* instance-tdd-enb-input-schema.json
* instance-fdd-enb-input-schema.json
* software-fdd.cfg
* software-tdd.cfg.json
* instance-fdd-ue-nr-input-schema.json
* instance-tdd-gnb-input-schema.json
* instance-tdd-ue-nr-input-schema.json
* test/testFDD.py
* test/testTDD.py
* software-tdd.cfg
* instance-tdd-ue-lte-input-schema.json
* instance-fdd-gnb-input-schema.json
* software-fdd.cfg.json
* instance-fdd-ue-lte-input-schema.json
These files should not be modified directly, and the render-templates scripts should be run along
with update-hash before each commit.
## Services
We run 2 binaries from Amarisoft LTE stack:
......
......@@ -16,15 +16,19 @@
[template]
filename = instance.cfg
md5sum = f0c42061acf3f89c70d258c74c6af9fc
md5sum = acd9dd8dbe613e7101e62930a8380ef0
[template-ors]
filename = instance-ors.cfg
md5sum = f5c76c3443b75569eb18503dce38e783
[slaplte.jinja2]
_update_hash_filename_ = slaplte.jinja2
md5sum = 8d635cffcfc7985ac2550aed3b6eb44d
md5sum = 871ade334f445e22d6cb473e4d4e3522
[ru_amarisoft-stats.jinja2.py]
_update_hash_filename_ = ru/amarisoft-stats.jinja2.py
md5sum = c4d5e9fcf460d88bc2b4bcfbdfe554f7
md5sum = 674dcc250c0b6bb43d8546624552fc5d
[ru_amarisoft-rf-info.jinja2.py]
_update_hash_filename_ = ru/amarisoft-rf-info.jinja2.py
......@@ -32,19 +36,19 @@ md5sum = ab666fdfadbfc7d8a16ace38d295c883
[ru_libinstance.jinja2.cfg]
_update_hash_filename_ = ru/libinstance.jinja2.cfg
md5sum = 30c262a427de2132d6d66d9fb3597426
md5sum = 2dda7713832be83d94522c7abb4901f9
[ru_sdr_libinstance.jinja2.cfg]
_update_hash_filename_ = ru/sdr/libinstance.jinja2.cfg
md5sum = de71c63b8df940207409de7e948f7c8c
md5sum = b7906ca3a6b17963f78f680fc0842b74
[ru_lopcomm_libinstance.jinja2.cfg]
_update_hash_filename_ = ru/lopcomm/libinstance.jinja2.cfg
md5sum = 71508d94a47db493f30af30188c48d64
md5sum = 7d05f6a3980a79bfd35677dbb8b988ee
[ru_sunwave_libinstance.jinja2.cfg]
_update_hash_filename_ = ru/sunwave/libinstance.jinja2.cfg
md5sum = c855ee7a6132899eb53b8d80ec27701a
md5sum = bc5d82b8737b6990674b280ef2774be7
[ru_lopcomm_ncclient_common.py]
_update_hash_filename_ = ru/lopcomm/ncclient_common.py
......@@ -56,7 +60,7 @@ md5sum = b7ec0025a92e0947e4ac6abc4b06bf19
[ru_lopcomm_config.jinja2.py]
_update_hash_filename_ = ru/lopcomm/config.jinja2.py
md5sum = 167537a6aa2762355ee703d4c96351ea
md5sum = 122726666d147447171dcae9ebf8d093
[ru_lopcomm_reset-info.jinja2.py]
_update_hash_filename_ = ru/lopcomm/reset-info.jinja2.py
......@@ -72,7 +76,11 @@ md5sum = 2b08bb666c5f3ab287cdddbfdb4c9249
[ru_tapsplit]
_update_hash_filename_ = ru/tapsplit
md5sum = 2b8b57c5771b2a2203c0e7767e629e55
md5sum = 700aab566289619fb83ac6f3b085d983
[ru_xbuildout.py]
_update_hash_filename_ = ru/xbuildout.py
md5sum = a51171f926edd315a52841c2e7eb9fb7
[ru_capdo.c]
_update_hash_filename_ = ru/capdo.c
......@@ -80,19 +88,19 @@ md5sum = 52da9fe3a569199e35ad89ae1a44c30e
[template-enb]
_update_hash_filename_ = instance-enb.jinja2.cfg
md5sum = de0452a804b972a23ed96b99814fdaed
md5sum = 8b9301f26fc4ffbc7eda9c1ac8da1a46
[template-gnb]
_update_hash_filename_ = instance-gnb.jinja2.cfg
md5sum = 86f305a1daa83aa3b663388d12242764
[template-ors-enb]
_update_hash_filename_ = instance-ors-enb.jinja2.cfg
md5sum = 601d6237059fa665d3f3ffb6a78ad9ca
[template-core-network]
_update_hash_filename_ = instance-core-network.jinja2.cfg
md5sum = c807be73b9304f5a4c7483a3776bbc17
md5sum = 326e194e9c98d58d926f89521bb95df5
[template-ue]
_update_hash_filename_ = instance-ue.jinja2.cfg
md5sum = d8153dd5e0978afea018498b29f06fd5
md5sum = 812a43458c21f7d0cdb2141515a236ae
[template-obsolete]
_update_hash_filename_ = instance-obsolete.jinja2.cfg
......@@ -100,27 +108,27 @@ md5sum = c5f581ba01654b2aec46000abf8d0e35
[ue_db.jinja2.cfg]
filename = config/ue_db.jinja2.cfg
md5sum = dcaac06553a3222b14c0013a13f4a149
md5sum = 3b901e8733e6afff8940c6c318da4493
[enb.jinja2.cfg]
filename = config/enb.jinja2.cfg
md5sum = dc19f5362fe0e23ba814da39f7520c15
md5sum = e1c40827e30d6ddcd98be35ec8569af2
[drb_lte.jinja2.cfg]
filename = config/drb_lte.jinja2.cfg
md5sum = 6c8bdb0ce1d2bdd846a87aa6c5204a9c
md5sum = 01eb971e2ff580da52291138495a81ca
[drb_nr.jinja2.cfg]
filename = config/drb_nr.jinja2.cfg
md5sum = 84d3cef8fc7f1c2aed7c348d500f5636
md5sum = 282b11d7b72b01b8325df4632d82b84d
[sib23.jinja2.asn]
filename = config/sib23.jinja2.asn
md5sum = a1973ba6e43d40e510d61d461c2d13ac
md5sum = 959523597e29b048e45ebf58f7ea4c5b
[mme.jinja2.cfg]
filename = config/mme.jinja2.cfg
md5sum = 3d7833ddba3242cedcd74c7db52390c6
md5sum = 25ae6b1022548183293f0ef0c54532a7
[dnsmasq-core-network.jinja2.cfg]
filename = config/dnsmasq-core-network.jinja2.cfg
......@@ -128,7 +136,7 @@ md5sum = f167b4be5e327b276b42267e0678f577
[ru_dnsmasq.jinja2.cfg]
_update_hash_filename_ = ru/dnsmasq.jinja2.cfg
md5sum = 9bd5b08f23640f71ad109d186d060f2d
md5sum = 95f4f8fb85e0480eb3e9059b9db26540
[ims.jinja2.cfg]
filename = config/ims.jinja2.cfg
......@@ -136,7 +144,7 @@ md5sum = 36281b03597252cf75169417d02fc28c
[ue.jinja2.cfg]
filename = config/ue.jinja2.cfg
md5sum = 3831978f4070952f23dc92a4123a90c9
md5sum = 62291a11fd36a42464901cdc81338687
[ru_lopcomm_CreateProcessingEle.jinja2.xml]
_update_hash_filename_ = ru/lopcomm/CreateProcessingEle.jinja2.xml
......@@ -144,7 +152,11 @@ md5sum = e435990eb0a0d4be41efa9bd16dce09b
[ru_lopcomm_cu_config.jinja2.xml]
_update_hash_filename_ = ru/lopcomm/cu_config.jinja2.xml
md5sum = 09123ad68c6d8e7e4e201bcc2ab331c6
md5sum = 346c911e1ac5e5001a39c8926b44c91e
[ru_lopcomm_cu_inactive_config.jinja2.xml]
_update_hash_filename_ = ru/lopcomm/cu_inactive_config.jinja2.xml
md5sum = 9d48c35f9939446ce75ae9f85e44c26a
[software.cfg.html]
_update_hash_filename_ = gadget/software.cfg.html
......
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"title": "Cell. Common properties",
"type": "object",
"required": [
"cell_type",
"rf_mode",
"pci",
"cell_id",
"bandwidth",
"ru"
],
"properties": {
"cell_type": {
"type": "string",
"options": { "hidden": true }
},
"cell_kind": {
"type": "string",
"const": "enb",
"template": "enb",
"options": { "hidden": true }
},
"rf_mode": {
"title": "RF mode",
"description": "Mode for TX/RX radio multiplexing: Frequency- or Time- Domain Division",
"type": "string",
"enum": ["fdd", "tdd"],
"propertyOrder": 101
},
"pci": {
"title": "Physical Cell ID",
"description": "Physical Cell ID",
"type": "integer"
},
"cell_id": {
"title": "Cell ID",
"description": "Cell ID",
"type": "string"
},
"bandwidth": {
"title": "Bandwidth",
"description": "Downlink Bandwidth (in MHz)",
"type": "number"
},
"root_sequence_index": {
"title": "Root Sequence Index",
"type": "integer"
},
"inactivity_timer": {
"title": "Inactivity Timer",
"description": "Send RRC connection release after this time (in ms) of network inactivity.",
"type": "number",
"default": 10000
},
"ru": {
"$ref": "#/$defs/ru-of-cell",
"propertyOrder": 9999
}
},
"$defs": {
"ru-of-cell": {
"title": "Radio Unit",
"oneOf": [
{
"title": "Shared Radio Unit",
"description": "Use radio unit defined in separate shared instance",
"type": "object",
"required": ["ru_type", "ru_ref"],
"properties": {
"ru_type": {
"const": "ru_ref",
"template": "ru_ref",
"options": { "hidden": true }
},
"ru_ref": {
"title": "RU Reference",
"description": "Reference of shared radio unit instance",
"type": "string"
}
}
},
{
"title": "Shared Radio Unit of a Cell",
"description": "Use the same radio unit as referenced cell instance does",
"type": "object",
"required": ["ru_type", "ruincell_ref"],
"properties": {
"ru_type": {
"const": "ruincell_ref",
"template": "ruincell_ref",
"options": { "hidden": true }
},
"ruincell_ref": {
"title": "Cell Reference",
"description": "Reference of cell instance whose radio unit to share",
"type": "string"
}
}
},
{ "$ref": "../ru/input-schema.json" }
]
}
}
}
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"title": "Cell",
"type": "object",
"oneOf": [
{ "$ref": "../cell/lte/input-schema.json" },
{ "$ref": "../cell/nr/input-schema.json" }
]
}
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"title": "LTE Cell",
"type": "object",
"required": [
"cell_type",
"rf_mode",
"pci",
"cell_id",
"bandwidth",
"ru",
"dl_earfcn",
"tac"
],
"properties": {
"$ref": "../../cell/common.json#/properties",
"cell_type": {
"$ref": "#/properties/cell_type",
"const": "lte",
"template": "lte"
},
"tdd_ul_dl_config": {
"title": "TDD Configuration",
"type": "string",
"enum": [
"[Configuration 2] 5ms 2UL 6DL (default)",
"[Configuration 6] 5ms 5UL 3DL (maximum uplink)"
],
"default": "[Configuration 2] 5ms 2UL 6DL (default)",
"options": {
"dependencies": {
"rf_mode": "tdd"
}
}
},
"bandwidth": {
"$ref": "#/properties/bandwidth",
"enum": [
1.4,
3,
5,
10,
15,
20
]
},
"dl_earfcn": {
"title": "DL EARFCN",
"description": "Downlink E-UTRA Absolute Radio Frequency Channel Number of the cell",
"type": "integer"
},
"ul_earfcn": {
"title": "UL EARFCN",
"description": "Uplink E-UTRA Absolute Radio Frequency Channel Number of the cell. By default a frequency corresponding to dl_earfcn is chosen.",
"type": "integer"
},
"tac": {
"title": "Tracking Area Code",
"description": "Tracking Area Code in hexadecimal representation (range 0x0000 to 0xffff)",
"type": "string"
},
"root_sequence_index": {
"$ref": "#/properties/root_sequence_index",
"description": "Range: 0 to 837. Set the PRACH root sequence index (SIB2.rootSequenceIndex field). It must be different for each neighbour cell operating on the same frequency and sharing the same PRACH configuration.",
"default": 204
}
}
}
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"title": "NR Cell",
"type": "object",
"required": [
"cell_type",
"rf_mode",
"pci",
"cell_id",
"bandwidth",
"ru",
"dl_nr_arfcn",
"nr_band"
],
"properties": {
"$ref": "../../cell/common.json#/properties",
"cell_type": {
"$ref": "#/properties/cell_type",
"const": "nr",
"template": "nr"
},
"tdd_ul_dl_config": {
"title": "TDD Configuration",
"type": "string",
"enum": [
"5ms 2UL 7DL 4/6 (default)",
"2.5ms 1UL 3DL 2/10",
"5ms 8UL 1DL 2/10 (maximum uplink)"
],
"default": "5ms 2UL 7DL 4/6 (default)",
"options": {
"dependencies": {
"rf_mode": "tdd"
}
}
},
"bandwidth": {
"$ref": "#/properties/bandwidth"
},
"dl_nr_arfcn": {
"title": "DL NR ARFCN",
"description": "Downlink NR Absolute Radio Frequency Channel Number of the cell",
"type": "integer"
},
"nr_band": {
"title": "NR band",
"description": "NR band number",
"type": "integer"
},
"ul_nr_arfcn": {
"title": "UL NR ARFCN",
"description": "Uplink NR Absolute Radio Frequency Channel Number of the cell. By default a frequency corresponding to dl_nr_arfcn and nr_band is chosen.",
"type": "integer"
},
"ssb_nr_arfcn": {
"title": "SSB NR ARFCN",
"description": "SSB NR Absolute Radio Frequency Channel Number of the cell. If set it must be an element of global synchronization raster and be at offset from center DL frequency that aligns with SSB subcarrier spacing of selected band. By default a valid frequency nearby dl_nr_arfcn is chosen.",
"type": "integer"
},
"ssb_pos_bitmap": {
"title": "SSB Position Bitmap",
"description": "SSB position bitmap in bits (4, 8 or 64 bits depending on the DL frequency).",
"type": "string",
"default": "10000000"
},
"root_sequence_index": {
"$ref": "#/properties/root_sequence_index",
"description": "Range 0 to 837 for PRACH format up to 3, 0 to 137 otherwise. prach-RootSequenceIndex parameter. It must be different for each neighbour cell operating on the same frequency and sharing the same PRACH configuration.",
"default": 1
}
},
"$defs": {
"tac": {
"title": "Tracking Area Code",
"description": "Integer (range 0 to 16777215)",
"type": "number"
}
}
}
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"description": "Values returned by Cell instantiation (stub)",
"type": "object",
"properties": {}
}
// DRB configuration for LTE cell.
{%- set B = xbuildout.encode -%}
// DRB configuration for LTE cell {{ B(cell_ref) }} @ {{ B(ru_ref) }}.
// DRB configuration vary in beteen FDD and TDD modes.
{% set T_REORDERING = {'fdd': 35, 'tdd': 65} [rf_mode] %}
// {{ rf_mode | upper }} T_REORDERING={{ T_REORDERING }}
{% set T_REORDERING = {'fdd': 35, 'tdd': 65} [cell.rf_mode] %}
// {{ cell.rf_mode | upper }} T_REORDERING={{ T_REORDERING }}
[
{
......
// DRB configuration for NR cell.
{%- set B = xbuildout.encode -%}
// DRB configuration for NR cell {{ B(cell_ref) }} @ {{ B(ru_ref) }}.
[
{
......
{
log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,s1ap.level=debug,s1ap.max_size=1,ngap.level=debug,ngap.max_size=1,file.rotate=1G,file.path=/dev/null",
log_filename: "{{ directory['log'] }}/mme.log",
{% if slapparameter_dict.get('external_enb_gnb', '') %}
{% if slapparameter_dict.get('use_ipv4', False) %}
gtp_addr: "{{ gtp_addr_v4 }}",
......@@ -16,11 +16,11 @@
mme_group_id: 32769,
mme_code: 1,
ims_vops_eps: true,
ims_vops_5gs_3gpp: true,
ims_vops_5gs_n3gpp: true,
ims_vops_eps: true,
ims_vops_5gs_3gpp: true,
ims_vops_5gs_n3gpp: true,
emergency_number_list: [
{ category: 0x1f, digits: "911" },
{ category: 0x1f, digits: "112" },
],
......@@ -29,16 +29,16 @@
qci: {audio: 1, video: 2},
},
network_name: "{{ slapparameter_dict.get('network_name', 'RAPIDSPACE') }}",
network_short_name: "{{ slapparameter_dict.get('network_short_name', 'RAPIDSPACE') }}",
cp_ciot_opt: true,
nr_support: true,
eps_5gs_interworking: "with_n26",
fifteen_bearers: false,
ims_list: [
......@@ -47,7 +47,7 @@
bind_addr: "{{ slap_configuration['configuration.ims_bind'] }}"
}
],
pdn_list: [
{
......@@ -71,7 +71,7 @@
first_ip_addr: "{{ netaddr.IPAddress(netaddr.IPNetwork(slap_configuration.get('tun-ipv4-network', '')).first) + 2 }}",
last_ip_addr: "{{ netaddr.IPAddress(netaddr.IPNetwork(slap_configuration.get('tun-ipv4-network', '')).last) - 1 }}",
{% endif %}
ip_addr_shift: 2,
p_cscf_addr: ["{{ slap_configuration.get('tun-ipv4-addr', '') }}"],
erabs: [
......
{%- set B = xbuildout.encode -%}
/* SIB2/SIB3 for {{ cell.cell_type | upper }} cell {{ B(cell_ref) }} @ {{ B(ru_ref) }}. */
{
message c1: systemInformation: {
criticalExtensions systemInformation-r8: {
......@@ -36,11 +38,11 @@
}
},
pdsch-ConfigCommon {
{% if bbu == "ors" %}
{%- if one_watt == "True" %}
referenceSignalPower {{ (tx_gain | int) - 54 }}, /* patched by eNB */
{% if ors %}
{%- if ors['one-watt'] %}
referenceSignalPower {{ (ru.tx_gain | int) - 54 }}, /* patched by eNB */
{%- else %}
referenceSignalPower {{ (tx_gain | int) - 35 }}, /* patched by eNB */
referenceSignalPower {{ (ru.tx_gain | int) - 35 }}, /* patched by eNB */
{%- endif %}
{% else %}
referenceSignalPower -8, /* patched by eNB */
......
{#- do_lte/do_nr indicate whether we have LTE or NR UE/cells #}
{%- do assert(do_lte or do_nr) %}
{%- do assert(not (do_lte and do_nr)) %}
{%- import 'slaplte.jinja2' as slaplte with context %}
{%- set B = slaplte.B %}
{%- set J = slaplte.J %}
{%- set jcell_ru_ref = slaplte.jcell_ru_ref %}
{#- for standalone testing via slapos-render-config.py
NOTE: keep in sync with instance-ue.jinja2.cfg and ru/libinstance.jinja2.cfg #}
{%- if _standalone is defined %}
{%- set iru_dict = {} %}
{%- set icell_dict = {} %}
{%- set iue_dict = {} %}
{%- do slaplte.load_iru_and_icell(iru_dict, icell_dict, icell_kind='ue') %}
{%- do slaplte.load_iue(iue_dict) %}
{%- do slaplte.check_loaded_everything() %}
{%- endif %}
{#- start of the config -#}
{
log_options: "all.level=error,all.max_size=0,nas.level=debug,nas.max_size=1,rrc.level=debug,rrc.max_size=1,phy.level=info,file.rotate=1G,file.path=/dev/null",
log_filename: "{{ directory['log'] }}/ue.log",
......@@ -9,70 +23,98 @@
rue_bind_addr: "{{ pub_info['rue_bind_addr'] }}",
com_addr: "{{ pub_info['com_addr'] }}",
rf_driver: {
name: "sdr",
args: "dev0=/dev/sdr0",
rx_antenna:"tx_rx",
},
tx_gain: {{ slapparameter_dict.get('tx_gain', 60) }},
rx_gain: {{ slapparameter_dict.get('rx_gain', 40) }},
cell_groups: [
{%- if do_lte %}
{
{# instantiate radio units #}
{{ slaplte.ru_config(iru_dict, slapparameter_dict) }}
cell_groups: [{
// LTE cells
group_type: "lte",
multi_ue: true,
cells: [
{
bandwidth: {{ slapparameter_dict.get('bandwidth', slap_configuration['configuration.default_lte_bandwidth']) .removesuffix(' MHz') }},
dl_earfcn: {{ slapparameter_dict.get('dl_earfcn', 0) }},
n_antenna_dl: {{ slapparameter_dict.get('n_antenna_dl', slap_configuration['configuration.default_n_antenna_dl']) }},
n_antenna_ul: {{ slapparameter_dict.get('n_antenna_ul', slap_configuration['configuration.default_n_antenna_ul']) }},
global_timing_advance: -1,
}
{%- for cell_ref, icell in icell_dict|dictsort %}
{%- set cell = icell['_'] %}
{%- if cell.cell_type == 'lte' %}
{%- set ru_ref = J(jcell_ru_ref(icell)) %}
{%- set iru = iru_dict[ru_ref] %}
{%- set ru = iru['_'] %}
// {{ B(cell_ref) }}
{
rf_port: {{ ru._rf_port }},
n_antenna_dl: {{ ru.n_antenna_dl }},
n_antenna_ul: {{ ru.n_antenna_ul }},
dl_earfcn: {{ cell.dl_earfcn }},
ul_earfcn: {{ cell.ul_earfcn }},
bandwidth: {{ cell.bandwidth }},
global_timing_advance: -1,
},
{%- endif %}
{%- endfor %}
],
pdcch_decode_opt: false,
pdcch_decode_opt_threshold: 0.1,
},
{%- endif %}
{%- if do_nr %}
{
}, {
// NR cells
group_type: "nr",
multi_ue: false,
cells: [{
rf_port: 0,
bandwidth: {{ slapparameter_dict.get('bandwidth', slap_configuration['configuration.default_nr_bandwidth']) }},
band: {{ slapparameter_dict.get('nr_band', 0) }},
dl_nr_arfcn: {{ slapparameter_dict.get('dl_nr_arfcn', 0) }},
ssb_nr_arfcn: {{ slapparameter_dict.get('ssb_nr_arfcn', 0) }},
subcarrier_spacing: 30,
n_antenna_dl: {{ slapparameter_dict.get('n_antenna_dl', slap_configuration['configuration.default_n_antenna_dl']) }},
n_antenna_ul: {{ slapparameter_dict.get('n_antenna_ul', slap_configuration['configuration.default_n_antenna_ul']) }},
}
],
},
{%- endif %}
],
multi_ue: true,
cells: [
{%- for cell_ref, icell in icell_dict|dictsort %}
{%- set cell = icell['_'] %}
{%- if cell.cell_type == 'nr' %}
{%- set ru_ref = J(jcell_ru_ref(icell)) %}
{%- set iru = iru_dict[ru_ref] %}
{%- set ru = iru['_'] %}
// {{ B(cell_ref) }}
{
rf_port: {{ ru._rf_port }},
n_antenna_dl: {{ ru.n_antenna_dl }},
n_antenna_ul: {{ ru.n_antenna_ul }},
band: {{ cell.nr_band }},
dl_nr_arfcn: {{ cell.dl_nr_arfcn }},
ul_nr_arfcn: {{ cell.ul_nr_arfcn }},
ssb_nr_arfcn: {{ cell.ssb_nr_arfcn }},
bandwidth: {{ cell.bandwidth }},
subcarrier_spacing: {{ cell.subcarrier_spacing }},
},
{%- endif %}
{%- endfor %}
]
}],
ue_list: [
{%- for ue_ref, iue in iue_dict|dictsort %}
{%- set ue = iue['_'] %}
// {{ B(ue_ref) }}
{
sim_algo: "{{ slapparameter_dict.get('sim_algo', 'milenage') }}",
opc: "{{ slapparameter_dict.get('opc', '') }}",
amf: {{ slapparameter_dict.get('amf', '0x9001') }},
sqn: "{{ slapparameter_dict.get('sqn', '000000000000') }}",
impu: "{{ slapparameter_dict.get('impu', '') }}",
impi: "{{ slapparameter_dict.get('impi', '') }}",
{%- if do_lte %}
imsi: "{{ slapparameter_dict.get('imsi', slap_configuration['configuration.default_lte_imsi']) }}",
K: "{{ slapparameter_dict.get('k', slap_configuration['configuration.default_lte_k']) }}",
ue_category: 12,
{%- endif %}
{%- if do_nr %}
imsi: "{{ slapparameter_dict.get('imsi', slap_configuration['configuration.default_nr_imsi']) }}",
K: "{{ slapparameter_dict.get('k', slap_configuration['configuration.default_nr_k']) }}",
as_release: 15,
ue_category: "nr",
{%- endif %}
rue_addr: "{{ slapparameter_dict.get('rue_addr', '') }}",
tun_setup_script: "ue-ifup",
apn: "internet",
}
sim_algo: "{{ ue.sim_algo }}",
opc: "{{ ue.opc }}",
amf: {{ ue.amf }},
sqn: "{{ ue.sqn }}",
impu: "{{ ue.impu }}",
impi: "{{ ue.impi }}",
imsi: "{{ ue.imsi }}",
K: "{{ ue.k }}",
rue_addr: "{{ ue.rue_addr }}",
{%- if ue.ue_type == 'lte' %}
as_release: 13,
ue_category: 13,
{%- elif ue.ue_type == 'nr' %}
as_release: 15,
ue_category: "nr",
{%- else %}
{%- do bug('unreachable') %}
{%- endif %}
tun_setup_script: "ue-ifup",
apn: "internet",
},
{%- endfor %}
],
}
{%- set filtered_slave_instance_list = [] %}
{%- for slave_instance in slave_instance_list %}
{%- if slave_instance.get('_', '') != '' %}
{%- set slave = json_module.loads(slave_instance.pop('_')) %}
{%- else %}
{%- set slave = slave_instance %}
{%- endif %}
{%- if slave.get('imsi', '') != '' %}
{%- do filtered_slave_instance_list.append(slave) %}
{%- endif %}
{%- endfor -%}
ue_db: [
{%- for i, slave in enumerate(filtered_slave_instance_list) %}
{%- for i, slave in enumerate(slap_configuration['sim_list']) %}
{%- set s = json_module.loads(slave.pop('_')) %}
{%- if i == 0 -%}
{
{%- else -%}
, {
{%- endif %}
sim_algo: "{{ slave.get('sim_algo', 'milenage') }}",
imsi: "{{ slave.get('imsi', '') }}",
opc: "{{ slave.get('opc', '') }}",
amf: {{ slave.get('amf', '0x9001') }},
sqn: "{{ slave.get('sqn', '000000000000') }}",
K: "{{ slave.get('k', '') }}",
impu: "{{ slave.get('impu', '') }}",
impi: "{{ slave.get('impi', '') }}",
sim_algo: "{{ s.get('sim_algo', 'milenage') }}",
imsi: "{{ s.get('imsi', '') }}",
opc: "{{ s.get('opc', '') }}",
amf: {{ s.get('amf', '0x9001') }},
sqn: "{{ s.get('sqn', '000000000000') }}",
K: "{{ s.get('k', '') }}",
impu: "{{ s.get('impu', '') }}",
impi: "{{ s.get('impi', '') }}",
{%- if "ip" in s %}
pdn_list:[{
access_point_name: "internet",
default: true,
ipv4_addr: "{{ s['ip'] }}"
}]
{%- endif %}
}
{%- endfor -%}
]
......
......@@ -44,6 +44,12 @@
"title": "Use IPv4",
"description": "Set to true to use IPv4 for AMF / MME addresses",
"type": "boolean"
},
"fixed_ips": {
"default": false,
"title": "Fixed IP for the UE",
"description": "Set to true to force a static IPv4 for each UE. If true, the number of UE is limited.",
"type": "boolean"
}
}
}
{
"$schema": "http://json-schema.org/draft-04/schema",
"description": "Values returned by Core Network instantiation (stub)",
"type": "object",
"properties": {}
}
{%- set dns_slave_instance_list = [] %}
{%- set sim_slave_instance_list = [] %}
{%- set fixed_ip = slapparameter_dict.get("fixed_ips", False) %}
{%- for slave in slave_instance_list %}
{%- set slave_parameters = json_module.loads(slave['_']) %}
{%- if slave_parameters.get('subdomain', '') != '' %}
......@@ -19,8 +20,38 @@
recipe = slapos.cookbook:publish.serialised
-slave-reference = {{ slave_reference }}
info = Your SIM card with IMSI {{ slave_parameters.get('imsi', '') }} has been attached to service ${slap-configuration:instance-title}.
{%- if fixed_ip %}
ipv4 = ${sim-ip-configuration:{{slave_reference}}}
{%- endif %}
{%- endfor %}
[sim-ip-configuration]
recipe = slapos.recipe.build
sim-slave-instance-list = {{ dumps(sim_slave_instance_list) }}
ipv4-network = {{ slap_configuration.get('tun-ipv4-network', '') }}
init =
import netaddr
import json
network = netaddr.IPNetwork(options['ipv4-network'])
slave_list = options['sim-slave-instance-list']
# if we don't have enough IPv4 addresses in the network, don't force it
# should we make a promise fail ?
if len(slave_list) + 2 > network.size:
for s in slave_list:
options[s['slave_reference']] = "Too many SIM for the IPv4 network"
else:
# calculate the IP addresses of each SIM
sim_list = []
first_addr = netaddr.IPAddress(network.first)
for i, s in enumerate(sorted(slave_list, key=lambda x: json.loads(x['_'])['imsi'])):
ip = str(first_addr + 2 + i)
options[s['slave_reference']] = ip
slave_parameters = json.loads(s['_'])
slave_parameters['ip'] = ip
s['_'] = json.dumps(slave_parameters)
options['sim-with-ip-list'] = slave_list
{%- for slave in dns_slave_instance_list %}
{%- set slave_parameters = json_module.loads(slave['_']) %}
{% set slave_reference = slave.get('slave_reference', '') %}
......@@ -69,7 +100,12 @@ cert = {{ slap_connection['cert-file'] }}
configuration.gtp_addr = 127.0.1.100
configuration.ims_addr = 127.0.0.1
configuration.ims_bind = 127.0.0.2
ue_db_path = {{ ue_db_path }}
ue_db_path = ${ue-db-config:output}
{%- if fixed_ip %}
sim_list = ${sim-ip-configuration:sim-with-ip-list}
{%- else %}
sim_list = {{ dumps(sim_slave_instance_list) }}
{%- endif %}
[monitor-httpd-conf-parameter]
httpd-include-file = {{ buildout_directory }}/etc/httpd-include-file.conf
......@@ -116,7 +152,7 @@ mode = 0775
pidfile = ${directory:run}/ims.pid
hash-files =
${ims-config:output}
{{ ue_db_path }}
${ue-db-config:output}
environment = AMARISOFT_PATH=/opt/amarisoft/.amarisoft
[mme-sh-wrapper]
......@@ -144,7 +180,7 @@ mode = 0775
pidfile = ${directory:run}/mme.pid
hash-files =
${mme-config:output}
{{ ue_db_path }}
${ue-db-config:output}
${mme-sh-wrapper:output}
environment =
LD_LIBRARY_PATH={{ openssl_location }}/lib:{{ nghttp2_location }}/lib
......@@ -183,7 +219,7 @@ context =
section directory directory
section slap_configuration slap-configuration
key slapparameter_dict slap-configuration:configuration
key gtp_addr_v6 slap-configuration:ipv6-random
raw gtp_addr_v6 {{ my_ipv6 }}
raw gtp_addr_v4 {{ lan_ipv4 }}
import netaddr netaddr
key ifup_empty mme-ifup-empty:wrapper-path
......@@ -193,6 +229,14 @@ context =
url = {{ ims_template }}
output = ${directory:etc}/ims.cfg
[ue-db-config]
<= config-base
url = {{ ue_db_template }}
output = ${directory:etc}/ue_db.cfg
context =
section slap_configuration slap-configuration
import json_module json
[mme-config]
<= config-base
{% if slapparameter_dict.get("mme_config_link", None) %}
......@@ -245,7 +289,7 @@ password = {{ slapparameter_dict['monitor-password'] | string }}
[publish-connection-information]
<= monitor-publish
recipe = slapos.cookbook:publish.serialised
core-network-ipv6 = ${slap-configuration:ipv6-random}
core-network-ipv6 = {{ my_ipv6 }}
core-network-ipv4 = {{ lan_ipv4 }}
amarisoft-version = {{ lte_version }}
license-expiration = {{ lte_expiration }}
......
{
"$schema": "http://json-schema.org/draft-04/schema",
"description": "Values returned by eNB/gNB instantiation (stub)",
"type": "object",
"properties": {}
}
# instance-enb implements eNB/gNB service.
{#- defaults for global eNB/gNB parameters.
TODO automatically load enb defaults from JSON schema #}
{%- set enb_defaults = {
'com_ws_port': 9001,
'com_addr': '127.0.1.2',
'use_ipv4': False,
'gnb_id_bits': 28,
'nssai': {'1': {'sst': 1}},
} %}
{%- set gtp_addr_lo = '127.0.1.1' %}
{%- for k,v in enb_defaults|dictsort %}
{%- do slapparameter_dict.setdefault(k, v) %}
{%- endfor %}
[buildout]
parts =
directory
......@@ -17,10 +34,23 @@ eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
{%- set icell_kind='enb' %}
{%- import 'slaplte.jinja2' as slaplte with context %}
{%- import 'ru_libinstance.jinja2.cfg' as rulib with context %}
{%- set ipeer_dict = {} %}
{%- set ipeercell_dict = {} %}
{%- do slaplte.load_ipeer(ipeer_dict) %}
{%- do slaplte.load_ipeercell(ipeercell_dict) %}
{%- do slaplte.check_loaded_everything() %}
{{ rulib.buildout() }}
[myslap]
# NOTE we don't query slapos.cookbook:slapconfiguration the second time because
# slapparameter_dict is potentially modified with defaults.
parameter_dict = {{ dumps(slapparameter_dict) }}
configuration = {{ dumps(slap_configuration) }}
[monitor-httpd-conf-parameter]
httpd-include-file = {{ buildout_directory }}/etc/httpd-include-file.conf
......@@ -36,22 +66,6 @@ minimum = 8035
maximum = 8055
ip = ${monitor-instance-parameter:monitor-httpd-ipv6}
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = {{ slap_connection['computer-id'] }}
partition = {{ slap_connection['partition-id'] }}
url = {{ slap_connection['server-url'] }}
key = {{ slap_connection['key-file'] }}
cert = {{ slap_connection['cert-file'] }}
configuration.com_ws_port = 9001
configuration.com_addr = 127.0.1.2
configuration.mme_addr = 127.0.1.100
configuration.gtp_addr = 127.0.1.1
configuration.default_lte_bandwidth = {{ default_lte_bandwidth }}
configuration.default_lte_inactivity_timer = {{ default_lte_inactivity_timer }}
configuration.default_n_antenna_dl = {{ default_n_antenna_dl }}
configuration.default_n_antenna_ul = {{ default_n_antenna_ul }}
[directory]
recipe = slapos.cookbook:mkdirectory
......@@ -118,7 +132,7 @@ drb_stats_logspec =
rotatespec = 100MB.9
logspec = ${:stats_logspec} ${:drb_stats_logspec}
{%- if slapparameter_dict.get("websocket_password", "") %}
websock = ws://[${slap-configuration:ipv6-random}]:9001
websock = ws://[{{my_ipv6}}]:9001
{%- else %}
websock = ws://127.0.1.2:9001
{%- endif %}
......@@ -142,7 +156,7 @@ logfile = ${xamari-xlog-script:logfile}
forward-host = {{ slapparameter_dict.get('xlog_fluentbit_forward_host', '') }}
forward-port = {{ slapparameter_dict.get('xlog_fluentbit_forward_port', '') }}
forward-shared-key = {{ slapparameter_dict.get('xlog_fluentbit_forward_shared_key', '') }}
forward-self-hostname = {{ ors_id['ors-id'] }}
forward-self-hostname = {{ comp_id['comp-id'] }}
inline =
[SERVICE]
flush 5
......@@ -157,7 +171,9 @@ inline =
{%- if slapparameter_dict.get('xlog_fluentbit_forward_port') %}
Port ${:forward-port}
{%- endif %}
{%- if slapparameter_dict.get('xlog_fluentbit_forward_shared_key') %}
Shared_Key ${:forward-shared-key}
{%- endif %}
Self_Hostname ${:forward-self-hostname}
tls on
tls.verify off
......@@ -176,35 +192,17 @@ recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
extra-context =
context =
json ors false
section directory directory
section slap_configuration slap-configuration
key slapparameter_dict slap-configuration:configuration
key gtp_addr_v6 slap-configuration:ipv6-random
key slap_configuration myslap:configuration
key slapparameter_dict myslap:parameter_dict
raw gtp_addr_v6 {{ my_ipv6 }}
raw gtp_addr_v4 {{ lan_ipv4 }}
raw one_watt {{ ors_version['one-watt'] }}
raw tx_gain {{ ors_version['current-tx-gain'] }}
raw rx_gain {{ ors_version['current-rx-gain'] }}
raw earfcn {{ ors_version['current-earfcn'] }}
raw software_name {{ software_name }}
raw rf_mode {{ rf_mode }}
raw trx {{ trx }}
raw bbu {{ bbu }}
raw ru_type {{ ru }}
json do_lte true
json do_nr false
raw gtp_addr_lo {{ gtp_addr_lo }}
import xbuildout xbuildout
import netaddr netaddr
${:extra-context}
[sib-config]
<= config-base
url = {{ sib23_template }}
output = ${directory:etc}/sib23.cfg
[drb-config]
<= config-base
url = {{ drb_lte_template }}
output = ${directory:etc}/drb.cfg
[enb-config]
<= config-base
{% if slapparameter_dict.get("enb_config_link", None) %}
......@@ -213,34 +211,36 @@ url = ${enb-config-dl:target}
url = {{ enb_template }}
{% endif %}
output = ${directory:etc}/enb.cfg
extra-context =
import json_module json
json cell_list {{ rulib.cell_list | tojson }}
key sib23_file sib-config:output
key drb_file drb-config:output
import-list =
rawfile slaplte.jinja2 {{ slaplte_template }}
extra-context =
import json_module json
key iru_dict :iru_dict
key icell_dict :icell_dict
key ipeer_dict :ipeer_dict
key ipeercell_dict :ipeercell_dict
iru_dict = {{ dumps(rulib.iru_dict) }}
icell_dict = {{ dumps(rulib.icell_dict) }}
ipeer_dict = {{ dumps(ipeer_dict) }}
ipeercell_dict = {{ dumps(ipeercell_dict) }}
[publish-connection-information]
<= monitor-publish
recipe = slapos.cookbook:publish.serialised
{%- if slapparameter_dict.get("websocket_password", "") %}
websocket_url = ws://[${slap-configuration:ipv6-random}]:9001
websocket_url = ws://[{{my_ipv6}}]:9001
{%- endif %}
enb-ipv6 = ${slap-configuration:ipv6-random}
enb-ipv6 = {{ my_ipv6 }}
enb-ipv4 = {{ lan_ipv4 }}
{% if bbu == "ors" %}
ors-version = {{ ors_version['ors-version'] }}
frequency-range-rating = {{ ors_version['range'] }}
current-tx-power-estimate = {{ ors_version['power-estimate'] }}
current-tx-gain = {{ ors_version['current-tx-gain'] }}
current-rx-gain = {{ ors_version['current-rx-gain'] }}
{% endif %}
current-earfcn = {{ ors_version['current-earfcn'] }}
amarisoft-version = {{ lte_version }}
license-expiration = {{ lte_expiration }}
monitor-gadget-url = ${:monitor-base-url}/gadget/software.cfg.html
ru-list = {{ dumps(rulib.iru_dict.keys() | sort) }}
cell-list = {{ dumps(rulib.icell_dict.keys() | sort) }}
peer-list = {{ dumps(ipeer_dict.keys() | sort) }}
peer-cell-list = {{ dumps(ipeercell_dict.keys() | sort) }}
[monitor-instance-parameter]
{% if slapparameter_dict.get("name", None) %}
......
This diff is collapsed.
{
"$schema": "http://json-schema.org/draft-04/schema",
"description": "Values returned by ORS eNB instantiation (stub)",
"type": "object",
"properties": {}
}
This diff is collapsed.
This diff is collapsed.
{
"$schema": "http://json-schema.org/draft-04/schema",
"description": "Values returned by ORS gNB instantiation (stub)",
"type": "object",
"properties": {}
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment