Commit b696fd3d authored by Jérome Perrin's avatar Jérome Perrin

Use python3 only for software release tests

See merge request nexedi/slapos!1247
parents 405f2b7e d3aed928
Pipeline #23382 running with stage
......@@ -45,7 +45,7 @@ class TestBackupServer(InstanceTestCase):
# Check that there is a RSS feed
self.assertTrue('rss' in parameter_dict)
self.assertTrue(parameter_dict['rss'].startswith(
'https://[%s]:9443/' % (self._ipv6_address, )
f'https://[{self._ipv6_address}]:9443/'
))
result = requests.get(
......
......@@ -24,7 +24,6 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import unicode_literals
import os
import requests
......
......@@ -51,7 +51,6 @@ setup(name=name,
# caucase needed to connect to the KeDiFa caucase
'caucase',
'cryptography',
'backports.lzma',
],
zip_safe=True,
test_suite='test',
......
......@@ -51,18 +51,13 @@ import urllib.parse
import socket
import sys
import logging
import lzma
import random
import string
from slapos.slap.standalone import SlapOSNodeInstanceError
import caucase.client
import caucase.utils
try:
import lzma
except ImportError:
from backports import lzma
import datetime
from cryptography import x509
......
......@@ -45,7 +45,6 @@ setup(name=name,
'slapos.cookbook',
'slapos.libnetworkcache',
'requests',
'six',
'PyPDF2',
],
zip_safe=True,
......
##############################################################################
# coding: utf-8
#
# Copyright (c) 2020 Nexedi SA and Contributors. All Rights Reserved.
#
......@@ -31,8 +30,8 @@ import csv
import multiprocessing
import os
import json
import six.moves.xmlrpc_client as xmlrpclib
import six.moves.urllib.parse as urllib_parse
import xmlrpc.client as xmlrpclib
import urllib.parse as urllib_parse
import ssl
import base64
import io
......@@ -63,6 +62,7 @@ class CloudOooTestCase(_CloudOooTestCase):
context=ssl_context,
allow_none=True,
)
self.addCleanup(self.server('close'))
def normalizeFontName(font_name):
......@@ -111,16 +111,16 @@ class HTMLtoPDFConversionFontTestMixin:
def test(self):
actual_font_mapping_mapping = {}
for font in self.expected_font_mapping:
src_html = '''
src_html = f'''
<style>
p {{ font-family: "{font}"; font-size: 20pt; }}
</style>
<p>the quick brown fox jumps over the lazy dog.</p>
<p>THE QUICK BROWN FOX JUMPS OVER THE LAZY DOG.</p>
'''.format(**locals())
'''
pdf_data = self._convert_html_to_pdf(src_html)
pdf_reader = PyPDF2.PdfFileReader(io.BytesIO((pdf_data)))
pdf_reader = PyPDF2.PdfFileReader(io.BytesIO(pdf_data))
self.assertEqual(
self.pdf_producer,
pdf_reader.getDocumentInfo()['/Producer'])
......@@ -165,7 +165,7 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
'Liberation Sans Narrow': 'LiberationSansNarrow',
'Liberation Serif': 'LiberationSerif',
'Linux LibertineG': 'LiberationSans',
'OpenSymbol': set(['DejaVuSans', 'OpenSymbol']),
'OpenSymbol': {'DejaVuSans', 'OpenSymbol'},
'Palatino': 'LiberationSans',
'Roboto Black': 'LiberationSans',
'Roboto Condensed Light': 'LiberationSans',
......@@ -180,9 +180,9 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
}
def _convert_html_to_pdf(self, src_html):
return base64.decodestring(
return base64.decodebytes(
self.server.convertFile(
base64.encodestring(src_html.encode()).decode(),
base64.encodebytes(src_html.encode()).decode(),
'html',
'pdf',
False,
......@@ -238,9 +238,9 @@ class TestLibreoffice(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
}
def _convert_html_to_pdf(self, src_html):
return base64.decodestring(
return base64.decodebytes(
self.server.convertFile(
base64.encodestring(src_html.encode()).decode(),
base64.encodebytes(src_html.encode()).decode(),
'html',
'pdf',
).encode())
......@@ -251,10 +251,10 @@ class TestLibreOfficeTextConversion(CloudOooTestCase):
def test_html_to_text(self):
self.assertEqual(
base64.decodestring(
base64.decodebytes(
self.server.convertFile(
base64.encodestring(
u'<html>héhé</html>'.encode('utf-8')).decode(),
base64.encodebytes(
'<html>héhé</html>'.encode()).decode(),
'html',
'txt',
).encode()),
......@@ -274,19 +274,18 @@ class TestLibreOfficeCluster(CloudOooTestCase):
global _convert_html_to_text
def _convert_html_to_text(src_html):
return base64.decodestring(
return base64.decodebytes(
self.server.convertFile(
base64.encodestring(src_html.encode()).decode(),
base64.encodebytes(src_html.encode()).decode(),
'html',
'txt',
).encode())
pool = multiprocessing.Pool(5)
# TODO py3: use with pool
converted = pool.map(_convert_html_to_text,
['<html><body>hello</body></html>'] * 100)
pool.terminate()
pool.join()
with pool:
converted = pool.map(
_convert_html_to_text,
['<html><body>hello</body></html>'] * 100)
self.assertEqual(converted, [codecs.BOM_UTF8 + b'hello\n'] * 100)
......@@ -294,9 +293,8 @@ class TestLibreOfficeCluster(CloudOooTestCase):
res = requests.get(
urllib_parse.urljoin(self.url, '/haproxy;csv'),
verify=False,
stream=True,
)
reader = csv.DictReader(res.raw)
reader = csv.DictReader(io.StringIO(res.text))
line_list = list(reader)
# requests have been balanced
total_hrsp_2xx = {
......@@ -309,8 +307,8 @@ class TestLibreOfficeCluster(CloudOooTestCase):
# ideally there should be 25% of requests on each backend, because we use
# round robin scheduling, but it can happen that some backend take longer
# to start, so we are tolerant here and just check that each backend
# process at least 15% of requests.
self.assertGreater(total_hrsp_2xx[backend], 15)
# process at least one request.
self.assertGreater(total_hrsp_2xx[backend], 0)
# no errors
total_eresp = {
line['svname']: int(line['eresp'] or 0)
......
##############################################################################
# coding: utf-8
#
# Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved.
#
......
......@@ -24,7 +24,6 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import absolute_import
from setuptools import setup, find_packages
version = '0.0.1.dev0'
......@@ -49,7 +48,6 @@ setup(name=name,
'psutil',
'requests',
'mysqlclient',
'backports.lzma',
'cryptography',
'pexpect',
'pyOpenSSL',
......
......@@ -25,7 +25,6 @@
#
##############################################################################
from __future__ import absolute_import
import json
import os
......
from __future__ import absolute_import
import glob
import hashlib
import json
......@@ -9,10 +8,10 @@ import shutil
import subprocess
import tempfile
import time
import six.moves.urllib.request, six.moves.urllib.parse, six.moves.urllib.error
from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
import urllib.parse
from http.server import BaseHTTPRequestHandler
import mock
from unittest import mock
import OpenSSL.SSL
import pexpect
import psutil
......@@ -27,7 +26,6 @@ from slapos.testing.utils import (CrontabMixin, ManagedHTTPServer,
findFreeTCPPort)
from . import ERP5InstanceTestCase, setUpModule
from six.moves import range
setUpModule # pyflakes
......@@ -100,7 +98,7 @@ class CaucaseService(ManagedResource):
os.mkdir(os.path.join(caucased_dir, 'user'))
os.mkdir(os.path.join(caucased_dir, 'service'))
backend_caucased_netloc = '%s:%s' % (self._cls._ipv4_address, findFreeTCPPort(self._cls._ipv4_address))
backend_caucased_netloc = f'{self._cls._ipv4_address}:{findFreeTCPPort(self._cls._ipv4_address)}'
self.url = 'http://' + backend_caucased_netloc
self._caucased_process = subprocess.Popen(
[
......@@ -213,7 +211,7 @@ class TestTimeout(BalancerTestCase, CrontabMixin):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
parameter_dict = super(TestTimeout, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
# and set timeout of 1 second
......@@ -224,12 +222,12 @@ class TestTimeout(BalancerTestCase, CrontabMixin):
# type: () -> None
self.assertEqual(
requests.get(
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/1'),
urllib.parse.urljoin(self.default_balancer_url, '/1'),
verify=False).status_code,
requests.codes.ok)
self.assertEqual(
requests.get(
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/5'),
urllib.parse.urljoin(self.default_balancer_url, '/5'),
verify=False).status_code,
requests.codes.gateway_timeout)
......@@ -241,7 +239,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
parameter_dict = super(TestLog, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
return parameter_dict
......@@ -249,7 +247,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
def test_access_log_format(self):
# type: () -> None
requests.get(
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/url_path'),
urllib.parse.urljoin(self.default_balancer_url, '/url_path'),
verify=False,
)
time.sleep(.5) # wait a bit more until access is logged
......@@ -288,7 +286,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
'apachedex',
'ApacheDex-*.html',
))
with open(apachedex_report, 'r') as f:
with open(apachedex_report) as f:
report_text = f.read()
self.assertIn('APacheDEX', report_text)
# having this table means that apachedex could parse some lines.
......@@ -336,7 +334,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
error_line = error_log_file.read().splitlines()[-1]
self.assertIn('proxy family_default has no server available!', error_line)
# this log also include a timestamp
self.assertRegexpMatches(error_line, r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}')
self.assertRegex(error_line, r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}')
class BalancerCookieHTTPServer(ManagedHTTPServer):
......@@ -377,7 +375,7 @@ class TestBalancer(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
parameter_dict = super(TestBalancer, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
# use two backend servers
parameter_dict['dummy_http_server'] = [
......@@ -409,7 +407,7 @@ class TestBalancer(BalancerTestCase):
# if backend provides a "SERVERID" cookie, balancer will overwrite it with the
# backend selected by balancing algorithm
self.assertIn(
requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
requests.get(urllib.parse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
('default-0', 'default-1'),
)
......@@ -457,10 +455,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
parameter_dict = super(
TestTestRunnerEntryPoints,
cls,
)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
parameter_dict['dummy_http_server-test-runner-address-list'] = [
[
......@@ -485,18 +480,18 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
)['default-test-runner-url-list']
url_0, url_1, url_2 = test_runner_url_list
self.assertEqual(
six.moves.urllib.parse.urlparse(url_0).netloc,
six.moves.urllib.parse.urlparse(url_1).netloc)
urllib.parse.urlparse(url_0).netloc,
urllib.parse.urlparse(url_1).netloc)
self.assertEqual(
six.moves.urllib.parse.urlparse(url_0).netloc,
six.moves.urllib.parse.urlparse(url_2).netloc)
urllib.parse.urlparse(url_0).netloc,
urllib.parse.urlparse(url_2).netloc)
path_0 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_0/something'.format(
netloc=six.moves.urllib.parse.urlparse(url_0).netloc)
netloc=urllib.parse.urlparse(url_0).netloc)
path_1 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_1/something'.format(
netloc=six.moves.urllib.parse.urlparse(url_0).netloc)
netloc=urllib.parse.urlparse(url_0).netloc)
path_2 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_2/something'.format(
netloc=six.moves.urllib.parse.urlparse(url_0).netloc)
netloc=urllib.parse.urlparse(url_0).netloc)
self.assertEqual(
{
......@@ -534,7 +529,7 @@ class TestHTTP(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
parameter_dict = super(TestHTTP, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
# use a HTTP/1.1 server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]]
return parameter_dict
......@@ -574,7 +569,7 @@ class TestHTTP(BalancerTestCase):
session.get(self.default_balancer_url).raise_for_status()
new_conn.assert_not_called()
parsed_url = six.moves.urllib.parse.urlparse(self.default_balancer_url)
parsed_url = urllib.parse.urlparse(self.default_balancer_url)
# check that we have an open file for the ip connection
self.assertTrue([
......@@ -617,7 +612,7 @@ class TestContentEncoding(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
parameter_dict = super(TestContentEncoding, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
parameter_dict['dummy_http_server'] = [
[cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False],
]
......@@ -644,17 +639,17 @@ class TestContentEncoding(BalancerTestCase):
'application/font-woff2',
'application/x-font-opentype',
'application/wasm',):
resp = requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, content_type), verify=False)
resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, content_type), verify=False)
self.assertEqual(resp.headers['Content-Type'], content_type)
self.assertEqual(
resp.headers.get('Content-Encoding'),
'gzip',
'%s uses wrong encoding: %s' % (content_type, resp.headers.get('Content-Encoding')))
'{} uses wrong encoding: {}'.format(content_type, resp.headers.get('Content-Encoding')))
self.assertEqual(resp.text, 'OK')
def test_no_gzip_encoding(self):
# type: () -> None
resp = requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
self.assertNotIn('Content-Encoding', resp.headers)
self.assertEqual(resp.text, 'OK')
......@@ -777,9 +772,9 @@ class TestFrontendXForwardedFor(BalancerTestCase):
# type: () -> dict
frontend_caucase = cls.getManagedResource('frontend_caucase', CaucaseService)
certificate = cls.getManagedResource('client_certificate', CaucaseCertificate)
certificate.request(u'shared frontend', frontend_caucase)
certificate.request('shared frontend', frontend_caucase)
parameter_dict = super(TestFrontendXForwardedFor, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
# add another "-auth" backend, that will have ssl-authentication enabled
parameter_dict['zope-family-dict']['default-auth'] = ['dummy_http_server']
parameter_dict['backend-path-dict']['default-auth'] = '/'
......@@ -815,7 +810,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
).json()
self.assertNotEqual(result['Incoming Headers'].get('x-forwarded-for', '').split(', ')[0], '1.2.3.4')
balancer_url = json.loads(self.computer_partition.getConnectionParameterDict()['_'])['default-auth']
with self.assertRaisesRegexp(Exception, "certificate required"):
with self.assertRaisesRegex(Exception, "certificate required"):
requests.get(
balancer_url,
headers={'X-Forwarded-For': '1.2.3.4'},
......@@ -833,8 +828,8 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
# type: () -> dict
server_caucase = cls.getManagedResource('server_caucase', CaucaseService)
server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate)
server_certificate.request(six.ensure_text(cls._ipv4_address), server_caucase)
parameter_dict = super(TestServerTLSProvidedCertificate, cls)._getInstanceParameterDict()
server_certificate.request(cls._ipv4_address, server_caucase)
parameter_dict = super()._getInstanceParameterDict()
with open(server_certificate.cert_file) as f:
parameter_dict['ssl']['cert'] = f.read()
with open(server_certificate.key_file) as f:
......@@ -855,13 +850,13 @@ class TestClientTLS(BalancerTestCase):
# type: () -> dict
frontend_caucase1 = cls.getManagedResource('frontend_caucase1', CaucaseService)
certificate1 = cls.getManagedResource('client_certificate1', CaucaseCertificate)
certificate1.request(u'client_certificate1', frontend_caucase1)
certificate1.request('client_certificate1', frontend_caucase1)
frontend_caucase2 = cls.getManagedResource('frontend_caucase2', CaucaseService)
certificate2 = cls.getManagedResource('client_certificate2', CaucaseCertificate)
certificate2.request(u'client_certificate2', frontend_caucase2)
certificate2.request('client_certificate2', frontend_caucase2)
parameter_dict = super(TestClientTLS, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
parameter_dict['ssl-authentication-dict'] = {
'default': True,
}
......@@ -936,11 +931,11 @@ class TestClientTLS(BalancerTestCase):
process = pexpect.spawnu("faketime +1day %s" % caucase_updater)
process.logfile = DebugLogFile()
process.expect(u"Got new CRL.*Next wake-up at.*")
process.expect("Got new CRL.*Next wake-up at.*")
process.terminate()
process.wait()
with self.assertRaisesRegexp(Exception, 'certificate revoked'):
with self.assertRaisesRegex(Exception, 'certificate revoked'):
_make_request()
......@@ -952,10 +947,7 @@ class TestPathBasedRouting(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> dict
parameter_dict = super(
TestPathBasedRouting,
cls,
)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
parameter_dict['zope-family-dict'][
'second'
] = parameter_dict['zope-family-dict'][
......@@ -985,7 +977,7 @@ class TestPathBasedRouting(BalancerTestCase):
published_dict = json.loads(self.computer_partition.getConnectionParameterDict()['_'])
scheme = 'scheme'
netloc = 'example.com:8080'
prefix = '/VirtualHostBase/' + scheme + '//' + six.moves.urllib.parse.quote(
prefix = '/VirtualHostBase/' + scheme + '//' + urllib.parse.quote(
netloc,
safe='',
)
......@@ -1009,7 +1001,7 @@ class TestPathBasedRouting(BalancerTestCase):
# test will need to be updated accordingly.
self.assertEqual(
requests.get(
six.moves.urllib.parse.urljoin(published_dict[family], prefix + vhr + path),
urllib.parse.urljoin(published_dict[family], prefix + vhr + path),
verify=False,
).json()['Path'],
expected_path,
......
......@@ -25,7 +25,6 @@
#
##############################################################################
from __future__ import absolute_import
import contextlib
import glob
......@@ -42,9 +41,8 @@ import unittest
import psutil
import requests
import six
import six.moves.urllib.parse
import six.moves.xmlrpc_client
import urllib.parse
import xmlrpc.client
import urllib3
from slapos.testing.utils import CrontabMixin
......@@ -53,7 +51,7 @@ from . import ERP5InstanceTestCase, setUpModule
setUpModule # pyflakes
class TestPublishedURLIsReachableMixin(object):
class TestPublishedURLIsReachableMixin:
"""Mixin that checks that default page of ERP5 is reachable.
"""
......@@ -61,7 +59,7 @@ class TestPublishedURLIsReachableMixin(object):
# We access ERP5 trough a "virtual host", which should make
# ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root
# as base.
virtual_host_url = six.moves.urllib.parse.urljoin(
virtual_host_url = urllib.parse.urljoin(
base_url,
'/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/'
.format(site_id))
......@@ -89,7 +87,7 @@ class TestPublishedURLIsReachableMixin(object):
# login page can be rendered and contain the text "ERP5"
r = session.get(
six.moves.urllib.parse.urljoin(base_url, '{}/login_form'.format(site_id)),
urllib.parse.urljoin(base_url, f'{site_id}/login_form'),
verify=verify,
allow_redirects=False,
)
......@@ -184,7 +182,7 @@ class TestBalancerPorts(ERP5InstanceTestCase):
}
def checkValidHTTPSURL(self, url):
parsed = six.moves.urllib.parse.urlparse(url)
parsed = urllib.parse.urlparse(url)
self.assertEqual(parsed.scheme, 'https')
self.assertTrue(parsed.hostname)
self.assertTrue(parsed.port)
......@@ -194,16 +192,16 @@ class TestBalancerPorts(ERP5InstanceTestCase):
param_dict = self.getRootPartitionConnectionParameterDict()
for family_name in ('family1', 'family2'):
self.checkValidHTTPSURL(
param_dict['family-{family_name}'.format(family_name=family_name)])
param_dict[f'family-{family_name}'])
self.checkValidHTTPSURL(
param_dict['family-{family_name}-v6'.format(family_name=family_name)])
param_dict[f'family-{family_name}-v6'])
def test_published_test_runner_url(self):
# each family's also a list of test test runner URLs, by default 3 per family
param_dict = self.getRootPartitionConnectionParameterDict()
for family_name in ('family1', 'family2'):
family_test_runner_url_list = param_dict[
'{family_name}-test-runner-url-list'.format(family_name=family_name)]
f'{family_name}-test-runner-url-list']
self.assertEqual(3, len(family_test_runner_url_list))
for url in family_test_runner_url_list:
self.checkValidHTTPSURL(url)
......@@ -221,16 +219,16 @@ class TestBalancerPorts(ERP5InstanceTestCase):
# normal access on ipv4 and ipv6 and test runner access on ipv4 only
with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo()
process_info, = [p for p in all_process_info if p['name'].startswith('haproxy-')]
process_info, = (p for p in all_process_info if p['name'].startswith('haproxy-'))
haproxy_master_process = psutil.Process(process_info['pid'])
haproxy_worker_process, = haproxy_master_process.children()
self.assertEqual(
sorted([socket.AF_INET] * 4 + [socket.AF_INET6] * 2),
sorted([
sorted(
c.family
for c in haproxy_worker_process.connections()
if c.status == 'LISTEN'
]))
))
class TestSeleniumTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
......@@ -296,7 +294,7 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
# Haproxy only listen on two ports, there is no haproxy ports allocated for test runner
with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo()
process_info, = [p for p in all_process_info if p['name'].startswith('haproxy')]
process_info, = (p for p in all_process_info if p['name'].startswith('haproxy'))
haproxy_master_process = psutil.Process(process_info['pid'])
haproxy_worker_process, = haproxy_master_process.children()
self.assertEqual(
......@@ -366,7 +364,7 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
zodb["pool-timeout"] = "10m"
storage["storage"] = "root"
storage["server"] = zeo_addr
with open('%s/etc/zope-%s.conf' % (partition, zope)) as f:
with open(f'{partition}/etc/zope-{zope}.conf') as f:
conf = list(map(str.strip, f.readlines()))
i = conf.index("<zodb_db root>") + 1
conf = iter(conf[i:conf.index("</zodb_db>", i)])
......@@ -376,12 +374,12 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
if line == '</zeoclient>':
break
checkParameter(line, storage)
for k, v in six.iteritems(storage):
for k, v in storage.items():
self.assertIsNone(v, k)
del storage
else:
checkParameter(line, zodb)
for k, v in six.iteritems(zodb):
for k, v in zodb.items():
self.assertIsNone(v, k)
partition = self.getComputerPartitionPath('zope-a')
......@@ -436,19 +434,19 @@ class TestWatchActivities(ERP5InstanceTestCase):
env=dict(os.environ,
PATH=os.pathsep.join([tmpdir, os.environ['PATH']])),
stderr=subprocess.STDOUT,
universal_newlines=True,
text=True,
)
except subprocess.CalledProcessError as e:
self.fail(e.output)
self.assertIn(' dict ', output)
class ZopeSkinsMixin(object):
class ZopeSkinsMixin:
"""Mixins with utility methods to test zope behaviors.
"""
@classmethod
def _setUpClass(cls):
super(ZopeSkinsMixin, cls)._setUpClass()
super()._setUpClass()
param_dict = cls.getRootPartitionConnectionParameterDict()
with cls.getXMLRPCClient() as erp5_xmlrpc_client:
# wait for ERP5 to be ready (TODO: this should probably be a promise)
......@@ -456,8 +454,8 @@ class ZopeSkinsMixin(object):
time.sleep(1)
try:
erp5_xmlrpc_client.getTitle()
except (six.moves.xmlrpc_client.ProtocolError,
six.moves.xmlrpc_client.Fault):
except (xmlrpc.client.ProtocolError,
xmlrpc.client.Fault):
pass
else:
break
......@@ -470,7 +468,7 @@ class ZopeSkinsMixin(object):
path is joined with urllib.parse.urljoin to the URL of the portal.
"""
param_dict = cls.getRootPartitionConnectionParameterDict()
parsed = six.moves.urllib.parse.urlparse(param_dict['family-' + family_name])
parsed = urllib.parse.urlparse(param_dict['family-' + family_name])
base_url = parsed._replace(
netloc='{}:{}@{}:{}'.format(
param_dict['inituser-login'],
......@@ -480,7 +478,7 @@ class ZopeSkinsMixin(object):
),
path=param_dict['site-id'] + '/',
).geturl()
return six.moves.urllib_parse.urljoin(base_url, path)
return urllib.parse.urljoin(base_url, path)
@classmethod
@contextlib.contextmanager
......@@ -489,16 +487,12 @@ class ZopeSkinsMixin(object):
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
erp5_xmlrpc_client = six.moves.xmlrpc_client.ServerProxy(
erp5_xmlrpc_client = xmlrpc.client.ServerProxy(
cls._getAuthenticatedZopeUrl(''),
context=ssl_context,
)
# BBB use as a context manager only on python3
if sys.version_info < (3, ):
with erp5_xmlrpc_client:
yield erp5_xmlrpc_client
else:
with erp5_xmlrpc_client:
yield erp5_xmlrpc_client
@classmethod
def _addPythonScript(cls, script_id, params, body):
......@@ -507,7 +501,7 @@ class ZopeSkinsMixin(object):
try:
custom.manage_addProduct.PythonScripts.manage_addPythonScript(
script_id)
except six.moves.xmlrpc_client.ProtocolError as e:
except xmlrpc.client.ProtocolError as e:
if e.errcode != 302:
raise
getattr(custom, script_id).ZPythonScriptHTML_editAction(
......@@ -542,7 +536,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
@classmethod
def _setUpClass(cls):
super(ZopeTestMixin, cls)._setUpClass()
super()._setUpClass()
cls.zope_base_url = cls._getAuthenticatedZopeUrl('')
param_dict = cls.getRootPartitionConnectionParameterDict()
cls.zope_deadlock_debugger_url = cls._getAuthenticatedZopeUrl(
......@@ -565,7 +559,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
raise ValueError("Unknown mode: %s" % mode)
''',
)
cls.zope_verify_activity_processing_url = six.moves.urllib_parse.urljoin(
cls.zope_verify_activity_processing_url = urllib.parse.urljoin(
cls.zope_base_url,
'ERP5Site_verifyActivityProcessing',
)
......@@ -578,7 +572,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
return log("hello %s" % name)
''',
)
cls.zope_log_message_url = six.moves.urllib_parse.urljoin(
cls.zope_log_message_url = urllib.parse.urljoin(
cls.zope_base_url,
'ERP5Site_logMessage',
)
......@@ -593,18 +587,18 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
return "done"
''',
)
cls.zope_long_request_url = six.moves.urllib_parse.urljoin(
cls.zope_long_request_url = urllib.parse.urljoin(
cls.zope_base_url,
'ERP5Site_executeLongRequest',
)
def setUp(self):
super(ZopeTestMixin, self).setUp()
super().setUp()
# run logrotate a first time so that it create state files
self._executeCrontabAtDate('logrotate', '2000-01-01')
def tearDown(self):
super(ZopeTestMixin, self).tearDown()
super().tearDown()
# reset logrotate status
logrotate_status = os.path.join(
self.getComputerPartitionPath('zope-default'),
......@@ -859,11 +853,11 @@ class TestZopeWSGI(ZopeTestMixin, ERP5InstanceTestCase):
@unittest.expectedFailure
def test_long_request_log_rotation(self):
super(TestZopeWSGI, self).test_long_request_log_rotation(self)
super().test_long_request_log_rotation()
@unittest.expectedFailure
def test_basic_authentication_user_in_access_log(self):
super(TestZopeWSGI, self).test_basic_authentication_user_in_access_log(self)
super().test_basic_authentication_user_in_access_log()
class TestZopePublisherTimeout(ZopeSkinsMixin, ERP5InstanceTestCase):
......@@ -902,7 +896,7 @@ class TestZopePublisherTimeout(ZopeSkinsMixin, ERP5InstanceTestCase):
@classmethod
def _setUpClass(cls):
super(TestZopePublisherTimeout, cls)._setUpClass()
super()._setUpClass()
cls._addPythonScript(
'ERP5Site_doSlowRequest',
'',
......
##############################################################################
# coding: utf-8
#
# Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved.
#
......@@ -26,11 +25,10 @@
#
##############################################################################
from __future__ import absolute_import
import os
import json
import glob
import six.moves.urllib.parse
import urllib.parse
import socket
import sys
import time
......@@ -39,7 +37,7 @@ import datetime
import subprocess
import gzip
from backports import lzma
import lzma
import MySQLdb
from slapos.testing.utils import CrontabMixin
......@@ -84,7 +82,7 @@ class MariaDBTestCase(ERP5InstanceTestCase):
# type: () -> MySQLdb.connections.Connection
connection_parameter_dict = json.loads(
self.computer_partition.getConnectionParameterDict()['_'])
db_url = six.moves.urllib.parse.urlparse(connection_parameter_dict['database-list'][0])
db_url = urllib.parse.urlparse(connection_parameter_dict['database-list'][0])
self.assertEqual('mysql', db_url.scheme)
self.assertTrue(db_url.path.startswith('/'))
......@@ -208,7 +206,7 @@ class TestMariaDB(MariaDBTestCase):
"""
select * from test_utf8_collation where col1 = "a"
""")
self.assertEqual(((u'à',),), cnx.store_result().fetch_row(maxrows=2))
self.assertEqual((('à',),), cnx.store_result().fetch_row(maxrows=2))
class TestMroonga(MariaDBTestCase):
......@@ -232,7 +230,7 @@ class TestMroonga(MariaDBTestCase):
SELECT mroonga_normalize("ABCDあぃうぇ㍑")
""")
# XXX this is returned as bytes by mroonga/mariadb (this might be a bug)
self.assertEqual(((u'abcdあぃうぇリットル'.encode('utf-8'),),),
self.assertEqual((('abcdあぃうぇリットル'.encode(),),),
cnx.store_result().fetch_row(maxrows=2))
if 0:
......@@ -245,7 +243,7 @@ class TestMroonga(MariaDBTestCase):
"""
SELECT mroonga_normalize("aBcDあぃウェ㍑", "NormalizerMySQLUnicodeCIExceptKanaCIKanaWithVoicedSoundMark")
""")
self.assertEqual(((u'ABCDあぃうぇ㍑'.encode('utf-8'),),),
self.assertEqual((('ABCDあぃうぇ㍑'.encode(),),),
cnx.store_result().fetch_row(maxrows=2))
def test_mroonga_full_text_normalizer(self):
......@@ -282,7 +280,7 @@ class TestMroonga(MariaDBTestCase):
WHERE MATCH (content) AGAINST ("+ブラック" IN BOOLEAN MODE)
""")
self.assertEqual(
((datetime.date(2013, 4, 23), u'ブラックコーヒーを飲んだ。'),),
((datetime.date(2013, 4, 23), 'ブラックコーヒーを飲んだ。'),),
cnx.store_result().fetch_row(maxrows=2),
)
......
......@@ -16,7 +16,6 @@
# See COPYING file for full licensing terms.
# See https://www.nexedi.com/licensing for rationale and options.
from __future__ import absolute_import
import json
import os.path
import unittest
......@@ -76,5 +75,5 @@ def lookupMount(zurl):
# readfile returns content of file @path.
def readfile(path):
with open(path, 'r') as f:
with open(path) as f:
return f.read()
......@@ -44,7 +44,6 @@ setup(name=name,
'slapos.core',
'supervisor',
'slapos.libnetworkcache',
'typing; python_version<"3"',
],
test_suite='test',
)
......@@ -35,8 +35,8 @@ import tempfile
import time
import requests
import six.moves.urllib as urllib
import six.moves.xmlrpc_client
import urllib.parse
import xmlrpc.client
import urllib3
from slapos.grid.utils import md5digest
......@@ -83,8 +83,8 @@ class ERP5UpgradeTestCase(SlapOSInstanceTestCase):
@classmethod
def setUpClass(cls):
# request and instanciate with old software url
super(ERP5UpgradeTestCase, cls).setUpClass()
# request and instantiate with old software url
super().setUpClass()
cls.setUpOldInstance()
......@@ -155,7 +155,7 @@ class TestERP5Upgrade(ERP5UpgradeTestCase):
# wait for old site creation
cls.session.get(
'{zope_base_url}/person_module'.format(zope_base_url=cls.zope_base_url),
f'{cls.zope_base_url}/person_module',
auth=requests.auth.HTTPBasicAuth(
username=param_dict['inituser-login'],
password=param_dict['inituser-password'],
......@@ -171,16 +171,12 @@ class TestERP5Upgrade(ERP5UpgradeTestCase):
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
erp5_xmlrpc_client = six.moves.xmlrpc_client.ServerProxy(
erp5_xmlrpc_client = xmlrpc.client.ServerProxy(
cls.authenticated_zope_base_url,
context=ssl_context,
)
# BBB use as a context manager only on python3
if sys.version_info < (3, ):
with erp5_xmlrpc_client:
yield erp5_xmlrpc_client
else:
with erp5_xmlrpc_client:
yield erp5_xmlrpc_client
def addPythonScript(script_id, params, body):
with getXMLRPCClient() as erp5_xmlrpc_client:
......@@ -188,7 +184,7 @@ class TestERP5Upgrade(ERP5UpgradeTestCase):
try:
custom.manage_addProduct.PythonScripts.manage_addPythonScript(
script_id)
except six.moves.xmlrpc_client.ProtocolError as e:
except xmlrpc.client.ProtocolError as e:
if e.errcode != 302:
raise
getattr(custom, script_id).ZPythonScriptHTML_editAction(
......
[buildout]
extends =
software.cfg
[python]
part = python2.7
......@@ -46,7 +46,6 @@ setup(name=name,
'slapos.libnetworkcache',
'erp5.util',
'supervisor',
'six',
],
zip_safe=True,
test_suite='test',
......
......@@ -34,11 +34,10 @@ import struct
import subprocess
import tempfile
import time
import six
import sys
from six.moves.SimpleHTTPServer import SimpleHTTPRequestHandler
from six.moves.socketserver import StreamRequestHandler, TCPServer
from http.server import SimpleHTTPRequestHandler
from socketserver import StreamRequestHandler, TCPServer
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import findFreeTCPPort
......@@ -49,8 +48,8 @@ FLUSH_INTERVAL = 1
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..',
'software%s.cfg' % ("-py2" if six.PY2 else ""))))
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class FluentdTestCase(SlapOSInstanceTestCase):
__partition_reference__ = 'fluentd'
......@@ -135,11 +134,11 @@ class WendelinTutorialTestCase(FluentdTestCase):
return subprocess.check_output(
[self._fluentd_bin, '-c', conf_path, '--dry-run'],
env={'GEM_PATH': self._gem_path},
universal_newlines=True,
text=True,
)
def _test_configuration(self, expected_str):
self.assertRegexpMatches(
self.assertRegex(
self.read_fluentd_conf(self._conf),
expected_str,
)
......@@ -168,12 +167,12 @@ class SensorConfTestCase(WendelinTutorialTestCase):
@classmethod
def sensor_conf(cls, script_path):
return '''\
return f'''\
<source>
@type exec
tag tag.name
command %s %s
run_interval %ss
command {sys.executable} {script_path}
run_interval {FLUSH_INTERVAL}s
<parse>
keys pressure, humidity, temperature
</parse>
......@@ -182,25 +181,25 @@ class SensorConfTestCase(WendelinTutorialTestCase):
@type forward
<server>
name myserver1
host %s
host {cls._ipv6_address}
</server>
<buffer>
flush_mode immediate
</buffer>
</match>''' % (sys.executable, script_path, FLUSH_INTERVAL, cls._ipv6_address)
</match>'''
@classmethod
def sensor_script(cls, measurementList):
return '''\
#!/usr/bin/python
measurement_text = "\t".join(measurementList)
return f'''\
#!{sys.executable}
# -*- coding: utf-8 -*-
print("%s")''' % "\t".join(measurementList)
print("{measurement_text}")'''
def test_configuration(self):
self._test_configuration(
r'adding forwarding server \'myserver1\' host="%s" port=%s weight=60'
% (self._ipv6_address, FLUENTD_PORT)
fr'adding forwarding server \'myserver1\' host="{self._ipv6_address}" port={FLUENTD_PORT} weight=60'
)
def test_send_data(self):
......@@ -229,25 +228,24 @@ class GatewayConfTestCase(WendelinTutorialTestCase):
@classmethod
def gateway_conf(cls, fluentd_port, wendelin_port):
return '''\
return f'''\
<source>
@type forward
port %s
bind %s
port {fluentd_port}
bind {cls._ipv6_address}
</source>
<match tag.name>
@type wendelin
streamtool_uri http://[%s]:%s/erp5/portal_ingestion_policies/default
streamtool_uri http://[{cls._ipv6_address}]:{wendelin_port}/erp5/portal_ingestion_policies/default
user foo
password bar
<buffer>
flush_mode interval
@type file
path fluentd-buffer-file/
flush_interval %ss
flush_interval {FLUSH_INTERVAL}s
</buffer>
</match>''' % (fluentd_port, cls._ipv6_address, cls._ipv6_address,
wendelin_port, FLUSH_INTERVAL)
</match>'''
@classmethod
def get_configuration(cls):
......
......@@ -27,7 +27,7 @@
import os
import logging
from six.moves.urllib.parse import urlparse
from urllib.parse import urlparse
import requests
......
......@@ -48,7 +48,6 @@ setup(name=name,
'erp5.util',
'supervisor',
'psutil',
'six',
],
zip_safe=True,
test_suite='test',
......
......@@ -25,7 +25,8 @@
#
##############################################################################
import six.moves.http_client as httplib
import http.client as httplib
import http.server
import json
import os
import glob
......@@ -33,15 +34,13 @@ import hashlib
import psutil
import re
import requests
import six
import slapos.util
import sqlite3
from six.moves.urllib.parse import parse_qs, urlparse
from urllib.parse import parse_qs, urlparse
import unittest
import subprocess
import tempfile
import six.moves.socketserver as SocketServer
from six.moves import SimpleHTTPServer
import socketserver as SocketServer
import multiprocessing
import time
import shutil
......@@ -117,7 +116,7 @@ bootstrap_machine_param_dict = {
}
class KvmMixin(object):
class KvmMixin:
def getConnectionParameterDictJson(self):
return json.loads(
self.computer_partition.getConnectionParameterDict()['_'])
......@@ -142,10 +141,10 @@ class KvmMixin(object):
for hash_file in hash_file_list
])
with self.slap.instance_supervisor_rpc as supervisor:
running_process_info = '\n'.join(sorted([
running_process_info = '\n'.join(sorted(
'%(group)s:%(name)s %(statename)s' % q for q
in supervisor.getAllProcessInfo()
if q['name'] != 'watchdog' and q['group'] != 'watchdog']))
if q['name'] != 'watchdog' and q['group'] != 'watchdog'))
return running_process_info.replace(
hash_value, '{hash}').replace(kvm_hash_value, '{kvm-hash-value}')
......@@ -164,14 +163,14 @@ class KvmMixin(object):
state=state)
class KvmMixinJson(object):
class KvmMixinJson:
@classmethod
def getInstanceParameterDict(cls):
return {
'_': json.dumps(super(KvmMixinJson, cls).getInstanceParameterDict())}
'_': json.dumps(super().getInstanceParameterDict())}
def rerequestInstance(self, parameter_dict, *args, **kwargs):
return super(KvmMixinJson, self).rerequestInstance(
return super().rerequestInstance(
parameter_dict={'_': json.dumps(parameter_dict)},
*args, **kwargs
)
......@@ -196,10 +195,10 @@ class TestInstance(InstanceTestCase, KvmMixin):
{
'ipv6': self._ipv6_address,
'maximum-extra-disk-amount': '0',
'monitor-base-url': 'https://[%s]:8026' % (self._ipv6_address,),
'nat-rule-port-tcp-22': '%s : 10022' % (self._ipv6_address,),
'nat-rule-port-tcp-443': '%s : 10443' % (self._ipv6_address,),
'nat-rule-port-tcp-80': '%s : 10080' % (self._ipv6_address,),
'monitor-base-url': f'https://[{self._ipv6_address}]:8026',
'nat-rule-port-tcp-22': f'{self._ipv6_address} : 10022',
'nat-rule-port-tcp-443': f'{self._ipv6_address} : 10443',
'nat-rule-port-tcp-80': f'{self._ipv6_address} : 10080',
}
)
self.assertEqual(set(present_key_list), set(assert_key_list))
......@@ -275,8 +274,8 @@ class TestMemoryManagement(InstanceTestCase, KvmMixin):
qemu_wrapper = QemuQMPWrapper(os.path.join(
self.computer_partition_root_path, 'var', 'qmp_socket'))
ram_mb = sum(
[q['size']
for q in qemu_wrapper.getMemoryInfo()['hotplugged']]) / 1024 / 1024
q['size']
for q in qemu_wrapper.getMemoryInfo()['hotplugged']) / 1024 / 1024
cpu_count = len(
[q['CPU'] for q in qemu_wrapper.getCPUInfo()['hotplugged']])
return {'cpu_count': cpu_count, 'ram_mb': ram_mb}
......@@ -374,8 +373,7 @@ class MonitorAccessMixin(KvmMixin):
connection_xml = partition_information.get('connection_xml')
if not connection_xml:
continue
connection_dict = json.loads(slapos.util.xml2dict(
connection_xml if six.PY3 else connection_xml.encode('utf-8'))['_'])
connection_dict = json.loads(slapos.util.xml2dict(connection_xml)['_'])
monitor_base_url = connection_dict.get('monitor-base-url')
if not monitor_base_url:
continue
......@@ -660,22 +658,22 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin):
connection_parameter_dict.pop(k)
self.assertIn('feed-url-kvm-1-pull', connection_parameter_dict)
feed_pull = connection_parameter_dict.pop('feed-url-kvm-1-pull')
self.assertRegexpMatches(
self.assertRegex(
feed_pull,
'http://\\[%s\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-pull' % (
self._ipv6_address,))
'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-pull'.format(
self._ipv6_address))
feed_push = connection_parameter_dict.pop('feed-url-kvm-1-push')
self.assertRegexpMatches(
self.assertRegex(
feed_push,
'http://\\[%s\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-push' % (
self._ipv6_address,))
'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-push'.format(
self._ipv6_address))
self.assertEqual(
connection_parameter_dict,
{
'ipv6': self._ipv6_address,
'monitor-base-url': 'https://[%s]:8160' % (self._ipv6_address,),
'monitor-base-url': f'https://[{self._ipv6_address}]:8160',
'monitor-user': 'admin',
'takeover-kvm-1-url': 'http://[%s]:9263/' % (self._ipv6_address,),
'takeover-kvm-1-url': f'http://[{self._ipv6_address}]:9263/',
}
)
self.assertEqual(set(present_key_list), set(assert_key_list))
......@@ -823,10 +821,10 @@ class TestInstanceNbdServerJson(
pass
class HttpHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
class HttpHandler(http.server.SimpleHTTPRequestHandler):
def log_message(self, *args):
if os.environ.get('SLAPOS_TEST_DEBUG'):
return SimpleHTTPServer.SimpleHTTPRequestHandler.log_message(self, *args)
return http.server.SimpleHTTPRequestHandler.log_message(self, *args)
else:
return
......@@ -873,18 +871,19 @@ class FakeImageServerMixin(KvmMixin):
cls.server_process = multiprocessing.Process(
target=server.serve_forever, name='FakeImageHttpServer')
cls.server_process.start()
server.socket.close()
finally:
os.chdir(old_dir)
@classmethod
def stopImageHttpServer(cls):
cls.logger.debug('Stopping process %s' % (cls.server_process,))
cls.logger.debug('Stopping process %s', cls.server_process)
cls.server_process.join(10)
cls.server_process.terminate()
time.sleep(0.1)
if cls.server_process.is_alive():
cls.logger.warning(
'Process %s still alive' % (cls.server_process, ))
'Process %s still alive', cls.server_process)
shutil.rmtree(cls.image_source_directory)
......@@ -932,11 +931,11 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
@classmethod
def setUpClass(cls):
cls.startImageHttpServer()
super(TestBootImageUrlList, cls).setUpClass()
super().setUpClass()
@classmethod
def tearDownClass(cls):
super(TestBootImageUrlList, cls).tearDownClass()
super().tearDownClass()
cls.stopImageHttpServer()
def tearDown(self):
......@@ -947,7 +946,7 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
# 2nd ...move instance to "default" state
self.rerequestInstance({})
self.slap.waitForInstance(max_retry=10)
super(TestBootImageUrlList, self).tearDown()
super().tearDown()
def getRunningImageList(
self, kvm_instance_partition,
......@@ -993,8 +992,8 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
self.assertEqual(
[
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image_md5sum),
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image2_md5sum),
f'${{inst}}/srv/{self.image_directory}/{self.fake_image_md5sum}',
f'${{inst}}/srv/{self.image_directory}/{self.fake_image2_md5sum}',
'${shared}/debian-${ver}-amd64-netinst.iso',
],
self.getRunningImageList(kvm_instance_partition)
......@@ -1014,8 +1013,8 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
self.assertEqual(
[
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image3_md5sum),
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image2_md5sum),
f'${{inst}}/srv/{self.image_directory}/{self.fake_image3_md5sum}',
f'${{inst}}/srv/{self.image_directory}/{self.fake_image2_md5sum}',
'${shared}/debian-${ver}-amd64-netinst.iso',
],
self.getRunningImageList(kvm_instance_partition)
......@@ -1159,9 +1158,9 @@ class TestBootImageUrlSelect(TestBootImageUrlList):
def test_together(self):
partition_parameter_kw = {
'boot-image-url-list': "%s#%s" % (
'boot-image-url-list': "{}#{}".format(
self.fake_image, self.fake_image_md5sum),
'boot-image-url-select': '["%s#%s"]' % (
'boot-image-url-select': '["{}#{}"]'.format(
self.fake_image, self.fake_image_md5sum)
}
self.rerequestInstance(partition_parameter_kw)
......@@ -1183,10 +1182,10 @@ class TestBootImageUrlSelect(TestBootImageUrlList):
self.assertEqual(
[
'${inst}/srv/boot-image-url-select-repository/%s' % (
self.fake_image_md5sum,),
'${inst}/srv/boot-image-url-list-repository/%s' % (
self.fake_image_md5sum,),
'${{inst}}/srv/boot-image-url-select-repository/{}'.format(
self.fake_image_md5sum),
'${{inst}}/srv/boot-image-url-list-repository/{}'.format(
self.fake_image_md5sum),
'${shared}/debian-${ver}-amd64-netinst.iso',
],
self.getRunningImageList(kvm_instance_partition)
......@@ -1258,12 +1257,12 @@ class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin):
config_file_name = 'boot-image-url-list.conf'
def setUp(self):
super(TestBootImageUrlListKvmCluster, self).setUp()
super().setUp()
self.startImageHttpServer()
def tearDown(self):
self.stopImageHttpServer()
super(TestBootImageUrlListKvmCluster, self).tearDown()
super().tearDown()
@classmethod
def getInstanceParameterDict(cls):
......@@ -1302,12 +1301,12 @@ class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin):
KVM1_config = os.path.join(
self.slap.instance_directory, self.__partition_reference__ + '2', 'etc',
self.config_file_name)
with open(KVM0_config, 'r') as fh:
with open(KVM0_config) as fh:
self.assertEqual(
self.input_value % (self.fake_image, self.fake_image_md5sum),
fh.read().strip()
)
with open(KVM1_config, 'r') as fh:
with open(KVM1_config) as fh:
self.assertEqual(
self.input_value % (self.fake_image2, self.fake_image2_md5sum),
fh.read().strip()
......@@ -1340,11 +1339,11 @@ class TestNatRules(KvmMixin, InstanceTestCase):
self.assertIn('nat-rule-port-tcp-200', connection_parameter_dict)
self.assertEqual(
'%s : 10100' % (self._ipv6_address,),
f'{self._ipv6_address} : 10100',
connection_parameter_dict['nat-rule-port-tcp-100']
)
self.assertEqual(
'%s : 10200' % (self._ipv6_address,),
f'{self._ipv6_address} : 10200',
connection_parameter_dict['nat-rule-port-tcp-200']
)
......@@ -1388,13 +1387,13 @@ class TestNatRulesKvmCluster(InstanceTestCase):
def test(self):
host_fwd_entry = self.getRunningHostFwd()
self.assertIn(
'hostfwd=tcp:%s:10100-:100' % (self._ipv4_address,),
f'hostfwd=tcp:{self._ipv4_address}:10100-:100',
host_fwd_entry)
self.assertIn(
'hostfwd=tcp:%s:10200-:200' % (self._ipv4_address,),
f'hostfwd=tcp:{self._ipv4_address}:10200-:200',
host_fwd_entry)
self.assertIn(
'hostfwd=tcp:%s:10300-:300' % (self._ipv4_address,),
f'hostfwd=tcp:{self._ipv4_address}:10300-:300',
host_fwd_entry)
......@@ -1419,10 +1418,10 @@ class TestWhitelistFirewall(InstanceTestCase):
try:
self.content_json = json.loads(content)
except ValueError:
self.fail('Failed to parse json of %r' % (content,))
self.fail(f'Failed to parse json of {content!r}')
self.assertTrue(isinstance(self.content_json, list))
# check /etc/resolv.conf
with open('/etc/resolv.conf', 'r') as f:
with open('/etc/resolv.conf') as f:
resolv_conf_ip_list = []
for line in f.readlines():
if line.startswith('nameserver'):
......@@ -1451,7 +1450,7 @@ class TestWhitelistFirewallRequest(TestWhitelistFirewall):
}
def test(self):
super(TestWhitelistFirewallRequest, self).test()
super().test()
self.assertIn('2.2.2.2', self.content_json)
self.assertIn('3.3.3.3', self.content_json)
self.assertIn('4.4.4.4', self.content_json)
......@@ -1573,7 +1572,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
maxDiff = None
def setUp(self):
super(TestImageDownloadController, self).setUp()
super().setUp()
self.working_directory = tempfile.mkdtemp()
self.destination_directory = os.path.join(
self.working_directory, 'destination')
......@@ -1595,7 +1594,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
def tearDown(self):
self.stopImageHttpServer()
shutil.rmtree(self.working_directory)
super(TestImageDownloadController, self).tearDown()
super().tearDown()
def callImageDownloadController(self, *args):
call_list = [sys.executable, self.image_download_controller] + list(args)
......@@ -1618,7 +1617,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
def assertFileContent(self, path, content):
self.assertTrue(os.path.exists, path)
with open(path, 'r') as fh:
with open(path) as fh:
self.assertEqual(
fh.read(),
content)
......@@ -1788,7 +1787,7 @@ class TestParameterDefault(InstanceTestCase, KvmMixin):
self.slap.instance_directory, '*', 'bin', 'kvm_raw'))
self.assertEqual(len(kvm_raw), 1)
kvm_raw = kvm_raw[0]
with open(kvm_raw, 'r') as fh:
with open(kvm_raw) as fh:
kvm_raw = fh.read()
self.assertIn(expected, kvm_raw)
......@@ -1929,7 +1928,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
continue
partition_store_list = []
for number in range(10):
storage = os.path.join(external_storage_path, 'data%s' % (number,))
storage = os.path.join(external_storage_path, f'data{number}')
if not os.path.exists(storage):
os.mkdir(storage)
partition_store = os.path.join(storage, partition)
......@@ -1949,7 +1948,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
if line.strip() == '[slapos]':
slapos_config.append('[slapos]\n')
slapos_config.append(
'instance_storage_home = %s\n' % (external_storage_path,))
f'instance_storage_home = {external_storage_path}\n')
else:
slapos_config.append(line)
with open(cls.slap._slapos_config, 'w') as fh:
......@@ -1968,7 +1967,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
@classmethod
def _setUpClass(cls):
super(TestExternalDisk, cls)._setUpClass()
super()._setUpClass()
cls.working_directory = tempfile.mkdtemp()
# setup the external_storage_list, to mimic part of slapformat
cls._prepareExternalStorageList()
......@@ -1978,7 +1977,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
@classmethod
def tearDownClass(cls):
cls._dropExternalStorageList()
super(TestExternalDisk, cls).tearDownClass()
super().tearDownClass()
shutil.rmtree(cls.working_directory)
def test(self):
......@@ -2026,7 +2025,7 @@ class TestExternalDiskModern(InstanceTestCase, ExternalDiskMixin):
@classmethod
def setUpClass(cls):
super(TestExternalDiskModern, cls).setUpClass()
super().setUpClass()
def getExternalDiskInstanceParameterDict(
self, first, second, third, update_dict=None):
......@@ -2090,11 +2089,11 @@ class TestExternalDiskModern(InstanceTestCase, ExternalDiskMixin):
[
'file=${partition}/srv/virtual.qcow2,if=virtio,discard=on,'
'format=qcow2',
'file=%s/first_disk,if=virtio,cache=writeback,format=qcow' % (
self.working_directory,),
'file={}/first_disk,if=virtio,cache=writeback,format=qcow'.format(
self.working_directory),
'file=${partition}/second_disk,if=virtio,cache=writeback',
'file=%s/third_disk,if=virtio,cache=none' % (
self.working_directory,)
'file={}/third_disk,if=virtio,cache=none'.format(
self.working_directory)
]
)
update_dict = {
......@@ -2128,9 +2127,7 @@ class TestExternalDiskModernCluster(TestExternalDiskModern):
return 'kvm-cluster'
def getExternalDiskInstanceParameterDict(self, *args, **kwargs):
partition_dict = super(
TestExternalDiskModernCluster, self
).getExternalDiskInstanceParameterDict(*args, **kwargs)
partition_dict = super().getExternalDiskInstanceParameterDict(*args, **kwargs)
partition_dict.update({"disable-ansible-promise": True})
return {
"kvm-partition-dict": {
......@@ -2150,7 +2147,7 @@ class TestExternalDiskModernIndexRequired(InstanceTestCase, ExternalDiskMixin):
@classmethod
def setUpClass(cls):
super(TestExternalDiskModernIndexRequired, cls).setUpClass()
super().setUpClass()
def getExternalDiskInstanceParameterDict(self, first, second, third):
return {
......@@ -2234,31 +2231,31 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin):
@classmethod
def stopHttpServer(cls):
cls.logger.debug('Stopping process %s' % (cls.server_process,))
cls.logger.debug(f'Stopping process {cls.server_process}')
cls.server_process.join(10)
cls.server_process.terminate()
time.sleep(0.1)
if cls.server_process.is_alive():
cls.logger.warning(
'Process %s still alive' % (cls.server_process, ))
f'Process {cls.server_process} still alive')
shutil.rmtree(cls.http_directory)
@classmethod
def setUpClass(cls):
cls.startHttpServer()
super(TestInstanceHttpServer, cls).setUpClass()
super().setUpClass()
@classmethod
def tearDownClass(cls):
super(TestInstanceHttpServer, cls).tearDownClass()
super().tearDownClass()
cls.stopHttpServer()
@classmethod
def getInstanceParameterDict(cls):
return {
'enable-http-server': True,
'bootstrap-script-url': '%s#%s' % (
'bootstrap-script-url': '{}#{}'.format(
cls.bootstrap_script_url, cls.bootstrap_script_md5sum),
'data-to-vm': """data
to
......@@ -2280,10 +2277,10 @@ vm""",
{
'ipv6': self._ipv6_address,
'maximum-extra-disk-amount': '0',
'monitor-base-url': 'https://[%s]:8026' % (self._ipv6_address,),
'nat-rule-port-tcp-22': '%s : 10022' % (self._ipv6_address,),
'nat-rule-port-tcp-443': '%s : 10443' % (self._ipv6_address,),
'nat-rule-port-tcp-80': '%s : 10080' % (self._ipv6_address,),
'monitor-base-url': f'https://[{self._ipv6_address}]:8026',
'nat-rule-port-tcp-22': f'{self._ipv6_address} : 10022',
'nat-rule-port-tcp-443': f'{self._ipv6_address} : 10443',
'nat-rule-port-tcp-80': f'{self._ipv6_address} : 10080',
}
)
self.assertEqual(set(present_key_list), set(assert_key_list))
......@@ -2311,11 +2308,11 @@ ihs0:whitelist-firewall-{hash} RUNNING""",
'ipv6_config.sh', 'netmask', 'network', 'vm-bootstrap'],
sorted(os.listdir(public_dir))
)
with open(os.path.join(public_dir, 'data'), 'r') as fh:
with open(os.path.join(public_dir, 'data')) as fh:
self.assertEqual("""data
to
vm""", fh.read())
with open(os.path.join(public_dir, 'vm-bootstrap'), 'r') as fh:
with open(os.path.join(public_dir, 'vm-bootstrap')) as fh:
self.assertEqual('bootstrap_script', fh.read())
......
......@@ -45,7 +45,6 @@ setup(name=name,
'slapos.cookbook',
'slapos.libnetworkcache',
'supervisor',
'six',
'requests'
],
zip_safe=True,
......
##############################################################################
# coding: utf-8
#
# Copyright (c) 2020 Nexedi SA and Contributors. All Rights Reserved.
#
......@@ -28,7 +27,7 @@
import os
import json
from six.moves.urllib import parse
from urllib import parse
import requests
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
......
......@@ -8,12 +8,11 @@ import shutil
import subprocess
import tempfile
import time
import urllib
import urlparse
from BaseHTTPServer import BaseHTTPRequestHandler
import urllib.parse
from http.server import BaseHTTPRequestHandler
from typing import Dict
import mock
from unittest import mock
import OpenSSL.SSL
import pexpect
import psutil
......@@ -44,10 +43,10 @@ class EchoHTTPServer(ManagedHTTPServer):
response = json.dumps(
{
'Path': self.path,
'Incoming Headers': self.headers.dict
'Incoming Headers': dict(self.headers.items()),
},
indent=2,
)
).encode('utf-8')
self.end_headers()
self.wfile.write(response)
......@@ -67,11 +66,11 @@ class EchoHTTP11Server(ManagedHTTPServer):
response = json.dumps(
{
'Path': self.path,
'Incoming Headers': self.headers.dict
'Incoming Headers': dict(self.headers.items()),
},
indent=2,
)
self.send_header("Content-Length", len(response))
).encode('utf-8')
self.send_header("Content-Length", str(len(response)))
self.end_headers()
self.wfile.write(response)
......@@ -100,7 +99,7 @@ class CaucaseService(ManagedResource):
os.mkdir(os.path.join(caucased_dir, 'user'))
os.mkdir(os.path.join(caucased_dir, 'service'))
backend_caucased_netloc = '%s:%s' % (self._cls._ipv4_address, findFreeTCPPort(self._cls._ipv4_address))
backend_caucased_netloc = f'{self._cls._ipv4_address}:{findFreeTCPPort(self._cls._ipv4_address)}'
self.url = 'http://' + backend_caucased_netloc
self._caucased_process = subprocess.Popen(
[
......@@ -110,6 +109,7 @@ class CaucaseService(ManagedResource):
'--netloc', backend_caucased_netloc,
'--service-auto-approve-count', '1',
],
# capture subprocess output not to pollute test's own stdout
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
......@@ -127,6 +127,7 @@ class CaucaseService(ManagedResource):
# type: () -> None
self._caucased_process.terminate()
self._caucased_process.wait()
self._caucased_process.stdout.close()
shutil.rmtree(self.directory)
......@@ -166,10 +167,11 @@ class BalancerTestCase(ERP5InstanceTestCase):
'backend-path-dict': {
'default': '',
},
'ssl-authentication-dict': {},
'ssl-authentication-dict': {'default': False},
'ssl': {
'caucase-url': cls.getManagedResource("caucase", CaucaseService).url,
},
'timeout-dict': {'default': None},
'family-path-routing-dict': {},
'path-routing-list': [],
}
......@@ -185,18 +187,27 @@ class BalancerTestCase(ERP5InstanceTestCase):
class SlowHTTPServer(ManagedHTTPServer):
"""An HTTP Server which reply after 2 seconds.
"""An HTTP Server which reply after a timeout.
Timeout is 2 seconds by default, and can be specified in the path of the URL
"""
class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
# type: () -> None
self.send_response(200)
self.send_header("Content-Type", "text/plain")
time.sleep(2)
timeout = 2
try:
timeout = int(self.path[1:])
except ValueError:
pass
time.sleep(timeout)
self.end_headers()
self.wfile.write("OK\n")
self.wfile.write(b"OK\n")
log_message = logging.getLogger(__name__ + '.SlowHTTPServer').info
log_message = logging.getLogger(__name__ + '.SlowHandler').info
class TestLog(BalancerTestCase, CrontabMixin):
......@@ -206,7 +217,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
parameter_dict = super(TestLog, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
return parameter_dict
......@@ -214,7 +225,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
def test_access_log_format(self):
# type: () -> None
requests.get(
urlparse.urljoin(self.default_balancer_url, '/url_path'),
urllib.parse.urljoin(self.default_balancer_url, '/url_path'),
verify=False,
)
time.sleep(.5) # wait a bit more until access is logged
......@@ -254,7 +265,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
'apachedex',
'ApacheDex-*.html',
))
with open(apachedex_report, 'r') as f:
with open(apachedex_report) as f:
report_text = f.read()
self.assertIn('APacheDEX', report_text)
# having this table means that apachedex could parse some lines.
......@@ -301,7 +312,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
error_line = error_log_file.read().splitlines()[-1]
self.assertIn('apache.conf -D FOREGROUND', error_line)
# this log also include a timestamp
# This regex is for haproxy mostly, so keep it commented for now, until we can
# This regex is for haproxy mostly, so keep it commented for now, until we can
# Merge the slapos-master setup and erp5.
# self.assertRegexpMatches(error_line, r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}')
......@@ -331,7 +342,7 @@ class BalancerCookieHTTPServer(ManagedHTTPServer):
# The name of this cookie is SERVERID
assert self.headers['X-Balancer-Current-Cookie'] == 'SERVERID'
self.end_headers()
self.wfile.write(server._name)
self.wfile.write(server._name.encode('utf-8'))
log_message = logging.getLogger(__name__ + '.BalancerCookieHTTPServer').info
return RequestHandler
......@@ -344,7 +355,7 @@ class TestBalancer(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
parameter_dict = super(TestBalancer, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
# use two backend servers
parameter_dict['dummy_http_server'] = [
......@@ -373,7 +384,7 @@ class TestBalancer(BalancerTestCase):
# if backend provides a "SERVERID" cookie, balancer will overwrite it with the
# backend selected by balancing algorithm
self.assertIn(
requests.get(urlparse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
requests.get(urllib.parse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
('default-0', 'default-1'),
)
......@@ -400,10 +411,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
parameter_dict = super(
TestTestRunnerEntryPoints,
cls,
)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
parameter_dict['dummy_http_server-test-runner-address-list'] = [
[
......@@ -427,18 +435,18 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
)['default-test-runner-url-list']
url_0, url_1, url_2 = test_runner_url_list
self.assertEqual(
urlparse.urlparse(url_0).netloc,
urlparse.urlparse(url_1).netloc)
urllib.parse.urlparse(url_0).netloc,
urllib.parse.urlparse(url_1).netloc)
self.assertEqual(
urlparse.urlparse(url_0).netloc,
urlparse.urlparse(url_2).netloc)
urllib.parse.urlparse(url_0).netloc,
urllib.parse.urlparse(url_2).netloc)
path_0 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_0/something'.format(
netloc=urlparse.urlparse(url_0).netloc)
netloc=urllib.parse.urlparse(url_0).netloc)
path_1 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_1/something'.format(
netloc=urlparse.urlparse(url_0).netloc)
netloc=urllib.parse.urlparse(url_0).netloc)
path_2 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_2/something'.format(
netloc=urlparse.urlparse(url_0).netloc)
netloc=urllib.parse.urlparse(url_0).netloc)
self.assertEqual(
{
......@@ -476,7 +484,7 @@ class TestHTTP(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
parameter_dict = super(TestHTTP, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
# use a HTTP/1.1 server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]]
return parameter_dict
......@@ -497,32 +505,33 @@ class TestHTTP(BalancerTestCase):
'%{http_version}',
self.default_balancer_url,
]),
'1.1',
b'1.1',
)
def test_keep_alive(self):
# type: () -> None
# when doing two requests, connection is established only once
session = requests.Session()
session.verify = False
with requests.Session() as session:
session.verify = False
# do a first request, which establish a first connection
session.get(self.default_balancer_url).raise_for_status()
# "break" new connection method and check we can make another request
with mock.patch(
"requests.packages.urllib3.connectionpool.HTTPSConnectionPool._new_conn",
) as new_conn:
# do a first request, which establish a first connection
session.get(self.default_balancer_url).raise_for_status()
new_conn.assert_not_called()
parsed_url = urlparse.urlparse(self.default_balancer_url)
# check that we have an open file for the ip connection
self.assertTrue([
c for c in psutil.Process(os.getpid()).connections()
if c.status == 'ESTABLISHED' and c.raddr.ip == parsed_url.hostname
and c.raddr.port == parsed_url.port
])
# "break" new connection method and check we can make another request
with mock.patch(
"requests.packages.urllib3.connectionpool.HTTPSConnectionPool._new_conn",
) as new_conn:
session.get(self.default_balancer_url).raise_for_status()
new_conn.assert_not_called()
parsed_url = urllib.parse.urlparse(self.default_balancer_url)
# check that we have an open file for the ip connection
self.assertTrue([
c for c in psutil.Process(os.getpid()).connections()
if c.status == 'ESTABLISHED' and c.raddr.ip == parsed_url.hostname
and c.raddr.port == parsed_url.port
])
class ContentTypeHTTPServer(ManagedHTTPServer):
......@@ -539,12 +548,12 @@ class ContentTypeHTTPServer(ManagedHTTPServer):
# type: () -> None
self.send_response(200)
if self.path == '/':
self.send_header("Content-Length", 0)
self.send_header("Content-Length", '0')
return self.end_headers()
content_type = self.path[1:]
body = "OK"
body = b"OK"
self.send_header("Content-Type", content_type)
self.send_header("Content-Length", len(body))
self.send_header("Content-Length", str(len(body)))
self.end_headers()
self.wfile.write(body)
......@@ -558,7 +567,7 @@ class TestContentEncoding(BalancerTestCase):
@classmethod
def _getInstanceParameterDict(cls):
# type: () -> Dict
parameter_dict = super(TestContentEncoding, cls)._getInstanceParameterDict()
parameter_dict = super()._getInstanceParameterDict()
parameter_dict['dummy_http_server'] = [
[cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False],
]
......@@ -588,19 +597,19 @@ class TestContentEncoding(BalancerTestCase):
'application/x-font-opentype',
'application/wasm',):
resp = requests.get(
urlparse.urljoin(self.default_balancer_url, content_type),
urllib.parse.urljoin(self.default_balancer_url, content_type),
verify=False,
headers={"Accept-Encoding": "gzip, deflate",})
self.assertEqual(resp.headers['Content-Type'], content_type)
self.assertEqual(
resp.headers.get('Content-Encoding'),
'gzip',
'%s uses wrong encoding: %s' % (content_type, resp.headers.get('Content-Encoding')))
'{} uses wrong encoding: {}'.format(content_type, resp.headers.get('Content-Encoding')))
self.assertEqual(resp.text, 'OK')
def test_no_gzip_encoding(self):
# type: () -> None
resp = requests.get(urlparse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
self.assertNotIn('Content-Encoding', resp.headers)
self.assertEqual(resp.text, 'OK')
......@@ -683,7 +692,7 @@ class CaucaseCertificate(ManagedResource):
cas_args + [
'--send-csr', self.csr_file,
],
).split()[0]
).split()[0].decode()
assert csr_id
for _ in range(30):
......@@ -699,8 +708,8 @@ class CaucaseCertificate(ManagedResource):
time.sleep(1)
else:
raise RuntimeError('getting service certificate failed.')
with open(self.cert_file) as f:
assert 'BEGIN CERTIFICATE' in f.read()
with open(self.cert_file) as cert_file:
assert 'BEGIN CERTIFICATE' in cert_file.read()
def revoke(self, caucase):
# type: (str, CaucaseService) -> None
......@@ -724,8 +733,8 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
# type: () -> Dict
server_caucase = cls.getManagedResource('server_caucase', CaucaseService)
server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate)
server_certificate.request(cls._ipv4_address.decode(), server_caucase)
parameter_dict = super(TestServerTLSProvidedCertificate, cls)._getInstanceParameterDict()
server_certificate.request(cls._ipv4_address, server_caucase)
parameter_dict = super()._getInstanceParameterDict()
with open(server_certificate.cert_file) as f:
parameter_dict['ssl']['cert'] = f.read()
with open(server_certificate.key_file) as f:
......
......@@ -28,11 +28,11 @@
import os
import json
import glob
import urlparse
import urllib.parse
import socket
import time
import re
import BaseHTTPServer
import http.server
import multiprocessing
import subprocess
......@@ -44,7 +44,7 @@ from . import setUpModule
setUpModule # pyflakes
class TestPublishedURLIsReachableMixin(object):
class TestPublishedURLIsReachableMixin:
"""Mixin that checks that default page of ERP5 is reachable.
"""
......@@ -52,7 +52,7 @@ class TestPublishedURLIsReachableMixin(object):
# We access ERP5 trough a "virtual host", which should make
# ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root
# as base.
virtual_host_url = urlparse.urljoin(
virtual_host_url = urllib.parse.urljoin(
base_url,
'/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/'
.format(site_id))
......@@ -72,20 +72,20 @@ class TestPublishedURLIsReachableMixin(object):
total=60,
backoff_factor=.5,
status_forcelist=(404, 500, 503))))
r = session.get(virtual_host_url, verify=verify, allow_redirects=False)
self.assertEqual(r.status_code, requests.codes.found)
# access on / are redirected to login form, with virtual host preserved
self.assertEqual(r.headers.get('location'), 'https://virtual-host-name:1234/virtual_host_root/login_form')
# login page can be rendered and contain the text "ERP5"
r = session.get(
urlparse.urljoin(base_url, '{}/login_form'.format(site_id)),
verify=verify,
allow_redirects=False,
)
self.assertEqual(r.status_code, requests.codes.ok)
self.assertIn("ERP5", r.text)
with session:
r = session.get(virtual_host_url, verify=verify, allow_redirects=False)
self.assertEqual(r.status_code, requests.codes.found)
# access on / are redirected to login form, with virtual host preserved
self.assertEqual(r.headers.get('location'), 'https://virtual-host-name:1234/virtual_host_root/login_form')
# login page can be rendered and contain the text "ERP5"
r = session.get(
urllib.parse.urljoin(base_url, f'{site_id}/login_form'),
verify=verify,
allow_redirects=False,
)
self.assertEqual(r.status_code, requests.codes.ok)
self.assertIn("ERP5", r.text)
def test_published_family_default_v6_is_reachable(self):
"""Tests the IPv6 URL published by the root partition is reachable.
......@@ -134,7 +134,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
param_dict = self.getRootPartitionConnectionParameterDict()
self.assertEqual(
'https://[%s]:8888/tree' % self._ipv6_address,
f'https://[{self._ipv6_address}]:8888/tree',
param_dict['jupyter-url']
)
......@@ -172,7 +172,7 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase):
}
def checkValidHTTPSURL(self, url):
parsed = urlparse.urlparse(url)
parsed = urllib.parse.urlparse(url)
self.assertEqual(parsed.scheme, 'https')
self.assertTrue(parsed.hostname)
self.assertTrue(parsed.port)
......@@ -182,16 +182,16 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase):
param_dict = self.getRootPartitionConnectionParameterDict()
for family_name in ('family1', 'family2'):
self.checkValidHTTPSURL(
param_dict['family-{family_name}'.format(family_name=family_name)])
param_dict[f'family-{family_name}'])
self.checkValidHTTPSURL(
param_dict['family-{family_name}-v6'.format(family_name=family_name)])
param_dict[f'family-{family_name}-v6'])
def test_published_test_runner_url(self):
# each family's also a list of test test runner URLs, by default 3 per family
param_dict = self.getRootPartitionConnectionParameterDict()
for family_name in ('family1', 'family2'):
family_test_runner_url_list = param_dict[
'{family_name}-test-runner-url-list'.format(family_name=family_name)]
f'{family_name}-test-runner-url-list']
self.assertEqual(3, len(family_test_runner_url_list))
for url in family_test_runner_url_list:
self.checkValidHTTPSURL(url)
......@@ -209,23 +209,23 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase):
# normal access on ipv4 and ipv6 and test runner access on ipv4 only
with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo()
process_info, = [p for p in all_process_info if p['name'] == 'apache']
process_info, = (p for p in all_process_info if p['name'] == 'apache')
apache_process = psutil.Process(process_info['pid'])
self.assertEqual(
sorted([socket.AF_INET] * 4 + [socket.AF_INET6] * 2),
sorted([
sorted(
c.family
for c in apache_process.connections()
if c.status == 'LISTEN'
]))
))
def test_haproxy_listen(self):
# There is one haproxy per family
with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo()
process_info, = [
process_info, = (
p for p in all_process_info if p['name'].startswith('haproxy-')
]
)
haproxy_process = psutil.Process(process_info['pid'])
self.assertEqual([socket.AF_INET, socket.AF_INET], [
c.family for c in haproxy_process.connections() if c.status == 'LISTEN'
......@@ -290,8 +290,8 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
zodb["pool-timeout"] = "10m"
storage["storage"] = "root"
storage["server"] = zeo_addr
with open('%s/etc/zope-%s.conf' % (partition, zope)) as f:
conf = map(str.strip, f.readlines())
with open(f'{partition}/etc/zope-{zope}.conf') as f:
conf = list(map(str.strip, f.readlines()))
i = conf.index("<zodb_db root>") + 1
conf = iter(conf[i:conf.index("</zodb_db>", i)])
for line in conf:
......@@ -300,23 +300,23 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
if line == '</zeoclient>':
break
checkParameter(line, storage)
for k, v in storage.iteritems():
for k, v in storage.items():
self.assertIsNone(v, k)
del storage
else:
checkParameter(line, zodb)
for k, v in zodb.iteritems():
for k, v in zodb.items():
self.assertIsNone(v, k)
partition = self.getComputerPartitionPath('zope-a')
for zope in xrange(3):
for zope in range(3):
checkConf({
"cache-size-bytes": "20MB",
}, {
"cache-size": "50MB",
})
partition = self.getComputerPartitionPath('zope-bb')
for zope in xrange(5):
for zope in range(5):
checkConf({
"cache-size-bytes": "500MB" if zope else 1<<20,
}, {
......@@ -332,19 +332,20 @@ def popenCommunicate(command_list, input_=None, **kwargs):
popen.kill()
if popen.returncode != 0:
raise ValueError(
'Issue during calling %r, result was:\n%s' % (command_list, result))
f'Issue during calling {command_list!r}, result was:\n{result}')
return result
class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
class TestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
response = {
'Path': self.path,
'Incoming Headers': self.headers.dict
}
response = json.dumps(response, indent=2)
response = json.dumps(
{
'Path': self.path,
'Incoming Headers': {k.lower(): v for k, v in self.headers.items()},
},
indent=2,
).encode('utf-8')
self.end_headers()
self.wfile.write(response)
......@@ -352,7 +353,7 @@ class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
class TestDeploymentScriptInstantiation(ERP5InstanceTestCase):
"""This check deployment script like instantiation
Low level assertions are done here in roder to assure that
Low level assertions are done here in order to assure that
https://lab.nexedi.com/nexedi/slapos.package/blob/master/playbook/
slapos-master-standalone.yml
works correctly
......@@ -426,8 +427,8 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase):
1,
len(backend_apache_configuration_list)
)
backend_apache_configuration = open(
backend_apache_configuration_list[0]).read()
with open(backend_apache_configuration_list[0]) as f:
backend_apache_configuration = f.read()
self.assertIn(
'SSLVerifyClient require',
backend_apache_configuration
......@@ -452,7 +453,7 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase):
common_name = 'TEST-SSL-AUTH'
popenCommunicate([
'openssl', 'req', '-utf8', '-nodes', '-config', openssl_config, '-new',
'-keyout', key, '-out', csr, '-days', '3650'], '%s\n' % (common_name,),
'-keyout', key, '-out', csr, '-days', '3650'], f'{common_name}\n'.encode(),
stdin=subprocess.PIPE)
popenCommunicate([
'openssl', 'ca', '-utf8', '-days', '3650', '-batch', '-config',
......@@ -464,24 +465,24 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase):
ip, port = re.search(
r'.*http:\/\/(.*):(\d*)\/.*', portal_slap_line).groups()
port = int(port)
server = BaseHTTPServer.HTTPServer((ip, port), TestHandler)
server = http.server.HTTPServer((ip, port), TestHandler)
server_process = multiprocessing.Process(
target=server.serve_forever, name='HTTPServer')
server_process.start()
try:
# assert that accessing the service endpoint results with certificate
# authentication and proper information extraction
result_json = requests.get(
self.getRootPartitionConnectionParameterDict()['family-service'],
verify=False, cert=(cert, key)).json()
self.assertEqual(
common_name,
result_json['Incoming Headers']['remote-user']
)
self.assertEqual(
'/erp5/portal_slap/',
result_json['Path']
)
finally:
server_process.join(10)
server_process.terminate()
self.addCleanup(server_process.terminate)
self.addCleanup(server_process.join, 10)
server.socket.close()
# assert that accessing the service endpoint results with certificate
# authentication and proper information extraction
result_json = requests.get(
self.getRootPartitionConnectionParameterDict()['family-service'],
verify=False, cert=(cert, key)).json()
self.assertEqual(
common_name,
result_json['Incoming Headers']['remote-user']
)
self.assertEqual(
'/erp5/portal_slap/',
result_json['Path']
)
##############################################################################
# coding: utf-8
#
# Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved.
#
......@@ -29,7 +28,7 @@
import os
import json
import glob
import urlparse
import urllib.parse
import socket
import sys
import time
......@@ -38,7 +37,7 @@ import datetime
import subprocess
import gzip
from backports import lzma
import lzma
import MySQLdb
from slapos.testing.utils import CrontabMixin
......@@ -80,7 +79,7 @@ class MariaDBTestCase(ERP5InstanceTestCase):
def getDatabaseConnection(self):
connection_parameter_dict = json.loads(
self.computer_partition.getConnectionParameterDict()['_'])
db_url = urlparse.urlparse(connection_parameter_dict['database-list'][0])
db_url = urllib.parse.urlparse(connection_parameter_dict['database-list'][0])
self.assertEqual('mysql', db_url.scheme)
self.assertTrue(db_url.path.startswith('/'))
......@@ -91,6 +90,8 @@ class MariaDBTestCase(ERP5InstanceTestCase):
host=db_url.hostname,
port=db_url.port,
db=database_name,
use_unicode=True,
charset='utf8mb4'
)
......@@ -106,7 +107,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
'mariadb-full',
'20500101000000.sql.gz',
),
'r') as dump:
'rt') as dump:
self.assertIn('CREATE TABLE', dump.read())
def test_logrotate_and_slow_query_digest(self):
......@@ -148,7 +149,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
'slowquery_digest',
'slowquery_digest.txt-2050-01-01.xz',
)
with lzma.open(slow_query_report, 'r') as f:
with lzma.open(slow_query_report, 'rt') as f:
# this is the hash for our "select sleep(n)" slow query
self.assertIn("ID 0xF9A57DD5A41825CA", f.read())
......@@ -170,7 +171,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
subprocess.check_output('faketime 2050-01-01 %s' % check_slow_query_promise_plugin['command'], shell=True)
self.assertEqual(
error_context.exception.output,
"""\
b"""\
Threshold is lower than expected:
Expected total queries : 1.0 and current is: 2
Expected slowest query : 0.1 and current is: 3
......@@ -220,7 +221,7 @@ class TestMroonga(MariaDBTestCase):
"""
SELECT mroonga_normalize("ABCDあぃうぇ㍑")
""")
self.assertEqual((('abcdあぃうぇリットル',),),
self.assertEqual((('abcdあぃうぇリットル'.encode(),),),
cnx.store_result().fetch_row(maxrows=2))
if 0:
......@@ -233,7 +234,7 @@ class TestMroonga(MariaDBTestCase):
"""
SELECT mroonga_normalize("aBcDあぃウェ㍑", "NormalizerMySQLUnicodeCIExceptKanaCIKanaWithVoicedSoundMark")
""")
self.assertEqual((('ABCDあぃうぇ㍑',),),
self.assertEqual((('ABCDあぃうぇ㍑'.encode(),),),
cnx.store_result().fetch_row(maxrows=2))
def test_mroonga_full_text_normalizer(self):
......@@ -321,7 +322,7 @@ class TestMroonga(MariaDBTestCase):
cnx = self.getDatabaseConnection()
with contextlib.closing(cnx):
cnx.query("SELECT mroonga_command('register token_filters/stem')")
self.assertEqual((('true',),), cnx.store_result().fetch_row(maxrows=2))
self.assertEqual(((b'true',),), cnx.store_result().fetch_row(maxrows=2))
cnx.query(
"""
CREATE TABLE memos (
......
......@@ -75,5 +75,5 @@ def lookupMount(zurl):
# readfile returns content of file @path.
def readfile(path):
with open(path, 'r') as f:
with open(path) as f:
return f.read()
......@@ -17,7 +17,6 @@ changes to the code, run tests and publish changes.
```bash
# install this software release and request an instance
# use software-py3.cfg instead of software.cfg if the SR you want to test is written in Python 3
SR=https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slapos-sr-testing/software.cfg
COMP=slaprunner
INSTANCE_NAME=$COMP
......
[buildout]
extends =
software.cfg
[python]
part = python3
[python-interpreter]
extra-eggs +=
# plantuml 0.3.0 is only available for Python 3
${slapos.test.plantuml-setup:egg}
[template]
extra =
# The following list is for SR whose buildout runs only with Python 3.
caddy-frontend ${slapos.test.caddy-frontend-setup:setup}
caucase ${slapos.test.caucase-setup:setup}
erp5testnode ${slapos.test.erp5testnode-setup:setup}
galene ${slapos.test.galene-setup:setup}
grafana ${slapos.test.grafana-setup:setup}
headless-chromium ${slapos.test.headless-chromium-setup:setup}
helloworld ${slapos.test.helloworld-setup:setup}
html5as ${slapos.test.html5as-setup:setup}
html5as-base ${slapos.test.html5as-base-setup:setup}
htmlvalidatorserver ${slapos.test.htmlvalidatorserver-setup:setup}
hugo ${slapos.test.hugo-setup:setup}
jscrawler ${slapos.test.jscrawler-setup:setup}
jstestnode ${slapos.test.jstestnode-setup:setup}
jupyter ${slapos.test.jupyter-setup:setup}
kvm ${slapos.test.kvm-setup:setup}
matomo ${slapos.test.matomo-setup:setup}
metabase ${slapos.test.metabase-setup:setup}
monitor ${slapos.test.monitor-setup:setup}
nextcloud ${slapos.test.nextcloud-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
ors-amarisoft ${slapos.test.ors-amarisoft-setup:setup}
plantuml ${slapos.test.plantuml-setup:setup}
powerdns ${slapos.test.powerdns-setup:setup}
proftpd ${slapos.test.proftpd-setup:setup}
repman ${slapos.test.repman-setup:setup}
restic-rest-server ${slapos.test.restic_rest_server-setup:setup}
seleniumserver ${slapos.test.seleniumserver-setup:setup}
theia ${slapos.test.theia-setup:setup}
turnserver ${slapos.test.turnserver-setup:setup}
......@@ -27,9 +27,6 @@ parts =
shared-part-list =
[python]
part = python2.7
[setup-develop-egg]
recipe = zc.recipe.egg:develop
......@@ -250,8 +247,7 @@ egg = slapos.core
setup = ${slapos.core-repository:location}
[python-interpreter]
eggs += ${:extra-eggs}
extra-eggs =
eggs +=
${lxml-python:egg}
${python-PyYAML:egg}
${slapos.core-setup:egg}
......@@ -289,10 +285,11 @@ extra-eggs =
${slapos.test.kvm-setup:egg}
${slapos.test.matomo-setup:egg}
${slapos.test.metabase-setup:egg}
${slapos.test.ors-amarisoft-setup:egg}
${slapos.test.monitor-setup:egg}
${slapos.test.nextcloud-setup:egg}
${slapos.test.nginx-push-stream-setup:egg}
${slapos.test.ors-amarisoft-setup:egg}
${slapos.test.plantuml-setup:egg}
${slapos.test.powerdns-setup:egg}
${slapos.test.proftpd-setup:egg}
${slapos.test.re6stnet-setup:egg}
......@@ -352,27 +349,46 @@ context =
tests =
json-schemas ${slapos.cookbook-setup:setup}
# The following list is for SR that work with either Python 2 and 3
# (as main Python). The test egg must supply a URL which depends on
# the version of Python that is used to run the test.
# Due to a bug in the way promises are run, we may also list some Py3-only SR
# here, to check there's no promise issue when slapos node runs with Python 2.
erp5 ${slapos.test.erp5-setup:setup}
fluentd ${slapos.test.fluentd-setup:setup}
###
${:extra}
extra =
# WARNING: This is for SR that only support Python 2.
# You should not add more lines here.
backupserver ${slapos.test.backupserver-setup:setup}
beremiz-ide ${slapos.test.beremiz-ide-setup:setup}
caddy-frontend ${slapos.test.caddy-frontend-setup:setup}
caucase ${slapos.test.caucase-setup:setup}
cloudooo ${slapos.test.cloudooo-setup:setup}
dream ${slapos.test.dream-setup:setup}
erp5 ${slapos.test.erp5-setup:setup}
erp5testnode ${slapos.test.erp5testnode-setup:setup}
fluentd ${slapos.test.fluentd-setup:setup}
galene ${slapos.test.galene-setup:setup}
gitlab ${slapos.test.gitlab-setup:setup}
grafana ${slapos.test.grafana-setup:setup}
headless-chromium ${slapos.test.headless-chromium-setup:setup}
helloworld ${slapos.test.helloworld-setup:setup}
html5as ${slapos.test.html5as-setup:setup}
html5as-base ${slapos.test.html5as-base-setup:setup}
htmlvalidatorserver ${slapos.test.htmlvalidatorserver-setup:setup}
hugo ${slapos.test.hugo-setup:setup}
jscrawler ${slapos.test.jscrawler-setup:setup}
jstestnode ${slapos.test.jstestnode-setup:setup}
jupyter ${slapos.test.jupyter-setup:setup}
kvm ${slapos.test.kvm-setup:setup}
matomo ${slapos.test.matomo-setup:setup}
metabase ${slapos.test.metabase-setup:setup}
monitor ${slapos.test.monitor-setup:setup}
nextcloud ${slapos.test.nextcloud-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
ors-amarisoft ${slapos.test.ors-amarisoft-setup:setup}
plantuml ${slapos.test.plantuml-setup:setup}
powerdns ${slapos.test.powerdns-setup:setup}
proftpd ${slapos.test.proftpd-setup:setup}
re6stnet ${slapos.test.re6stnet-setup:setup}
repman ${slapos.test.repman-setup:setup}
restic-rest-server ${slapos.test.restic_rest_server-setup:setup}
seleniumserver ${slapos.test.seleniumserver-setup:setup}
slapos-master ${slapos.test.slapos-master-setup:setup}
slaprunner ${slapos.test.slaprunner-setup:setup}
theia ${slapos.test.theia-setup:setup}
turnserver ${slapos.test.turnserver-setup:setup}
upgrade_erp5 ${slapos.test.upgrade_erp5-setup:setup}
[versions]
......@@ -397,11 +413,9 @@ PyPDF2 = 1.26.0+SlapOSPatched001
# Django 1.11 is python 2 compatible
Django = 1.11
mock = 2.0.0:whl
testfixtures = 6.11.0
funcsigs = 1.0.2
mysqlclient = 1.3.12
pexpect = 4.8.0
ptyprocess = 0.6.0
typing = 3.7.4.3
psycopg2 = 2.8.6
......@@ -48,7 +48,6 @@ setup(name=name,
'supervisor',
'psutil',
'paramiko',
'six',
'requests',
],
zip_safe=True,
......
......@@ -35,12 +35,11 @@ import subprocess
import json
import time
from six.moves.urllib.parse import urlparse
from six.moves.urllib.parse import quote
from six.moves.urllib.parse import urljoin
from six.moves.configparser import ConfigParser
from urllib.parse import urlparse
from urllib.parse import quote
from urllib.parse import urljoin
from configparser import ConfigParser
import requests
import six
from slapos.recipe.librecipe import generateHashFromFiles
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
......@@ -61,7 +60,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
data = {
"path": "workspace/slapos/software/%s" % software_release,
}
resp = self._postToSlaprunner(url, data)
resp = self._postToSlaprunner(url, data)
self.assertEqual(requests.codes.ok, resp.status_code)
self.assertNotEqual(json.loads(resp.text)['code'], 0,
'Unexpecting result in call to setCurrentProject: %s' % resp.text)
......@@ -69,7 +68,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
def _buildSoftwareRelease(self):
parameter_dict = self.computer_partition.getConnectionParameterDict()
url = "%s/runSoftwareProfile" % parameter_dict['url']
resp = self._postToSlaprunner(url, {})
resp = self._postToSlaprunner(url, {})
self.assertEqual(requests.codes.ok, resp.status_code)
self.assertEqual(json.loads(resp.text)['result'], True,
'Unexpecting result in call to runSoftwareProfile: %s' % resp.text)
......@@ -77,7 +76,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
def _deployInstance(self):
parameter_dict = self.computer_partition.getConnectionParameterDict()
url = "%s/runInstanceProfile" % parameter_dict['url']
resp = self._postToSlaprunner(url, {})
resp = self._postToSlaprunner(url, {})
self.assertEqual(requests.codes.ok, resp.status_code)
self.assertEqual(json.loads(resp.text)['result'], True,
'Unexpecting result in call to runSoftwareProfile: %s' % resp.text)
......@@ -100,7 +99,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
def _isSoftwareReleaseReady(self):
parameter_dict = self.computer_partition.getConnectionParameterDict()
url = "%s/isSRReady" % parameter_dict['url']
resp = self._getFromSlaprunner(url)
resp = self._getFromSlaprunner(url)
if requests.codes.ok != resp.status_code:
return -1
return resp.text
......@@ -125,7 +124,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
}
while True:
time.sleep(25)
resp = self._postToSlaprunner(url, data)
resp = self._postToSlaprunner(url, data)
if requests.codes.ok != resp.status_code:
continue
if json.loads(resp.text)["instance"]["state"] is False:
......@@ -153,9 +152,9 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
url = "%s/getFileContent" % parameter_dict['url']
data = {
"file": relative_path
"file": relative_path
}
resp = self._postToSlaprunner(url, data)
resp = self._postToSlaprunner(url, data)
self.assertEqual(requests.codes.ok, resp.status_code)
self.assertNotEqual(json.loads(resp.text)['code'], 0,
'Unexpecting result in call to getFileContent: %s' % resp.text)
......@@ -189,9 +188,9 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
parameter_dict = self.computer_partition.getConnectionParameterDict()
takeover_url = parameter_dict["takeover-%s-url" % scope]
takeover_password = parameter_dict["takeover-%s-password" % scope]
resp = requests.get(
"%s?password=%s" % (takeover_url, takeover_password),
f"{takeover_url}?password={takeover_password}",
verify=True)
self.assertEqual(requests.codes.ok, resp.status_code)
self.assertNotIn("Error", resp.text,
......@@ -236,7 +235,7 @@ class TestWebRunnerAutorun(SlaprunnerTestCase):
@classmethod
def getInstanceParameterDict(cls):
return {
# Auto deploy is required for the isSRReady works.
# Auto deploy is required for the isSRReady works.
'auto-deploy': 'true',
'autorun': 'true',
'software-root': os.path.join(cls.slap._instance_root, "..", "soft"),
......@@ -363,7 +362,7 @@ class TestSSH(SlaprunnerTestCase):
self.assertTrue(fingerprint_from_url.startswith('ssh-rsa-'), fingerprint_from_url)
fingerprint_from_url = fingerprint_from_url[len('ssh-rsa-'):]
class KeyPolicy(object):
class KeyPolicy:
"""Accept server key and keep it in self.key for inspection
"""
def missing_host_key(self, client, hostname, key):
......@@ -507,7 +506,7 @@ class TestResilientInstance(SlaprunnerTestCase):
# just check that keys returned on requested partition are for resilient
self.assertSetEqual(
set(self.computer_partition.getConnectionParameterDict().keys()),
set([
{
'backend-url',
'feed-url-runner-1-pull',
'feed-url-runner-1-push',
......@@ -520,7 +519,7 @@ class TestResilientInstance(SlaprunnerTestCase):
'takeover-runner-1-password',
'takeover-runner-1-url',
'url',
'webdav-url']))
'webdav-url'})
class TestResilientCustomFrontend(TestCustomFrontend):
instance_max_retry = 20
......@@ -589,7 +588,7 @@ class TestResilientDummyInstance(SlaprunnerTestCase):
self._waitForCloneToBeReadyForTakeover()
self._doTakeover()
self.slap.waitForInstance(20)
self.slap.waitForInstance(20)
previous_computer_partition = self.computer_partition
self.computer_partition = self.requestDefaultInstance()
......@@ -601,5 +600,5 @@ class TestResilientDummyInstance(SlaprunnerTestCase):
self.assertTrue(result_after.startswith("Hello"), result_after)
self.assertIn(result, result_after,
"%s not in %s" % (result, result_after))
f"{result} not in {result_after}")
......@@ -391,52 +391,49 @@ class TestTheiaEnv(TheiaTestCase):
# Start a theia shell that inherits the environment of the theia process
# This simulates the environment of a shell launched from the browser application
theia_shell_process = pexpect.spawnu('{}/bin/theia-shell'.format(self.getPath()), env=theia_env)
try:
theia_shell_process.expect_exact('Standalone SlapOS for computer `slaprunner` activated')
# Launch slapos node software from theia shell
theia_shell_process.sendline('slapos node software')
theia_shell_process.expect('Installing software release %s' % self.dummy_software_path)
theia_shell_process.expect('Finished software releases.')
# Get the theia shell environment
with open(env_json_path) as f:
theia_shell_env = json.load(f)
# Remove the env.json file to later be sure that a new one has been generated
os.remove(env_json_path)
# Launch slapos node software service from the embedded supervisord.
# Note that we have two services, slapos-node-software and slapos-node-software-all
# The later uses --all which is what we want to use here, because the software
# is already installed and we want to install it again, this time from supervisor
embedded_run_path = self.getPath('srv', 'runner', 'var', 'run')
embedded_supervisord_socket_path = _getSupervisordSocketPath(embedded_run_path, self.logger)
with getSupervisorRPC(embedded_supervisord_socket_path) as embedded_supervisor:
previous_stop_time = embedded_supervisor.getProcessInfo('slapos-node-software-all')['stop']
embedded_supervisor.startProcess('slapos-node-software-all')
for _retries in range(20):
time.sleep(1)
if embedded_supervisor.getProcessInfo('slapos-node-software-all')['stop'] != previous_stop_time:
break
else:
self.fail("the supervisord service 'slapos-node-software-all' takes too long to finish")
# Get the supervisord environment
with open(env_json_path) as f:
supervisord_env = json.load(f)
# Compare relevant variables from both environments
self.maxDiff = None
self.assertEqual(theia_shell_env['PATH'].split(':'), supervisord_env['PATH'].split(':'))
self.assertEqual(theia_shell_env['SLAPOS_CONFIGURATION'], supervisord_env['SLAPOS_CONFIGURATION'])
self.assertEqual(theia_shell_env['SLAPOS_CLIENT_CONFIGURATION'], supervisord_env['SLAPOS_CLIENT_CONFIGURATION'])
self.assertEqual(theia_shell_env['HOME'], supervisord_env['HOME'])
finally:
# Cleanup the theia shell process
theia_shell_process.terminate()
theia_shell_process.wait()
self.addCleanup(theia_shell_process.wait)
Please register or sign in to reply
self.addCleanup(theia_shell_process.terminate)
theia_shell_process.expect_exact('Standalone SlapOS for computer `slaprunner` activated')
# Launch slapos node software from theia shell
theia_shell_process.sendline('slapos node software')
theia_shell_process.expect('Installing software release %s' % self.dummy_software_path)
theia_shell_process.expect('Finished software releases.')
# Get the theia shell environment
with open(env_json_path) as f:
theia_shell_env = json.load(f)
# Remove the env.json file to later be sure that a new one has been generated
os.remove(env_json_path)
# Launch slapos node software service from the embedded supervisord.
# Note that we have two services, slapos-node-software and slapos-node-software-all
# The later uses --all which is what we want to use here, because the software
# is already installed and we want to install it again, this time from supervisor
embedded_run_path = self.getPath('srv', 'runner', 'var', 'run')
embedded_supervisord_socket_path = _getSupervisordSocketPath(embedded_run_path, self.logger)
with getSupervisorRPC(embedded_supervisord_socket_path) as embedded_supervisor:
previous_stop_time = embedded_supervisor.getProcessInfo('slapos-node-software-all')['stop']
embedded_supervisor.startProcess('slapos-node-software-all')
for _retries in range(20):
time.sleep(1)
if embedded_supervisor.getProcessInfo('slapos-node-software-all')['stop'] != previous_stop_time:
break
else:
self.fail("the supervisord service 'slapos-node-software-all' takes too long to finish")
# Get the supervisord environment
with open(env_json_path) as f:
supervisord_env = json.load(f)
# Compare relevant variables from both environments
self.maxDiff = None
self.assertEqual(theia_shell_env['PATH'].split(':'), supervisord_env['PATH'].split(':'))
self.assertEqual(theia_shell_env['SLAPOS_CONFIGURATION'], supervisord_env['SLAPOS_CONFIGURATION'])
self.assertEqual(theia_shell_env['SLAPOS_CLIENT_CONFIGURATION'], supervisord_env['SLAPOS_CLIENT_CONFIGURATION'])
self.assertEqual(theia_shell_env['HOME'], supervisord_env['HOME'])
class ResilientTheiaMixin(object):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment