Commit 7467ef4c authored by Bryton Lacquement's avatar Bryton Lacquement 🚪 Committed by Julien Muchembled

Add Python 3 support

parent ae15060e
...@@ -45,6 +45,6 @@ def do_bang(configp, message): ...@@ -45,6 +45,6 @@ def do_bang(configp, message):
slap = slapos.slap.slap() slap = slapos.slap.slap()
slap.initializeConnection(master_url, key_file=key_file, cert_file=cert_file) slap.initializeConnection(master_url, key_file=key_file, cert_file=cert_file)
computer = slap.registerComputer(computer_id) computer = slap.registerComputer(computer_id)
print 'Banging to %r' % master_url print('Banging to %r' % master_url)
computer.bang(message) computer.bang(message)
print 'Bang with message %r' % message print('Bang with message %r' % message)
...@@ -27,8 +27,10 @@ ...@@ -27,8 +27,10 @@
# #
############################################################################## ##############################################################################
from __future__ import print_function
import subprocess import subprocess
import urlparse from six.moves.urllib.parse import urlparse
from time import sleep from time import sleep
import glob import glob
import os import os
...@@ -46,14 +48,14 @@ def _removeTimestamp(instancehome): ...@@ -46,14 +48,14 @@ def _removeTimestamp(instancehome):
""" """
timestamp_glob_path = "%s/slappart*/.timestamp" % instancehome timestamp_glob_path = "%s/slappart*/.timestamp" % instancehome
for timestamp_path in glob.glob(timestamp_glob_path): for timestamp_path in glob.glob(timestamp_glob_path):
print "Removing %s" % timestamp_path print("Removing %s" % timestamp_path)
os.remove(timestamp_path) os.remove(timestamp_path)
def _runBang(app): def _runBang(app):
""" """
Launch slapos node format. Launch slapos node format.
""" """
print "[BOOT] Invoking slapos node bang..." print("[BOOT] Invoking slapos node bang...")
result = app.run(['node', 'bang', '-m', 'Reboot']) result = app.run(['node', 'bang', '-m', 'Reboot'])
if result == 1: if result == 1:
return 0 return 0
...@@ -63,7 +65,7 @@ def _runFormat(app): ...@@ -63,7 +65,7 @@ def _runFormat(app):
""" """
Launch slapos node format. Launch slapos node format.
""" """
print "[BOOT] Invoking slapos node format..." print("[BOOT] Invoking slapos node format...")
result = app.run(['node', 'format', '--now', '--verbose']) result = app.run(['node', 'format', '--now', '--verbose'])
if result == 1: if result == 1:
return 0 return 0
...@@ -73,30 +75,30 @@ def _ping(hostname): ...@@ -73,30 +75,30 @@ def _ping(hostname):
""" """
Ping a hostname Ping a hostname
""" """
print "[BOOT] Invoking ipv4 ping to %s..." % hostname print("[BOOT] Invoking ipv4 ping to %s..." % hostname)
p = subprocess.Popen( p = subprocess.Popen(
["ping", "-c", "2", hostname], ["ping", "-c", "2", hostname],
stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
if p.returncode == 0: if p.returncode == 0:
print "[BOOT] IPv4 network reachable..." print("[BOOT] IPv4 network reachable...")
return 1 return 1
print "[BOOT] [ERROR] IPv4 network unreachable..." print("[BOOT] [ERROR] IPv4 network unreachable...")
return 0 return 0
def _ping6(hostname): def _ping6(hostname):
""" """
Ping an ipv6 address Ping an ipv6 address
""" """
print "[BOOT] Invoking ipv6 ping to %s..." % hostname print("[BOOT] Invoking ipv6 ping to %s..." % hostname)
p = subprocess.Popen( p = subprocess.Popen(
["ping6", "-c", "2", hostname], ["ping6", "-c", "2", hostname],
stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
if p.returncode == 0: if p.returncode == 0:
print "[BOOT] IPv6 network reachable..." print("[BOOT] IPv6 network reachable...")
return 1 return 1
print "[BOOT] [ERROR] IPv6 network unreachable..." print("[BOOT] [ERROR] IPv6 network unreachable...")
return 0 return 0
def _test_ping(hostname): def _test_ping(hostname):
...@@ -126,7 +128,7 @@ def _waitIpv6Ready(ipv6_interface): ...@@ -126,7 +128,7 @@ def _waitIpv6Ready(ipv6_interface):
test if ipv6 is ready on ipv6_interface test if ipv6 is ready on ipv6_interface
""" """
ipv6_address = "" ipv6_address = ""
print "[BOOT] Checking if %r has IPv6..." % ipv6_interface print("[BOOT] Checking if %r has IPv6..." % ipv6_interface)
while ipv6_address == "": while ipv6_address == "":
for inet_dict in netifaces.ifaddresses(ipv6_interface)[socket.AF_INET6]: for inet_dict in netifaces.ifaddresses(ipv6_interface)[socket.AF_INET6]:
ipv6_address = inet_dict['addr'].split('%')[0] ipv6_address = inet_dict['addr'].split('%')[0]
...@@ -134,8 +136,8 @@ def _waitIpv6Ready(ipv6_interface): ...@@ -134,8 +136,8 @@ def _waitIpv6Ready(ipv6_interface):
break break
else: else:
ipv6_address = "" ipv6_address = ""
print "[BOOT] [ERROR] No IPv6 found on interface %r, " \ print("[BOOT] [ERROR] No IPv6 found on interface %r, "
"try again in 5 seconds..." % ipv6_interface "try again in 5 seconds..." % ipv6_interface))
sleep(5) sleep(5)
class BootCommand(ConfigCommand): class BootCommand(ConfigCommand):
...@@ -155,7 +157,7 @@ class BootCommand(ConfigCommand): ...@@ -155,7 +157,7 @@ class BootCommand(ConfigCommand):
def take_action(self, args): def take_action(self, args):
configp = self.fetch_config(args) configp = self.fetch_config(args)
instance_root = configp.get('slapos','instance_root') instance_root = configp.get('slapos','instance_root')
master_url = urlparse.urlparse(configp.get('slapos','master_url')) master_url = urlparse(configp.get('slapos','master_url'))
master_hostname = master_url.hostname master_hostname = master_url.hostname
# Check that we have IPv6 ready # Check that we have IPv6 ready
...@@ -177,12 +179,12 @@ class BootCommand(ConfigCommand): ...@@ -177,12 +179,12 @@ class BootCommand(ConfigCommand):
app = SlapOSApp() app = SlapOSApp()
# Make sure slapos node format returns ok # Make sure slapos node format returns ok
while not _runFormat(app): while not _runFormat(app):
print "[BOOT] [ERROR] Fail to format, try again in 15 seconds..." print("[BOOT] [ERROR] Fail to format, try again in 15 seconds...")
sleep(15) sleep(15)
# Make sure slapos node bang returns ok # Make sure slapos node bang returns ok
while not _runBang(app): while not _runBang(app):
print "[BOOT] [ERROR] Fail to bang, try again in 15 seconds..." print("[BOOT] [ERROR] Fail to bang, try again in 15 seconds...")
sleep(15) sleep(15)
_removeTimestamp(instance_root) _removeTimestamp(instance_root)
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
# #
############################################################################## ##############################################################################
import ConfigParser from six.moves import configparser
import os import os
from slapos.cli.command import Command from slapos.cli.command import Command
...@@ -77,7 +77,7 @@ class ConfigCommand(Command): ...@@ -77,7 +77,7 @@ class ConfigCommand(Command):
if not os.path.exists(cfg_path): if not os.path.exists(cfg_path):
raise ConfigError('Configuration file does not exist: %s' % cfg_path) raise ConfigError('Configuration file does not exist: %s' % cfg_path)
configp = ConfigParser.SafeConfigParser() configp = configparser.SafeConfigParser()
if configp.read(cfg_path) != [cfg_path]: if configp.read(cfg_path) != [cfg_path]:
# bad permission, etc. # bad permission, etc.
raise ConfigError('Cannot parse configuration file: %s' % cfg_path) raise ConfigError('Cannot parse configuration file: %s' % cfg_path)
......
...@@ -31,6 +31,7 @@ import re ...@@ -31,6 +31,7 @@ import re
import os import os
import sys import sys
import json import json
from six.moves import input
import requests import requests
...@@ -94,7 +95,7 @@ def fetch_configuration_template(): ...@@ -94,7 +95,7 @@ def fetch_configuration_template():
def do_configure_client(logger, master_url_web, token, config_path, master_url): def do_configure_client(logger, master_url_web, token, config_path, master_url):
while not token: while not token:
token = raw_input('Credential security token: ').strip() token = input('Credential security token: ').strip()
# Check for existence of previous configuration, certificate or key files # Check for existence of previous configuration, certificate or key files
# where we expect to create them. If so, ask the use to manually remove them. # where we expect to create them. If so, ask the use to manually remove them.
......
...@@ -39,6 +39,7 @@ from slapos.cli.config import ConfigCommand ...@@ -39,6 +39,7 @@ from slapos.cli.config import ConfigCommand
from slapos.grid.slapgrid import create_slapgrid_object from slapos.grid.slapgrid import create_slapgrid_object
from slapos.grid.utils import updateFile, createPrivateDirectory from slapos.grid.utils import updateFile, createPrivateDirectory
from slapos.grid.svcbackend import launchSupervisord from slapos.grid.svcbackend import launchSupervisord
from slapos.util import bytes2str
DEFAULT_COMPUTER_ID = 'local_computer' DEFAULT_COMPUTER_ID = 'local_computer'
...@@ -126,8 +127,8 @@ def _replaceParameterValue(original_content, to_replace): ...@@ -126,8 +127,8 @@ def _replaceParameterValue(original_content, to_replace):
def _generateSlaposNodeConfigurationFile(slapos_node_config_path, args): def _generateSlaposNodeConfigurationFile(slapos_node_config_path, args):
template_arg_list = (__name__, '../../slapos.cfg.example') template_arg_list = (__name__, '../../slapos.cfg.example')
with pkg_resources.resource_stream(*template_arg_list) as fout: slapos_node_configuration_template = \
slapos_node_configuration_template = fout.read() bytes2str(pkg_resources.resource_string(*template_arg_list))
master_url = 'http://%s:%s' % (args.daemon_listen_ip, args.daemon_listen_port) master_url = 'http://%s:%s' % (args.daemon_listen_ip, args.daemon_listen_port)
slapos_home = args.slapos_buildout_directory slapos_home = args.slapos_buildout_directory
to_replace = [ to_replace = [
...@@ -149,12 +150,12 @@ def _generateSlaposNodeConfigurationFile(slapos_node_config_path, args): ...@@ -149,12 +150,12 @@ def _generateSlaposNodeConfigurationFile(slapos_node_config_path, args):
'(key_file|cert_file|certificate_repository_path).*=.*\n', '(key_file|cert_file|certificate_repository_path).*=.*\n',
'', slapos_node_configuration_content) '', slapos_node_configuration_content)
with open(slapos_node_config_path, 'w') as fout: with open(slapos_node_config_path, 'w') as fout:
fout.write(slapos_node_configuration_content.encode('utf8')) fout.write(slapos_node_configuration_content)
def _generateSlaposProxyConfigurationFile(conf): def _generateSlaposProxyConfigurationFile(conf):
template_arg_list = (__name__, '../../slapos-proxy.cfg.example') template_arg_list = (__name__, '../../slapos-proxy.cfg.example')
with pkg_resources.resource_stream(*template_arg_list) as fout: slapos_proxy_configuration_template = \
slapos_proxy_configuration_template = fout.read() bytes2str(pkg_resources.resource_string(*template_arg_list))
slapos_proxy_configuration_path = os.path.join( slapos_proxy_configuration_path = os.path.join(
conf.slapos_configuration_directory, 'slapos-proxy.cfg') conf.slapos_configuration_directory, 'slapos-proxy.cfg')
listening_ip, listening_port = \ listening_ip, listening_port = \
...@@ -172,7 +173,7 @@ def _generateSlaposProxyConfigurationFile(conf): ...@@ -172,7 +173,7 @@ def _generateSlaposProxyConfigurationFile(conf):
slapos_proxy_configuration_template, to_replace) slapos_proxy_configuration_template, to_replace)
with open(slapos_proxy_configuration_path, 'w') as fout: with open(slapos_proxy_configuration_path, 'w') as fout:
fout.write(slapos_proxy_configuration_content.encode('utf8')) fout.write(slapos_proxy_configuration_content)
return slapos_proxy_configuration_path return slapos_proxy_configuration_path
......
...@@ -32,6 +32,8 @@ import textwrap ...@@ -32,6 +32,8 @@ import textwrap
from slapos.cli.config import ClientConfigCommand from slapos.cli.config import ClientConfigCommand
from slapos.client import init, do_console, ClientConfig from slapos.client import init, do_console, ClientConfig
from six import exec_
class ShellNotFound(Exception): class ShellNotFound(Exception):
pass pass
...@@ -90,7 +92,9 @@ class ConsoleCommand(ClientConfigCommand): ...@@ -90,7 +92,9 @@ class ConsoleCommand(ClientConfigCommand):
local = init(conf, self.app.log) local = init(conf, self.app.log)
if args.script_file: if args.script_file:
return execfile(args.script_file, globals(), local) with open(args.script_file) as f:
code = compile(f.read(), args.script_file, 'exec')
return exec_(code, globals(), local)
if not any([args.python, args.ipython, args.bpython]): if not any([args.python, args.ipython, args.bpython]):
args.ipython = True args.ipython = True
......
...@@ -247,7 +247,7 @@ class SlapOSApp(App): ...@@ -247,7 +247,7 @@ class SlapOSApp(App):
return return
if self.options.log_color: if self.options.log_color:
import coloredlogs from slapos.cli import coloredlogs
console = coloredlogs.ColoredStreamHandler(show_name=True, # logger name (slapos) and PID console = coloredlogs.ColoredStreamHandler(show_name=True, # logger name (slapos) and PID
show_severity=True, show_severity=True,
show_timestamps=self.options.log_time, show_timestamps=self.options.log_time,
......
...@@ -29,6 +29,7 @@ ...@@ -29,6 +29,7 @@
import logging import logging
import sys import sys
import six
from slapos.cli.config import ClientConfigCommand from slapos.cli.config import ClientConfigCommand
from slapos.client import init, ClientConfig from slapos.client import init, ClientConfig
...@@ -64,5 +65,5 @@ def do_list(logger, conf, local): ...@@ -64,5 +65,5 @@ def do_list(logger, conf, local):
logger.info('No existing service.') logger.info('No existing service.')
return return
logger.info('List of services:') logger.info('List of services:')
for title, instance in instance_dict.iteritems(): for title, instance in six.iteritems(instance_dict):
logger.info('%s %s', title, instance._software_release_url) logger.info('%s %s', title, instance._software_release_url)
...@@ -34,7 +34,6 @@ import logging ...@@ -34,7 +34,6 @@ import logging
import sys import sys
import os import os
import subprocess import subprocess
import StringIO
import lxml.etree import lxml.etree
import prettytable import prettytable
...@@ -43,8 +42,17 @@ import sqlite3 ...@@ -43,8 +42,17 @@ import sqlite3
from slapos.cli.config import ConfigCommand from slapos.cli.config import ConfigCommand
from slapos.proxy import ProxyConfig from slapos.proxy import ProxyConfig
from slapos.proxy.db_version import DB_VERSION from slapos.proxy.db_version import DB_VERSION
from slapos.util import sqlite_connect from slapos.util import sqlite_connect, str2bytes
if bytes is str:
from io import BytesIO
class StringIO(BytesIO):
# Something between strict io.BytesIO and laxist/slow StringIO.StringIO
# (which starts returning unicode once unicode is written) for logging.
def write(self, b):
return BytesIO.write(self, b.encode('utf-8'))
else:
from io import StringIO
class ProxyShowCommand(ConfigCommand): class ProxyShowCommand(ConfigCommand):
""" """
...@@ -139,7 +147,7 @@ def log_params(logger, conn): ...@@ -139,7 +147,7 @@ def log_params(logger, conn):
if not row['connection_xml']: if not row['connection_xml']:
continue continue
xml = str(row['connection_xml']) xml = str2bytes(row['connection_xml'])
logger.info('%s: %s (type %s)', row['reference'], row['partition_reference'], row['software_type']) logger.info('%s: %s (type %s)', row['reference'], row['partition_reference'], row['software_type'])
instance = lxml.etree.fromstring(xml) instance = lxml.etree.fromstring(xml)
for parameter in list(instance): for parameter in list(instance):
...@@ -150,9 +158,12 @@ def log_params(logger, conn): ...@@ -150,9 +158,12 @@ def log_params(logger, conn):
# _ is usually json encoded - re-format to make it easier to read # _ is usually json encoded - re-format to make it easier to read
if name == '_': if name == '_':
try: try:
text = json.dumps(json.loads(text), indent=2) text = json.dumps(json.loads(text),
indent=2, sort_keys=True)
except ValueError: except ValueError:
pass pass
else: # to avoid differences between Py2 and Py3 in unit tests
text = '\n'.join(map(str.rstrip, text.splitlines()))
logger.info(' %s = %s', name, text) logger.info(' %s = %s', name, text)
...@@ -208,7 +219,7 @@ def do_show(conf): ...@@ -208,7 +219,7 @@ def do_show(conf):
# to paginate input, honoring $PAGER. # to paginate input, honoring $PAGER.
output = sys.stdout output = sys.stdout
if output.isatty(): if output.isatty():
output = StringIO.StringIO() output = StringIO()
proxy_show_logger = logging.getLogger(__name__) proxy_show_logger = logging.getLogger(__name__)
handler = logging.StreamHandler(output) handler = logging.StreamHandler(output)
handler.setLevel(logging.DEBUG) handler.setLevel(logging.DEBUG)
...@@ -221,7 +232,8 @@ def do_show(conf): ...@@ -221,7 +232,8 @@ def do_show(conf):
conn = sqlite_connect(conf.database_uri) conn = sqlite_connect(conf.database_uri)
conn.row_factory = sqlite3.Row conn.row_factory = sqlite3.Row
conn.create_function('md5', 1, lambda s: hashlib.md5(s).hexdigest()) conn.create_function('md5', 1,
lambda s: hashlib.md5(str2bytes(s)).hexdigest())
call_table = [ call_table = [
(conf.computers, log_computer_table), (conf.computers, log_computer_table),
...@@ -248,4 +260,4 @@ def do_show(conf): ...@@ -248,4 +260,4 @@ def do_show(conf):
close_fds=True, close_fds=True,
shell=True, shell=True,
stdin=subprocess.PIPE,) stdin=subprocess.PIPE,)
pager.communicate(output.getvalue().encode('utf-8')) pager.communicate(str2bytes(output.getvalue()))
...@@ -36,6 +36,7 @@ import sys ...@@ -36,6 +36,7 @@ import sys
import pkg_resources import pkg_resources
import requests import requests
import json import json
from six.moves import input
from slapos.cli.command import Command, must_be_root from slapos.cli.command import Command, must_be_root
...@@ -327,7 +328,7 @@ def gen_auth(conf): ...@@ -327,7 +328,7 @@ def gen_auth(conf):
else: else:
yield conf.login, getpass.getpass() yield conf.login, getpass.getpass()
while ask: while ask:
yield raw_input('SlapOS Master Login: '), getpass.getpass() yield input('SlapOS Master Login: '), getpass.getpass()
def do_register(conf): def do_register(conf):
...@@ -348,7 +349,7 @@ def do_register(conf): ...@@ -348,7 +349,7 @@ def do_register(conf):
password=password) password=password)
else: else:
while not conf.token: while not conf.token:
conf.token = raw_input('Computer security token: ').strip() conf.token = input('Computer security token: ').strip()
certificate, key = get_certificate_key_pair(conf.logger, certificate, key = get_certificate_key_pair(conf.logger,
conf.master_url_web, conf.master_url_web,
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
############################################################################## ##############################################################################
import atexit import atexit
import ConfigParser from six.moves import configparser
import os import os
import sys import sys
...@@ -52,7 +52,7 @@ class ClientConfig(object): ...@@ -52,7 +52,7 @@ class ClientConfig(object):
# Merges the arguments and configuration # Merges the arguments and configuration
try: try:
configuration_dict = dict(configp.items('slapconsole')) configuration_dict = dict(configp.items('slapconsole'))
except ConfigParser.NoSectionError: except configparser.NoSectionError:
pass pass
else: else:
for key in configuration_dict: for key in configuration_dict:
...@@ -119,7 +119,7 @@ def _getSoftwareReleaseFromSoftwareString(logger, software_string, product): ...@@ -119,7 +119,7 @@ def _getSoftwareReleaseFromSoftwareString(logger, software_string, product):
try: try:
return product.__getattr__(software_string[len(SOFTWARE_PRODUCT_NAMESPACE):]) return product.__getattr__(software_string[len(SOFTWARE_PRODUCT_NAMESPACE):])
except AttributeError as e: except AttributeError as e:
logger.error('Error: %s Exiting now.' % e.message) logger.error('Error: %s Exiting now.', e)
sys.exit(1) sys.exit(1)
def do_console(local): def do_console(local):
......
...@@ -27,6 +27,7 @@ ...@@ -27,6 +27,7 @@
# #
############################################################################## ##############################################################################
from __future__ import print_function
from psutil import process_iter, NoSuchProcess, AccessDenied from psutil import process_iter, NoSuchProcess, AccessDenied
from time import strftime from time import strftime
import shutil import shutil
...@@ -42,7 +43,7 @@ from slapos.collect.reporter import RawCSVDumper, \ ...@@ -42,7 +43,7 @@ from slapos.collect.reporter import RawCSVDumper, \
compressLogFolder, \ compressLogFolder, \
ConsumptionReport ConsumptionReport
from entity import get_user_list, Computer from .entity import get_user_list, Computer
def _get_time(): def _get_time():
return strftime("%Y-%m-%d -- %H:%M:%S").split(" -- ") return strftime("%Y-%m-%d -- %H:%M:%S").split(" -- ")
...@@ -157,5 +158,5 @@ def do_collect(conf): ...@@ -157,5 +158,5 @@ def do_collect(conf):
database.garbageCollect() database.garbageCollect()
except AccessDenied: except AccessDenied:
print "You HAVE TO execute this script with root permission." print("You HAVE TO execute this script with root permission.")
...@@ -74,19 +74,15 @@ class User(object): ...@@ -74,19 +74,15 @@ class User(object):
time_cycle = self.disk_snapshot_params.get('time_cycle', 0) time_cycle = self.disk_snapshot_params.get('time_cycle', 0)
database.connect() database.connect()
if time_cycle: if time_cycle:
order = 'date DESC, time DESC' for date_time in database.select(table="folder", columns="date, time",
limit = 1 order='date DESC, time DESC', limit=1,
query = database.select(table="folder", columns="date, time", where="partition='%s'" % self.name):
order=order, limit=limit, latest_date = datetime.strptime('%s %s' % date_time,
where="partition='%s'" % self.name) "%Y-%m-%d %H:%M:%S")
query_result = zip(*query)
if len(query_result):
date, time = (query_result[0][0], query_result[1][0])
latest_date = datetime.strptime('%s %s' % (date, time),
"%Y-%m-%d %H:%M:%S")
if (datetime.now() - latest_date).seconds < time_cycle: if (datetime.now() - latest_date).seconds < time_cycle:
# wait the time cycle # wait the time cycle
return return
break
pid_file = self.disk_snapshot_params.get('pid_folder', None) pid_file = self.disk_snapshot_params.get('pid_folder', None)
if pid_file is not None: if pid_file is not None:
pid_file = os.path.join(pid_file, '%s_disk_size.pid' % self.name) pid_file = os.path.join(pid_file, '%s_disk_size.pid' % self.name)
......
...@@ -40,6 +40,8 @@ import tarfile ...@@ -40,6 +40,8 @@ import tarfile
import time import time
import psutil import psutil
import six
log_file = False log_file = False
class Dumper(object): class Dumper(object):
...@@ -59,10 +61,10 @@ class SystemReporter(Dumper): ...@@ -59,10 +61,10 @@ class SystemReporter(Dumper):
""" Dump data """ """ Dump data """
_date = time.strftime("%Y-%m-%d") _date = time.strftime("%Y-%m-%d")
self.db.connect() self.db.connect()
for item, collected_item_list in self.db.exportSystemAsDict(_date).iteritems(): for item, collected_item_list in six.iteritems(self.db.exportSystemAsDict(_date)):
self.writeFile(item, folder, collected_item_list) self.writeFile(item, folder, collected_item_list)
for partition, collected_item_list in self.db.exportDiskAsDict(_date).iteritems(): for partition, collected_item_list in six.iteritems(self.db.exportDiskAsDict(_date)):
partition_id = "_".join(partition.split("-")[:-1]).replace("/", "_") partition_id = "_".join(partition.split("-")[:-1]).replace("/", "_")
item = "memory_%s" % partition.split("-")[-1] item = "memory_%s" % partition.split("-")[-1]
self.writeFile("disk_%s_%s" % (item, partition_id), folder, collected_item_list) self.writeFile("disk_%s_%s" % (item, partition_id), folder, collected_item_list)
...@@ -135,62 +137,53 @@ class ConsumptionReportBase(object): ...@@ -135,62 +137,53 @@ class ConsumptionReportBase(object):
def getPartitionCPULoadAverage(self, partition_id, date_scope): def getPartitionCPULoadAverage(self, partition_id, date_scope):
self.db.connect() self.db.connect()
query_result_cursor = self.db.select("user", date_scope, (cpu_percent_sum,), = self.db.select("user", date_scope,
columns="SUM(cpu_percent)", columns="SUM(cpu_percent)",
where="partition = '%s'" % partition_id) where="partition = '%s'" % partition_id)
cpu_percent_sum = zip(*query_result_cursor) if cpu_percent_sum is None:
if len(cpu_percent_sum) and cpu_percent_sum[0][0] is None:
return return
query_result_cursor = self.db.select("user", date_scope, (sample_amount,), = self.db.select("user", date_scope,
columns="COUNT(DISTINCT time)", columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id) where="partition = '%s'" % partition_id)
sample_amount = zip(*query_result_cursor)
self.db.close() self.db.close()
if len(sample_amount) and len(cpu_percent_sum): return cpu_percent_sum/sample_amount
return cpu_percent_sum[0][0]/sample_amount[0][0]
def getPartitionUsedMemoryAverage(self, partition_id, date_scope): def getPartitionUsedMemoryAverage(self, partition_id, date_scope):
self.db.connect() self.db.connect()
query_result_cursor = self.db.select("user", date_scope, (memory_sum,), = self.db.select("user", date_scope,
columns="SUM(memory_rss)", columns="SUM(memory_rss)",
where="partition = '%s'" % partition_id) where="partition = '%s'" % partition_id)
memory_sum = zip(*query_result_cursor) if memory_sum is None:
if len(memory_sum) and memory_sum[0][0] is None:
return return
query_result_cursor = self.db.select("user", date_scope, (sample_amount,), = self.db.select("user", date_scope,
columns="COUNT(DISTINCT time)", columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id) where="partition = '%s'" % partition_id)
sample_amount = zip(*query_result_cursor)
self.db.close() self.db.close()
if len(sample_amount) and len(memory_sum): return memory_sum/sample_amount
return memory_sum[0][0]/sample_amount[0][0]
def getPartitionDiskUsedAverage(self, partition_id, date_scope): def getPartitionDiskUsedAverage(self, partition_id, date_scope):
self.db.connect() self.db.connect()
query_result_cursor = self.db.select("folder", date_scope, (disk_used_sum,), = self.db.select("folder", date_scope,
columns="SUM(disk_used)", columns="SUM(disk_used)",
where="partition = '%s'" % partition_id) where="partition = '%s'" % partition_id)
disk_used_sum = zip(*query_result_cursor) if disk_used_sum is None:
if len(disk_used_sum) and disk_used_sum[0][0] is None:
return return
query_result_cursor = self.db.select("folder", date_scope, (collect_amount,), = self.db.select("folder", date_scope,
columns="COUNT(DISTINCT time)", columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id) where="partition = '%s'" % partition_id)
collect_amount = zip(*query_result_cursor)
self.db.close() self.db.close()
if len(collect_amount) and len(disk_used_sum): return disk_used_sum/collect_amount
return disk_used_sum[0][0]/collect_amount[0][0]
class ConsumptionReport(ConsumptionReportBase): class ConsumptionReport(ConsumptionReportBase):
...@@ -287,7 +280,7 @@ class ConsumptionReport(ConsumptionReportBase): ...@@ -287,7 +280,7 @@ class ConsumptionReport(ConsumptionReportBase):
reference=user, reference=user,
category="") category="")
with open(xml_report_path, 'w') as f: with open(xml_report_path, 'wb') as f:
f.write(journal.getXML()) f.write(journal.getXML())
f.close() f.close()
...@@ -298,23 +291,19 @@ class ConsumptionReport(ConsumptionReportBase): ...@@ -298,23 +291,19 @@ class ConsumptionReport(ConsumptionReportBase):
def _getCpuLoadAverageConsumption(self, date_scope): def _getCpuLoadAverageConsumption(self, date_scope):
self.db.connect() self.db.connect()
query_result_cursor = self.db.select("system", date_scope, (cpu_load_percent_list,), = self.db.select("system", date_scope,
columns="SUM(cpu_percent)/COUNT(cpu_percent)") columns="SUM(cpu_percent)/COUNT(cpu_percent)")
cpu_load_percent_list = zip(*query_result_cursor)
self.db.close() self.db.close()
if len(cpu_load_percent_list): return cpu_load_percent_list
return cpu_load_percent_list[0][0]
def _getMemoryAverageConsumption(self, date_scope): def _getMemoryAverageConsumption(self, date_scope):
self.db.connect() self.db.connect()
query_result_cursor = self.db.select("system", date_scope, (memory_used_list,), = self.db.select("system", date_scope,
columns="SUM(memory_used)/COUNT(memory_used)") columns="SUM(memory_used)/COUNT(memory_used)")
memory_used_list = zip(*query_result_cursor)
self.db.close() self.db.close()
if len(memory_used_list): return memory_used_list
return memory_used_list[0][0]
def _getZeroEmissionContribution(self): def _getZeroEmissionContribution(self):
self.db.connect() self.db.connect()
...@@ -329,7 +318,7 @@ class Journal(object): ...@@ -329,7 +318,7 @@ class Journal(object):
def getXML(self): def getXML(self):
report = ElementTree.tostring(self.root) report = ElementTree.tostring(self.root)
return "<?xml version='1.0' encoding='utf-8'?>%s" % report return b"<?xml version='1.0' encoding='utf-8'?>%s" % report
def newTransaction(self, portal_type="Sale Packing List"): def newTransaction(self, portal_type="Sale Packing List"):
transaction = ElementTree.SubElement(self.root, "transaction") transaction = ElementTree.SubElement(self.root, "transaction")
......
...@@ -27,13 +27,15 @@ ...@@ -27,13 +27,15 @@
# #
############################################################################## ##############################################################################
from __future__ import print_function
import psutil import psutil
import os import os
import subprocess import subprocess
from temperature import collectComputerTemperature, \ from .temperature import collectComputerTemperature, launchTemperatureTest
launchTemperatureTest
from temperature.heating import get_contribution_ratio from .temperature.heating import get_contribution_ratio
import six
MEASURE_INTERVAL = 5 MEASURE_INTERVAL = 5
...@@ -159,7 +161,7 @@ class HeatingContributionSnapshot(_Snapshot): ...@@ -159,7 +161,7 @@ class HeatingContributionSnapshot(_Snapshot):
result = launchTemperatureTest(sensor_id) result = launchTemperatureTest(sensor_id)
if result is None: if result is None:
print "Impossible to test sensor: %s " % sensor_id print("Impossible to test sensor: %s " % sensor_id)
initial_temperature, final_temperature, duration = result initial_temperature, final_temperature, duration = result
...@@ -215,8 +217,7 @@ class ComputerSnapshot(_Snapshot): ...@@ -215,8 +217,7 @@ class ComputerSnapshot(_Snapshot):
# #
self.system_snapshot = SystemSnapshot() self.system_snapshot = SystemSnapshot()
self.temperature_snapshot_list = self._get_temperature_snapshot_list() self.temperature_snapshot_list = self._get_temperature_snapshot_list()
self.disk_snapshot_list = [] self._get_physical_disk_info()
self.partition_list = self._get_physical_disk_info()
if test_heating and model_id is not None \ if test_heating and model_id is not None \
and sensor_id is not None: and sensor_id is not None:
...@@ -231,16 +232,16 @@ class ComputerSnapshot(_Snapshot): ...@@ -231,16 +232,16 @@ class ComputerSnapshot(_Snapshot):
return temperature_snapshot_list return temperature_snapshot_list
def _get_physical_disk_info(self): def _get_physical_disk_info(self):
partition_dict = {} # XXX: merge the following 2 to avoid calling disk_usage() twice
self.disk_snapshot_list = []
self.partition_list = []
partition_set = set()
for partition in psutil.disk_partitions(): for partition in psutil.disk_partitions():
if partition.device not in partition_dict: dev = partition.device
if dev not in partition_set: # XXX: useful ?
partition_set.add(dev)
usage = psutil.disk_usage(partition.mountpoint) usage = psutil.disk_usage(partition.mountpoint)
partition_dict[partition.device] = usage.total self.partition_list.append((dev, usage.total))
self.disk_snapshot_list.append( self.disk_snapshot_list.append(
DiskPartitionSnapshot(partition.device, DiskPartitionSnapshot(dev, partition.mountpoint))
partition.mountpoint))
return [(k, v) for k, v in partition_dict.iteritems()]
from __future__ import print_function
from multiprocessing import Process, active_children, cpu_count, Pipe from multiprocessing import Process, active_children, cpu_count, Pipe
import subprocess import subprocess
...@@ -14,28 +15,21 @@ except NotImplementedError: ...@@ -14,28 +15,21 @@ except NotImplementedError:
DEFAULT_CPU = 1 DEFAULT_CPU = 1
def collectComputerTemperature(sensor_bin="sensors"): def collectComputerTemperature(sensor_bin="sensors"):
cmd = ["%s -u" % sensor_bin] stdout = subprocess.check_output((sensor_bin, '-u'), universal_newlines=True)
  • This seems to be a change in behavior, before this change, if sensors command is not available, the test fail. Which I believe is a good thing, it was hiding errors.

    Isn't the missing part that we need to include component/lmsensors/buildout.cfg in the PATH of the test here ? similar to what we do for socat for example ?

    /cc @rafael @tomo

  • Apparently this is also something we can get from psutil https://psutil.readthedocs.io/en/latest/index.html#psutil.sensors_temperatures

  • Test can fail because command is not available (like on my development environment), but also some tests fail because command exit with non 0, for example:

    test_computer_snapshot (slapos.tests.collect.TestCollectSnapshot) ... No sensors found!
    Make sure you loaded all the kernel drivers you need.
    Try sensors-detect to find out which these are.
    ERROR

    I noticed this on rapidspace-testnode-003-3Nodes-ERP5PROJECT3 , also on RAPIDCLOUD-VM-COMP-2867-3Nodes-ERP5PROJECT6

  • it should fail if sensor is not available.... and it should get sensors from the component like you said.

  • Thanks. So if sensors command is not available it should fail. OK, let's change the software profile to add the command from component in $PATH -> slapos!479 (merged)

    But what to do when sensors command is available and it does no find sensors (the No sensors found! error message from above) ? slapos!479 (merged) won't solve this case.

    Also according to https://github.com/lm-sensors/lm-sensors , this is linux only, psutil version seems a bit more portable but it's not something that's universally available, so slapos should maybe consider this as something optional - only required on linux for example

  • From my understanding of the discussions in slapos!479 (comment 71803) , here we should not use check_output but an approach which would tolerate a non 0 exit code here.

    In !73 (merged) we discuss adding subprocess32, so we can maybe switch to subprocess.run here ?

  • for the subprocess.run approach -> !81 (merged)

Please register or sign in to reply
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
stdout, stderr = sp.communicate()
sensor_output_list = stdout.splitlines() sensor_output_list = stdout.splitlines()
adapter_name = "" adapter_name = ""
sensor_temperature_list = [] sensor_temperature_list = []
for line_number in range(len(sensor_output_list)): for line_number, sensor_output in enumerate(sensor_output_list):
found_sensor = None stripped_line = sensor_output.strip()
stripped_line = sensor_output_list[line_number].strip()
if stripped_line.startswith("Adapter:"): if stripped_line.startswith("Adapter:"):
adapter_name = sensor_output_list[line_number-1] adapter_name = sensor_output_list[line_number-1]
elif stripped_line.startswith("temp") and "_input" in stripped_line: elif stripped_line.startswith("temp") and "_input" in stripped_line:
temperature = sensor_output_list[line_number].split()[-1] temperature = sensor_output.split()[-1]
found_sensor = ["%s %s" % (adapter_name, sensor_output_list[line_number-1]), float(temperature)] found_sensor = ["%s %s" % (adapter_name, sensor_output_list[line_number-1]), float(temperature)]
if found_sensor is not None:
critical = '1000' critical = '1000'
maximal = '1000' maximal = '1000'
for next_line in sensor_output_list[line_number+1:line_number+3]: for next_line in sensor_output_list[line_number+1:line_number+3]:
...@@ -120,7 +114,7 @@ def launchTemperatureTest(sensor_id, sensor_bin="sensors", timeout=600, interval ...@@ -120,7 +114,7 @@ def launchTemperatureTest(sensor_id, sensor_bin="sensors", timeout=600, interval
for connection in process_connection_list: for connection in process_connection_list:
try: try:
print connection.recv() print(connection.recv())
except EOFError: except EOFError:
continue continue
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
# #
############################################################################## ##############################################################################
import ConfigParser from six.moves import configparser
import errno import errno
import fcntl import fcntl
import grp import grp
...@@ -51,12 +51,13 @@ import time ...@@ -51,12 +51,13 @@ import time
import traceback import traceback
import zipfile import zipfile
import platform import platform
from urllib2 import urlopen from six.moves.urllib.request import urlopen
import six
import lxml.etree import lxml.etree
import xml_marshaller.xml_marshaller import xml_marshaller.xml_marshaller
from slapos.util import mkdir_p, ipv6FromBin, binFromIpv6, lenNetmaskIpv6 from slapos.util import dumps, mkdir_p, ipv6FromBin, binFromIpv6, lenNetmaskIpv6
import slapos.slap as slap import slapos.slap as slap
from slapos import version from slapos import version
from slapos import manager as slapmanager from slapos import manager as slapmanager
...@@ -87,7 +88,7 @@ class OS(object): ...@@ -87,7 +88,7 @@ class OS(object):
def _addWrapper(self, name): def _addWrapper(self, name):
def wrapper(*args, **kw): def wrapper(*args, **kw):
arg_list = [repr(x) for x in args] + [ arg_list = [repr(x) for x in args] + [
'%s=%r' % (x, y) for x, y in kw.iteritems() '%s=%r' % (x, y) for x, y in six.iteritems(kw)
] ]
self._logger.debug('%s(%s)' % (name, ', '.join(arg_list))) self._logger.debug('%s(%s)' % (name, ', '.join(arg_list)))
if not self._dry_run: if not self._dry_run:
...@@ -232,8 +233,8 @@ def _getDict(obj): ...@@ -232,8 +233,8 @@ def _getDict(obj):
return obj return obj
return { return {
key: _getDict(value) \ key: _getDict(value)
for key, value in dikt.iteritems() \ for key, value in six.iteritems(dikt)
# do not attempt to serialize logger: it is both useless and recursive. # do not attempt to serialize logger: it is both useless and recursive.
# do not serialize attributes starting with "_", let the classes have some privacy # do not serialize attributes starting with "_", let the classes have some privacy
if not key.startswith("_") if not key.startswith("_")
...@@ -336,7 +337,7 @@ class Computer(object): ...@@ -336,7 +337,7 @@ class Computer(object):
if conf.dry_run: if conf.dry_run:
return return
try: try:
slap_computer.updateConfiguration(xml_marshaller.xml_marshaller.dumps(_getDict(self))) slap_computer.updateConfiguration(dumps(_getDict(self)))
except slap.NotFoundError as error: except slap.NotFoundError as error:
raise slap.NotFoundError("%s\nERROR: This SlapOS node is not recognised by " raise slap.NotFoundError("%s\nERROR: This SlapOS node is not recognised by "
"SlapOS Master and/or computer_id and certificates don't match. " "SlapOS Master and/or computer_id and certificates don't match. "
...@@ -358,7 +359,7 @@ class Computer(object): ...@@ -358,7 +359,7 @@ class Computer(object):
with open(path_to_json, 'wb') as fout: with open(path_to_json, 'wb') as fout:
fout.write(json.dumps(computer_dict, sort_keys=True, indent=2)) fout.write(json.dumps(computer_dict, sort_keys=True, indent=2))
new_xml = xml_marshaller.xml_marshaller.dumps(computer_dict) new_xml = dumps(computer_dict)
new_pretty_xml = prettify_xml(new_xml) new_pretty_xml = prettify_xml(new_xml)
path_to_archive = path_to_xml + '.zip' path_to_archive = path_to_xml + '.zip'
...@@ -1197,7 +1198,7 @@ class Interface(object): ...@@ -1197,7 +1198,7 @@ class Interface(object):
def parse_computer_definition(conf, definition_path): def parse_computer_definition(conf, definition_path):
conf.logger.info('Using definition file %r' % definition_path) conf.logger.info('Using definition file %r' % definition_path)
computer_definition = ConfigParser.RawConfigParser({ computer_definition = configparser.RawConfigParser({
'software_user': 'slapsoft', 'software_user': 'slapsoft',
}) })
computer_definition.read(definition_path) computer_definition.read(definition_path)
...@@ -1308,7 +1309,7 @@ def parse_computer_xml(conf, xml_path): ...@@ -1308,7 +1309,7 @@ def parse_computer_xml(conf, xml_path):
def write_computer_definition(conf, computer): def write_computer_definition(conf, computer):
computer_definition = ConfigParser.RawConfigParser() computer_definition = configparser.RawConfigParser()
computer_definition.add_section('computer') computer_definition.add_section('computer')
if computer.address is not None and computer.netmask is not None: if computer.address is not None and computer.netmask is not None:
computer_definition.set('computer', 'address', '/'.join( computer_definition.set('computer', 'address', '/'.join(
......
...@@ -38,7 +38,7 @@ import subprocess ...@@ -38,7 +38,7 @@ import subprocess
import tarfile import tarfile
import tempfile import tempfile
import time import time
import xmlrpclib from six.moves import xmlrpc_client as xmlrpclib, range
from supervisor import xmlrpc from supervisor import xmlrpc
...@@ -51,6 +51,7 @@ from slapos.grid.exception import (BuildoutFailedError, WrongPermissionError, ...@@ -51,6 +51,7 @@ from slapos.grid.exception import (BuildoutFailedError, WrongPermissionError,
PathDoesNotExistError, DiskSpaceError) PathDoesNotExistError, DiskSpaceError)
from slapos.grid.networkcache import download_network_cached, upload_network_cached from slapos.grid.networkcache import download_network_cached, upload_network_cached
from slapos.human import bytes2human from slapos.human import bytes2human
from slapos.util import bytes2str
WATCHDOG_MARK = '-on-watch' WATCHDOG_MARK = '-on-watch'
...@@ -60,8 +61,8 @@ REQUIRED_COMPUTER_PARTITION_PERMISSION = 0o750 ...@@ -60,8 +61,8 @@ REQUIRED_COMPUTER_PARTITION_PERMISSION = 0o750
CP_STORAGE_FOLDER_NAME = 'DATA' CP_STORAGE_FOLDER_NAME = 'DATA'
# XXX not very clean. this is changed when testing # XXX not very clean. this is changed when testing
PROGRAM_PARTITION_TEMPLATE = pkg_resources.resource_stream(__name__, PROGRAM_PARTITION_TEMPLATE = bytes2str(pkg_resources.resource_string(__name__,
'templates/program_partition_supervisord.conf.in').read() 'templates/program_partition_supervisord.conf.in'))
def free_space(path, fn): def free_space(path, fn):
...@@ -150,7 +151,7 @@ class Software(object): ...@@ -150,7 +151,7 @@ class Software(object):
self.software_min_free_space = software_min_free_space self.software_min_free_space = software_min_free_space
def check_free_space(self): def check_free_space(self):
required = self.software_min_free_space required = self.software_min_free_space or 0
available = free_space_nonroot(self.software_path) available = free_space_nonroot(self.software_path)
if available < required: if available < required:
...@@ -292,7 +293,7 @@ class Software(object): ...@@ -292,7 +293,7 @@ class Software(object):
f.close() f.close()
def _create_buildout_profile(self, buildout_cfg, url): def _create_buildout_profile(self, buildout_cfg, url):
with open(buildout_cfg, 'wb') as fout: with open(buildout_cfg, 'w') as fout:
fout.write('[buildout]\nextends = ' + url + '\n') fout.write('[buildout]\nextends = ' + url + '\n')
self._set_ownership(buildout_cfg) self._set_ownership(buildout_cfg)
...@@ -419,7 +420,7 @@ class Partition(object): ...@@ -419,7 +420,7 @@ class Partition(object):
def check_free_space(self): def check_free_space(self):
required = self.instance_min_free_space required = self.instance_min_free_space or 0
available = free_space_nonroot(self.instance_path) available = free_space_nonroot(self.instance_path)
if available < required: if available < required:
...@@ -481,8 +482,8 @@ class Partition(object): ...@@ -481,8 +482,8 @@ class Partition(object):
} }
def addCustomGroup(self, group_suffix, partition_id, program_list): def addCustomGroup(self, group_suffix, partition_id, program_list):
group_partition_template = pkg_resources.resource_stream(__name__, group_partition_template = bytes2str(pkg_resources.resource_string(__name__,
'templates/group_partition_supervisord.conf.in').read() 'templates/group_partition_supervisord.conf.in'))
group_id = '{}-{}'.format(partition_id, group_suffix) group_id = '{}-{}'.format(partition_id, group_suffix)
self.supervisor_configuration_group += group_partition_template % { self.supervisor_configuration_group += group_partition_template % {
...@@ -568,8 +569,8 @@ class Partition(object): ...@@ -568,8 +569,8 @@ class Partition(object):
# fill generated buildout with additional information # fill generated buildout with additional information
buildout_text = open(config_location).read() buildout_text = open(config_location).read()
buildout_text += '\n\n' + pkg_resources.resource_string(__name__, buildout_text += '\n\n' + bytes2str(pkg_resources.resource_string(__name__,
'templates/buildout-tail.cfg.in') % { 'templates/buildout-tail.cfg.in')) % {
'computer_id': self.computer_id, 'computer_id': self.computer_id,
'partition_id': self.partition_id, 'partition_id': self.partition_id,
'server_url': self.server_url, 'server_url': self.server_url,
...@@ -671,8 +672,8 @@ class Partition(object): ...@@ -671,8 +672,8 @@ class Partition(object):
os.unlink(self.supervisord_partition_configuration_path) os.unlink(self.supervisord_partition_configuration_path)
else: else:
partition_id = self.computer_partition.getId() partition_id = self.computer_partition.getId()
group_partition_template = pkg_resources.resource_stream(__name__, group_partition_template = bytes2str(pkg_resources.resource_string(__name__,
'templates/group_partition_supervisord.conf.in').read() 'templates/group_partition_supervisord.conf.in'))
self.supervisor_configuration_group = group_partition_template % { self.supervisor_configuration_group = group_partition_template % {
'instance_id': partition_id, 'instance_id': partition_id,
'program_list': ','.join(['_'.join([partition_id, runner]) 'program_list': ','.join(['_'.join([partition_id, runner])
...@@ -842,7 +843,7 @@ class Partition(object): ...@@ -842,7 +843,7 @@ class Partition(object):
self.logger.warning('Problem while stopping process %r, will try later' % gname) self.logger.warning('Problem while stopping process %r, will try later' % gname)
else: else:
self.logger.info('Stopped %r' % gname) self.logger.info('Stopped %r' % gname)
for i in xrange(0, 10): for i in range(0, 10):
# Some process may be still running, be nice and wait for them to be stopped. # Some process may be still running, be nice and wait for them to be stopped.
try: try:
supervisor.removeProcessGroup(gname) supervisor.removeProcessGroup(gname)
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
# FOR A PARTICULAR PURPOSE. # FOR A PARTICULAR PURPOSE.
# #
############################################################################## ##############################################################################
from __future__ import print_function
import ast import ast
import json import json
...@@ -29,10 +30,10 @@ try: ...@@ -29,10 +30,10 @@ try:
else: else:
LIBNETWORKCACHE_ENABLED = True LIBNETWORKCACHE_ENABLED = True
except: except:
print 'There was problem while trying to import slapos.libnetworkcache:'\ print('There was problem while trying to import slapos.libnetworkcache:\n%s'
'\n%s' % traceback.format_exc() % traceback.format_exc())
LIBNETWORKCACHE_ENABLED = False LIBNETWORKCACHE_ENABLED = False
print 'Networkcache forced to be disabled.' print('Networkcache forced to be disabled.')
...@@ -45,8 +46,8 @@ def fallback_call(function): ...@@ -45,8 +46,8 @@ def fallback_call(function):
try: try:
return function(self, *args, **kwd) return function(self, *args, **kwd)
except: # indeed, *any* exception is swallowed except: # indeed, *any* exception is swallowed
print 'There was problem while calling method %r:\n%s' % ( print('There was problem while calling method %r:\n%s' % (
function.__name__, traceback.format_exc()) function.__name__, traceback.format_exc()))
return False return False
wrapper.__doc__ = function.__doc__ wrapper.__doc__ = function.__doc__
return wrapper return wrapper
...@@ -107,7 +108,7 @@ def download_network_cached(cache_url, dir_url, software_url, software_root, ...@@ -107,7 +108,7 @@ def download_network_cached(cache_url, dir_url, software_url, software_root,
f.close() f.close()
file_descriptor.close() file_descriptor.close()
return True return True
except (IOError, DirectoryNotFound), e: except (IOError, DirectoryNotFound) as e:
logger.info('Failed to download from network cache %s: %s' % \ logger.info('Failed to download from network cache %s: %s' % \
(software_url, str(e))) (software_url, str(e)))
return False return False
...@@ -169,7 +170,7 @@ def upload_network_cached(software_root, software_url, cached_key, ...@@ -169,7 +170,7 @@ def upload_network_cached(software_root, software_url, cached_key,
try: try:
return nc.upload_generic(f, cached_key, **kw) return nc.upload_generic(f, cached_key, **kw)
except (IOError, UploadError), e: except (IOError, UploadError) as e:
logger.info('Failed to upload file. %s' % (str(e))) logger.info('Failed to upload file. %s' % (str(e)))
return False return False
finally: finally:
......
...@@ -38,7 +38,7 @@ import importlib ...@@ -38,7 +38,7 @@ import importlib
import traceback import traceback
import psutil import psutil
from multiprocessing import Process, Queue as MQueue from multiprocessing import Process, Queue as MQueue
import Queue from six.moves import queue, reload_module
from slapos.util import mkdir_p, chownDirectory from slapos.util import mkdir_p, chownDirectory
from slapos.grid.utils import dropPrivileges, killProcessTree from slapos.grid.utils import dropPrivileges, killProcessTree
from slapos.grid.promise import interface from slapos.grid.promise import interface
...@@ -168,7 +168,7 @@ class PromiseProcess(Process): ...@@ -168,7 +168,7 @@ class PromiseProcess(Process):
if not os.path.exists(init_file): if not os.path.exists(init_file):
with open(init_file, 'w') as f: with open(init_file, 'w') as f:
f.write("") f.write("")
os.chmod(init_file, 0644) os.chmod(init_file, 0o644)
# add promise folder to sys.path so we can import promise script # add promise folder to sys.path so we can import promise script
if sys.path[0] != promise_folder: if sys.path[0] != promise_folder:
sys.path[0:0] = [promise_folder] sys.path[0:0] = [promise_folder]
...@@ -184,9 +184,8 @@ class PromiseProcess(Process): ...@@ -184,9 +184,8 @@ class PromiseProcess(Process):
raise AttributeError("Class RunPromise not found in promise" \ raise AttributeError("Class RunPromise not found in promise" \
"%s" % self.name) "%s" % self.name)
if not interface.IPromise.implementedBy(promise_module.RunPromise): if not interface.IPromise.implementedBy(promise_module.RunPromise):
raise RuntimeError("RunPromise class in %s must implements 'IPromise'" \ raise RuntimeError("RunPromise class in %s must implement 'IPromise'"
" interface. zope_interface.implements(interface.IPromise) is" \ " interface. @implementer(interface.IPromise) is missing ?" % self.name)
" missing ?" % self.name)
from slapos.grid.promise.generic import GenericPromise from slapos.grid.promise.generic import GenericPromise
if not issubclass(promise_module.RunPromise, GenericPromise): if not issubclass(promise_module.RunPromise, GenericPromise):
...@@ -195,7 +194,7 @@ class PromiseProcess(Process): ...@@ -195,7 +194,7 @@ class PromiseProcess(Process):
if promise_module.__file__ != self.promise_path: if promise_module.__file__ != self.promise_path:
# cached module need to be updated # cached module need to be updated
promise_module = reload(promise_module) promise_module = reload_module(promise_module)
# load extra parameters # load extra parameters
self._loadPromiseParameterDict(promise_module) self._loadPromiseParameterDict(promise_module)
...@@ -208,7 +207,7 @@ class PromiseProcess(Process): ...@@ -208,7 +207,7 @@ class PromiseProcess(Process):
if not isinstance(extra_dict, dict): if not isinstance(extra_dict, dict):
raise ValueError("Extra parameter is not a dict") raise ValueError("Extra parameter is not a dict")
for key in extra_dict: for key in extra_dict:
if self.argument_dict.has_key(key): if key in self.argument_dict:
raise ValueError("Extra parameter name %r cannot be used.\n%s" % ( raise ValueError("Extra parameter name %r cannot be used.\n%s" % (
key, extra_dict)) key, extra_dict))
self.argument_dict[key] = extra_dict[key] self.argument_dict[key] = extra_dict[key]
...@@ -362,7 +361,7 @@ class PromiseLauncher(object): ...@@ -362,7 +361,7 @@ class PromiseLauncher(object):
try: try:
result = PromiseQueueResult() result = PromiseQueueResult()
result.load(json.loads(f.read())) result.load(json.loads(f.read()))
except ValueError, e: except ValueError as e:
result = None result = None
self.logger.warn('Bad promise JSON result at %r: %s' % ( self.logger.warn('Bad promise JSON result at %r: %s' % (
promise_output_file, promise_output_file,
...@@ -375,7 +374,7 @@ class PromiseLauncher(object): ...@@ -375,7 +374,7 @@ class PromiseLauncher(object):
while True: while True:
try: try:
self.queue_result.get_nowait() self.queue_result.get_nowait()
except Queue.Empty: except queue.Empty:
return return
def _updateFolderOwner(self, folder_path=None): def _updateFolderOwner(self, folder_path=None):
...@@ -443,7 +442,7 @@ class PromiseLauncher(object): ...@@ -443,7 +442,7 @@ class PromiseLauncher(object):
if not promise_process.is_alive(): if not promise_process.is_alive():
try: try:
queue_item = self.queue_result.get(True, 1) queue_item = self.queue_result.get(True, 1)
except Queue.Empty: except queue.Empty:
# no result found in process result Queue # no result found in process result Queue
pass pass
else: else:
......
...@@ -35,8 +35,10 @@ import time ...@@ -35,8 +35,10 @@ import time
import random import random
import traceback import traceback
import slapos.slap import slapos.slap
from slapos.util import mkdir_p from slapos.util import bytes2str, mkdir_p
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import six
from six import PY3, with_metaclass
from datetime import datetime, timedelta from datetime import datetime, timedelta
PROMISE_STATE_FOLDER_NAME = '.slapgrid/promise' PROMISE_STATE_FOLDER_NAME = '.slapgrid/promise'
...@@ -46,6 +48,10 @@ PROMISE_LOG_FOLDER_NAME = '.slapgrid/promise/log' ...@@ -46,6 +48,10 @@ PROMISE_LOG_FOLDER_NAME = '.slapgrid/promise/log'
PROMISE_PARAMETER_NAME = 'extra_config_dict' PROMISE_PARAMETER_NAME = 'extra_config_dict'
PROMISE_PERIOD_FILE_NAME = '%s.periodicity' PROMISE_PERIOD_FILE_NAME = '%s.periodicity'
LOGLINE_RE = r"(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2})\s+\-?\s*(\w{4,7})\s+\-?\s+(\d+\-\d{3})\s+\-?\s*(.*)"
matchLogStr = re.compile(LOGLINE_RE).match
matchLogBytes = re.compile(LOGLINE_RE.encode()).match if PY3 else matchLogStr
class BaseResult(object): class BaseResult(object):
def __init__(self, problem=False, message=None, date=None): def __init__(self, problem=False, message=None, date=None):
self.__problem = problem self.__problem = problem
...@@ -129,10 +135,7 @@ class PromiseQueueResult(object): ...@@ -129,10 +135,7 @@ class PromiseQueueResult(object):
self.path = data['path'] self.path = data['path']
self.execution_time = data['execution-time'] self.execution_time = data['execution-time']
class GenericPromise(object): class GenericPromise(with_metaclass(ABCMeta, object)):
# Abstract class
__metaclass__ = ABCMeta
def __init__(self, config): def __init__(self, config):
self.__config = config self.__config = config
...@@ -160,9 +163,7 @@ class GenericPromise(object): ...@@ -160,9 +163,7 @@ class GenericPromise(object):
for handler in self.logger.handlers: for handler in self.logger.handlers:
self.logger.removeHandler(handler) self.logger.removeHandler(handler)
if self.__log_folder is None: if self.__log_folder is None:
# configure logger with StringIO self.__logger_buffer = six.StringIO()
import cStringIO
self.__logger_buffer = cStringIO.StringIO()
logger_handler = logging.StreamHandler(self.__logger_buffer) logger_handler = logging.StreamHandler(self.__logger_buffer)
self.__log_file = None self.__log_file = None
else: else:
...@@ -230,9 +231,9 @@ class GenericPromise(object): ...@@ -230,9 +231,9 @@ class GenericPromise(object):
""" """
Call bang if requested Call bang if requested
""" """
if self.__config.has_key('master-url') and \ if 'master-url' in self.__config and \
self.__config.has_key('partition-id') and \ 'partition-in' in self.__config and \
self.__config.has_key('computer-id'): 'computer-id' in self.__config:
slap = slapos.slap.slap() slap = slapos.slap.slap()
slap.initializeConnection( slap.initializeConnection(
...@@ -247,18 +248,14 @@ class GenericPromise(object): ...@@ -247,18 +248,14 @@ class GenericPromise(object):
computer_partition.bang(message) computer_partition.bang(message)
self.logger.info("Bang with message %r." % message) self.logger.info("Bang with message %r." % message)
def __getLogRegex(self):
return re.compile(r"(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2})\s+\-?\s*(\w{4,7})\s+\-?\s+(\d+\-\d{3})\s+\-?\s*(.*)")
def __getResultFromString(self, result_string, only_failure=False): def __getResultFromString(self, result_string, only_failure=False):
line_list = result_string.split('\n') line_list = result_string.split('\n')
result_list = [] result_list = []
line_part = "" line_part = ""
regex = self.__getLogRegex()
for line in line_list: for line in line_list:
if not line: if not line:
continue continue
match = regex.match(line) match = matchLogStr(line)
if match is not None: if match is not None:
if not only_failure or (only_failure and match.groups()[1] == 'ERROR'): if not only_failure or (only_failure and match.groups()[1] == 'ERROR'):
result_list.append({ result_list.append({
...@@ -297,37 +294,36 @@ class GenericPromise(object): ...@@ -297,37 +294,36 @@ class GenericPromise(object):
if not os.path.exists(self.__log_file): if not os.path.exists(self.__log_file):
return [] return []
regex = self.__getLogRegex()
max_date_string = ""
if latest_minute > 0: if latest_minute > 0:
date = datetime.now() - timedelta(minutes=latest_minute) date = datetime.now() - timedelta(minutes=latest_minute)
max_date_string = date.strftime('%Y-%m-%d %H:%M:%S') max_date_string = date.strftime('%Y-%m-%d %H:%M:%S')
else:
max_date_string = ""
line_list = [] line_list = []
result_list = [] result_list = []
transaction_id = None transaction_id = None
transaction_count = 0 transaction_count = 0
with open(self.__log_file, 'r') as f: with open(self.__log_file, 'rb') as f:
offset = 0
f.seek(0, 2) f.seek(0, 2)
size = f.tell() * -1 offset = f.tell()
line = line_part = "" line = b""
while offset > size: line_part = ""
while offset:
offset -= 1 offset -= 1
f.seek(offset, 2) f.seek(offset)
char = f.read(1) char = f.read(1)
if char != '\n': if char != b'\n':
line = char + line line = char + line
if char == '\n' or offset == size: if offset:
# Add new line
if offset == -1:
continue continue
if line != "": if line:
result = regex.match(line) result = matchLogBytes(line)
if result is not None: if result is not None:
if max_date_string and result.groups()[0] <= max_date_string: date, level, tid, msg = map(bytes2str, result.groups())
if max_date_string and date <= max_date_string:
break break
if transaction_id != result.groups()[2]: if transaction_id != tid:
if transaction_id is not None: if transaction_id is not None:
# append new result # append new result
result_list.append(line_list) result_list.append(line_list)
...@@ -335,20 +331,18 @@ class GenericPromise(object): ...@@ -335,20 +331,18 @@ class GenericPromise(object):
transaction_count += 1 transaction_count += 1
if transaction_count > result_count: if transaction_count > result_count:
break break
transaction_id = result.groups()[2] transaction_id = tid
if not only_failure or \ if not only_failure or level == 'ERROR':
(only_failure and result.groups()[1] == 'ERROR'):
line_list.insert(0, { line_list.insert(0, {
'date': datetime.strptime(result.groups()[0], 'date': datetime.strptime(date,
'%Y-%m-%d %H:%M:%S'), '%Y-%m-%d %H:%M:%S'),
'status': result.groups()[1], 'status': level,
'message': (result.groups()[3] + line_part).strip(), 'message': (msg + line_part).strip(),
}) })
line_part = ""
else: else:
line_part = '\n' + line + line_part line_part = '\n' + bytes2str(line) + line_part
line = "" line = b""
continue
line = line_part = ""
if len(line_list): if len(line_list):
result_list.append(line_list) result_list.append(line_list)
...@@ -410,7 +404,7 @@ class GenericPromise(object): ...@@ -410,7 +404,7 @@ class GenericPromise(object):
try: try:
self.__queue.put_nowait(result_item) self.__queue.put_nowait(result_item)
break break
except Queue.Full, e: except Queue.Full as e:
error = e error = e
time.sleep(0.5) time.sleep(0.5)
if error: if error:
...@@ -459,7 +453,7 @@ class GenericPromise(object): ...@@ -459,7 +453,7 @@ class GenericPromise(object):
""" """
try: try:
self.sense() self.sense()
except Exception, e: except Exception as e:
# log the result # log the result
self.logger.error(str(e)) self.logger.error(str(e))
if check_anomaly: if check_anomaly:
...@@ -468,7 +462,7 @@ class GenericPromise(object): ...@@ -468,7 +462,7 @@ class GenericPromise(object):
result = self.anomaly() result = self.anomaly()
if result is None: if result is None:
raise ValueError("Promise anomaly method returned 'None'") raise ValueError("Promise anomaly method returned 'None'")
except Exception, e: except Exception as e:
result = AnomalyResult(problem=True, message=str(e)) result = AnomalyResult(problem=True, message=str(e))
else: else:
if isinstance(result, AnomalyResult) and result.hasFailed() and can_bang: if isinstance(result, AnomalyResult) and result.hasFailed() and can_bang:
...@@ -482,7 +476,7 @@ class GenericPromise(object): ...@@ -482,7 +476,7 @@ class GenericPromise(object):
result = self.test() result = self.test()
if result is None: if result is None:
raise ValueError("Promise test method returned 'None'") raise ValueError("Promise test method returned 'None'")
except Exception, e: except Exception as e:
result = TestResult(problem=True, message=str(e)) result = TestResult(problem=True, message=str(e))
if self.__logger_buffer is not None: if self.__logger_buffer is not None:
......
...@@ -32,17 +32,16 @@ import subprocess ...@@ -32,17 +32,16 @@ import subprocess
import functools import functools
import signal import signal
import traceback import traceback
from zope import interface as zope_interface from zope.interface import implementer
from slapos.grid.promise import interface from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise)
class WrapPromise(GenericPromise): class WrapPromise(GenericPromise):
""" """
A wrapper promise used to run old promises style and bash promises A wrapper promise used to run old promises style and bash promises
""" """
zope_interface.implements(interface.IPromise)
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) GenericPromise.__init__(self, config)
self.setPeriodicity(minute=2) self.setPeriodicity(minute=2)
...@@ -62,21 +61,19 @@ class WrapPromise(GenericPromise): ...@@ -62,21 +61,19 @@ class WrapPromise(GenericPromise):
[self.getPromiseFile()], [self.getPromiseFile()],
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
cwd=self.getPartitionFolder() cwd=self.getPartitionFolder(),
universal_newlines=True,
) )
handler = functools.partial(self.terminate, self.getName(), self.logger, handler = functools.partial(self.terminate, self.getName(), self.logger,
promise_process) promise_process)
signal.signal(signal.SIGINT, handler) signal.signal(signal.SIGINT, handler)
signal.signal(signal.SIGTERM, handler) signal.signal(signal.SIGTERM, handler)
output, error = promise_process.communicate() message = promise_process.communicate()[0].strip()
message = output or "" if promise_process.returncode:
if error: self.logger.error(message)
message += "\n" + error
if promise_process.returncode != 0:
self.logger.error(message.strip())
else: else:
self.logger.info(message.strip()) self.logger.info(message)
def test(self): def test(self):
# Fail if the latest promise result failed # Fail if the latest promise result failed
......
...@@ -32,7 +32,7 @@ import os ...@@ -32,7 +32,7 @@ import os
import pkg_resources import pkg_resources
import random import random
import socket import socket
import StringIO from io import BytesIO
import subprocess import subprocess
import sys import sys
import tempfile import tempfile
...@@ -42,6 +42,7 @@ import warnings ...@@ -42,6 +42,7 @@ import warnings
import logging import logging
import json import json
import shutil import shutil
import six
if sys.version_info < (2, 6): if sys.version_info < (2, 6):
warnings.warn('Used python version (%s) is old and has problems with' warnings.warn('Used python version (%s) is old and has problems with'
...@@ -167,7 +168,7 @@ def merged_options(args, configp): ...@@ -167,7 +168,7 @@ def merged_options(args, configp):
if configp.has_section('networkcache'): if configp.has_section('networkcache'):
options.update(dict(configp.items('networkcache'))) options.update(dict(configp.items('networkcache')))
for key, value in vars(args).iteritems(): for key, value in six.iteritems(vars(args)):
if value is not None: if value is not None:
options[key] = value options[key] = value
...@@ -672,7 +673,7 @@ stderr_logfile_backups=1 ...@@ -672,7 +673,7 @@ stderr_logfile_backups=1
computer_partition.setComputerPartitionRelatedInstanceList( computer_partition.setComputerPartitionRelatedInstanceList(
[reference for reference in tf.read().split('\n') if reference] [reference for reference in tf.read().split('\n') if reference]
) )
except NotFoundError, e: except NotFoundError as e:
# Master doesn't implement this feature ? # Master doesn't implement this feature ?
self.logger.warning("NotFoundError: %s. \nCannot send requested instance "\ self.logger.warning("NotFoundError: %s. \nCannot send requested instance "\
"list to master. Please check if this feature is"\ "list to master. Please check if this feature is"\
...@@ -682,12 +683,12 @@ stderr_logfile_backups=1 ...@@ -682,12 +683,12 @@ stderr_logfile_backups=1
""" """
""" """
query_cmd = rule_command.replace('--add-rule', '--query-rule') query_cmd = rule_command.replace('--add-rule', '--query-rule')
process = FPopen(query_cmd) process = FPopen(query_cmd, universal_newlines=True)
result, stderr = process.communicate() result, stderr = process.communicate()
if result.strip() == 'no': if result.strip() == 'no':
# rule doesn't exist add to firewall # rule doesn't exist add to firewall
self.logger.debug(rule_command) self.logger.debug(rule_command)
process = FPopen(rule_command) process = FPopen(rule_command, universal_newlines=True)
rule_result, stderr = process.communicate() rule_result, stderr = process.communicate()
if process.returncode == 0: if process.returncode == 0:
if rule_result.strip() != 'success': if rule_result.strip() != 'success':
...@@ -705,13 +706,13 @@ stderr_logfile_backups=1 ...@@ -705,13 +706,13 @@ stderr_logfile_backups=1
""" """
""" """
query_cmd = rule_command.replace('--add-rule', '--query-rule') query_cmd = rule_command.replace('--add-rule', '--query-rule')
process = FPopen(query_cmd) process = FPopen(query_cmd, universal_newlines=True)
result, stderr = process.communicate() result, stderr = process.communicate()
if result.strip() == 'yes': if result.strip() == 'yes':
# The rule really exist, remove it # The rule really exist, remove it
remove_command = rule_command.replace('--add-rule', '--remove-rule') remove_command = rule_command.replace('--add-rule', '--remove-rule')
self.logger.debug(remove_command) self.logger.debug(remove_command)
process = FPopen(remove_command) process = FPopen(remove_command, universal_newlines=True)
rule_result, stderr = process.communicate() rule_result, stderr = process.communicate()
if process.returncode == 0: if process.returncode == 0:
if rule_result.strip() != 'success': if rule_result.strip() != 'success':
...@@ -764,7 +765,7 @@ stderr_logfile_backups=1 ...@@ -764,7 +765,7 @@ stderr_logfile_backups=1
# XXX - need to check firewalld reload instead of restart # XXX - need to check firewalld reload instead of restart
self.logger.info("Reloading firewall configuration...") self.logger.info("Reloading firewall configuration...")
reload_cmd = self.firewall_conf['reload_config_cmd'] reload_cmd = self.firewall_conf['reload_config_cmd']
reload_process = FPopen(reload_cmd) reload_process = FPopen(reload_cmd, universal_newlines=True)
stdout, stderr = reload_process.communicate() stdout, stderr = reload_process.communicate()
if reload_process.returncode != 0: if reload_process.returncode != 0:
raise Exception("Failed to load firewalld rules with command %s.\n%" % ( raise Exception("Failed to load firewalld rules with command %s.\n%" % (
...@@ -922,7 +923,7 @@ stderr_logfile_backups=1 ...@@ -922,7 +923,7 @@ stderr_logfile_backups=1
self._checkPromiseList(local_partition, self._checkPromiseList(local_partition,
check_anomaly=True, check_anomaly=True,
force=False) force=False)
except PromiseError, e: except PromiseError as e:
self.logger.error(e) self.logger.error(e)
if partition_access_status is None or not status_error: if partition_access_status is None or not status_error:
computer_partition.error(e, logger=self.logger) computer_partition.error(e, logger=self.logger)
...@@ -1140,7 +1141,7 @@ stderr_logfile_backups=1 ...@@ -1140,7 +1141,7 @@ stderr_logfile_backups=1
(computer_partition_id, computer_partition_state) (computer_partition_id, computer_partition_state)
computer_partition.error(error_string, logger=self.logger) computer_partition.error(error_string, logger=self.logger)
raise NotImplementedError(error_string) raise NotImplementedError(error_string)
except Exception, e: except Exception as e:
if not isinstance(e, PromiseError): if not isinstance(e, PromiseError):
with open(error_output_file, 'w') as error_file: with open(error_output_file, 'w') as error_file:
# Write error message in a log file assible to computer partition user # Write error message in a log file assible to computer partition user
...@@ -1309,7 +1310,7 @@ stderr_logfile_backups=1 ...@@ -1309,7 +1310,7 @@ stderr_logfile_backups=1
def validateXML(self, to_be_validated, xsd_model): def validateXML(self, to_be_validated, xsd_model):
"""Validates a given xml file""" """Validates a given xml file"""
#We retrieve the xsd model #We retrieve the xsd model
xsd_model = StringIO.StringIO(xsd_model) xsd_model = BytesIO(xsd_model)
xmlschema_doc = etree.parse(xsd_model) xmlschema_doc = etree.parse(xsd_model)
xmlschema = etree.XMLSchema(xmlschema_doc) xmlschema = etree.XMLSchema(xmlschema_doc)
......
...@@ -35,9 +35,10 @@ import subprocess ...@@ -35,9 +35,10 @@ import subprocess
import stat import stat
import sys import sys
import time import time
import xmlrpclib from six.moves import xmlrpc_client as xmlrpclib
from slapos.grid.utils import (createPrivateDirectory, SlapPopen, updateFile) from slapos.grid.utils import (createPrivateDirectory, SlapPopen, updateFile)
from slapos.util import bytes2str
from supervisor import xmlrpc, states from supervisor import xmlrpc, states
...@@ -89,8 +90,8 @@ def createSupervisordConfiguration(instance_root, watchdog_command=''): ...@@ -89,8 +90,8 @@ def createSupervisordConfiguration(instance_root, watchdog_command=''):
# Creates supervisord configuration # Creates supervisord configuration
updateFile(supervisord_configuration_file_path, updateFile(supervisord_configuration_file_path,
pkg_resources.resource_stream(__name__, bytes2str(pkg_resources.resource_string(__name__,
'templates/supervisord.conf.in').read() % { 'templates/supervisord.conf.in')) % {
'supervisord_configuration_directory': supervisord_configuration_directory, 'supervisord_configuration_directory': supervisord_configuration_directory,
'supervisord_socket': os.path.abspath(supervisord_socket), 'supervisord_socket': os.path.abspath(supervisord_socket),
'supervisord_loglevel': 'info', 'supervisord_loglevel': 'info',
......
...@@ -40,6 +40,8 @@ import logging ...@@ -40,6 +40,8 @@ import logging
import psutil import psutil
import time import time
import six
from slapos.grid.exception import BuildoutFailedError, WrongPermissionError from slapos.grid.exception import BuildoutFailedError, WrongPermissionError
# Such umask by default will create paths with full permission # Such umask by default will create paths with full permission
...@@ -123,20 +125,18 @@ class SlapPopen(subprocess.Popen): ...@@ -123,20 +125,18 @@ class SlapPopen(subprocess.Popen):
self.stdin.close() self.stdin.close()
self.stdin = None self.stdin = None
# XXX-Cedric: this algorithm looks overkill for simple logging.
output_lines = [] output_lines = []
while True: for line in self.stdout:
line = self.stdout.readline() if type(line) is not str:
if line == '' and self.poll() is not None: line = line.decode(errors='replace')
break output_lines.append(line)
if line: logger.info(line.rstrip('\n'))
output_lines.append(line) self.wait()
logger.info(line.rstrip('\n'))
self.output = ''.join(output_lines) self.output = ''.join(output_lines)
def md5digest(url): def md5digest(url):
return hashlib.md5(url).hexdigest() return hashlib.md5(url.encode('utf-8')).hexdigest()
def getCleanEnvironment(logger, home_path='/tmp'): def getCleanEnvironment(logger, home_path='/tmp'):
...@@ -150,7 +150,7 @@ def getCleanEnvironment(logger, home_path='/tmp'): ...@@ -150,7 +150,7 @@ def getCleanEnvironment(logger, home_path='/tmp'):
if old is not None: if old is not None:
removed_env.append(k) removed_env.append(k)
changed_env['HOME'] = env['HOME'] = home_path changed_env['HOME'] = env['HOME'] = home_path
for k in sorted(changed_env.iterkeys()): for k in sorted(six.iterkeys(changed_env)):
logger.debug('Overridden %s = %r' % (k, changed_env[k])) logger.debug('Overridden %s = %r' % (k, changed_env[k]))
if removed_env: if removed_env:
logger.debug('Removed from environment: %s' % ', '.join(sorted(removed_env))) logger.debug('Removed from environment: %s' % ', '.join(sorted(removed_env)))
...@@ -351,18 +351,20 @@ def launchBuildout(path, buildout_binary, logger, ...@@ -351,18 +351,20 @@ def launchBuildout(path, buildout_binary, logger,
def updateFile(file_path, content, mode=0o600): def updateFile(file_path, content, mode=0o600):
"""Creates or updates a file with "content" as content.""" """Creates or updates a file with "content" as content."""
altered = False content = content.encode('utf-8')
if not (os.path.isfile(file_path)) or \ try:
not (hashlib.md5(open(file_path).read()).digest() == with open(file_path, 'rb') as f:
hashlib.md5(content).digest()): if f.read(len(content) + 1) == content:
with open(file_path, 'w') as fout: if stat.S_IMODE(os.fstat(f.fileno()).st_mode) == mode:
fout.write(content) return False
altered = True os.fchmod(f.fileno(), mode)
os.chmod(file_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) return True
if stat.S_IMODE(os.stat(file_path).st_mode) != mode: except IOError:
os.chmod(file_path, mode) pass
altered = True with open(file_path, 'wb') as f:
return altered os.fchmod(f.fileno(), mode)
f.write(content)
return True
def updateExecutable(executable_path, content): def updateExecutable(executable_path, content):
...@@ -399,7 +401,7 @@ def killProcessTree(pid, logger): ...@@ -399,7 +401,7 @@ def killProcessTree(pid, logger):
for child in running_process_list: for child in running_process_list:
try: try:
child.suspend() child.suspend()
except psutil.Error, e: except psutil.Error as e:
logger.debug(str(e)) logger.debug(str(e))
time.sleep(0.2) time.sleep(0.2)
...@@ -408,5 +410,5 @@ def killProcessTree(pid, logger): ...@@ -408,5 +410,5 @@ def killProcessTree(pid, logger):
for process in process_list: for process in process_list:
try: try:
process.kill() process.kill()
except psutil.Error, e: except psutil.Error as e:
logger.debug("Process kill: %s" % e) logger.debug("Process kill: %s" % e)
...@@ -30,6 +30,7 @@ ...@@ -30,6 +30,7 @@
import argparse import argparse
import os.path import os.path
import sys import sys
import six
import slapos.slap.slap import slapos.slap.slap
from slapos.grid.slapgrid import COMPUTER_PARTITION_TIMESTAMP_FILENAME, \ from slapos.grid.slapgrid import COMPUTER_PARTITION_TIMESTAMP_FILENAME, \
...@@ -56,7 +57,7 @@ def parseArgumentTuple(): ...@@ -56,7 +57,7 @@ def parseArgumentTuple():
# Build option_dict # Build option_dict
option_dict = {} option_dict = {}
for argument_key, argument_value in vars(option).iteritems(): for argument_key, argument_value in six.iteritems(vars(option)):
option_dict.update({argument_key: argument_value}) option_dict.update({argument_key: argument_value})
return option_dict return option_dict
......
...@@ -5,12 +5,12 @@ import os.path ...@@ -5,12 +5,12 @@ import os.path
import pwd import pwd
import time import time
from zope import interface as zope_interface from zope.interface import implementer
from slapos.manager import interface from slapos.manager import interface
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@implementer(interface.IManager)
class Manager(object): class Manager(object):
"""Manage cgroup's cpuset in terms on initializing and runtime operations. """Manage cgroup's cpuset in terms on initializing and runtime operations.
...@@ -21,8 +21,6 @@ class Manager(object): ...@@ -21,8 +21,6 @@ class Manager(object):
TODO: there is no limit on number of reserved cores per user. TODO: there is no limit on number of reserved cores per user.
""" """
zope_interface.implements(interface.IManager)
cpu_exclusive_file = ".slapos-cpu-exclusive" cpu_exclusive_file = ".slapos-cpu-exclusive"
cpuset_path = "/sys/fs/cgroup/cpuset/" cpuset_path = "/sys/fs/cgroup/cpuset/"
task_write_mode = "wt" task_write_mode = "wt"
......
...@@ -5,7 +5,6 @@ import os ...@@ -5,7 +5,6 @@ import os
import pwd import pwd
import grp import grp
from .interface import IManager from .interface import IManager
from itertools import ifilter
from zope import interface from zope import interface
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
......
...@@ -5,8 +5,8 @@ import netaddr ...@@ -5,8 +5,8 @@ import netaddr
import os import os
from .interface import IManager from .interface import IManager
from itertools import ifilter from six.moves import filter
from zope import interface from zope.interface import implementer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -23,9 +23,8 @@ def which(exename): ...@@ -23,9 +23,8 @@ def which(exename):
return full_path return full_path
return None return None
@implementer(IManager)
class Manager(object): class Manager(object):
interface.implements(IManager)
port_redirect_filename = '.slapos-port-redirect' port_redirect_filename = '.slapos-port-redirect'
def __init__(self, config): def __init__(self, config):
...@@ -89,7 +88,7 @@ class Manager(object): ...@@ -89,7 +88,7 @@ class Manager(object):
'full_ip_list', []) 'full_ip_list', [])
partition_ip_list = [tup[1] for tup in partition_ip_list] partition_ip_list = [tup[1] for tup in partition_ip_list]
partition_ipv6 = next(ifilter(lambda ip_addr: ':' in ip_addr, partition_ipv6 = next(filter(lambda ip_addr: ':' in ip_addr,
partition_ip_list), partition_ip_list),
None) None)
......
...@@ -4,17 +4,16 @@ import os ...@@ -4,17 +4,16 @@ import os
import sys import sys
import subprocess import subprocess
from zope import interface as zope_interface from zope.interface import implementer
from slapos.manager import interface from slapos.manager import interface
from slapos.grid.slapgrid import COMPUTER_PARTITION_WAIT_LIST_FILENAME from slapos.grid.slapgrid import COMPUTER_PARTITION_WAIT_LIST_FILENAME
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@implementer(interface.IManager)
class Manager(object): class Manager(object):
"""Manager is called in every step of preparation of the computer.""" """Manager is called in every step of preparation of the computer."""
zope_interface.implements(interface.IManager)
def __init__(self, config): def __init__(self, config):
"""Manager needs to know config for its functioning. """Manager needs to know config for its functioning.
""" """
......
...@@ -33,6 +33,8 @@ import logging ...@@ -33,6 +33,8 @@ import logging
from slapos.proxy.views import app from slapos.proxy.views import app
from slapos.util import sqlite_connect from slapos.util import sqlite_connect
import six
def _generateSoftwareProductListFromString(software_product_list_string): def _generateSoftwareProductListFromString(software_product_list_string):
""" """
Take a string as argument (which usually comes from the software_product_list Take a string as argument (which usually comes from the software_product_list
...@@ -72,7 +74,7 @@ class ProxyConfig(object): ...@@ -72,7 +74,7 @@ class ProxyConfig(object):
elif section.startswith('multimaster/'): elif section.startswith('multimaster/'):
# Merge multimaster configuration if any # Merge multimaster configuration if any
# XXX: check for duplicate SR entries # XXX: check for duplicate SR entries
for key, value in configuration_dict.iteritems(): for key, value in six.iteritems(configuration_dict):
if key == 'software_release_list': if key == 'software_release_list':
# Split multi-lines values # Split multi-lines values
configuration_dict[key] = [line.strip() for line in value.strip().split('\n')] configuration_dict[key] = [line.strip() for line in value.strip().split('\n')]
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import pkg_resources import pkg_resources
from slapos.util import bytes2str
DB_VERSION = pkg_resources.resource_stream('slapos.proxy', 'schema.sql').readline().strip().split(':')[1] DB_VERSION = bytes2str(pkg_resources.resource_stream('slapos.proxy', 'schema.sql').readline()).strip().split(':')[1]
This diff is collapsed.
...@@ -30,4 +30,4 @@ if sys.version_info < (2, 6): ...@@ -30,4 +30,4 @@ if sys.version_info < (2, 6):
import warnings import warnings
warnings.warn('Used python version (%s) is old and has problems with' warnings.warn('Used python version (%s) is old and has problems with'
' IPv6 connections' % '.'.join([str(q) for q in sys.version_info[:3]])) ' IPv6 connections' % '.'.join([str(q) for q in sys.version_info[:3]]))
from slap import * from .slap import *
This diff is collapsed.
...@@ -29,23 +29,21 @@ import logging ...@@ -29,23 +29,21 @@ import logging
import pprint import pprint
import unittest import unittest
import tempfile import tempfile
import StringIO
import sys import sys
import os import os
import sqlite3
import pkg_resources import pkg_resources
from contextlib import contextmanager
from mock import patch, create_autospec from mock import patch, create_autospec
import mock import mock
from slapos.util import sqlite_connect from slapos.util import sqlite_connect, bytes2str
import slapos.cli.console import slapos.cli.console
import slapos.cli.entry import slapos.cli.entry
import slapos.cli.info import slapos.cli.info
import slapos.cli.list import slapos.cli.list
import slapos.cli.supervisorctl import slapos.cli.supervisorctl
import slapos.cli.proxy_show from slapos.cli.proxy_show import do_show, StringIO
from slapos.client import ClientConfig from slapos.client import ClientConfig
import slapos.grid.svcbackend import slapos.grid.svcbackend
import slapos.proxy import slapos.proxy
...@@ -99,9 +97,9 @@ class TestCliProxyShow(CliMixin): ...@@ -99,9 +97,9 @@ class TestCliProxyShow(CliMixin):
self.conf.logger = self.logger self.conf.logger = self.logger
# load database # load database
schema = pkg_resources.resource_stream('slapos.tests.slapproxy', 'database_dump_version_11.sql') schema = bytes2str(pkg_resources.resource_string('slapos.tests.slapproxy', 'database_dump_version_11.sql'))
db = sqlite_connect(self.db_file.name) db = sqlite_connect(self.db_file.name)
db.cursor().executescript(schema.read()) db.cursor().executescript(schema)
db.commit() db.commit()
# by default we simulate being invoked with "show all" arguments # by default we simulate being invoked with "show all" arguments
...@@ -121,7 +119,7 @@ class TestCliProxyShow(CliMixin): ...@@ -121,7 +119,7 @@ class TestCliProxyShow(CliMixin):
with mock.patch( with mock.patch(
'slapos.cli.proxy_show.logging.getLogger', 'slapos.cli.proxy_show.logging.getLogger',
return_value=logger): return_value=logger):
slapos.cli.proxy_show.do_show(self.conf) do_show(self.conf)
# installed softwares are listed # installed softwares are listed
logger.info.assert_any_call( logger.info.assert_any_call(
...@@ -132,7 +130,7 @@ class TestCliProxyShow(CliMixin): ...@@ -132,7 +130,7 @@ class TestCliProxyShow(CliMixin):
logger.info.assert_any_call( logger.info.assert_any_call(
' %s = %s', ' %s = %s',
'_', '_',
'{\n "url": "memcached://10.0.30.235:2003/", \n "monitor-base-url": ""\n}') '{\n "monitor-base-url": "",\n "url": "memcached://10.0.30.235:2003/"\n}')
# other parameters are displayed as simple string # other parameters are displayed as simple string
logger.info.assert_any_call( logger.info.assert_any_call(
...@@ -152,10 +150,10 @@ class TestCliProxyShow(CliMixin): ...@@ -152,10 +150,10 @@ class TestCliProxyShow(CliMixin):
def test_proxy_show_displays_on_stdout(self): def test_proxy_show_displays_on_stdout(self):
saved_stderr = sys.stderr saved_stderr = sys.stderr
saved_stdout = sys.stdout saved_stdout = sys.stdout
sys.stderr = stderr = StringIO.StringIO() sys.stderr = stderr = StringIO()
sys.stdout = stdout = StringIO.StringIO() sys.stdout = stdout = StringIO()
try: try:
slapos.cli.proxy_show.do_show(self.conf) do_show(self.conf)
finally: finally:
sys.stderr = saved_stderr sys.stderr = saved_stderr
sys.stdout = saved_stdout sys.stdout = saved_stdout
...@@ -169,8 +167,8 @@ class TestCliProxyShow(CliMixin): ...@@ -169,8 +167,8 @@ class TestCliProxyShow(CliMixin):
def test_proxy_show_use_pager(self): def test_proxy_show_use_pager(self):
saved_stderr = sys.stderr saved_stderr = sys.stderr
saved_stdout = sys.stdout saved_stdout = sys.stdout
sys.stderr = stderr = StringIO.StringIO() sys.stderr = stderr = StringIO()
sys.stdout = stdout = StringIO.StringIO() sys.stdout = stdout = StringIO()
stdout.isatty = lambda *args: True stdout.isatty = lambda *args: True
# use a pager that just output to a file. # use a pager that just output to a file.
...@@ -179,7 +177,7 @@ class TestCliProxyShow(CliMixin): ...@@ -179,7 +177,7 @@ class TestCliProxyShow(CliMixin):
os.environ['PAGER'] = 'cat > {}'.format(tmp.name) os.environ['PAGER'] = 'cat > {}'.format(tmp.name)
try: try:
slapos.cli.proxy_show.do_show(self.conf) do_show(self.conf)
finally: finally:
sys.stderr = saved_stderr sys.stderr = saved_stderr
sys.stdout = saved_stdout sys.stdout = saved_stdout
...@@ -327,53 +325,34 @@ class TestCliSupervisorctl(CliMixin): ...@@ -327,53 +325,34 @@ class TestCliSupervisorctl(CliMixin):
class TestCliConsole(unittest.TestCase): class TestCliConsole(unittest.TestCase):
def setUp(self):
cp = slapos.slap.ComputerPartition('computer_id', 'partition_id')
cp._parameter_dict = {'parameter_name': 'parameter_value'}
request_patch = patch.object(slapos.slap.OpenOrder, 'request', return_value = cp) script = """\
self.mock_request = request_patch.start() print(request('software_release', 'instance').getInstanceParameterDict()['parameter_name'])
"""
self.config_file = tempfile.NamedTemporaryFile() @contextmanager
self.config_file.write('''[slapos] def _test_console(self):
master_url=null cp = slapos.slap.ComputerPartition('computer_id', 'partition_id')
''') cp._parameter_dict = {'parameter_name': 'parameter_value'}
self.config_file.flush() with patch.object(slapos.slap.OpenOrder, 'request',
return_value = cp) as mock_request, \
def tearDown(self): patch.object(sys, 'stdout', StringIO()) as app_stdout, \
self.mock_request.stop() tempfile.NamedTemporaryFile() as config_file:
self.config_file.close() config_file.write(b'[slapos]\nmaster_url=null\n')
config_file.flush()
yield slapos.cli.entry.SlapOSApp(), config_file.name
mock_request.assert_called_once_with(
'software_release', 'instance')
self.assertIn('parameter_value', app_stdout.getvalue())
def test_console_interactive(self): def test_console_interactive(self):
app = slapos.cli.entry.SlapOSApp() with self._test_console() as (app, config_file), \
saved_stdin = sys.stdin patch.object(sys, 'stdin', StringIO(self.script)):
saved_stdout = sys.stdout app.run(('console', '--cfg', config_file))
try:
sys.stdin = app_stdin = StringIO.StringIO(
"""print request('software_release', 'instance').getInstanceParameterDict()['parameter_name']\n""")
sys.stdout = app_stdout = StringIO.StringIO()
app.run(('console', '--cfg', self.config_file.name))
finally:
sys.stdin = saved_stdin
sys.stdout = saved_stdout
self.mock_request.assert_called_once_with('software_release', 'instance')
self.assertIn('parameter_value', app_stdout.getvalue())
def test_console_script(self): def test_console_script(self):
with tempfile.NamedTemporaryFile() as script: with self._test_console() as (app, config_file), \
script.write( tempfile.NamedTemporaryFile('w') as script:
"""print request('software_release', 'instance').getInstanceParameterDict()['parameter_name']\n""") script.write(self.script)
script.flush() script.flush()
app.run(('console', '--cfg', config_file, script.name))
app = slapos.cli.entry.SlapOSApp()
saved_stdout = sys.stdout
try:
sys.stdout = app_stdout = StringIO.StringIO()
app.run(('console', '--cfg', self.config_file.name, script.name))
finally:
sys.stdout = saved_stdout
self.mock_request.assert_called_once_with('software_release', 'instance')
self.assertIn('parameter_value', app_stdout.getvalue())
...@@ -36,7 +36,7 @@ import psutil ...@@ -36,7 +36,7 @@ import psutil
from time import strftime from time import strftime
from slapos.collect import entity, snapshot, db, reporter from slapos.collect import entity, snapshot, db, reporter
from slapos.cli.entry import SlapOSApp from slapos.cli.entry import SlapOSApp
from ConfigParser import ConfigParser from six.moves.configparser import ConfigParser
class FakeDatabase(object): class FakeDatabase(object):
def __init__(self): def __init__(self):
...@@ -364,7 +364,7 @@ class TestCollectReport(unittest.TestCase): ...@@ -364,7 +364,7 @@ class TestCollectReport(unittest.TestCase):
with tarfile.open("%s.tar.gz" % dump_folder) as tf: with tarfile.open("%s.tar.gz" % dump_folder) as tf:
self.assertEqual(tf.getmembers()[0].name, "1990-01-01") self.assertEqual(tf.getmembers()[0].name, "1990-01-01")
self.assertEqual(tf.getmembers()[1].name, "1990-01-01/test.txt") self.assertEqual(tf.getmembers()[1].name, "1990-01-01/test.txt")
self.assertEqual(tf.extractfile(tf.getmembers()[1]).read(), 'hi') self.assertEqual(tf.extractfile(tf.getmembers()[1]).read(), b'hi')
class TestCollectSnapshot(unittest.TestCase): class TestCollectSnapshot(unittest.TestCase):
...@@ -482,10 +482,10 @@ class TestCollectEntity(unittest.TestCase): ...@@ -482,10 +482,10 @@ class TestCollectEntity(unittest.TestCase):
config.set('slapos', 'instance_root', self.instance_root) config.set('slapos', 'instance_root', self.instance_root)
user_dict = entity.get_user_list(config) user_dict = entity.get_user_list(config)
username_list = ['slapuser0', 'slapuser1', 'slapuser2'] username_set = {'slapuser0', 'slapuser1', 'slapuser2'}
self.assertEqual(username_list, user_dict.keys()) self.assertEquals(username_set, set(user_dict))
for name in username_list: for name in username_set:
self.assertEqual(user_dict[name].name, name) self.assertEqual(user_dict[name].name, name)
self.assertEqual(user_dict[name].snapshot_list, []) self.assertEqual(user_dict[name].snapshot_list, [])
expected_path = "%s/slappart%s" % (self.instance_root, name.strip("slapuser")) expected_path = "%s/slappart%s" % (self.instance_root, name.strip("slapuser"))
...@@ -508,11 +508,11 @@ class TestCollectEntity(unittest.TestCase): ...@@ -508,11 +508,11 @@ class TestCollectEntity(unittest.TestCase):
self.assertEqual(database.invoked_method_list[1][0], "insertUserSnapshot") self.assertEqual(database.invoked_method_list[1][0], "insertUserSnapshot")
self.assertEqual(database.invoked_method_list[1][1][0], ("fakeuser0",)) self.assertEqual(database.invoked_method_list[1][1][0], ("fakeuser0",))
self.assertEqual(database.invoked_method_list[1][1][1].keys(), self.assertEqual(set(database.invoked_method_list[1][1][1]),
['cpu_time', 'cpu_percent', 'process', {'cpu_time', 'cpu_percent', 'process',
'memory_rss', 'pid', 'memory_percent', 'memory_rss', 'pid', 'memory_percent',
'io_rw_counter', 'insertion_date', 'insertion_time', 'io_rw_counter', 'insertion_date', 'insertion_time',
'io_cycles_counter', 'cpu_num_threads']) 'io_cycles_counter', 'cpu_num_threads'})
self.assertEqual(database.invoked_method_list[2], ("commit", "")) self.assertEqual(database.invoked_method_list[2], ("commit", ""))
self.assertEqual(database.invoked_method_list[3], ("close", "")) self.assertEqual(database.invoked_method_list[3], ("close", ""))
...@@ -527,19 +527,19 @@ class TestCollectEntity(unittest.TestCase): ...@@ -527,19 +527,19 @@ class TestCollectEntity(unittest.TestCase):
self.assertEqual(database.invoked_method_list[1][0], "insertUserSnapshot") self.assertEqual(database.invoked_method_list[1][0], "insertUserSnapshot")
self.assertEqual(database.invoked_method_list[1][1][0], ("fakeuser0",)) self.assertEqual(database.invoked_method_list[1][1][0], ("fakeuser0",))
self.assertEqual(database.invoked_method_list[1][1][1].keys(), self.assertEqual(set(database.invoked_method_list[1][1][1]),
['cpu_time', 'cpu_percent', 'process', {'cpu_time', 'cpu_percent', 'process',
'memory_rss', 'pid', 'memory_percent', 'memory_rss', 'pid', 'memory_percent',
'io_rw_counter', 'insertion_date', 'insertion_time', 'io_rw_counter', 'insertion_date', 'insertion_time',
'io_cycles_counter', 'cpu_num_threads']) 'io_cycles_counter', 'cpu_num_threads'})
self.assertEqual(database.invoked_method_list[2], ("commit", "")) self.assertEquals(database.invoked_method_list[2], ("commit", ""))
self.assertEqual(database.invoked_method_list[3], ("close", "")) self.assertEquals(database.invoked_method_list[3], ("close", ""))
self.assertEqual(database.invoked_method_list[4], ("connect", "")) self.assertEqual(database.invoked_method_list[4], ("connect", ""))
self.assertEqual(database.invoked_method_list[5][0], "inserFolderSnapshot") self.assertEqual(database.invoked_method_list[5][0], "inserFolderSnapshot")
self.assertEqual(database.invoked_method_list[5][1][0], ("fakeuser0",)) self.assertEqual(database.invoked_method_list[5][1][0], ("fakeuser0",))
self.assertEqual(database.invoked_method_list[5][1][1].keys(), self.assertEqual(set(database.invoked_method_list[5][1][1]),
['insertion_date', 'disk_usage', 'insertion_time']) {'insertion_date', 'disk_usage', 'insertion_time'})
self.assertEqual(database.invoked_method_list[6], ("commit", "")) self.assertEqual(database.invoked_method_list[6], ("commit", ""))
self.assertEqual(database.invoked_method_list[7], ("close", "")) self.assertEqual(database.invoked_method_list[7], ("close", ""))
...@@ -554,23 +554,23 @@ class TestCollectEntity(unittest.TestCase): ...@@ -554,23 +554,23 @@ class TestCollectEntity(unittest.TestCase):
self.assertEqual(database.invoked_method_list[1][0], "insertUserSnapshot") self.assertEqual(database.invoked_method_list[1][0], "insertUserSnapshot")
self.assertEqual(database.invoked_method_list[1][1][0], ("fakeuser0",)) self.assertEqual(database.invoked_method_list[1][1][0], ("fakeuser0",))
self.assertEqual(database.invoked_method_list[1][1][1].keys(), self.assertEqual(set(database.invoked_method_list[1][1][1]),
['cpu_time', 'cpu_percent', 'process', {'cpu_time', 'cpu_percent', 'process',
'memory_rss', 'pid', 'memory_percent', 'memory_rss', 'pid', 'memory_percent',
'io_rw_counter', 'insertion_date', 'insertion_time', 'io_rw_counter', 'insertion_date', 'insertion_time',
'io_cycles_counter', 'cpu_num_threads']) 'io_cycles_counter', 'cpu_num_threads'})
self.assertEqual(database.invoked_method_list[2], ("commit", "")) self.assertEqual(database.invoked_method_list[2], ("commit", ""))
self.assertEqual(database.invoked_method_list[3], ("close", "")) self.assertEqual(database.invoked_method_list[3], ("close", ""))
self.assertEqual(database.invoked_method_list[4], ("connect", "")) self.assertEqual(database.invoked_method_list[4], ("connect", ""))
self.assertEqual(database.invoked_method_list[5][0], "select") self.assertEqual(database.invoked_method_list[5][0], "select")
self.assertEqual(database.invoked_method_list[5][1][0], ()) self.assertEqual(database.invoked_method_list[5][1][0], ())
self.assertEqual(database.invoked_method_list[5][1][1].keys(), self.assertEqual(set(database.invoked_method_list[5][1][1]),
['table', 'where', 'limit', 'order', 'columns']) {'table', 'where', 'limit', 'order', 'columns'})
self.assertEqual(database.invoked_method_list[6][0], "inserFolderSnapshot") self.assertEqual(database.invoked_method_list[6][0], "inserFolderSnapshot")
self.assertEqual(database.invoked_method_list[6][1][0], ("fakeuser0",)) self.assertEqual(database.invoked_method_list[6][1][0], ("fakeuser0",))
self.assertEqual(database.invoked_method_list[6][1][1].keys(), self.assertEqual(set(database.invoked_method_list[6][1][1]),
['insertion_date', 'disk_usage', 'insertion_time']) {'insertion_date', 'disk_usage', 'insertion_time'})
self.assertEqual(database.invoked_method_list[7], ("commit", "")) self.assertEqual(database.invoked_method_list[7], ("commit", ""))
self.assertEqual(database.invoked_method_list[8], ("close", "")) self.assertEqual(database.invoked_method_list[8], ("close", ""))
...@@ -583,14 +583,14 @@ class TestCollectEntity(unittest.TestCase): ...@@ -583,14 +583,14 @@ class TestCollectEntity(unittest.TestCase):
self.assertEqual(database.invoked_method_list[1][0], "insertComputerSnapshot") self.assertEqual(database.invoked_method_list[1][0], "insertComputerSnapshot")
self.assertEqual(database.invoked_method_list[1][1][0], ()) self.assertEqual(database.invoked_method_list[1][1][0], ())
self.assertEqual(database.invoked_method_list[1][1][1].keys(), self.assertEqual(set(database.invoked_method_list[1][1][1]),
['insertion_time', 'insertion_date', 'cpu_num_core', {'insertion_time', 'insertion_date', 'cpu_num_core',
'partition_list', 'cpu_frequency', 'memory_size', 'partition_list', 'cpu_frequency', 'memory_size',
'cpu_type', 'memory_type']) 'cpu_type', 'memory_type'})
self.assertEqual(database.invoked_method_list[2][0], "insertSystemSnapshot") self.assertEqual(database.invoked_method_list[2][0], "insertSystemSnapshot")
self.assertEqual(database.invoked_method_list[2][1][0], ()) self.assertEqual(database.invoked_method_list[2][1][0], ())
self.assertEqual(set(database.invoked_method_list[2][1][1].keys()), self.assertEqual(set(database.invoked_method_list[2][1][1]),
set([ 'memory_used', 'cpu_percent', 'insertion_date', 'insertion_time', set([ 'memory_used', 'cpu_percent', 'insertion_date', 'insertion_time',
'loadavg', 'memory_free', 'net_in_bytes', 'net_in_dropped', 'loadavg', 'memory_free', 'net_in_bytes', 'net_in_dropped',
'net_in_errors', 'net_out_bytes', 'net_out_dropped', 'net_in_errors', 'net_out_bytes', 'net_out_dropped',
...@@ -598,7 +598,7 @@ class TestCollectEntity(unittest.TestCase): ...@@ -598,7 +598,7 @@ class TestCollectEntity(unittest.TestCase):
self.assertEqual(database.invoked_method_list[3][0], "insertDiskPartitionSnapshot") self.assertEqual(database.invoked_method_list[3][0], "insertDiskPartitionSnapshot")
self.assertEqual(database.invoked_method_list[3][1][0], ()) self.assertEqual(database.invoked_method_list[3][1][0], ())
self.assertEqual(set(database.invoked_method_list[3][1][1].keys()), self.assertEqual(set(database.invoked_method_list[3][1][1]),
set([ 'used', 'insertion_date', 'partition', 'free', set([ 'used', 'insertion_date', 'partition', 'free',
'mountpoint', 'insertion_time' ])) 'mountpoint', 'insertion_time' ]))
......
...@@ -34,7 +34,7 @@ import slapos.cli.configure_local ...@@ -34,7 +34,7 @@ import slapos.cli.configure_local
from slapos.cli.configure_local import ConfigureLocalCommand, _createConfigurationDirectory from slapos.cli.configure_local import ConfigureLocalCommand, _createConfigurationDirectory
from slapos.cli.entry import SlapOSApp from slapos.cli.entry import SlapOSApp
from argparse import Namespace from argparse import Namespace
from ConfigParser import ConfigParser from six.moves.configparser import ConfigParser
# Disable any command to launch slapformat and supervisor # Disable any command to launch slapformat and supervisor
slapos.cli.configure_local._runFormat = lambda x: "Do nothing" slapos.cli.configure_local._runFormat = lambda x: "Do nothing"
......
...@@ -47,7 +47,7 @@ class SlapPopenTestCase(unittest.TestCase): ...@@ -47,7 +47,7 @@ class SlapPopenTestCase(unittest.TestCase):
def test_exec(self): def test_exec(self):
"""Test command execution with SlapPopen. """Test command execution with SlapPopen.
""" """
self.script.write('#!/bin/sh\necho "hello"\nexit 123') self.script.write(b'#!/bin/sh\necho "hello"\nexit 123')
self.script.close() self.script.close()
logger = mock.MagicMock() logger = mock.MagicMock()
...@@ -65,7 +65,7 @@ class SlapPopenTestCase(unittest.TestCase): ...@@ -65,7 +65,7 @@ class SlapPopenTestCase(unittest.TestCase):
def test_debug(self): def test_debug(self):
"""Test debug=True, which keeps interactive. """Test debug=True, which keeps interactive.
""" """
self.script.write('#!/bin/sh\necho "exit code?"\nread rc\nexit $rc') self.script.write(b'#!/bin/sh\necho "exit code?"\nread rc\nexit $rc')
self.script.close() self.script.close()
# keep a reference to stdin and stdout to restore them later # keep a reference to stdin and stdout to restore them later
...@@ -74,7 +74,7 @@ class SlapPopenTestCase(unittest.TestCase): ...@@ -74,7 +74,7 @@ class SlapPopenTestCase(unittest.TestCase):
# replace stdin with a pipe that will write 123 # replace stdin with a pipe that will write 123
child_stdin_r, child_stdin_w = os.pipe() child_stdin_r, child_stdin_w = os.pipe()
os.write(child_stdin_w, "123") os.write(child_stdin_w, b"123")
os.close(child_stdin_w) os.close(child_stdin_w)
os.dup2(child_stdin_r, sys.stdin.fileno()) os.dup2(child_stdin_r, sys.stdin.fileno())
...@@ -88,7 +88,7 @@ class SlapPopenTestCase(unittest.TestCase): ...@@ -88,7 +88,7 @@ class SlapPopenTestCase(unittest.TestCase):
debug=True, debug=True,
logger=logging.getLogger()) logger=logging.getLogger())
# program output # program output
self.assertEqual('exit code?\n', os.read(child_stdout_r, 1024)) self.assertEqual(b'exit code?\n', os.read(child_stdout_r, 1024))
self.assertEqual(123, program.returncode) self.assertEqual(123, program.returncode)
self.assertEqual('(output not captured in debug mode)', program.output) self.assertEqual('(output not captured in debug mode)', program.output)
......
...@@ -28,17 +28,15 @@ import unittest ...@@ -28,17 +28,15 @@ import unittest
from zope.interface.verify import verifyClass from zope.interface.verify import verifyClass
import zope.interface import zope.interface
import types from six import class_types
from slapos import slap from slapos import slap
def getOnlyImplementationAssertionMethod(klass, method_list): def getOnlyImplementationAssertionMethod(klass, method_list):
"""Returns method which verifies if a klass only implements its interfaces""" """Returns method which verifies if a klass only implements its interfaces"""
def testMethod(self): def testMethod(self):
implemented_method_list = [x for x in dir(klass) \ implemented_method_list = {x for x in dir(klass)
if ((not x.startswith('_')) and callable(getattr(klass, x)))] if not x.startswith('_') and callable(getattr(klass, x))}
for interface_method in method_list: implemented_method_list.difference_update(method_list)
if interface_method in implemented_method_list:
implemented_method_list.remove(interface_method)
if implemented_method_list: if implemented_method_list:
raise AssertionError("Unexpected methods %s" % implemented_method_list) raise AssertionError("Unexpected methods %s" % implemented_method_list)
...@@ -61,7 +59,7 @@ def generateTestMethodListOnClass(klass, module): ...@@ -61,7 +59,7 @@ def generateTestMethodListOnClass(klass, module):
"""Generate test method on klass""" """Generate test method on klass"""
for class_id in dir(module): for class_id in dir(module):
implementing_class = getattr(module, class_id) implementing_class = getattr(module, class_id)
if type(implementing_class) not in (types.ClassType, types.TypeType): if not isinstance(implementing_class, class_types):
continue continue
# add methods to assert that publicly available classes are defining # add methods to assert that publicly available classes are defining
# interfaces # interfaces
...@@ -69,7 +67,7 @@ def generateTestMethodListOnClass(klass, module): ...@@ -69,7 +67,7 @@ def generateTestMethodListOnClass(klass, module):
setattr(klass, method_name, getDeclarationAssertionMethod( setattr(klass, method_name, getDeclarationAssertionMethod(
implementing_class)) implementing_class))
implemented_method_list = [] implemented_method_list = ['with_traceback']
for interface in list(zope.interface.implementedBy(implementing_class)): for interface in list(zope.interface.implementedBy(implementing_class)):
# for each interface which class declares add a method which verify # for each interface which class declares add a method which verify
# implementation # implementation
......
This diff is collapsed.
This diff is collapsed.
...@@ -26,6 +26,8 @@ ...@@ -26,6 +26,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# #
############################################################################## ##############################################################################
from __future__ import print_function
import glob import glob
import logging import logging
import slapos.format import slapos.format
...@@ -49,6 +51,8 @@ import mock ...@@ -49,6 +51,8 @@ import mock
from .slapgrid import DummyManager from .slapgrid import DummyManager
import six
USER_LIST = [] USER_LIST = []
GROUP_LIST = [] GROUP_LIST = []
INTERFACE_DICT = {} INTERFACE_DICT = {}
...@@ -89,7 +93,7 @@ class FakeCallAndRead: ...@@ -89,7 +93,7 @@ class FakeCallAndRead:
retval = 0, 'UP' retval = 0, 'UP'
global INTERFACE_DICT global INTERFACE_DICT
if 'useradd' in argument_list: if 'useradd' in argument_list:
print argument_list print(argument_list)
global USER_LIST global USER_LIST
username = argument_list[-1] username = argument_list[-1]
if username == '-r': if username == '-r':
...@@ -130,7 +134,7 @@ class LoggableWrapper: ...@@ -130,7 +134,7 @@ class LoggableWrapper:
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
arg_list = [repr(x) for x in args] + [ arg_list = [repr(x) for x in args] + [
'%s=%r' % (x, y) for x, y in kwargs.iteritems()] '%s=%r' % (x, y) for x, y in six.iteritems(kwargs)]
self.__logger.debug('%s(%s)' % (self.__name, ', '.join(arg_list))) self.__logger.debug('%s(%s)' % (self.__name, ', '.join(arg_list)))
...@@ -197,6 +201,8 @@ class SlapformatMixin(unittest.TestCase): ...@@ -197,6 +201,8 @@ class SlapformatMixin(unittest.TestCase):
self.netifaces = NetifacesMock() self.netifaces = NetifacesMock()
self.saved_netifaces = {} self.saved_netifaces = {}
for fake in vars(NetifacesMock): for fake in vars(NetifacesMock):
if fake.startswith("__"):
continue
self.saved_netifaces[fake] = getattr(netifaces, fake, None) self.saved_netifaces[fake] = getattr(netifaces, fake, None)
setattr(netifaces, fake, getattr(self.netifaces, fake)) setattr(netifaces, fake, getattr(self.netifaces, fake))
...@@ -208,6 +214,8 @@ class SlapformatMixin(unittest.TestCase): ...@@ -208,6 +214,8 @@ class SlapformatMixin(unittest.TestCase):
def patchPwd(self): def patchPwd(self):
self.saved_pwd = {} self.saved_pwd = {}
for fake in vars(PwdMock): for fake in vars(PwdMock):
if fake.startswith("__"):
continue
self.saved_pwd[fake] = getattr(pwd, fake, None) self.saved_pwd[fake] = getattr(pwd, fake, None)
setattr(pwd, fake, getattr(PwdMock, fake)) setattr(pwd, fake, getattr(PwdMock, fake))
...@@ -219,6 +227,8 @@ class SlapformatMixin(unittest.TestCase): ...@@ -219,6 +227,8 @@ class SlapformatMixin(unittest.TestCase):
def patchTime(self): def patchTime(self):
self.saved_time = {} self.saved_time = {}
for fake in vars(TimeMock): for fake in vars(TimeMock):
if fake.startswith("__"):
continue
self.saved_time[fake] = getattr(time, fake, None) self.saved_time[fake] = getattr(time, fake, None)
setattr(time, fake, getattr(TimeMock, fake)) setattr(time, fake, getattr(TimeMock, fake))
...@@ -230,6 +240,8 @@ class SlapformatMixin(unittest.TestCase): ...@@ -230,6 +240,8 @@ class SlapformatMixin(unittest.TestCase):
def patchGrp(self): def patchGrp(self):
self.saved_grp = {} self.saved_grp = {}
for fake in vars(GrpMock): for fake in vars(GrpMock):
if fake.startswith("__"):
continue
self.saved_grp[fake] = getattr(grp, fake, None) self.saved_grp[fake] = getattr(grp, fake, None)
setattr(grp, fake, getattr(GrpMock, fake)) setattr(grp, fake, getattr(GrpMock, fake))
......
This diff is collapsed.
This diff is collapsed.
...@@ -65,9 +65,9 @@ class TestUtil(unittest.TestCase): ...@@ -65,9 +65,9 @@ class TestUtil(unittest.TestCase):
wanted_directory0 = os.path.join(root_slaptest, 'slap-write0') wanted_directory0 = os.path.join(root_slaptest, 'slap-write0')
wanted_directory1 = os.path.join(root_slaptest, 'slap-write0', 'write-slap1') wanted_directory1 = os.path.join(root_slaptest, 'slap-write0', 'write-slap1')
wanted_directory2 = os.path.join(root_slaptest, 'slap-write0', 'write-slap1', 'write-teste2') wanted_directory2 = os.path.join(root_slaptest, 'slap-write0', 'write-slap1', 'write-teste2')
wanted_directory_mkdir0 = os.makedirs(wanted_directory0, mode=0777) wanted_directory_mkdir0 = os.makedirs(wanted_directory0, mode=0o777)
wanted_directory_mkdir1 = os.makedirs(wanted_directory1, mode=0777) wanted_directory_mkdir1 = os.makedirs(wanted_directory1, mode=0o777)
wanted_directory_mkdir2 = os.makedirs(wanted_directory2, mode=0777) wanted_directory_mkdir2 = os.makedirs(wanted_directory2, mode=0o777)
create_file_txt = tempfile.mkstemp(suffix='.txt', prefix='tmp', dir=wanted_directory2, text=True) create_file_txt = tempfile.mkstemp(suffix='.txt', prefix='tmp', dir=wanted_directory2, text=True)
user = 'nobody' user = 'nobody'
try: try:
...@@ -109,23 +109,14 @@ class TestUtil(unittest.TestCase): ...@@ -109,23 +109,14 @@ class TestUtil(unittest.TestCase):
shutil.rmtree(root_slaptest) shutil.rmtree(root_slaptest)
def test_string_to_boolean_with_true_values(self): def test_string_to_boolean_with_true_values(self):
"""
Check that mkdir_p doesn't raise if directory already exist.
"""
for value in ['true', 'True', 'TRUE']: for value in ['true', 'True', 'TRUE']:
self.assertTrue(string_to_boolean(value)) self.assertTrue(string_to_boolean(value))
def test_string_to_boolean_with_false_values(self): def test_string_to_boolean_with_false_values(self):
"""
Check that mkdir_p doesn't raise if directory already exist.
"""
for value in ['false', 'False', 'False']: for value in ['false', 'False', 'False']:
self.assertFalse(string_to_boolean(value)) self.assertFalse(string_to_boolean(value))
def test_string_to_boolean_with_incorrect_values(self): def test_string_to_boolean_with_incorrect_values(self):
"""
Check that mkdir_p doesn't raise if directory already exist.
"""
for value in [True, False, 1, '1', 't', 'tru', 'truelle', 'f', 'fals', 'falsey']: for value in [True, False, 1, '1', 't', 'tru', 'truelle', 'f', 'fals', 'falsey']:
self.assertRaises(ValueError, string_to_boolean, value) self.assertRaises(ValueError, string_to_boolean, value)
......
...@@ -33,6 +33,7 @@ import socket ...@@ -33,6 +33,7 @@ import socket
import struct import struct
import subprocess import subprocess
import sqlite3 import sqlite3
from xml_marshaller.xml_marshaller import dumps, loads
def mkdir_p(path, mode=0o700): def mkdir_p(path, mode=0o700):
...@@ -86,16 +87,9 @@ def string_to_boolean(string): ...@@ -86,16 +87,9 @@ def string_to_boolean(string):
The parser is completely arbitrary, see code for actual implementation. The parser is completely arbitrary, see code for actual implementation.
""" """
if not isinstance(string, str) and not isinstance(string, unicode): try:
raise ValueError('Given value is not a string.') return ('false', 'true').index(string.lower())
acceptable_true_values = ['true'] except Exception:
acceptable_false_values = ['false']
string = string.lower()
if string in acceptable_true_values:
return True
if string in acceptable_false_values:
return False
else:
raise ValueError('%s is neither True nor False.' % string) raise ValueError('%s is neither True nor False.' % string)
...@@ -138,3 +132,15 @@ def ipv6FromBin(ip, suffix=''): ...@@ -138,3 +132,15 @@ def ipv6FromBin(ip, suffix=''):
def lenNetmaskIpv6(netmask): def lenNetmaskIpv6(netmask):
return len(binFromIpv6(netmask).rstrip('0')) return len(binFromIpv6(netmask).rstrip('0'))
# Used for Python 2-3 compatibility
if str is bytes:
bytes2str = str2bytes = lambda s: s
def unicode2str(s):
return s.encode('utf-8')
else:
def bytes2str(s):
return s.decode()
def str2bytes(s):
return s.encode()
def unicode2str(s):
return s
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment