Commit 049c42b1 authored by Bryton Lacquement's avatar Bryton Lacquement 🚪 Committed by Julien Muchembled

fixup! Add support for Python 3

parent 31532fac
...@@ -345,8 +345,8 @@ def main(): ...@@ -345,8 +345,8 @@ def main():
# Select an unused computer to run the test. # Select an unused computer to run the test.
group = test_mapping.getNextGroup( group = test_mapping.getNextGroup(
ignore_list = [group for _, _, group in \ ignore_list = [group for _, _, group in
running_test_dict.itervalues()]) six.itervalues(running_test_dict)])
# Select a test # Select a test
test_line = test_result.start( test_line = test_result.start(
...@@ -455,7 +455,7 @@ def main(): ...@@ -455,7 +455,7 @@ def main():
logger.info('Sleeping %is...', to_sleep) logger.info('Sleeping %is...', to_sleep)
time.sleep(to_sleep) time.sleep(to_sleep)
if not test_result.isAlive(): if not test_result.isAlive():
for _, tester, computer_id in running_test_dict.itervalues(): for _, tester, computer_id in six.itervalues(running_test_dict):
tester.teardown() tester.teardown()
time.sleep(300) time.sleep(300)
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import print_function
from six.moves import configparser from six.moves import configparser
import argparse import argparse
from six.moves import dbm_gnu as gdbm from six.moves import dbm_gnu as gdbm
...@@ -27,7 +28,7 @@ def main(): ...@@ -27,7 +28,7 @@ def main():
if args.pid is not None: if args.pid is not None:
pid_filename = args.pid[0] pid_filename = args.pid[0]
if os.path.exists(pid_filename): if os.path.exists(pid_filename):
print >> sys.stderr, "Already running" print("Already running", file=sys.stderr)
return 127 return 127
with open(pid_filename, 'w') as pid_file: with open(pid_filename, 'w') as pid_file:
pid_file.write(str(os.getpid())) pid_file.write(str(os.getpid()))
......
...@@ -42,6 +42,14 @@ from six.moves import socketserver ...@@ -42,6 +42,14 @@ from six.moves import socketserver
import io import io
import threading import threading
try:
logging_levels = logging._nameToLevel
logging_choices = logging_levels.keys()
except AttributeError:
logging_levels = logging._levelNames
logging_choices = [i for i in logging_levels
if isinstance(i, str)]
# Copied from erp5.util:erp5/util/testnode/ProcessManager.py # Copied from erp5.util:erp5/util/testnode/ProcessManager.py
def subprocess_capture(p, log, log_prefix, get_output=True): def subprocess_capture(p, log, log_prefix, get_output=True):
def readerthread(input, output, buffer): def readerthread(input, output, buffer):
...@@ -81,7 +89,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer): ...@@ -81,7 +89,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
def __init__(self, *args, **kw): def __init__(self, *args, **kw):
self.options = kw.pop('equeue_options') self.options = kw.pop('equeue_options')
super(EqueueServer, self).__init__(self, socketserver.ThreadingUnixStreamServer.__init__(self,
RequestHandlerClass=None, RequestHandlerClass=None,
*args, **kw) *args, **kw)
# Equeue Specific elements # Equeue Specific elements
...@@ -99,7 +107,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer): ...@@ -99,7 +107,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
self.logger = logging.getLogger("EQueue") self.logger = logging.getLogger("EQueue")
handler = logging.handlers.WatchedFileHandler(logfile, mode='a') handler = logging.handlers.WatchedFileHandler(logfile, mode='a')
# Natively support logrotate # Natively support logrotate
level = logging._levelNames.get(loglevel, logging.INFO) level = logging_levels.get(loglevel, logging.INFO)
self.logger.setLevel(level) self.logger.setLevel(level)
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
handler.setFormatter(formatter) handler.setFormatter(formatter)
...@@ -131,7 +139,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer): ...@@ -131,7 +139,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
try: try:
sys.stdout.flush() sys.stdout.flush()
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE) stderr=subprocess.PIPE, universal_newlines=True)
subprocess_capture(p, self.logger.info, '', True) subprocess_capture(p, self.logger.info, '', True)
if p.returncode == 0: if p.returncode == 0:
self.logger.info("%s finished successfully.", cmd_readable) self.logger.info("%s finished successfully.", cmd_readable)
...@@ -172,7 +180,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer): ...@@ -172,7 +180,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
try: try:
request.send(command) request.send(command)
except: except Exception:
self.logger.warning("Couldn't respond to %r", request.fileno()) self.logger.warning("Couldn't respond to %r", request.fileno())
self.close_request(request) self.close_request(request)
self._runCommandIfNeeded(command, timestamp) self._runCommandIfNeeded(command, timestamp)
...@@ -193,8 +201,7 @@ def main(): ...@@ -193,8 +201,7 @@ def main():
"calls are stored") "calls are stored")
parser.add_argument('--loglevel', nargs=1, parser.add_argument('--loglevel', nargs=1,
default='INFO', default='INFO',
choices=[i for i in logging._levelNames choices=logging_choices,
if isinstance(i, str)],
required=False) required=False)
parser.add_argument('-l', '--logfile', nargs=1, required=True, parser.add_argument('-l', '--logfile', nargs=1, required=True,
help="Path to the log file.") help="Path to the log file.")
......
...@@ -89,19 +89,19 @@ def setup(arguments): ...@@ -89,19 +89,19 @@ def setup(arguments):
time.sleep(3) time.sleep(3)
continue continue
time.sleep(timeout) time.sleep(timeout)
if arguments.has_key('delete_target'): if 'delete_target' in arguments:
delete(arguments) delete(arguments)
if arguments.has_key('source'): if 'source' in arguments:
rename(arguments) rename(arguments)
if arguments.has_key('script'): if 'script' in arguments:
run_script(arguments) run_script(arguments)
if arguments.has_key('sql_script'): if 'sql_script' in arguments:
run_sql_script(arguments) run_sql_script(arguments)
if arguments.has_key('chmod_target'): if 'chmod_target' in arguments:
chmod(arguments) chmod(arguments)
return return
......
...@@ -33,7 +33,7 @@ def buildStatistic(history_folder): ...@@ -33,7 +33,7 @@ def buildStatistic(history_folder):
last_date = None last_date = None
if stats_dict["data"]: if stats_dict["data"]:
if stats_dict["data"][-1].has_key("start-date"): if "start-date" in stats_dict["data"][-1]:
last_date = stats_dict["data"][-1]["start-date"] last_date = stats_dict["data"][-1]["start-date"]
else: else:
last_date = stats_dict["data"][-1]["date"] last_date = stats_dict["data"][-1]["date"]
......
...@@ -293,17 +293,17 @@ def main(): ...@@ -293,17 +293,17 @@ def main():
if process_result and process_result['total_process'] != 0.0: if process_result and process_result['total_process'] != 0.0:
appendToJsonFile(process_file, ", ".join( appendToJsonFile(process_file, ", ".join(
[str(process_result[key]) for key in label_list if process_result.has_key(key)]) str(process_result[key]) for key in label_list if key in process_result)
) )
resource_status_dict.update(process_result) resource_status_dict.update(process_result)
if memory_result and memory_result['memory_rss'] != 0.0: if memory_result and memory_result['memory_rss'] != 0.0:
appendToJsonFile(mem_file, ", ".join( appendToJsonFile(mem_file, ", ".join(
[str(memory_result[key]) for key in label_list if memory_result.has_key(key)]) str(memory_result[key]) for key in label_list if key in memory_result)
) )
resource_status_dict.update(memory_result) resource_status_dict.update(memory_result)
if io_result and io_result['io_rw_counter'] != 0.0: if io_result and io_result['io_rw_counter'] != 0.0:
appendToJsonFile(io_file, ", ".join( appendToJsonFile(io_file, ", ".join(
[str(io_result[key]) for key in label_list if io_result.has_key(key)]) str(io_result[key]) for key in label_list if key in io_result)
) )
resource_status_dict.update(io_result) resource_status_dict.update(io_result)
......
...@@ -14,7 +14,7 @@ def get_curl(buffer, url): ...@@ -14,7 +14,7 @@ def get_curl(buffer, url):
result = "OK" result = "OK"
try: try:
curl.perform() curl.perform()
except: except Exception:
import traceback import traceback
traceback.print_exc(file=sys.stderr) traceback.print_exc(file=sys.stderr)
sys.stderr.flush() sys.stderr.flush()
......
...@@ -16,7 +16,7 @@ from tzlocal import get_localzone ...@@ -16,7 +16,7 @@ from tzlocal import get_localzone
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# check backup ran OK every 5 minutes # check backup ran OK every 5 minutes
self.setPeriodicity(minute=5) self.setPeriodicity(minute=5)
......
...@@ -8,7 +8,7 @@ import os ...@@ -8,7 +8,7 @@ import os
@implementer(interface.IPromise) @implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# set periodicity to run the promise twice per day # set periodicity to run the promise twice per day
self.custom_frequency = int(self.getConfig('frequency', 720)) self.custom_frequency = int(self.getConfig('frequency', 720))
self.setPeriodicity(self.custom_frequency) self.setPeriodicity(self.custom_frequency)
......
...@@ -12,7 +12,7 @@ r = re.compile(br"^([0-9]+\-[0-9]+\-[0-9]+ [0-9]+\:[0-9]+\:[0-9]+)(\,[0-9]+) - ( ...@@ -12,7 +12,7 @@ r = re.compile(br"^([0-9]+\-[0-9]+\-[0-9]+ [0-9]+\:[0-9]+\:[0-9]+)(\,[0-9]+) - (
@implementer(interface.IPromise) @implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=10) self.setPeriodicity(minute=10)
def sense(self): def sense(self):
......
...@@ -6,7 +6,7 @@ from slapos.grid.promise.generic import GenericPromise ...@@ -6,7 +6,7 @@ from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise) @implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# SR can set custom periodicity # SR can set custom periodicity
self.setPeriodicity(float(self.getConfig('frequency', 2))) self.setPeriodicity(float(self.getConfig('frequency', 2)))
...@@ -22,7 +22,8 @@ class RunPromise(GenericPromise): ...@@ -22,7 +22,8 @@ class RunPromise(GenericPromise):
url = self.getConfig('url').strip() url = self.getConfig('url').strip()
try: try:
result = open(filename).read() with open(filename) as f:
result = f.read()
except Exception as e: except Exception as e:
self.logger.error( self.logger.error(
"ERROR %r during opening and reading file %r" % (e, filename)) "ERROR %r during opening and reading file %r" % (e, filename))
......
...@@ -16,7 +16,7 @@ from slapos.collect.db import Database ...@@ -16,7 +16,7 @@ from slapos.collect.db import Database
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# check disk space at least every 3 minutes # check disk space at least every 3 minutes
self.setPeriodicity(minute=3) self.setPeriodicity(minute=3)
...@@ -131,7 +131,7 @@ class RunPromise(GenericPromise): ...@@ -131,7 +131,7 @@ class RunPromise(GenericPromise):
min_free_size = int(min_size_str)*1024*1024 min_free_size = int(min_size_str)*1024*1024
else: else:
with open(disk_threshold_file, 'w') as f: with open(disk_threshold_file, 'w') as f:
f.write(str(min_free_size/(1024*1024))) f.write(str(min_free_size//(1024*1024)))
if check_date: if check_date:
# testing mode # testing mode
......
...@@ -8,7 +8,7 @@ from slapos.networkbench.ping import ping, ping6 ...@@ -8,7 +8,7 @@ from slapos.networkbench.ping import ping, ping6
@implementer(interface.IPromise) @implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# set periodicity to run the promise twice per day # set periodicity to run the promise twice per day
self.custom_frequency = int(self.getConfig('frequency', 720)) self.custom_frequency = int(self.getConfig('frequency', 720))
self.setPeriodicity(self.custom_frequency) self.setPeriodicity(self.custom_frequency)
......
...@@ -8,7 +8,7 @@ from datetime import datetime ...@@ -8,7 +8,7 @@ from datetime import datetime
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1) self.setPeriodicity(minute=1)
def sense(self): def sense(self):
......
from zope import interface as zope_interface from zope.interface import implementer
from slapos.grid.promise import interface from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise from slapos.grid.promise.generic import GenericPromise
import socket import socket
import sys import sys
@implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# check port is listening at least every 2 minutes # check port is listening at least every 2 minutes
self.setPeriodicity(minute=2) self.setPeriodicity(minute=2)
...@@ -30,9 +28,9 @@ class RunPromise(GenericPromise): ...@@ -30,9 +28,9 @@ class RunPromise(GenericPromise):
# self.logger.info("port connection OK") # self.logger.info("port connection OK")
try: try:
socket.create_connection(addr).close() socket.create_connection(addr).close()
except (socket.herror, socket.gaierror), e: except (socket.herror, socket.gaierror) as e:
self.logger.error("ERROR hostname/port ({}) is not correct: {}".format(addr, e)) self.logger.error("ERROR hostname/port ({}) is not correct: {}".format(addr, e))
except (socket.error, socket.timeout), e: except (socket.error, socket.timeout) as e:
self.logger.error("ERROR while connecting to {}: {}".format(addr, e)) self.logger.error("ERROR while connecting to {}: {}".format(addr, e))
else: else:
self.logger.info("port connection OK ({})".format(addr)) self.logger.info("port connection OK ({})".format(addr))
......
...@@ -8,7 +8,7 @@ from slapos.networkbench.ping import ping, ping6 ...@@ -8,7 +8,7 @@ from slapos.networkbench.ping import ping, ping6
@implementer(interface.IPromise) @implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# set periodicity to run the promise twice per day # set periodicity to run the promise twice per day
self.custom_frequency = int(self.getConfig('frequency', 720)) self.custom_frequency = int(self.getConfig('frequency', 720))
self.setPeriodicity(self.custom_frequency) self.setPeriodicity(self.custom_frequency)
......
...@@ -9,22 +9,23 @@ import os ...@@ -9,22 +9,23 @@ import os
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# test load every 3 minutes # test load every 3 minutes
self.setPeriodicity(minute=3) self.setPeriodicity(minute=3)
def checkCPULoad(self, tolerance=2.2): def checkCPULoad(self, tolerance=2.2):
# tolerance=1.5 => accept CPU load up to 1.5 =150% # tolerance=1.5 => accept CPU load up to 1.5 =150%
uptime_result = subprocess.check_output(['uptime'], universal_newlines=True) uptime_result = subprocess.check_output('uptime', universal_newlines=True)
line = uptime_result.strip().split(' ') line = uptime_result.strip().split(' ')
load, load5, long_load = line[-3:] load, load5, long_load = line[-3:]
long_load = float(long_load.replace(',', '.')) long_load = float(long_load.replace(',', '.'))
core_count = int(subprocess.check_output(['nproc']).strip()) core_count = int(subprocess.check_output('nproc').strip())
max_load = core_count * tolerance max_load = core_count * tolerance
if long_load > max_load: if long_load > max_load:
# display top statistics # display top statistics
top_result = subprocess.check_output(['top', '-n', '1', '-b']) top_result = subprocess.check_output(('top', '-n', '1', '-b'),
universal_newlines=True)
message = "CPU load is high: %s %s %s\n\n" % (load, load5, long_load) message = "CPU load is high: %s %s %s\n\n" % (load, load5, long_load)
i = 0 i = 0
result_list = top_result.split('\n') result_list = top_result.split('\n')
......
...@@ -8,7 +8,7 @@ import requests ...@@ -8,7 +8,7 @@ import requests
@implementer(interface.IPromise) @implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# SR can set custom periodicity # SR can set custom periodicity
self.setPeriodicity(float(self.getConfig('frequency', 2))) self.setPeriodicity(float(self.getConfig('frequency', 2)))
......
...@@ -10,7 +10,7 @@ from .util import tail_file ...@@ -10,7 +10,7 @@ from .util import tail_file
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=2) self.setPeriodicity(minute=2)
def sense(self): def sense(self):
......
from zope import interface as zope_interface from zope.interface import implementer
from slapos.grid.promise import interface from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise from slapos.grid.promise.generic import GenericPromise
...@@ -7,12 +7,10 @@ try: ...@@ -7,12 +7,10 @@ try:
except ImportError: except ImportError:
import subprocess import subprocess
@implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=int(self.getConfig('frequency', 5))) self.setPeriodicity(minute=int(self.getConfig('frequency', 5)))
def sense(self): def sense(self):
...@@ -31,16 +29,18 @@ class RunPromise(GenericPromise): ...@@ -31,16 +29,18 @@ class RunPromise(GenericPromise):
self.logger.error("Wrapper %r not supported." % (wrapper,)) self.logger.error("Wrapper %r not supported." % (wrapper,))
return return
process = subprocess.Popen( try:
subprocess.subprocess.check_output(
args, args,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
) )
result = process.communicate()[0].strip() except subprocess.CalledProcessError as e:
if process.returncode == 0: result = e.output.strip()
self.logger.info("OK") self.logger.error(message, result if str is bytes else
result.decode('utf-8', 'replace'))
else: else:
self.logger.error(message % (result,)) self.logger.info("OK")
def anomaly(self): def anomaly(self):
""" """
......
from zope import interface as zope_interface from zope.interface import implementer
from slapos.grid.promise import interface from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise from slapos.grid.promise.generic import GenericPromise
try: try:
...@@ -6,12 +6,10 @@ try: ...@@ -6,12 +6,10 @@ try:
except ImportError: except ImportError:
import subprocess import subprocess
@implementer(interface.IPromise)
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
# check configuration every 5 minutes (only for anomaly) # check configuration every 5 minutes (only for anomaly)
self.setPeriodicity(minute=int(self.getConfig('frequency', 5))) self.setPeriodicity(minute=int(self.getConfig('frequency', 5)))
...@@ -23,16 +21,14 @@ class RunPromise(GenericPromise): ...@@ -23,16 +21,14 @@ class RunPromise(GenericPromise):
validate_script = self.getConfig('verification-script') validate_script = self.getConfig('verification-script')
if not validate_script: if not validate_script:
raise ValueError("'verification-script' was not set in promise parameters.") raise ValueError("'verification-script' was not set in promise parameters.")
process = subprocess.Popen( try:
[validate_script], subprocess.check_output(validate_script, stderr=subprocess.STDOUT)
stdout=subprocess.PIPE, except subprocess.CalledProcessError as e:
stderr=subprocess.STDOUT, message = e.output
) self.logger.error(message if str is bytes else
message = process.communicate()[0] message.decode('utf-8', 'replace'))
if process.returncode == 0:
self.logger.info("OK")
else: else:
self.logger.error("%s" % message) self.logger.info("OK")
def anomaly(self): def anomaly(self):
return self._anomaly(result_count=1, failure_amount=1) return self._anomaly(result_count=1, failure_amount=1)
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import print_function
import argparse import argparse
import csv import csv
import datetime import datetime
import json import json
import httplib from six.moves import http_client as httplib
import os import os
import shutil import shutil
import socket import socket
...@@ -13,8 +14,8 @@ import subprocess ...@@ -13,8 +14,8 @@ import subprocess
import sys import sys
import time import time
import traceback import traceback
import urllib2 from six.moves.urllib.request import urlopen
import urlparse from six.moves.urllib.parse import urlparse
import uuid import uuid
def createStatusItem(item_directory, instance_name, callback, date, link, status): def createStatusItem(item_directory, instance_name, callback, date, link, status):
...@@ -79,8 +80,7 @@ def main(): ...@@ -79,8 +80,7 @@ def main():
saveStatus('STARTED') saveStatus('STARTED')
if args.max_run <= 0: if args.max_run <= 0:
print "--max-run argument takes a strictely positive number as argument" parser.error("--max-run argument takes a strictly positive number as argument")
sys.exit(-1)
while args.max_run > 0: while args.max_run > 0:
try: try:
...@@ -108,7 +108,7 @@ def main(): ...@@ -108,7 +108,7 @@ def main():
content.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;') content.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
)) ))
print content print(content)
# Write feed safely # Write feed safely
error_message = "" error_message = ""
...@@ -128,7 +128,7 @@ def main(): ...@@ -128,7 +128,7 @@ def main():
'slapos:%s' % uuid.uuid4(), 'slapos:%s' % uuid.uuid4(),
]) ])
os.rename(temp_file, args.logfile[0]) os.rename(temp_file, args.logfile[0])
except Exception, e: except Exception as e:
error_message = "ERROR ON WRITING FEED - %s" % str(e) error_message = "ERROR ON WRITING FEED - %s" % str(e)
finally: finally:
try: try:
...@@ -143,14 +143,14 @@ def main(): ...@@ -143,14 +143,14 @@ def main():
if exit_code != 0: if exit_code != 0:
sys.exit(exit_code) sys.exit(exit_code)
print 'Fetching %s feed...' % args.feed_url[0] print('Fetching %s feed...' % args.feed_url[0])
feed = urllib2.urlopen(args.feed_url[0]) feed = urlopen(args.feed_url[0])
body = feed.read() body = feed.read()
some_notification_failed = False some_notification_failed = False
for notif_url in args.notification_url: for notif_url in args.notification_url:
notification_url = urlparse.urlparse(notif_url) notification_url = urlparse(notif_url)
notification_port = notification_url.port notification_port = notification_url.port
if notification_port is None: if notification_port is None:
......
...@@ -26,6 +26,7 @@ ...@@ -26,6 +26,7 @@
# #
############################################################################## ##############################################################################
from __future__ import print_function
import argparse import argparse
import json import json
import importlib import importlib
...@@ -120,7 +121,7 @@ def runTestSuite(test_suite_title, test_suite_arguments, logger): ...@@ -120,7 +121,7 @@ def runTestSuite(test_suite_title, test_suite_arguments, logger):
parsed_arguments = dict(key.split('=') for key in test_suite_arguments) parsed_arguments = dict(key.split('=') for key in test_suite_arguments)
test_suite_module = importFrom(test_suite_title) test_suite_module = importFrom(test_suite_title)
success = test_suite_module.runTestSuite(**parsed_arguments) success = test_suite_module.runTestSuite(**parsed_arguments)
except: except Exception:
logger.exception('Impossible to run resiliency test:') logger.exception('Impossible to run resiliency test:')
success = False success = False
return success return success
...@@ -228,7 +229,7 @@ def runResiliencyTest(): ...@@ -228,7 +229,7 @@ def runResiliencyTest():
""" """
error_message_set, exit_status = ScalabilityLauncher().run() error_message_set, exit_status = ScalabilityLauncher().run()
for error_message in error_message_set: for error_message in error_message_set:
print >>sys.stderr, 'ERROR: %s' % error_message print('ERROR: %s' % error_message, file=sys.stderr)
sys.exit(exit_status) sys.exit(exit_status)
...@@ -284,7 +285,5 @@ def runUnitTest(): ...@@ -284,7 +285,5 @@ def runUnitTest():
test_count=1, test_count=1,
error_count=error_count, error_count=error_count,
duration=test_duration) duration=test_duration)
except:
raise
finally: finally:
os.remove(fname) os.remove(fname)
...@@ -33,8 +33,9 @@ import random ...@@ -33,8 +33,9 @@ import random
import ssl import ssl
import string import string
import time import time
import urllib from six.moves.urllib.parse import quote
import urllib2 from six.moves.urllib.request import HTTPBasicAuthHandler, HTTPSHandler, \
build_opener
class NotHttpOkException(Exception): class NotHttpOkException(Exception):
pass pass
...@@ -50,7 +51,7 @@ class ERP5TestSuite(SlaprunnerTestSuite): ...@@ -50,7 +51,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
Set inside of slaprunner the instance parameter to use to deploy erp5 instance. Set inside of slaprunner the instance parameter to use to deploy erp5 instance.
""" """
p = '<?xml version="1.0" encoding="utf-8"?> <instance> <parameter id="_">{"zodb-zeo": {"backup-periodicity": "*:1/4"}, "mariadb": {"backup-periodicity": "*:1/4"}}</parameter> </instance>' p = '<?xml version="1.0" encoding="utf-8"?> <instance> <parameter id="_">{"zodb-zeo": {"backup-periodicity": "*:1/4"}, "mariadb": {"backup-periodicity": "*:1/4"}}</parameter> </instance>'
parameter = urllib2.quote(p) parameter = quote(p)
self._connectToSlaprunner( self._connectToSlaprunner(
resource='saveParameterXml', resource='saveParameterXml',
data='software_type=default&parameter=%s' % parameter) data='software_type=default&parameter=%s' % parameter)
...@@ -109,7 +110,7 @@ class ERP5TestSuite(SlaprunnerTestSuite): ...@@ -109,7 +110,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
resource='/saveFileContent', resource='/saveFileContent',
data='file=runner_workdir%%2Finstance%%2F%s%%2Fetc%%2Fhaproxy.cfg&content=%s' % ( data='file=runner_workdir%%2Finstance%%2F%s%%2Fetc%%2Fhaproxy.cfg&content=%s' % (
haproxy_slappart, haproxy_slappart,
urllib.quote(file_content), quote(file_content),
) )
) )
...@@ -133,12 +134,12 @@ class ERP5TestSuite(SlaprunnerTestSuite): ...@@ -133,12 +134,12 @@ class ERP5TestSuite(SlaprunnerTestSuite):
def _connectToERP5(self, url, data=None, password=None): def _connectToERP5(self, url, data=None, password=None):
if password is None: if password is None:
password = self._getERP5Password() password = self._getERP5Password()
auth_handler = urllib2.HTTPBasicAuthHandler() auth_handler = HTTPBasicAuthHandler()
auth_handler.add_password(realm='Zope', uri=url, user='zope', passwd=password) auth_handler.add_password(realm='Zope', uri=url, user='zope', passwd=password)
ssl_context = ssl._create_unverified_context() ssl_context = ssl._create_unverified_context()
opener_director = urllib2.build_opener( opener_director = build_opener(
auth_handler, auth_handler,
urllib2.HTTPSHandler(context=ssl_context) HTTPSHandler(context=ssl_context)
) )
self.logger.info('Calling ERP5 url %s' % url) self.logger.info('Calling ERP5 url %s' % url)
...@@ -213,7 +214,7 @@ class ERP5TestSuite(SlaprunnerTestSuite): ...@@ -213,7 +214,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
try: try:
if "erp5" == self._getCreatedERP5SiteId(): if "erp5" == self._getCreatedERP5SiteId():
break break
except: except Exception:
self.logger.info("Fail to connect to erp5.... wait a bit longer") self.logger.info("Fail to connect to erp5.... wait a bit longer")
pass pass
......
...@@ -167,7 +167,7 @@ class GitlabTestSuite(SlaprunnerTestSuite): ...@@ -167,7 +167,7 @@ class GitlabTestSuite(SlaprunnerTestSuite):
while loop < 3: while loop < 3:
try: try:
self._connectToGitlab(url=self.backend_url) self._connectToGitlab(url=self.backend_url)
except Exception, e: except Exception as e:
if loop == 2: if loop == 2:
raise raise
self.logger.warning(str(e)) self.logger.warning(str(e))
......
...@@ -32,7 +32,7 @@ import logging ...@@ -32,7 +32,7 @@ import logging
import random import random
import string import string
import time import time
import urllib from six.moves.urllib.request import urlopen
logger = logging.getLogger('KVMResiliencyTest') logger = logging.getLogger('KVMResiliencyTest')
...@@ -45,7 +45,7 @@ def fetchKey(ip): ...@@ -45,7 +45,7 @@ def fetchKey(ip):
new_key = None new_key = None
for i in range(0, 10): for i in range(0, 10):
try: try:
new_key = urllib.urlopen('http://%s:10080/get' % ip).read().strip() new_key = urlopen('http://%s:10080/get' % ip).read().strip()
break break
except IOError: except IOError:
logger.error('Server in new KVM does not answer.') logger.error('Server in new KVM does not answer.')
...@@ -148,7 +148,7 @@ class KVMTestSuite(ResiliencyTestSuite): ...@@ -148,7 +148,7 @@ class KVMTestSuite(ResiliencyTestSuite):
for i in range(0, 60): for i in range(0, 60):
failure = False failure = False
try: try:
connection = urllib.urlopen('http://%s:10080/set?key=%s' % (self.ip, self.key)) connection = urlopen('http://%s:10080/set?key=%s' % (self.ip, self.key))
if connection.getcode() is 200: if connection.getcode() is 200:
break break
else: else:
......
...@@ -34,7 +34,7 @@ import os ...@@ -34,7 +34,7 @@ import os
import subprocess import subprocess
import sys import sys
import time import time
import urllib2 from six.moves.urllib.request import urlopen
UNIT_TEST_ERP5TESTNODE = 'UnitTest' UNIT_TEST_ERP5TESTNODE = 'UnitTest'
...@@ -85,13 +85,13 @@ class ResiliencyTestSuite(object): ...@@ -85,13 +85,13 @@ class ResiliencyTestSuite(object):
takeover_url = root_partition_parameter_dict['takeover-%s-%s-url' % (namebase, target_clone)] takeover_url = root_partition_parameter_dict['takeover-%s-%s-url' % (namebase, target_clone)]
takeover_password = root_partition_parameter_dict['takeover-%s-%s-password' % (namebase, target_clone)] takeover_password = root_partition_parameter_dict['takeover-%s-%s-password' % (namebase, target_clone)]
# Connect to takeover web interface # Connect to takeover web interface
takeover_page_content = urllib2.urlopen(takeover_url).read() takeover_page_content = urlopen(takeover_url).read()
# Wait for importer script to be not running # Wait for importer script to be not running
while 'Importer script(s) of backup in progress: True' in takeover_page_content: while 'Importer script(s) of backup in progress: True' in takeover_page_content:
time.sleep(10) time.sleep(10)
takeover_page_content = urllib2.urlopen(takeover_url).read() takeover_page_content = urlopen(takeover_url).read()
# Do takeover # Do takeover
takeover_result = urllib2.urlopen('%s?password=%s' % (takeover_url, takeover_password)).read() takeover_result = urlopen('%s?password=%s' % (takeover_url, takeover_password)).read()
if 'Error' in takeover_result: if 'Error' in takeover_result:
raise Exception('Error while doing takeover: %s' % takeover_result) raise Exception('Error while doing takeover: %s' % takeover_result)
...@@ -214,7 +214,8 @@ class ResiliencyTestSuite(object): ...@@ -214,7 +214,8 @@ class ResiliencyTestSuite(object):
if 'monitor' in promise: if 'monitor' in promise:
continue continue
try: try:
process = subprocess.check_output(os.path.join(promise_directory, promise)) subprocess.check_output(os.path.join(promise_directory, promise),
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
self.logger.error('ERROR : promise "%s" failed with output :\n%s', promise, e.output) self.logger.error('ERROR : promise "%s" failed with output :\n%s', promise, e.output)
return False return False
......
...@@ -29,15 +29,16 @@ ...@@ -29,15 +29,16 @@
from .resiliencytestsuite import ResiliencyTestSuite from .resiliencytestsuite import ResiliencyTestSuite
import base64 import base64
import cookielib from six.moves import http_cookiejar as cookielib
import json import json
from lxml import etree from lxml import etree
import random import random
import ssl import ssl
import string import string
import time import time
import urllib2 from six.moves.urllib.request import HTTPCookieProcessor, HTTPSHandler, \
import urllib build_opener
from six.moves.urllib.error import HTTPError
class NotHttpOkException(Exception): class NotHttpOkException(Exception):
pass pass
...@@ -52,9 +53,9 @@ class SlaprunnerTestSuite(ResiliencyTestSuite): ...@@ -52,9 +53,9 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
cookie_jar = cookielib.CookieJar() cookie_jar = cookielib.CookieJar()
ssl_context = ssl._create_unverified_context() ssl_context = ssl._create_unverified_context()
self._opener_director = urllib2.build_opener( self._opener_director = build_opener(
urllib2.HTTPCookieProcessor(cookie_jar), HTTPCookieProcessor(cookie_jar),
urllib2.HTTPSHandler(context=ssl_context) HTTPSHandler(context=ssl_context)
) )
ResiliencyTestSuite.__init__(self, *args, **kwargs) ResiliencyTestSuite.__init__(self, *args, **kwargs)
...@@ -95,7 +96,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite): ...@@ -95,7 +96,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
if result.getcode() is not 200: if result.getcode() is not 200:
raise NotHttpOkException(result.getcode()) raise NotHttpOkException(result.getcode())
return result.read() return result.read()
except urllib2.HTTPError: except HTTPError:
self.logger.error('Error when contacting slaprunner at URL: {}'.format(url)) self.logger.error('Error when contacting slaprunner at URL: {}'.format(url))
raise raise
...@@ -164,7 +165,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite): ...@@ -164,7 +165,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
""" """
try: try:
return self._connectToSlaprunner(resource='isSRReady') return self._connectToSlaprunner(resource='isSRReady')
except (NotHttpOkException, urllib2.HTTPError) as error: except (NotHttpOkException, HTTPError) as error:
# The nginx frontend might timeout before software release is finished. # The nginx frontend might timeout before software release is finished.
self.logger.warning('Problem occured when contacting the server: %s' % error) self.logger.warning('Problem occured when contacting the server: %s' % error)
return -1 return -1
...@@ -187,7 +188,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite): ...@@ -187,7 +188,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
self.logger.info('Building the Software Release...') self.logger.info('Building the Software Release...')
try: try:
self._connectToSlaprunner(resource='runSoftwareProfile') self._connectToSlaprunner(resource='runSoftwareProfile')
except (NotHttpOkException, urllib2.HTTPError): except (NotHttpOkException, HTTPError):
# The nginx frontend might timeout before software release is finished. # The nginx frontend might timeout before software release is finished.
pass pass
...@@ -197,7 +198,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite): ...@@ -197,7 +198,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
self.logger.info('Deploying instance...') self.logger.info('Deploying instance...')
try: try:
self._connectToSlaprunner(resource='runInstanceProfile') self._connectToSlaprunner(resource='runInstanceProfile')
except (NotHttpOkException, urllib2.HTTPError): except (NotHttpOkException, HTTPError):
# The nginx frontend might timeout before someftware release is finished. # The nginx frontend might timeout before someftware release is finished.
pass pass
while True: while True:
...@@ -219,7 +220,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite): ...@@ -219,7 +220,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
if data['code'] == 0: if data['code'] == 0:
self.logger.warning(data['result']) self.logger.warning(data['result'])
except (NotHttpOkException, urllib2.HTTPError): except (NotHttpOkException, HTTPError):
# cloning can be very long. # cloning can be very long.
# XXX: quite dirty way to check. # XXX: quite dirty way to check.
while self._connectToSlaprunner('getProjectStatus', data='project=workspace/slapos').find('On branch master') == -1: while self._connectToSlaprunner('getProjectStatus', data='project=workspace/slapos').find('On branch master') == -1:
......
from __future__ import print_function from __future__ import division, print_function
import argparse import argparse
import itertools import itertools
...@@ -11,6 +11,7 @@ import time ...@@ -11,6 +11,7 @@ import time
from datetime import datetime from datetime import datetime
from .runner_utils import * from .runner_utils import *
from six.moves import map
os.environ['LC_ALL'] = 'C' os.environ['LC_ALL'] = 'C'
os.umask(0o77) os.umask(0o77)
...@@ -102,7 +103,7 @@ def getBackupFilesModifiedDuringExportList(config, export_start_date): ...@@ -102,7 +103,7 @@ def getBackupFilesModifiedDuringExportList(config, export_start_date):
export_time = time.time() - export_start_date export_time = time.time() - export_start_date
# find all files that were modified during export # find all files that were modified during export
modified_files = subprocess.check_output(( modified_files = subprocess.check_output((
'find', 'instance', '-cmin', str(export_time / 60.), '-type', 'f', '-path', '*/srv/backup/*' 'find', 'instance', '-cmin', str(export_time / 60), '-type', 'f', '-path', '*/srv/backup/*'
)) ))
if not modified_files: if not modified_files:
return () return ()
......
...@@ -7,7 +7,7 @@ import md5 ...@@ -7,7 +7,7 @@ import md5
import os import os
import re import re
import shutil import shutil
import urllib from six.moves.urllib.parse import unquote
import zipfile import zipfile
import fnmatch import fnmatch
...@@ -22,7 +22,7 @@ class FileBrowser(object): ...@@ -22,7 +22,7 @@ class FileBrowser(object):
self.config = config self.config = config
def _realdir(self, dir): def _realdir(self, dir):
realdir = realpath(self.config, urllib.unquote(dir)) realdir = realpath(self.config, unquote(dir))
if not realdir: if not realdir:
raise NameError('Could not load directory %s: Permission denied' % dir) raise NameError('Could not load directory %s: Permission denied' % dir)
return realdir return realdir
...@@ -45,7 +45,7 @@ class FileBrowser(object): ...@@ -45,7 +45,7 @@ class FileBrowser(object):
"""List elements of directory 'dir' taken""" """List elements of directory 'dir' taken"""
html = 'var gsdirs = [], gsfiles = [];' html = 'var gsdirs = [], gsfiles = [];'
dir = urllib.unquote(dir) dir = unquote(dir)
# XXX-Marco 'dir' and 'all' should not shadow builtin names # XXX-Marco 'dir' and 'all' should not shadow builtin names
realdir = realpath(self.config, dir) realdir = realpath(self.config, dir)
if not realdir: if not realdir:
...@@ -74,7 +74,7 @@ class FileBrowser(object): ...@@ -74,7 +74,7 @@ class FileBrowser(object):
return html return html
def fancylistDirs(self, dir, key, listfiles, all=False): def fancylistDirs(self, dir, key, listfiles, all=False):
dir = urllib.unquote(dir) dir = unquote(dir)
realdir = realpath(self.config, dir) realdir = realpath(self.config, dir)
if not realdir: if not realdir:
raise NameError('Could not load directory %s: Permission denied' % dir) raise NameError('Could not load directory %s: Permission denied' % dir)
...@@ -106,7 +106,7 @@ class FileBrowser(object): ...@@ -106,7 +106,7 @@ class FileBrowser(object):
realdir = self._realdir(dir) realdir = self._realdir(dir)
folder = os.path.join(realdir, filename) folder = os.path.join(realdir, filename)
if not os.path.exists(folder): if not os.path.exists(folder):
os.mkdir(folder, 0744) os.mkdir(folder, 0o744)
return "{result: '1'}" return "{result: '1'}"
else: else:
return "{result: '0'}" return "{result: '0'}"
...@@ -125,7 +125,7 @@ class FileBrowser(object): ...@@ -125,7 +125,7 @@ class FileBrowser(object):
"""Delete a list of files or directories""" """Delete a list of files or directories"""
# XXX-Marco do not shadow 'dir' # XXX-Marco do not shadow 'dir'
realdir = self._realdir(dir) realdir = self._realdir(dir)
lfiles = urllib.unquote(files).split(',,,') lfiles = unquote(files).split(',,,')
try: try:
# XXX-Marco do not shadow 'file' # XXX-Marco do not shadow 'file'
for item in lfiles: for item in lfiles:
...@@ -147,7 +147,7 @@ class FileBrowser(object): ...@@ -147,7 +147,7 @@ class FileBrowser(object):
def copyItem(self, dir, files, del_source=False): def copyItem(self, dir, files, del_source=False):
"""Copy a list of files or directory to dir""" """Copy a list of files or directory to dir"""
realdir = self._realdir(dir) realdir = self._realdir(dir)
lfiles = urllib.unquote(files).split(',,,') lfiles = unquote(files).split(',,,')
try: try:
# XXX-Marco do not shadow 'file' # XXX-Marco do not shadow 'file'
for file in lfiles: for file in lfiles:
...@@ -174,7 +174,7 @@ class FileBrowser(object): ...@@ -174,7 +174,7 @@ class FileBrowser(object):
def rename(self, dir, filename, newfilename): def rename(self, dir, filename, newfilename):
"""Rename file or directory to dir/filename""" """Rename file or directory to dir/filename"""
realdir = self._realdir(dir) realdir = self._realdir(dir)
realfile = realpath(self.config, urllib.unquote(filename)) realfile = realpath(self.config, unquote(filename))
if not realfile: if not realfile:
raise NameError('Could not load directory %s: Permission denied' % filename) raise NameError('Could not load directory %s: Permission denied' % filename)
tofile = os.path.join(realdir, newfilename) tofile = os.path.join(realdir, newfilename)
...@@ -208,7 +208,7 @@ class FileBrowser(object): ...@@ -208,7 +208,7 @@ class FileBrowser(object):
def downloadFile(self, dir, filename): def downloadFile(self, dir, filename):
"""Download file dir/filename""" """Download file dir/filename"""
realdir = self._realdir(dir) realdir = self._realdir(dir)
file = os.path.join(realdir, urllib.unquote(filename)) file = os.path.join(realdir, unquote(filename))
if not os.path.exists(file): if not os.path.exists(file):
raise NameError('NOT ALLOWED OPERATION : File or directory does not exist %s' raise NameError('NOT ALLOWED OPERATION : File or directory does not exist %s'
% os.path.join(dir, filename)) % os.path.join(dir, filename))
...@@ -255,8 +255,8 @@ class FileBrowser(object): ...@@ -255,8 +255,8 @@ class FileBrowser(object):
def readFile(self, dir, filename, truncate=False): def readFile(self, dir, filename, truncate=False):
"""Read file dir/filename and return content""" """Read file dir/filename and return content"""
realfile = realpath(self.config, os.path.join(urllib.unquote(dir), realfile = realpath(self.config, os.path.join(unquote(dir),
urllib.unquote(filename))) unquote(filename)))
if not realfile: if not realfile:
raise NameError('Could not load directory %s: Permission denied' % dir) raise NameError('Could not load directory %s: Permission denied' % dir)
if not isText(realfile): if not isText(realfile):
......
...@@ -2,13 +2,12 @@ ...@@ -2,13 +2,12 @@
# vim: set et sts=2: # vim: set et sts=2:
# pylint: disable-msg=W0311,C0301,C0103,C0111,R0904,R0903 # pylint: disable-msg=W0311,C0301,C0103,C0111,R0904,R0903
import ConfigParser from six.moves import configparser
import datetime import datetime
import flask import flask
import logging import logging
import logging.handlers import logging.handlers
import os import os
import urlparse
from slapos.htpasswd import HtpasswdFile from slapos.htpasswd import HtpasswdFile
from slapos.runner.process import setHandler from slapos.runner.process import setHandler
import sys import sys
...@@ -36,7 +35,7 @@ class Config: ...@@ -36,7 +35,7 @@ class Config:
self.configuration_file_path = os.path.abspath(os.getenv('RUNNER_CONFIG')) self.configuration_file_path = os.path.abspath(os.getenv('RUNNER_CONFIG'))
# Load configuration file # Load configuration file
configuration_parser = ConfigParser.SafeConfigParser() configuration_parser = configparser.SafeConfigParser()
configuration_parser.read(self.configuration_file_path) configuration_parser.read(self.configuration_file_path)
for section in ("slaprunner", "slapos", "slapproxy", "slapformat", for section in ("slaprunner", "slapos", "slapproxy", "slapformat",
...@@ -144,7 +143,7 @@ def serve(config): ...@@ -144,7 +143,7 @@ def serve(config):
result = cloneRepo(repo_url, repository_path) result = cloneRepo(repo_url, repository_path)
if branch_name: if branch_name:
switchBranch(repository_path, branch_name) switchBranch(repository_path, branch_name)
except GitCommandError, e: except GitCommandError as e:
app.logger.warning('Error while cloning default repository: %s' % str(e)) app.logger.warning('Error while cloning default repository: %s' % str(e))
traceback.print_exc() traceback.print_exc()
# Start slapproxy here when runner is starting # Start slapproxy here when runner is starting
...@@ -152,10 +151,10 @@ def serve(config): ...@@ -152,10 +151,10 @@ def serve(config):
startProxy(app.config) startProxy(app.config)
app.logger.info('Running slapgrid...') app.logger.info('Running slapgrid...')
if app.config['auto_deploy_instance'] in TRUE_VALUES: if app.config['auto_deploy_instance'] in TRUE_VALUES:
import thread from six.moves import _thread
# XXX-Nicolas: Hack to be sure that supervisord has started # XXX-Nicolas: Hack to be sure that supervisord has started
# before any communication with it, so that gunicorn doesn't exit # before any communication with it, so that gunicorn doesn't exit
thread.start_new_thread(waitForRun, (app.config,)) _thread.start_new_thread(waitForRun, (app.config,))
config.logger.info('Done.') config.logger.info('Done.')
app.wsgi_app = ProxyFix(app.wsgi_app) app.wsgi_app = ProxyFix(app.wsgi_app)
...@@ -166,7 +165,7 @@ def waitForRun(config): ...@@ -166,7 +165,7 @@ def waitForRun(config):
def getUpdatedParameter(self, var): def getUpdatedParameter(self, var):
configuration_parser = ConfigParser.SafeConfigParser() configuration_parser = configparser.SafeConfigParser()
configuration_file_path = os.path.abspath(os.getenv('RUNNER_CONFIG')) configuration_file_path = os.path.abspath(os.getenv('RUNNER_CONFIG'))
configuration_parser.read(configuration_file_path) configuration_parser.read(configuration_file_path)
......
...@@ -55,7 +55,7 @@ def saveBuildAndRunParams(config, params): ...@@ -55,7 +55,7 @@ def saveBuildAndRunParams(config, params):
about how you got the parameters""" about how you got the parameters"""
json_file = os.path.join(config['etc_dir'], 'config.json') json_file = os.path.join(config['etc_dir'], 'config.json')
with open(json_file, "w") as f: with open(json_file, "w") as f:
f.write(json.dumps(params)) json.dump(params, f)
def html_escape(text): def html_escape(text):
...@@ -70,7 +70,8 @@ def getSession(config): ...@@ -70,7 +70,8 @@ def getSession(config):
""" """
user_path = os.path.join(config['etc_dir'], '.htpasswd') user_path = os.path.join(config['etc_dir'], '.htpasswd')
if os.path.exists(user_path): if os.path.exists(user_path):
return open(user_path).read().split(';') with open(user_path) as f:
return f.read().split(';')
def checkUserCredential(config, username, password): def checkUserCredential(config, username, password):
htpasswdfile = os.path.join(config['etc_dir'], '.htpasswd') htpasswdfile = os.path.join(config['etc_dir'], '.htpasswd')
...@@ -125,8 +126,8 @@ def getCurrentSoftwareReleaseProfile(config): ...@@ -125,8 +126,8 @@ def getCurrentSoftwareReleaseProfile(config):
Returns used Software Release profile as a string. Returns used Software Release profile as a string.
""" """
try: try:
software_folder = open( with open(os.path.join(config['etc_dir'], ".project")) as f:
os.path.join(config['etc_dir'], ".project")).read().rstrip() software_folder = f.read().rstrip()
return realpath( return realpath(
config, os.path.join(software_folder, config['software_profile'])) config, os.path.join(software_folder, config['software_profile']))
# XXXX No Comments # XXXX No Comments
...@@ -141,9 +142,11 @@ def requestInstance(config, software_type=None): ...@@ -141,9 +142,11 @@ def requestInstance(config, software_type=None):
software_type_path = os.path.join(config['etc_dir'], ".software_type.xml") software_type_path = os.path.join(config['etc_dir'], ".software_type.xml")
if software_type: if software_type:
# Write it to conf file for later use # Write it to conf file for later use
open(software_type_path, 'w').write(software_type) with open(software_type_path, 'w') as f:
f.write(software_type)
elif os.path.exists(software_type_path): elif os.path.exists(software_type_path):
software_type = open(software_type_path).read().rstrip() with open(software_type_path) as f:
software_type = f.read().rstrip()
else: else:
software_type = 'default' software_type = 'default'
...@@ -261,7 +264,8 @@ def slapgridResultToFile(config, step, returncode, datetime): ...@@ -261,7 +264,8 @@ def slapgridResultToFile(config, step, returncode, datetime):
filename = step + "_info.json" filename = step + "_info.json"
file = os.path.join(config['runner_workdir'], filename) file = os.path.join(config['runner_workdir'], filename)
result = {'last_build':datetime, 'success':returncode} result = {'last_build':datetime, 'success':returncode}
open(file, "w").write(json.dumps(result)) with open(file, "w") as f:
json.dump(result, f)
def getSlapgridResult(config, step): def getSlapgridResult(config, step):
...@@ -359,7 +363,8 @@ def config_SR_folder(config): ...@@ -359,7 +363,8 @@ def config_SR_folder(config):
for path in os.listdir(config['software_link']): for path in os.listdir(config['software_link']):
cfg_path = os.path.join(config['software_link'], path, config_name) cfg_path = os.path.join(config['software_link'], path, config_name)
if os.path.exists(cfg_path): if os.path.exists(cfg_path):
cfg = open(cfg_path).read().split("#") with open(cfg_path) as f:
cfg = f.read().split("#")
if len(cfg) != 2: if len(cfg) != 2:
continue # there is a broken config file continue # there is a broken config file
software_link_list.append(cfg[1]) software_link_list.append(cfg[1])
...@@ -382,7 +387,8 @@ def loadSoftwareRList(config): ...@@ -382,7 +387,8 @@ def loadSoftwareRList(config):
for path in os.listdir(config['software_link']): for path in os.listdir(config['software_link']):
cfg_path = os.path.join(config['software_link'], path, config_name) cfg_path = os.path.join(config['software_link'], path, config_name)
if os.path.exists(cfg_path): if os.path.exists(cfg_path):
cfg = open(cfg_path).read().split("#") with open(cfg_path) as f:
cfg = f.read().split("#")
if len(cfg) != 2: if len(cfg) != 2:
continue # there is a broken config file continue # there is a broken config file
sr_list.append(dict(md5=cfg[1], path=cfg[0], title=path)) sr_list.append(dict(md5=cfg[1], path=cfg[0], title=path))
...@@ -409,7 +415,8 @@ def getProfilePath(projectDir, profile): ...@@ -409,7 +415,8 @@ def getProfilePath(projectDir, profile):
""" """
if not os.path.exists(os.path.join(projectDir, ".project")): if not os.path.exists(os.path.join(projectDir, ".project")):
return False return False
projectFolder = open(os.path.join(projectDir, ".project")).read() with open(os.path.join(projectDir, ".project")) as f:
projectFolder = f.read()
return os.path.join(projectFolder, profile) return os.path.join(projectFolder, profile)
...@@ -438,7 +445,7 @@ def svcStopAll(config): ...@@ -438,7 +445,7 @@ def svcStopAll(config):
try: try:
return Popen([config['slapos'], 'node', 'supervisorctl', '--cfg', config['configuration_file_path'], return Popen([config['slapos'], 'node', 'supervisorctl', '--cfg', config['configuration_file_path'],
'stop', 'all']).communicate()[0] 'stop', 'all']).communicate()[0]
except: except Exception:
pass pass
def svcStartAll(config): def svcStartAll(config):
...@@ -446,7 +453,7 @@ def svcStartAll(config): ...@@ -446,7 +453,7 @@ def svcStartAll(config):
try: try:
return Popen([config['slapos'], 'node', 'supervisorctl', '--cfg', config['configuration_file_path'], return Popen([config['slapos'], 'node', 'supervisorctl', '--cfg', config['configuration_file_path'],
'start', 'all']).communicate()[0] 'start', 'all']).communicate()[0]
except: except Exception:
pass pass
def removeInstanceRootDirectory(config): def removeInstanceRootDirectory(config):
...@@ -567,7 +574,8 @@ def configNewSR(config, projectpath): ...@@ -567,7 +574,8 @@ def configNewSR(config, projectpath):
sup_process.stopProcess(config, 'slapgrid-sr') sup_process.stopProcess(config, 'slapgrid-sr')
logger.warning("User opened a new SR. Removing all instances...") logger.warning("User opened a new SR. Removing all instances...")
removeCurrentInstance(config) removeCurrentInstance(config)
open(os.path.join(config['etc_dir'], ".project"), 'w').write(projectpath) with open(os.path.join(config['etc_dir'], ".project"), 'w') as f:
f.write(projectpath)
return True return True
else: else:
return False return False
...@@ -593,12 +601,15 @@ def newSoftware(folder, config, session): ...@@ -593,12 +601,15 @@ def newSoftware(folder, config, session):
softwareContent = "" softwareContent = ""
try: try:
softwareContent = urlopen(software).read() softwareContent = urlopen(software).read()
except: except Exception:
#Software.cfg and instance.cfg content will be empty #Software.cfg and instance.cfg content will be empty
pass pass
open(os.path.join(folderPath, config['software_profile']), 'w').write(softwareContent) with open(os.path.join(folderPath, config['software_profile']), 'w') as f:
open(os.path.join(folderPath, config['instance_profile']), 'w').write("") f.write(softwareContent)
open(os.path.join(basedir, ".project"), 'w').write(folder + "/") with open(os.path.join(folderPath, config['instance_profile']), 'w') as f:
pass
with open(os.path.join(basedir, ".project"), 'w') as f:
f.write(folder + "/")
#Clean sapproxy Database #Clean sapproxy Database
stopProxy(config) stopProxy(config)
removeProxyDb(config) removeProxyDb(config)
...@@ -633,7 +644,8 @@ def getProjectTitle(config): ...@@ -633,7 +644,8 @@ def getProjectTitle(config):
if instance_name: if instance_name:
instance_name = '%s - ' % instance_name instance_name = '%s - ' % instance_name
if os.path.exists(conf): if os.path.exists(conf):
project = open(conf, "r").read().split("/") with open(conf, "r") as f:
project = f.read().split("/")
software = project[-2] software = project[-2]
return '%s%s (%s)' % (instance_name, software, '/'.join(project[:-2])) return '%s%s (%s)' % (instance_name, software, '/'.join(project[:-2]))
return "%sNo Profile" % instance_name return "%sNo Profile" % instance_name
...@@ -643,7 +655,8 @@ def getSoftwareReleaseName(config): ...@@ -643,7 +655,8 @@ def getSoftwareReleaseName(config):
"""Get the name of the current Software Release""" """Get the name of the current Software Release"""
sr_profile = os.path.join(config['etc_dir'], ".project") sr_profile = os.path.join(config['etc_dir'], ".project")
if os.path.exists(sr_profile): if os.path.exists(sr_profile):
project = open(sr_profile, "r").read().split("/") with open(sr_profile, "r") as f:
project = f.read().split("/")
software = project[-2] software = project[-2]
return software.replace(' ', '_') return software.replace(' ', '_')
return None return None
...@@ -731,7 +744,7 @@ def readFileFrom(f, lastPosition, limit=20000): ...@@ -731,7 +744,7 @@ def readFileFrom(f, lastPosition, limit=20000):
# XXX-Marco do now shadow 'bytes' # XXX-Marco do now shadow 'bytes'
bytes = f.tell() bytes = f.tell()
block = -1 block = -1
data = "" data = b""
length = bytes length = bytes
truncated = False # True if a part of log data has been truncated truncated = False # True if a part of log data has been truncated
if (lastPosition <= 0 and length > limit) or (length - lastPosition > limit): if (lastPosition <= 0 and length > limit) or (length - lastPosition > limit):
...@@ -753,7 +766,6 @@ def readFileFrom(f, lastPosition, limit=20000): ...@@ -753,7 +766,6 @@ def readFileFrom(f, lastPosition, limit=20000):
data = f.read(BUFSIZ - margin) + data data = f.read(BUFSIZ - margin) + data
bytes -= BUFSIZ bytes -= BUFSIZ
block -= 1 block -= 1
f.close()
return { return {
'content': data, 'content': data,
'position': length, 'position': length,
...@@ -761,16 +773,14 @@ def readFileFrom(f, lastPosition, limit=20000): ...@@ -761,16 +773,14 @@ def readFileFrom(f, lastPosition, limit=20000):
} }
text_range = str2bytes(''.join(map(chr, [7, 8, 9, 10, 12, 13, 27]
+ list(range(0x20, 0x100)))))
def isText(file): def isText(file):
"""Return True if the mimetype of file is Text""" """Return True if the mimetype of file is Text"""
if not os.path.exists(file):
return False
text_range = str2bytes(''.join(map(chr, [7, 8, 9, 10, 12, 13, 27]
+ list(range(0x20, 0x100)))))
is_binary_string = lambda bytes: bool(bytes.translate(None, text_range))
try: try:
return not is_binary_string(open(file).read(1024)) with open(file, 'rb') as f:
except: return not f.read(1024).translate(None, text_range)
except Exception:
return False return False
...@@ -780,15 +790,15 @@ def md5sum(file): ...@@ -780,15 +790,15 @@ def md5sum(file):
if os.path.isdir(file): if os.path.isdir(file):
return False return False
try: try:
fh = open(file, 'rb')
m = hashlib.md5() m = hashlib.md5()
with open(file, 'rb') as fh:
while True: while True:
data = fh.read(8192) data = fh.read(8192)
if not data: if not data:
break break
m.update(data) m.update(data)
return m.hexdigest() return m.hexdigest()
except: except Exception:
return False return False
...@@ -949,10 +959,12 @@ def setMiniShellHistory(config, command): ...@@ -949,10 +959,12 @@ def setMiniShellHistory(config, command):
command = command + "\n" command = command + "\n"
history_file = config['minishell_history_file'] history_file = config['minishell_history_file']
if os.path.exists(history_file): if os.path.exists(history_file):
history = open(history_file, 'r').readlines() with open(history_file, 'r') as f:
history = f.readlines()
if len(history) >= history_max_size: if len(history) >= history_max_size:
del history[0] del history[0]
else: else:
history = [] history = []
history.append(command) history.append(command)
open(history_file, 'w+').write(''.join(history)) with open(history_file, 'w') as f:
f.write(''.join(history))
...@@ -15,6 +15,7 @@ from flask import (Flask, request, redirect, url_for, render_template, ...@@ -15,6 +15,7 @@ from flask import (Flask, request, redirect, url_for, render_template,
g, flash, jsonify, session, abort, send_file) g, flash, jsonify, session, abort, send_file)
import slapos import slapos
from slapos.util import bytes2str
from slapos.runner.utils import (checkSoftwareFolder, configNewSR, checkUserCredential, from slapos.runner.utils import (checkSoftwareFolder, configNewSR, checkUserCredential,
createNewUser, getBuildAndRunParams, createNewUser, getBuildAndRunParams,
getProfilePath, getSlapgridResult, getProfilePath, getSlapgridResult,
...@@ -253,13 +254,13 @@ def getFileLog(): ...@@ -253,13 +254,13 @@ def getFileLog():
raise IOError raise IOError
if not isText(file_path): if not isText(file_path):
content = "Can not open binary file, please select a text file!" content = "Can not open binary file, please select a text file!"
with open(file_path) as f:
if 'truncate' in request.form: if 'truncate' in request.form:
content = tail(open(file_path), int(request.form['truncate'])) content = tail(f, int(request.form['truncate']))
else: else:
with open(file_path) as f:
content = f.read() content = f.read()
return jsonify(code=1, result=html_escape(content)) return jsonify(code=1, result=html_escape(content))
except: except Exception:
return jsonify(code=0, result="Warning: Log file doesn't exist yet or empty log!!") return jsonify(code=0, result="Warning: Log file doesn't exist yet or empty log!!")
...@@ -505,8 +506,8 @@ def slapgridResult(): ...@@ -505,8 +506,8 @@ def slapgridResult():
if request.form['log'] in ['software', 'instance']: if request.form['log'] in ['software', 'instance']:
log_file = request.form['log'] + "_log" log_file = request.form['log'] + "_log"
if os.path.exists(app.config[log_file]): if os.path.exists(app.config[log_file]):
log_result = readFileFrom(open(app.config[log_file]), with open(app.config[log_file], 'rb') as f:
int(request.form['position'])) log_result = bytes2str(readFileFrom(f, int(request.form['position'])))
build_result = getSlapgridResult(app.config, 'software') build_result = getSlapgridResult(app.config, 'software')
run_result = getSlapgridResult(app.config, 'instance') run_result = getSlapgridResult(app.config, 'instance')
software_info = {'state':software_state, software_info = {'state':software_state,
...@@ -717,7 +718,7 @@ def fileBrowser(): ...@@ -717,7 +718,7 @@ def fileBrowser():
filename) filename)
try: try:
return send_file(result, attachment_filename=filename, as_attachment=True) return send_file(result, attachment_filename=filename, as_attachment=True)
except: except Exception:
abort(404) abort(404)
elif opt == 9: elif opt == 9:
result = file_request.readFile(dir, filename, False) result = file_request.readFile(dir, filename, False)
......
...@@ -4,6 +4,7 @@ import argparse ...@@ -4,6 +4,7 @@ import argparse
import sys import sys
import os import os
import signal import signal
from six.moves import map
def killpidfromfile(): def killpidfromfile():
"""deprecated: use below kill() instead""" """deprecated: use below kill() instead"""
...@@ -50,7 +51,7 @@ def kill(): ...@@ -50,7 +51,7 @@ def kill():
parser.error('Unknown signal name %s' % args.signal) parser.error('Unknown signal name %s' % args.signal)
pid = args.pidfile and [int(open(p).read()) for p in args.pidfile] pid = args.pidfile and [int(open(p).read()) for p in args.pidfile]
exe = args.exe and map(os.path.realpath, args.exe) exe = args.exe and list(map(os.path.realpath, args.exe))
import psutil import psutil
r = 1 r = 1
......
...@@ -93,7 +93,7 @@ from slapos.grid.promise import GenericPromise ...@@ -93,7 +93,7 @@ from slapos.grid.promise import GenericPromise
class RunPromise(GenericPromise): class RunPromise(GenericPromise):
def __init__(self, config): def __init__(self, config):
GenericPromise.__init__(self, config) super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=%(periodicity)s) self.setPeriodicity(minute=%(periodicity)s)
def sense(self): def sense(self):
......
from __future__ import print_function
import ZODB.FileStorage import ZODB.FileStorage
import ZODB.serialize import ZODB.serialize
import argparse import argparse
...@@ -16,24 +17,24 @@ def run(): ...@@ -16,24 +17,24 @@ def run():
point = now - (3600 * 24 * args.days) point = now - (3600 * 24 * args.days)
print 'Now is %s' % time.asctime(time.localtime(now)) print('Now is', time.asctime(time.localtime(now)))
print 'Will pack until %s' % time.asctime(time.localtime(point)) print('Will pack until', time.asctime(time.localtime(point)))
failures = 0 failures = 0
for f in args.files: for f in args.files:
b = time.time() b = time.time()
print 'Trying to pack %r' % f print('Trying to pack %r' % f)
try: try:
pack(point, f) pack(point, f)
except Exception: except Exception:
print 'Failed to pack %r:' % f print('Failed to pack %r:' % f)
traceback.print_exc() traceback.print_exc()
failures += 1 failures += 1
print 'Finished %s in %.3fs' % (f, time.time() - b) print('Finished %s in %.3fs' % (f, time.time() - b))
if failures: if failures:
print 'Failed files: %s' % failures print('Failed files: %s' % failures)
return failures return failures
else: else:
print 'All files sucessfully packed.' print('All files sucessfully packed.')
return 0 return 0
def pack(point, f): def pack(point, f):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment