Commit 1c8269b2 authored by Bryton Lacquement's avatar Bryton Lacquement 🚪 Committed by Julien Muchembled

Add support for Python 3

/reviewed-on nexedi/slapos.toolbox!51
parent eba2c149
......@@ -25,7 +25,7 @@
#
##############################################################################
import ConfigParser
from six.moves import configparser
import argparse
import collections
import json
......@@ -57,7 +57,7 @@ class AutoSTemp(object):
def __init__(self, value):
fd, self.__name = tempfile.mkstemp()
os.write(fd, value)
os.write(fd, value.encode('utf-8'))
os.close(fd)
@property
......@@ -67,7 +67,7 @@ class AutoSTemp(object):
def __del__(self):
self.__unlink(self.__name)
from tester import SoftwareReleaseTester
from .tester import SoftwareReleaseTester
class TestMap(object):
# tell pytest to skip this class (even if name starts with Test)
......@@ -94,7 +94,7 @@ class TestMap(object):
return set(exclude_list + list(self.ran_test_set))
def getGroupList(self):
return self.test_map_dict.keys()
return list(self.test_map_dict)
def dropGroup(self, group):
del self.test_map_dict[group]
......@@ -217,7 +217,7 @@ def main():
logger, log_file = getLogger(log, args.verbose)
configuration = ConfigParser.SafeConfigParser()
configuration = configparser.SafeConfigParser()
configuration.readfp(args.configuration_file)
pidfile = args.pidfile
......
from __future__ import print_function
import datetime
import json
import sys
......@@ -50,16 +52,16 @@ def retryOnNetworkFailure(func):
while True:
try:
return func(*args, **kwargs)
except SAFE_RPC_EXCEPTION_LIST, e:
print 'Network failure: %s , %s' % (sys.exc_info(), e)
except HTTPError, e:
print 'Network failure: %s , %s' % (sys.exc_info(), e)
except ConnectionError, e:
print 'Network failure: %s , %s' % (sys.exc_info(), e)
except slapos.slap.ConnectionError, e:
print 'Network failure: %s , %s' % (sys.exc_info(), e)
print 'Retry method %s in %i seconds' % (func, retry_time)
except SAFE_RPC_EXCEPTION_LIST as e:
print('Network failure: %s , %s' % (sys.exc_info(), e))
except HTTPError as e:
print('Network failure: %s , %s' % (sys.exc_info(), e))
except ConnectionError as e:
print('Network failure: %s , %s' % (sys.exc_info(), e))
except slapos.slap.ConnectionError as e:
print('Network failure: %s , %s' % (sys.exc_info(), e))
print('Retry method %s in %i seconds' % (func, retry_time))
time.sleep(retry_time)
retry_time = min(retry_time*1.5, 640)
......@@ -218,7 +220,7 @@ class SlapOSMasterCommunicator(object):
result = self.hateoas_navigator.GET(url)
result = json.loads(result)
if result['_links'].get('action_object_slap', None) is None:
print result['links']
print(result['links'])
return None
object_link = self.hateoas_navigator.hateoasGetLinkFromLinks(
......
......@@ -27,6 +27,8 @@
#
##############################################################################
from __future__ import print_function
import os, errno
import subprocess
import argparse
......@@ -52,7 +54,7 @@ def build_command(apachedex_executable, output_file,
# Automaticaly replace variable 'date'.
apache_log = logfile.strip() % {'date': today}
if not os.path.exists(apache_log):
print "WARNING: File %s not found..." % apache_log
print("WARNING: File %s not found..." % apache_log)
continue
log_list.append(apache_log)
if not log_list:
......@@ -81,7 +83,7 @@ def main():
base_url = args.base_url.strip()
if not os.path.exists(output_folder) or not os.path.isdir(output_folder):
print "ERROR: Output folder is not a directory. Exiting..."
print("ERROR: Output folder is not a directory. Exiting...")
return 1
today = date.today().strftime("%Y-%m-%d")
......@@ -93,21 +95,23 @@ def main():
args.apache_log_list,
config)
except ValueError as e:
print e
print(e)
return 1
process_handler = subprocess.Popen(argument_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
)
stdout, stderr = process_handler.communicate()
if process_handler.returncode != 0:
if stderr:
print stderr
print(stderr)
return 1
with open(output_file, 'r') as f:
print base_url + '/ApacheDex-%s.html' % today
# Check that output_file is a readable file.
with open(output_file, 'r'):
print(base_url + '/ApacheDex-%s.html' % today)
return 0
if __name__ == "__main__":
......
......@@ -27,6 +27,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import print_function
import os
import shutil
import sys
......@@ -35,6 +37,7 @@ import shlex
from subprocess import Popen, PIPE, STDOUT
import logging
from datetime import datetime
import six
_MARKER = []
WGET = 'wget'
......@@ -428,7 +431,7 @@ class HTTPCacheCheckerTestSuite(object):
if self.report_dict:
report_message_list = ['*Errors*:']
for url, message_list in self.report_dict.iteritems():
for url, message_list in six.iteritems(self.report_dict):
unique_message_list = []
for message in message_list:
if message not in unique_message_list:
......@@ -447,7 +450,7 @@ class HTTPCacheCheckerTestSuite(object):
from optparse import OptionParser
import ConfigParser
from six.moves.configparser import RawConfigParser
def _formatConfiguration(configuration):
""" format the configuration"""
......@@ -463,11 +466,11 @@ def web_checker_utility():
(options, args) = parser.parse_args()
if len(args) != 1 :
print parser.print_help()
print(parser.print_help())
parser.error('incorrect number of arguments')
config_path = args[0]
config = ConfigParser.RawConfigParser()
config = RawConfigParser()
config.read(config_path)
working_directory = config.get('web_checker', 'working_directory')
......@@ -525,7 +528,7 @@ def web_checker_utility():
file_object.write(result)
file_object.close()
else:
print result
print(result)
if __name__ == '__main__':
sys.exit(web_checker_utility())
# -*- coding: utf-8 -*-
import ConfigParser
from six.moves import configparser
import argparse
import gdbm
from six.moves import dbm_gnu as gdbm
import sys
import os
......@@ -41,7 +41,7 @@ def main():
run(args)
def run(args):
slapos_conf = ConfigParser.ConfigParser()
slapos_conf = configparser.ConfigParser()
slapos_conf.read(args.configuration_file)
current_binary = os.path.join(os.getcwd(), sys.argv[0])
......@@ -52,7 +52,7 @@ def run(args):
partition_base_name = slapos_conf.get('slapformat', 'partition_base_name')
try:
bridge_name = slapos_conf.get('slapformat', 'interface_name')
except ConfigParser.NoOptionError:
except configparser.NoOptionError:
bridge_name = slapos_conf.get('slapformat', 'bridge_name')
instance_root = slapos_conf.get('slapos', 'instance_root')
partition_base_path = os.path.join(instance_root, partition_base_name)
......@@ -61,7 +61,7 @@ def run(args):
logging.basicConfig(level=logging.getLevelName(args.log[0]))
database = gdbm.open(args.database, 'c', 0600)
database = gdbm.open(args.database, 'c', 0o600)
try:
process.main(sr_directory, partition_list, database, bridge_name)
finally:
......
......@@ -28,7 +28,7 @@
import argparse
import errno
import gdbm
from six.moves import dbm_gnu as gdbm
import json
from lockfile import LockFile
import logging
......@@ -38,8 +38,8 @@ import signal
import socket
import subprocess
import sys
import SocketServer
import StringIO
from six.moves import socketserver
import io
import threading
# Copied from erp5.util:erp5/util/testnode/ProcessManager.py
......@@ -75,15 +75,15 @@ def subprocess_capture(p, log, log_prefix, get_output=True):
return (p.stdout and ''.join(stdout),
p.stderr and ''.join(stderr))
class EqueueServer(SocketServer.ThreadingUnixStreamServer):
class EqueueServer(socketserver.ThreadingUnixStreamServer):
daemon_threads = True
def __init__(self, *args, **kw):
self.options = kw.pop('equeue_options')
SocketServer.ThreadingUnixStreamServer.__init__(self,
RequestHandlerClass=None,
*args, **kw)
super(EqueueServer, self).__init__(self,
RequestHandlerClass=None,
*args, **kw)
# Equeue Specific elements
self.setLogger(self.options.logfile[0], self.options.loglevel[0])
self.setDB(self.options.database[0])
......@@ -106,7 +106,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
self.logger.addHandler(handler)
def setDB(self, database):
self.db = gdbm.open(database, 'cs', 0700)
self.db = gdbm.open(database, 'cs', 0o700)
def _hasTakeoverBeenTriggered(self):
if hasattr(self, 'takeover_triggered_file_path') and \
......@@ -149,7 +149,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
# Handle request
self.logger.debug("Connection with file descriptor %d", request.fileno())
request.settimeout(self.options.timeout)
request_string = StringIO.StringIO()
request_string = io.StringIO()
segment = None
try:
while segment != '':
......@@ -181,7 +181,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
def remove_existing_file(path):
try:
os.remove(path)
except OSError, e:
except OSError as e:
if e.errno != errno.ENOENT:
raise
......
......@@ -76,7 +76,7 @@ def generateFeed(option):
# Reduces feed if number of items exceeds max_item
if len(item_dict) > option.max_item:
outdated_key_list = sorted_item_dict.keys()[:-option.max_item]
outdated_key_list = list(sorted_item_dict)[:-option.max_item]
for outdated_key in outdated_key_list:
del sorted_item_dict[outdated_key]
deleteFileList(outdated_key_list)
......
......@@ -12,6 +12,7 @@
#
##############################################################################
from __future__ import print_function
import os
import time
......@@ -76,7 +77,7 @@ def run():
result = parser.parse_args()
arguments = dict(result._get_kwargs())
if arguments['token'] == None and arguments['file_token'] == None:
print "lampconfigure: Error: Please specify where condition will be taken, use -d or -f option"
print("lampconfigure: Error: Please specify where condition will be taken, use -d or -f option")
return
setup(arguments)
......@@ -84,7 +85,7 @@ def setup(arguments):
timeout = 5;
while True:
if not checkAction(arguments):
print "Waiting for 3s and retrying"
print("Waiting for 3s and retrying")
time.sleep(3)
continue
time.sleep(timeout)
......@@ -115,9 +116,9 @@ def checkAction(arguments):
user = arguments['mysql_user'],
passwd = arguments['mysql_password'],
db = arguments['token'])
except Exception, ex:
except Exception as e:
#Mysql is not ready yet?...
print ex.message
print(e)
return False
if arguments['table'] == "**":
#only detect if mysql has been started
......@@ -145,7 +146,7 @@ def rename(arguments):
source = os.path.join(arguments['target_directory'], arguments['source'])
destination = os.path.join(arguments['target_directory'], arguments['destination'])
if not os.path.exists(source):
print "Error when moving: '%s': no such file or directory" % source
print("Error when moving: '%s': no such file or directory" % source)
return
os.rename(source, destination)
if arguments['mode'] != None:
......@@ -155,7 +156,7 @@ def delete(arguments):
for path in arguments['delete_target']:
path = os.path.join(arguments['target_directory'], path)
if not os.path.exists(path):
print "Error when deleting: '%s': no such file or directory" % path
print("Error when deleting: '%s': no such file or directory" % path)
continue
if os.path.isdir(path):
shutil.rmtree(path)
......@@ -164,7 +165,7 @@ def delete(arguments):
def run_script(arguments):
script = os.path.join(arguments['target_directory'], arguments['script'])
print 'Running script: %s' % script
print('Running script: %s' % script)
if os.path.exists(script):
import subprocess
#run python script with predefined data
......@@ -176,12 +177,12 @@ def run_script(arguments):
result = subprocess.Popen(data, env={'PYTHONPATH': ':'.join(sys.path)})
result.wait()
else:
print "Error: can not read file '%s'" % script
print("Error: can not read file '%s'" % script)
def run_sql_script(arguments):
script = os.path.join(arguments['target_directory'], arguments['sql_script'])
print 'Running SQL script: %s' % script
print('Running SQL script: %s' % script)
if os.path.exists(script):
conn = MySQLdb.connect(host=arguments['mysql_host'],
port=int(arguments['mysql_port']),
......@@ -196,7 +197,7 @@ def run_sql_script(arguments):
conn.close()
else:
print "Error: can not read file '%s'" % script
print("Error: can not read file '%s'" % script)
......@@ -204,6 +205,6 @@ def chmod(arguments):
for path in arguments['chmod_target']:
path = os.path.join(arguments['target_directory'], path)
if not os.path.exists(path):
print "Error when changing mode: '%s': no such file or directory" % path
print("Error when changing mode: '%s': no such file or directory" % path)
continue
os.chmod(path, int(arguments['mode'], 8))
......@@ -27,6 +27,8 @@
#
##############################################################################
from __future__ import division
import sqlite3
import os
import pwd
......@@ -80,19 +82,17 @@ class ResourceCollect:
table="sqlite_master",
columns='name',
where="type='table' AND name='%s'" % name)
table_exists_result = zip(*check_result_cursor)
if not len(table_exists_result) or table_exists_result[0][0] is None:
return False
return True
r = check_result_cursor.fetchone()
return r and r[0] is not None
def getPartitionCPULoadAverage(self, partition_id, date_scope):
return self.consumption_utils.getPartitionCPULoadAverage(partition_id, date_scope)
def getPartitionUsedMemoryAverage(self, partition_id, date_scope):
return self.consumption_utils.getPartitionUsedMemoryAverage(partition_id, date_scope)/(1024*1024.0)
return self.consumption_utils.getPartitionUsedMemoryAverage(partition_id, date_scope)/(1024*1024)
def getPartitionDiskUsedAverage(self, partition_id, date_scope):
return self.consumption_utils.getPartitionDiskUsedAverage(partition_id, date_scope)/1024.0
return self.consumption_utils.getPartitionDiskUsedAverage(partition_id, date_scope)/1024
def getPartitionConsumption(self, partition_id, where="", date_scope=None, min_time=None, max_time=None):
"""
......@@ -123,10 +123,10 @@ class ResourceCollect:
resource_dict = {
'pid': result[6],
'cpu_percent': round(result[1]/count, 2),
'cpu_time': round((result[2] or 0)/(60.0), 2),
'cpu_time': round((result[2] or 0)/(60), 2),
'cpu_num_threads': round(result[3]/count, 2),
'memory_percent': round(result[4]/count, 2),
'memory_rss': round((result[5] or 0)/(1024*1024.0), 2),
'memory_rss': round((result[5] or 0)/(1024*1024), 2),
'io_rw_counter': round(result[7]/count, 2),
'io_cycles_counter': round(result[8]/count, 2)
}
......@@ -159,38 +159,35 @@ class ResourceCollect:
query_result = self.db.select('user', date_scope, colums,
where="partition='%s' and (time between '%s' and '%s') %s" %
(partition_id, min_time, max_time, where))
result_list = zip(*query_result)
process_dict = memory_dict = io_dict = {}
if len(result_list):
result = result_list
process_dict = {'total_process': result[0][0],
'cpu_percent': round((result[1][0] or 0), 2),
'cpu_time': round((result[2][0] or 0)/(60.0), 2),
'cpu_num_threads': round((result[3][0] or 0), 2),
'date': '%s %s' % (date_scope, min_time)
}
memory_dict = {'memory_percent': round((result[4][0] or 0), 2),
'memory_rss': round((result[5][0] or 0)/(1024*1024.0), 2),
'date': '%s %s' % (date_scope, min_time)
}
io_dict = {'io_rw_counter': round((result[6][0] or 0), 2),
'io_cycles_counter': round((result[7][0] or 0), 2),
'disk_used': 0,
'date': '%s %s' % (date_scope, min_time)
}
if self.has_table('folder'):
disk_result_cursor = self.db.select(
"folder", date_scope,
columns="SUM(disk_used)",
where="partition='%s' and (time between '%s' and '%s') %s" % (
partition_id, min_time, max_time, where
)
result = query_result.fetchone()
process_dict = {'total_process': result[0],
'cpu_percent': round((result[1] or 0), 2),
'cpu_time': round((result[2] or 0)/(60), 2),
'cpu_num_threads': round((result[3] or 0), 2),
'date': '%s %s' % (date_scope, min_time)
}
memory_dict = {'memory_percent': round((result[4] or 0), 2),
'memory_rss': round((result[5] or 0)/(1024*1024), 2),
'date': '%s %s' % (date_scope, min_time)
}
io_dict = {'io_rw_counter': round((result[6] or 0), 2),
'io_cycles_counter': round((result[7] or 0), 2),
'disk_used': 0,
'date': '%s %s' % (date_scope, min_time)
}
if self.has_table('folder'):
disk_result_cursor = self.db.select(
"folder", date_scope,
columns="SUM(disk_used)",
where="partition='%s' and (time between '%s' and '%s') %s" % (
partition_id, min_time, max_time, where
)
disk_used_sum = zip(*disk_result_cursor)
if len(disk_used_sum) and disk_used_sum[0][0] is not None:
io_dict['disk_used'] = round(disk_used_sum[0][0]/1024.0, 2)
)
disk_used_sum, = disk_result_cursor.fetchone()
if disk_used_sum is not None:
io_dict['disk_used'] = round(disk_used_sum/1024, 2)
self.db.close()
return (process_dict, memory_dict, io_dict)
......@@ -252,7 +249,7 @@ def main():
status_file = os.path.join(parser.output_folder, 'monitor_resource.status.json')
if not os.path.exists(parser.collector_db):
print "Collector database not found..."
print("Collector database not found...")
initProcessDataFile(process_file)
initMemoryDataFile(mem_file)
initIODataFile(io_file)
......
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import glob
import json
import ConfigParser
from six.moves import configparser
import time
from datetime import datetime
import base64
import hashlib
import PyRSS2Gen
from slapos.util import bytes2str, str2bytes
def getKey(item):
return item.source.name
......@@ -30,6 +34,8 @@ class MonitorFeed(object):
event_date = item_dict['result']['change-date']
report_date = item_dict['result']['date']
description = item_dict['result'].get('message', '')
guid = base64.b64encode(str2bytes("%s, %s, %s, %s" % (self.hosting_name,
item_dict['title'], has_string, event_date)))
rss_item = PyRSS2Gen.RSSItem(
categories = [item_dict['status']],
source = PyRSS2Gen.Source(item_dict['title'], self.public_url),
......@@ -37,9 +43,7 @@ class MonitorFeed(object):
description = "\n%s" % (description,),
link = self.private_url,
pubDate = event_date,
guid = PyRSS2Gen.Guid(base64.b64encode("%s, %s, %s, %s" % (self.hosting_name,
item_dict['title'], has_string, event_date)),
isPermaLink=False)
guid = PyRSS2Gen.Guid(bytes2str(guid), isPermaLink=False)
)
self.rss_item_list.append(rss_item)
......@@ -69,7 +73,7 @@ def generateStatisticsData(stat_file_path, content):
fstat.write(json.dumps(data_dict))
current_state = ''
if content.has_key('state'):
if 'state' in content:
current_state = '%s, %s, %s, %s' % (
content['date'],
content['state']['success'],
......@@ -131,13 +135,14 @@ def generateMonitoringData(config, public_folder, private_folder, public_url,
promise_status = "OK"
success += 1
tmp_json['result']['change-date'] = tmp_json['result']['date']
if previous_state_dict.has_key(tmp_json['name']):
if tmp_json['name'] in previous_state_dict:
status, change_date, _ = previous_state_dict[tmp_json['name']]
if promise_status == status:
tmp_json['result']['change-date'] = change_date
tmp_json['status'] = promise_status
message_hash = hashlib.md5(tmp_json['result'].get('message', '')).hexdigest()
message_hash = hashlib.md5(
str2bytes(tmp_json['result'].get('message', ''))).hexdigest()
new_state_dict[tmp_json['name']] = [
promise_status,
tmp_json['result']['change-date'],
......@@ -150,9 +155,9 @@ def generateMonitoringData(config, public_folder, private_folder, public_url,
previous_state_dict.get(tmp_json['name']),
public_folder
)
except ValueError, e:
except ValueError as e:
# bad json file
print "ERROR: Bad json file at: %s\n%s" % (file, str(e))
print("ERROR: Bad json file at: %s\n%s" % (file, e))
continue
with open(promises_status_file, "w") as f:
......@@ -187,7 +192,7 @@ def savePromiseHistory(promise_name, state_dict, previous_state_list,
else:
if previous_state_list is not None:
_, change_date, checksum = previous_state_list
current_sum = hashlib.md5(state_dict.get('message', '')).hexdigest()
current_sum = hashlib.md5(str2bytes(state_dict.get('message', ''))).hexdigest()
if state_dict['change-date'] == change_date and \
current_sum == checksum:
# Only save the changes and not the same info
......@@ -202,7 +207,7 @@ def savePromiseHistory(promise_name, state_dict, previous_state_list,
def run(monitor_conf_file):
config = ConfigParser.ConfigParser()
config = configparser.ConfigParser()
config.read(monitor_conf_file)
base_folder = config.get('monitor', 'private-folder')
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import os
import stat
import json
import ConfigParser
from six.moves import configparser
import traceback
import argparse
import urllib2
from six.moves.urllib.request import urlopen
from six.moves.urllib.error import HTTPError
import ssl
import glob
import socket
from datetime import datetime
from xml.sax.saxutils import escape
from slapos.util import bytes2str
OPML_START = """<?xml version="1.0" encoding="UTF-8"?>
<!-- OPML generated by SlapOS -->
<opml version="1.1">
......@@ -47,7 +52,7 @@ def parseArguments():
def mkdirAll(path):
try:
os.makedirs(path)
except OSError, e:
except OSError as e:
if e.errno == os.errno.EEXIST and os.path.isdir(path):
pass
else: raise
......@@ -55,13 +60,13 @@ def mkdirAll(path):
def softConfigGet(config, *args, **kwargs):
try:
return config.get(*args, **kwargs)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
except (configparser.NoOptionError, configparser.NoSectionError):
return None
def createSymlink(source, destination):
try:
os.symlink(source, destination)
except OSError, e:
except OSError as e:
if e.errno != os.errno.EEXIST:
raise
......@@ -99,10 +104,10 @@ class Monitoring(object):
def loadConfig(self, pathes, config=None):
if config is None:
config = ConfigParser.ConfigParser()
config = configparser.ConfigParser()
try:
config.read(pathes)
except ConfigParser.MissingSectionHeaderError:
except configparser.MissingSectionHeaderError:
traceback.print_exc()
return config
......@@ -131,8 +136,8 @@ class Monitoring(object):
try:
with open(config_list[2]) as cfile:
param_value = cfile.read()
except OSError, e:
print 'Cannot read file %s, Error is: %s' % (config_list[2], str(e))
except OSError as e:
print('Cannot read file %s, Error is: %s' % (config_list[2], e))
pass
else:
param_value = ""
......@@ -147,7 +152,7 @@ class Monitoring(object):
)
if config_list[0] == 'htpasswd':
if len(config_list) != 5 or not os.path.exists(config_list[4]):
print 'htpasswd file is not specified: %s' % str(config_list)
print('htpasswd file is not specified: %s' % config_list)
continue
parameter['description']['user'] = config_list[3]
parameter['description']['htpasswd'] = config_list[4]
......@@ -178,8 +183,8 @@ class Monitoring(object):
}
)
configuration_list.append(parameter)
except OSError, e:
print 'Cannot read file at %s, Error is: %s' % (old_cors_file, str(e))
except OSError as e:
print('Cannot read file at %s, Error is: %s' % (old_cors_file, e))
pass
return configuration_list
......@@ -192,7 +197,7 @@ class Monitoring(object):
try:
mkdirAll(dirname) # could also raise OSError
os.symlink(path, os.path.join(dirname, os.path.basename(path)))
except OSError, e:
except OSError as e:
if e.errno != os.errno.EEXIST:
raise
......@@ -212,20 +217,20 @@ class Monitoring(object):
# XXX - working here with public url
if hasattr(ssl, '_create_unverified_context'):
context = ssl._create_unverified_context()
response = urllib2.urlopen(url, context=context, timeout=timeout)
response = urlopen(url, context=context, timeout=timeout)
else:
response = urllib2.urlopen(url, timeout=timeout)
except urllib2.HTTPError:
print "ERROR: Failed to get Monitor configuration file at %s " % url
response = urlopen(url, timeout=timeout)
except HTTPError:
print("ERROR: Failed to get Monitor configuration file at %s " % url)
except (socket.timeout, ssl.SSLError) as e:
print "ERROR: Timeout with %r while downloading monitor config at %s " % (e, url)
print("ERROR: Timeout with %r while downloading monitor config at %s " % (e, url))
else:
try:
monitor_dict = json.loads(response.read())
monitor_dict = json.loads(bytes2str(response.read()))
monitor_title = monitor_dict.get('title', 'Unknown Instance')
success = True
except ValueError, e:
print "ERROR: Json file at %s is not valid" % url
except ValueError as e:
print("ERROR: Json file at %s is not valid" % url)
self.bootstrap_is_ok = success
return monitor_title
......@@ -267,8 +272,8 @@ class Monitoring(object):
for parameter in parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % self.parameter_cfg_file
except OSError as e:
print("Error failed to create file %s" % self.parameter_cfg_file)
pass
......@@ -317,8 +322,8 @@ class Monitoring(object):
try:
if os.path.exists(file):
os.unlink(file)
except OSError, e:
print "failed to remove file %s." % file, str(e)
except OSError as e:
print("failed to remove file %s." % file, e)
# cleanup result of promises that was removed
promise_list = os.listdir(self.legacy_promise_folder)
......@@ -336,8 +341,8 @@ class Monitoring(object):
if os.path.exists(status_path):
try:
os.unlink(status_path)
except OSError, e:
print "Error: Failed to delete %s" % status_path, str(e)
except OSError as e:
print("Error: Failed to delete %s" % status_path, e)
else:
promise_list.pop(position)
......@@ -366,7 +371,7 @@ class Monitoring(object):
if self.bootstrap_is_ok:
with open(self.promise_output_file, 'w') as promise_file:
promise_file.write("")
print "SUCCESS: bootstrap is OK"
print("SUCCESS: bootstrap is OK")
return 0
......
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
......@@ -38,11 +40,11 @@ class MonitorConfigWrite(object):
def _fileWrite(self, file_path, content):
try:
with open(file_path, 'w') as wf:
print file_path, content
print(file_path, content)
wf.write(content.strip())
return True
except OSError, e:
print "ERROR while writing changes to %s.\n %s" % (file_path, str(e))
except OSError as e:
print("ERROR while writing changes to %s.\n %s" % (file_path, e))
return False
def _htpasswdWrite(self, htpasswd_bin, parameter_dict, value):
......@@ -55,7 +57,7 @@ class MonitorConfigWrite(object):
)
result = process.communicate()[0]
if process.returncode != 0:
print result
print(result)
return False
with open(parameter_dict['file'], 'w') as pfile:
pfile.write(value)
......@@ -76,31 +78,31 @@ class MonitorConfigWrite(object):
or (cors_domain == "" and os.stat(httpd_cors_file).st_size == 0)):
# Skip if cors file is not empty
return True
except OSError, e:
print "Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e))
except OSError as e:
print("Failed to open file at %s. \n%s" % (old_httpd_cors_file, e))
try:
with open(self.monitor_https_cors, 'r') as cors_template:
template = jinja2.Template(cors_template.read())
rendered_string = template.render(domain=cors_domain)
with open(httpd_cors_file, 'w') as file:
file.write(rendered_string)
except OSError, e:
print "ERROR while writing CORS changes to %s.\n %s" % (httpd_cors_file, str(e))
except OSError as e:
print("ERROR while writing CORS changes to %s.\n %s" % (httpd_cors_file, e))
return False
# Save current cors domain list
try:
with open(old_httpd_cors_file, 'w') as cors_file:
cors_file.write(cors_domain)
except OSError, e:
print "Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e))
except OSError as e:
print("Failed to open file at %s. \n%s" % (old_httpd_cors_file, e))
return False
# Restart httpd process
try:
subprocess.call(httpd_gracefull_bin)
except OSError, e:
print "Failed to execute command %s.\n %s" % (httpd_gracefull_bin, str(e))
except OSError as e:
print("Failed to execute command %s.\n %s" % (httpd_gracefull_bin, e))
return False
return True
......@@ -122,7 +124,7 @@ class MonitorConfigWrite(object):
with open(self.config_json_file) as tmpfile:
new_parameter_list = json.loads(tmpfile.read())
except ValueError:
print "Error: Couldn't parse json file %s" % self.config_json_file
print("Error: Couldn't parse json file %s" % self.config_json_file)
with open(parameter_config_file) as tmpfile:
description_dict = json.loads(tmpfile.read())
......@@ -156,8 +158,8 @@ class MonitorConfigWrite(object):
for parameter in new_parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % self.output_cfg_file
except OSError as e:
print("Error failed to create file %s" % self.output_cfg_file)
pass
return result_dict
......@@ -190,8 +192,8 @@ def main():
if status and os.path.exists(parameter_tmp_file):
try:
os.unlink(config_file)
except OSError, e:
print "ERROR cannot remove file: %s" % parameter_tmp_file
except OSError as e:
print("ERROR cannot remove file: %s" % parameter_tmp_file)
else:
os.rename(parameter_tmp_file, config_file)
if run_counter == max_runn:
......
......@@ -13,7 +13,7 @@ import glob
import argparse
import traceback
import logging
import ConfigParser
from six.moves import configparser
from slapos.grid.promise import PromiseLauncher, PromiseQueueResult, PromiseError
from slapos.grid.promise.generic import PROMISE_LOG_FOLDER_NAME
from slapos.util import mkdir_p
......@@ -92,7 +92,7 @@ class MonitorPromiseLauncher(object):
def _loadConfigFromFile(self, config_file):
config = ConfigParser.ConfigParser()
config = configparser.ConfigParser()
config.read([config_file])
known_key_list = ['partition-cert', 'partition-key', 'partition-id',
'pid-path', 'computer-id', 'check-anomaly',
......@@ -159,7 +159,7 @@ class MonitorPromiseLauncher(object):
exit_code = 0
try:
promise_launcher.run()
except PromiseError, e:
except PromiseError as e:
# error was already logged
exit_code = 1
os.remove(self.config.pid_path)
......
from __future__ import print_function
import socket
import logging
import time
......@@ -11,10 +13,10 @@ import random
import pycurl
import argparse
import json
from StringIO import StringIO
from ping import ping, ping6
from dnsbench import resolve
from http import get_curl, request
from io import StringIO
from .ping import ping, ping6
from .dnsbench import resolve
from .http import get_curl, request
import textwrap
class HelpFormatter(argparse.ArgumentDefaultsHelpFormatter):
......@@ -62,11 +64,11 @@ def download_external_configuration(url):
try:
return json.loads(buffer.getvalue())
except ValueError:
print "Unable to parse external configuration, error:"
print("Unable to parse external configuration, error:")
import traceback
traceback.print_exc(file=sys.stderr)
sys.stderr.flush()
print "Ignoring external configuration"
print("Ignoring external configuration")
finally:
curl.close()
......
import sys
import pycurl
from StringIO import StringIO
from io import BytesIO
from slapos.util import bytes2str
def get_curl(buffer, url):
curl = pycurl.Curl()
......@@ -22,7 +23,7 @@ def get_curl(buffer, url):
def request(url, expected_dict):
buffer = StringIO()
buffer = BytesIO()
curl, result = get_curl(buffer, url)
body = buffer.getvalue()
......@@ -43,7 +44,7 @@ def request(url, expected_dict):
expected_text = expected_dict.get("expected_text", None)
if expected_text is not None and \
str(expected_text) not in str(body):
str(expected_text) not in bytes2str(body):
result = "UNEXPECTED (%s not in page content)" % (expected_text)
curl.close()
......
......@@ -22,14 +22,15 @@ def ping(host, timeout=10, protocol="4", count=10):
test_title = 'PING6'
proc = subprocess.Popen((ping_bin, '-c', str(count), '-w', str(timeout), host),
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
universal_newlines=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
if 'Network is unreachable' in err:
return (test_title, host, 600, 'failed', 100, "Network is unreachable")
try:
packet_loss_line, summary_line = (out.splitlines() or [''])[-2:]
except:
except Exception:
return (test_title, host, 600, 'failed', -1, "Fail to parser ping output")
m = ping_re.match(summary_line)
match = re.search('(\d*)% packet loss', packet_loss_line)
......
......@@ -27,7 +27,7 @@
import os
import sys
from optparse import OptionParser, Option
import urllib
from six.moves.urllib.request import urlopen
class Parser(OptionParser):
......@@ -55,7 +55,7 @@ class Config:
self.file_path = file_path
def onetimedownload(url, file_path):
url_file = urllib.urlopen(url)
url_file = urlopen(url)
data = url_file.read()
file_object = open(file_path, 'w')
file_object.write(data)
......@@ -72,7 +72,7 @@ def main():
onetimedownload(config.url, config.file_path)
return_code = 0
except SystemExit, err:
except SystemExit as err:
# Catch exception raise by optparse
return_code = err
......
......@@ -124,7 +124,7 @@ def main():
run(config)
return_code = 0
except SystemExit, err:
except SystemExit as err:
# Catch exception raise by optparse
return_code = err
......
from __future__ import print_function
import requests
import re
import signal
......@@ -56,11 +58,11 @@ def watchServerStatus(pid_dict, server_status, timeout):
if process.cmdline()[0].endswith("/httpd"):
_pid_dict.setdefault(i, time.time() + timeout)
if _pid_dict[i] < time.time():
print "Sending signal -%s to %s" % (signal.SIGKILL, i)
print("Sending signal -%s to %s" % (signal.SIGKILL, i))
try:
process.kill()
except psutil.NoSuchProcess:
print "Process is not there anymore"
print("Process is not there anymore")
continue
return _pid_dict
......
......@@ -47,7 +47,7 @@ def checkApachedexResult(apachedex_path, apachedex_report_status_file, desired_t
with open(apachedex_report_status_file) as f:
try:
json_content = json.load(f)
except ValueError, e:
except ValueError as e:
json_content = ''
if json_content:
message += "\n" + json_content["message"]
......
......@@ -7,6 +7,8 @@ Uses:
- /proc/meminfo
"""
from __future__ import print_function
import sys
import sqlite3
import argparse
......@@ -21,21 +23,21 @@ def getMemoryInfo(database, time, date):
try:
database.connect()
query_result = database.select("computer", date, "memory_size", limit=1)
result = zip(*query_result)
if not result or not result[0][0]:
r = query_result.fetchone()
if not r or not r[0]:
return (None, "couldn't fetch total memory, collectordb is empty?")
memory_info['total'] = int(result[0][0]) # in byte
memory_info['total'] = int(r[0]) # in byte
# fetch free and used memory
where_query = "time between '%s:00' and '%s:30' " % (time, time)
query_result = database.select("system", date, "memory_free, memory_used", where=where_query)
result = zip(*query_result)
if not result or not result[0][0]:
r = query_result.fetchone()
if not r or not r[0]:
return (None, "couldn't fetch free memory")
memory_info['free'] = int(result[0][0]) # in byte
if not result or not result[1][0]:
memory_info['free'] = int(r[0]) # in byte
if not r or not r[1]:
return (None, "couldn't fetch used memory")
memory_info['used'] = int(result[1][0]) # in byte
memory_info['used'] = int(r[1]) # in byte
finally:
database.close()
......@@ -95,9 +97,9 @@ def main():
unit=args.unit,
)
if error:
print error
print(error)
return 0
print message
print(message)
return 0 if result else 1
if __name__ == "__main__":
......
......@@ -4,6 +4,8 @@
Check if a mariadb result matches the desired threshold or raises an error.
"""
from __future__ import print_function
import json
import os
import re
......@@ -58,7 +60,7 @@ def checkMariadbDigestResult(mariadbdex_path, mariadbdex_report_status_file,
with open(mariadbdex_report_status_file) as f:
try:
json_content = json.load(f)
except ValueError, e:
except ValueError as e:
json_content = ''
if json_content:
message += "\n" + json_content["message"]
......@@ -76,5 +78,5 @@ def main():
args.max_queries_threshold, args.slowest_query_threshold
)
print message
print(message)
sys.exit(status)
......@@ -4,6 +4,8 @@
Check user memory usage according to a given threshold.
"""
from __future__ import print_function
import sys
import os
import argparse
......@@ -87,9 +89,9 @@ def main():
unit=args.unit,
)
if error:
print error
print(error)
return 0
print message
print(message)
return 0 if result else 1
if __name__ == "__main__":
......
......@@ -7,14 +7,14 @@ import sys
import tempfile
import os
import argparse
import ConfigParser
from six.moves import configparser
import re
import pycurl
from mimetools import Message
from cStringIO import StringIO
from HTMLParser import HTMLParser
from email.message import Message
from io import BytesIO
from six.moves.html_parser import HTMLParser
begins_by_known_protocol_re = re.compile("^https?://")
get_protocol_re = re.compile("^([a-z]+)://")
......@@ -63,8 +63,8 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
parsed_url_dict.add(url)
print("Checking cache hit for " + url)
c = pycurl.Curl()
response_headers = StringIO()
output = StringIO()
response_headers = BytesIO()
output = BytesIO()
c.setopt(c.URL, url)
c.setopt(c.RESOLVE, resolve_list)
c.setopt(c.WRITEFUNCTION, output.write)
......@@ -76,8 +76,9 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
response_headers.truncate(0)
output.truncate(0)
c.perform()
if str(c.getinfo(pycurl.HTTP_CODE))[0:1] != "2":
if c.getinfo(pycurl.HTTP_CODE) >= 400:
code = c.getinfo(pycurl.HTTP_CODE)
if not (200 <= code < 300):
if code >= 400:
report_line_list.append("Status code %s received for %s" % (c.getinfo(pycurl.HTTP_CODE), url))
else:
print("Status code %s not handled" % c.getinfo(pycurl.HTTP_CODE))
......@@ -119,7 +120,7 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
def getConfig(config_parser, section, option, default=None, raw=False, vars=None):
try:
return config_parser.get(section, option, raw=raw, vars=vars)
except ConfigParser.NoOptionError:
except configparser.NoOptionError:
return default
def main():
......@@ -132,7 +133,7 @@ def main():
args.url_list = getattr(args, "url-list")
if args.config is not None:
parser = ConfigParser.ConfigParser()
parser = configparser.ConfigParser()
parser.read(args.config)
if args.url_list == []:
args.url_list = getConfig(parser, "public", "url-list", "").split()
......
......@@ -28,7 +28,8 @@ def isLocalTcpPortOpened(ip_address, port):
ip_addr_hex = ('%08X' * int_count) % struct.unpack('I' * int_count, socket.inet_pton(family, ip_address))
full_addr_hex = ip_addr_hex + ":%04X" % port
return any(full_addr_hex == line.split()[1] for line in open(tcp_path).readlines())
with open(tcp_path) as f:
return any(full_addr_hex == line.split()[1] for line in f.readlines())
def main():
if isLocalTcpPortOpened(sys.argv[1], int(sys.argv[2])):
......
......@@ -8,6 +8,8 @@ a file modification date is greater than the start date of the
process.
"""
from __future__ import print_function
import sys
import os
import errno
......@@ -29,10 +31,10 @@ def moduleIsModifiedSince(top, since, followlinks=False):
if ext in ignored_extension_set:
continue
if since < os.stat(os.path.join(root, name)).st_mtime:
print "%s was modified since the process started." % \
os.path.join(root, name)
print "Process Time %s < Last modidified file %s" % (time.ctime(since),
time.ctime(os.stat(os.path.join(root, name)).st_mtime))
print("%s was modified since the process started." %
os.path.join(root, name))
print("Process Time %s < Last modidified file %s" % (time.ctime(since),
time.ctime(os.stat(os.path.join(root, name)).st_mtime)))
return True
return False
......@@ -41,7 +43,7 @@ def isProcessOlderThanDependencySet(pid, python_path_list, kill=False):
start_time = process.create_time()
if any(moduleIsModifiedSince(product_path, start_time) for product_path in python_path_list):
if kill:
print "Terminating process %s with pid %s" % (process.name(), pid)
print("Terminating process %s with pid %s" % (process.name(), pid))
process.terminate()
return True
return False
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
from slapos.grid.promise.generic import TestResult
......@@ -12,10 +12,9 @@ from croniter import croniter
from dateutil.parser import parse
from tzlocal import get_localzone
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
# check backup ran OK every 5 minutes
......@@ -52,20 +51,21 @@ class RunPromise(GenericPromise):
# First, parse the log file
backup_started = False
backup_ended = False
for line in open(status, 'r'):
m = re.match(r"(.*), (.*), (.*), backup (.*)$", line)
if m:
if m.group(4) == "running":
backup_started = True
backup_start = parse(m.group(1))
elif m.group(4) == "failed":
backup_ended = True
backup_failed = True
backup_end = parse(m.group(1))
elif m.group(4) == "success":
backup_ended = True
backup_failed = False
backup_end = parse(m.group(1))
with open(status, 'r') as f:
for line in f:
m = re.match(r"(.*), (.*), (.*), backup (.*)$", line)
if m:
if m.group(4) == "running":
backup_started = True
backup_start = parse(m.group(1))
elif m.group(4) == "failed":
backup_ended = True
backup_failed = True
backup_end = parse(m.group(1))
elif m.group(4) == "success":
backup_ended = True
backup_failed = False
backup_end = parse(m.group(1))
# Then check result
if backup_ended and backup_failed:
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise, TestResult
import re
import time
import os
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
# set periodicity to run the promise twice per day
......@@ -26,7 +24,7 @@ class RunPromise(GenericPromise):
if not log_file:
raise ValueError("log file was not set in promise parameters.")
regex = re.compile("^(\[[^\]]+\]) (\[[^\]]+\]) (.*)$")
regex = re.compile(br"^(\[[^\]]+\]) (\[[^\]]+\]) (.*)$")
error_amount = 0
no_route_error = 0
network_is_unreachable = 0
......@@ -38,7 +36,7 @@ class RunPromise(GenericPromise):
self.logger.info("OK")
return
with open(log_file) as f:
with open(log_file, "rb") as f:
f.seek(0, 2)
block_end_byte = f.tell()
f.seek(-min(block_end_byte, 4096), 1)
......@@ -50,10 +48,10 @@ class RunPromise(GenericPromise):
dt, level, msg = m.groups()
try:
try:
t = time.strptime(dt[1:-1], "%a %b %d %H:%M:%S %Y")
t = time.strptime(dt[1:-1].decode('utf-8'), "%a %b %d %H:%M:%S %Y")
except ValueError:
# Fail to parser for the first time, try a different output.
t = time.strptime(dt[1:-1], "%a %b %d %H:%M:%S.%f %Y")
t = time.strptime(dt[1:-1].decode('utf-8'), "%a %b %d %H:%M:%S.%f %Y")
except ValueError:
# Probably it fail to parse
if parsing_failure < 3:
......@@ -65,14 +63,14 @@ class RunPromise(GenericPromise):
if maximum_delay and (time.time()-time.mktime(t)) > maximum_delay:
# no result in the latest hour
break
if level != "[error]":
if level != b"[error]":
continue
# Classify the types of errors
if "(113)No route to host" in msg:
if b"(113)No route to host" in msg:
no_route_error += 1
elif "(101)Network is unreachable" in msg:
elif b"(101)Network is unreachable" in msg:
network_is_unreachable += 1
elif "(110)Connection timed out" in msg:
elif b"(110)Connection timed out" in msg:
timeout += 1
error_amount += 1
if error_amount:
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
import time
......@@ -7,12 +7,10 @@ import os
import sys
import re
r = re.compile("^([0-9]+\-[0-9]+\-[0-9]+ [0-9]+\:[0-9]+\:[0-9]+)(\,[0-9]+) - ([A-z]+) (.*)$")
r = re.compile(br"^([0-9]+\-[0-9]+\-[0-9]+ [0-9]+\:[0-9]+\:[0-9]+)(\,[0-9]+) - ([A-z]+) (.*)$")
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
self.setPeriodicity(minute=10)
......@@ -27,7 +25,7 @@ class RunPromise(GenericPromise):
self.logger.info("log file does not exist: log check skipped")
return 0
with open(log_file) as f:
with open(log_file, "rb") as f:
f.seek(0, 2)
block_end_byte = f.tell()
f.seek(-min(block_end_byte, 4096*10), 1)
......@@ -38,7 +36,7 @@ class RunPromise(GenericPromise):
continue
dt, _, level, msg = m.groups()
try:
t = time.strptime(dt, "%Y-%m-%d %H:%M:%S")
t = time.strptime(dt.decode('utf-8'), "%Y-%m-%d %H:%M:%S")
except ValueError:
continue
if maximum_delay and (time.time()-time.mktime(t)) > maximum_delay:
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
# SR can set custom periodicity
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
......@@ -12,10 +12,9 @@ import psutil
from slapos.collect.db import Database
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
# check disk space at least every 3 minutes
......@@ -29,12 +28,12 @@ class RunPromise(GenericPromise):
database.connect()
where_query = "time between '%s:00' and '%s:30' and partition='%s'" % (time, time, disk_partition)
query_result = database.select("disk", date, "free", where=where_query)
result = zip(*query_result)
if not result or not result[0][0]:
result = query_result.fetchone()
if not result or not result[0]:
self.logger.info("No result from collector database: disk check skipped")
return 0
disk_free = result[0][0]
except sqlite3.OperationalError, e:
disk_free = result[0]
except sqlite3.OperationalError as e:
# if database is still locked after timeout expiration (another process is using it)
# we print warning message and try the promise at next run until max warn count
locked_message = "database is locked"
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise, TestResult
import re
import time
from slapos.networkbench.ping import ping, ping6
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
# set periodicity to run the promise twice per day
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
import os
from datetime import datetime
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
self.setPeriodicity(minute=1)
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise, TestResult
import re
import time
from slapos.networkbench.ping import ping, ping6
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
# set periodicity to run the promise twice per day
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
import subprocess
import os
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
# test load every 3 minutes
......@@ -17,7 +16,7 @@ class RunPromise(GenericPromise):
def checkCPULoad(self, tolerance=2.2):
# tolerance=1.5 => accept CPU load up to 1.5 =150%
uptime_result = subprocess.check_output(['uptime'])
uptime_result = subprocess.check_output(['uptime'], universal_newlines=True)
line = uptime_result.strip().split(' ')
load, load5, long_load = line[-3:]
long_load = float(long_load.replace(',', '.'))
......@@ -44,7 +43,7 @@ class RunPromise(GenericPromise):
if load_threshold is not None:
try:
threshold = float(load_threshold)
except ValueError, e:
except ValueError as e:
self.logger.error("CPU load threshold %r is not valid: %s" % (load_threshold, e))
return
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
import requests
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
# SR can set custom periodicity
......@@ -47,7 +45,7 @@ class RunPromise(GenericPromise):
result = requests.get(
url, verify=verify, allow_redirects=True, timeout=timeout, cert=cert)
except requests.exceptions.SSLError as e:
if 'certificate verify failed' in str(e.message):
if 'certificate verify failed' in str(e):
self.logger.error(
"ERROR SSL verify failed while accessing %r" % (url,))
else:
......@@ -58,7 +56,7 @@ class RunPromise(GenericPromise):
self.logger.error(
"ERROR connection not possible while accessing %r" % (url, ))
return
except Exception, e:
except Exception as e:
self.logger.error("ERROR: %s" % (e,))
return
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
import os
......@@ -6,10 +6,9 @@ import time
import psutil
from .util import tail_file
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
self.setPeriodicity(minute=2)
......@@ -23,7 +22,7 @@ class RunPromise(GenericPromise):
with open(process_pid_file) as f:
try:
pid = int(f.read())
except ValueError, e:
except ValueError as e:
raise ValueError("%r is empty or doesn't contain a valid pid number: %s" % (
process_pid_file, str(e)))
......
import argparse
import csv
import feedparser
import httplib # To avoid magic numbers
from six.moves import http_client as httplib # To avoid magic numbers
import io
import json
import logging
......
......@@ -25,6 +25,8 @@
#
##############################################################################
from __future__ import print_function
import argparse
import json
import os
......@@ -71,7 +73,7 @@ def getInitialQemuResourceDict(pid_file):
with open(pid_file) as f:
try:
pid = int(f.read())
except ValueError, e:
except ValueError as e:
raise ValueError("%r is empty or doesn't contain a valid pid number: %s" % (
pid_file, str(e)))
......@@ -81,7 +83,7 @@ def getInitialQemuResourceDict(pid_file):
process = psutil.Process(pid)
break
except psutil.NoSuchProcess:
print "Qemu process is not started yet..."
print("Qemu process is not started yet...")
wait_count -= 1
time.sleep(0.5)
else:
......@@ -130,7 +132,7 @@ class QemuQMPWrapper(object):
if not os.path.exists(unix_socket_location):
raise Exception('unix socket %s does not exist.' % unix_socket_location)
print 'Connecting to qemu...'
print('Connecting to qemu...')
so = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
connected = False
while not connected:
......@@ -138,7 +140,7 @@ class QemuQMPWrapper(object):
so.connect(unix_socket_location)
except socket.error:
time.sleep(1)
print 'Could not connect, retrying...'
print('Could not connect, retrying...')
else:
connected = True
so.recv(1024)
......@@ -159,7 +161,7 @@ class QemuQMPWrapper(object):
raise QmpCommandError(response["error"]["desc"])
if 'event' in response:
self._event_list.append(response)
print response
print(response)
if not only_event:
continue
......@@ -171,7 +173,7 @@ class QemuQMPWrapper(object):
for i in range(0, retry):
if response is not None:
break
print "Retrying send command after %s second(s)..." % sleep
print("Retrying send command after %s second(s)..." % sleep)
time.sleep(sleep)
self.socket.sendall(json.dumps(message))
response = self._readResponse()
......@@ -191,14 +193,14 @@ class QemuQMPWrapper(object):
if actual_status == wanted_status:
return
else:
print 'VM in %s status, wanting it to be %s, retrying...' % (
actual_status, wanted_status)
print('VM in %s status, wanting it to be %s, retrying...' % (
actual_status, wanted_status))
time.sleep(1)
except IOError:
print 'VM not ready, retrying...'
print('VM not ready, retrying...')
def capabilities(self):
print 'Asking for capabilities...'
print('Asking for capabilities...')
self._send({'execute': 'qmp_capabilities'})
def getEventList(self, timeout=0, cleanup=False):
......@@ -223,7 +225,7 @@ class QemuQMPWrapper(object):
self.socket.setblocking(0)
try:
self._readResponse(only_event=True)
except socket.error, err:
except socket.error as err:
if err[0] == errno.EAGAIN:
# No data available
pass
......@@ -240,7 +242,7 @@ class QemuQMPWrapper(object):
def setVNCPassword(self, password):
# Set VNC password
print 'Setting VNC password...'
print('Setting VNC password...')
result = self._send({
"execute": "change",
"arguments": {
......@@ -251,19 +253,19 @@ class QemuQMPWrapper(object):
})
if result and result.get('return', None) != {}:
raise ValueError(result)
print 'Done.'
print('Done.')
def powerdown(self):
print 'Stopping the VM...'
print('Stopping the VM...')
self._send({'execute': 'system_powerdown'})
def suspend(self):
print 'Suspending VM...'
print('Suspending VM...')
self._send({'execute': 'stop'})
self._waitForVMStatus('paused')
def resume(self):
print 'Resuming VM...'
print('Resuming VM...')
self._send({'execute': 'cont'})
self._waitForVMStatus('running')
......@@ -285,7 +287,7 @@ class QemuQMPWrapper(object):
return
def driveBackup(self, backup_target, source_device='virtio0', sync_type='full'):
print 'Asking Qemu to perform backup to %s' % backup_target
print('Asking Qemu to perform backup to %s' % backup_target)
# XXX: check for error
self._send({
'execute': 'drive-backup',
......@@ -296,17 +298,17 @@ class QemuQMPWrapper(object):
}
})
while self._getRunningJobList(backup_target):
print 'Job is not finished yet.'
print('Job is not finished yet.')
time.sleep(20)
def createSnapshot(self, snapshot_file, device='virtio0'):
print self._send({
print(self._send({
'execute': 'blockdev-snapshot-sync',
'arguments': {
'device': device,
'snapshot-file': snapshot_file,
}
})
}))
def createInternalSnapshot(self, name=None, device='virtio0'):
if name is None:
......@@ -372,9 +374,9 @@ class QemuQMPWrapper(object):
try:
if resend:
result = self._send(command_dict)
except QmpCommandError, e:
print "ERROR: ", str(e)
print "%s\nRetry remove %r in few seconds..." % (result, dev_id)
except QmpCommandError as e:
print("ERROR: ", e)
print("%s\nRetry remove %r in few seconds..." % (result, dev_id))
resend = True
else:
for event in self.getEventList(timeout=2, cleanup=True):
......@@ -388,13 +390,13 @@ class QemuQMPWrapper(object):
if stop_retry:
break
elif result is None and max_retry > 0:
print "Retry remove %r in few seconds..." % dev_id
print("Retry remove %r in few seconds..." % dev_id)
time.sleep(2)
if result is not None:
if result.get('return', None) == {} or ('error' in result and \
result['error'].get('class', '') == 'DeviceNotFound'):
print 'Device %s was removed.' % dev_id
print('Device %s was removed.' % dev_id)
return
# device was not remove after retries
......@@ -417,7 +419,7 @@ class QemuQMPWrapper(object):
if not system_exited:
# hard reset the VM
print "Trying hard shutdown of the VM..."
print("Trying hard shutdown of the VM...")
self._send({"execute": "quit"})
raise QmpDeviceRemoveError("Stopped Qemu in order to remove the device %r" % dev_id)
......@@ -459,14 +461,14 @@ class QemuQMPWrapper(object):
if cpu_amount == hotplug_amount:
# no chanches
print "Hotplug CPU is up to date."
print("Hotplug CPU is up to date.")
return
if cpu_amount > hotplug_amount:
# we will remove CPU
cpu_diff = -1 * cpu_diff
if cpu_diff >= 1:
print "Request remove %s CPUs..." % cpu_diff
print("Request remove %s CPUs..." % cpu_diff)
used_socket_id_list.reverse()
for i in range(0, cpu_diff):
self._removeDevice(used_socket_id_list[i], {
......@@ -478,7 +480,7 @@ class QemuQMPWrapper(object):
# no hotplugable cpu socket found for Add
raise ValueError("Cannot Configure %s CPUs, the maximum amount of " \
"hotplugable CPU is %s!" % (hotplug_amount, max_hotplug_cpu))
print "Adding %s CPUs..." % cpu_diff
print("Adding %s CPUs..." % cpu_diff)
for i in range(0, cpu_diff):
self._send({
'execute': 'device_add',
......@@ -491,10 +493,10 @@ class QemuQMPWrapper(object):
if hotplug_amount != final_cpu_count:
raise ValueError("Consistency error: Expected %s hotplugged CPU(s) but" \
" current CPU amount is %s" % (hotplug_amount, final_cpu_count))
print "Done."
print("Done.")
def _removeMemory(self, id_dict, auto_reboot=False):
print "Trying to remove devices %s, %s..." % (id_dict['id'], id_dict['memdev'])
print("Trying to remove devices %s, %s..." % (id_dict['id'], id_dict['memdev']))
self._removeDevice(id_dict['id'] ,{
'execute': 'device_del',
'arguments': {'id': id_dict['id']}
......@@ -544,7 +546,7 @@ class QemuQMPWrapper(object):
# cleanup memdev that was not removed because of failure
for memdev in cleanup_memdev_id_dict.keys():
print "Cleaning up memdev %s..." % memdev
print("Cleaning up memdev %s..." % memdev)
self._removeDevice(memdev, {
'execute': 'object-del',
'arguments': {
......@@ -559,9 +561,9 @@ class QemuQMPWrapper(object):
if (mem_size / slot_size) > slot_amount:
raise ValueError("No enough slots available to add %sMB of RAM" % mem_size)
current_size = current_size/(1024 * 1024)
current_size //= (1024 * 1024)
if current_size == mem_size:
print "Hotplug Memory size is up to date."
print("Hotplug Memory size is up to date.")
return
if mem_size < 0:
......@@ -569,7 +571,7 @@ class QemuQMPWrapper(object):
elif current_size > mem_size:
# Request to remove memory
to_remove_size = current_size - mem_size
print "Removing %s MB of memory..." % to_remove_size
print("Removing %s MB of memory..." % to_remove_size)
for i in range(num_slot_used, 0, -1):
# remove all slots that won't be used
......@@ -587,9 +589,9 @@ class QemuQMPWrapper(object):
)
elif current_size < mem_size:
# ask for increase memory
slot_add = (mem_size - current_size) / slot_size
slot_add = (mem_size - current_size) // slot_size
print "Adding %s memory slot(s) of %s MB..." % (slot_add, slot_size)
print("Adding %s memory slot(s) of %s MB..." % (slot_add, slot_size))
for i in range(0, slot_add):
index = num_slot_used + i + 1
self._send({
......@@ -618,11 +620,11 @@ class QemuQMPWrapper(object):
if mem_size != final_mem_size:
raise ValueError("Consistency error: Expected %s MB of hotplugged RAM " \
"but current RAM size is %s MB" % (mem_size, final_mem_size))
print "Done."
print("Done.")
def updateDevice(self, option_dict):
argument_dict = {}
if option_dict.has_key('device'):
if 'device' in option_dict:
if option_dict['device'] == 'cpu':
return self._updateCPU(
amount=int(option_dict['amount']),
......
......@@ -68,7 +68,8 @@ def synchroniseRunnerConfigurationDirectory(config, backup_path):
os.makedirs(backup_path)
file_list = ['config.json']
for hidden_file in os.listdir('.'):
# `sorted` is used for Python 2-3 compatibility
for hidden_file in sorted(os.listdir('.')):
if hidden_file[0] == '.':
file_list.append(hidden_file)
rsync(config.rsync_binary, file_list, backup_path, dry=config.dry)
......@@ -80,7 +81,8 @@ def synchroniseRunnerWorkingDirectory(config, backup_path):
if os.path.isdir('instance'):
file_list.append('instance')
exclude_list = getExcludePathList(os.getcwd())
# `sorted` is used for Python 2-3 compatibility
exclude_list = sorted(getExcludePathList(os.getcwd()))
# XXX: proxy.db should be properly dumped to leverage its
# atomic properties
......
......@@ -7,6 +7,9 @@ import sys
from contextlib import contextmanager
from hashlib import sha256
from zc.buildout.configparser import parse
from slapos.util import bytes2str, str2bytes
import six
@contextmanager
......@@ -63,7 +66,7 @@ def getExcludePathList(path):
if e.errno != errno.ENOENT:
raise
else:
for section in installed.itervalues():
for section in six.itervalues(installed):
append_relative(section.get(
'__buildout_installed__', '').splitlines())
......@@ -129,7 +132,7 @@ def writeSignatureFile(slappart_signature_method_dict, runner_working_path, sign
if signature_process:
(output, error_output) = signature_process.communicate(
'\0'.join(filepath_list)
str2bytes('\0'.join(filepath_list))
)
if signature_process.returncode != 0:
......@@ -143,7 +146,7 @@ def writeSignatureFile(slappart_signature_method_dict, runner_working_path, sign
# We have to rstrip as most programs return an empty line
# at the end of their output
signature_list.extend(output.strip('\n').split('\n'))
signature_list.extend(bytes2str(output).strip('\n').split('\n'))
else:
signature_list.extend(
getSha256Sum(filepath_list)
......
......@@ -10,20 +10,23 @@
# or it will NOT work
#############################################
from __future__ import print_function
import argparse
import base64
import ConfigParser
from six.moves.configparser import SafeConfigParser
import datetime
import hashlib
import json
import os
import shutil
import sup_process
from StringIO import StringIO
from . import sup_process
from io import StringIO
import ssl
import time
import unittest
import urllib2
from six.moves.urllib.request import Request, urlopen
import six
from slapos.runner.utils import (getProfilePath,
getSession, isInstanceRunning,
......@@ -125,7 +128,7 @@ class SlaprunnerTestCase(unittest.TestCase):
partition_id=cls.partition_id
)
cls.parameter_dict = cls.partition.getConnectionParameterDict()
for attribute, value in cls.parameter_dict.iteritems():
for attribute, value in six.iteritems(cls.parameter_dict):
setattr(cls, attribute.replace('-', '_'), value)
#create slaprunner configuration
......@@ -188,7 +191,7 @@ class SlaprunnerTestCase(unittest.TestCase):
shutil.rmtree(self.app.config['software_link'])
def updateConfigParameter(self, parameter, value):
config_parser = ConfigParser.SafeConfigParser()
config_parser = SafeConfigParser()
config_parser.read(os.getenv('RUNNER_CONFIG'))
for section in config_parser.sections():
if config_parser.has_option(section, parameter):
......@@ -256,11 +259,11 @@ setuptools = 33.1.1
open(template, "w").write(content)
def assertCanLoginWith(self, username, password):
request = urllib2.Request(self.backend_url)
request = Request(self.backend_url)
base64string = base64.encodestring('%s:%s' % (username, password))[:-1]
request.add_header("Authorization", "Basic %s" % base64string)
ssl_context = ssl._create_unverified_context()
result = urllib2.urlopen(request, context=ssl_context)
result = urlopen(request, context=ssl_context)
self.assertEqual(result.getcode(), 200)
def test_updateAccount(self):
......@@ -606,7 +609,7 @@ setuptools = 33.1.1
class PrintStringIO(StringIO):
def write(self, data):
StringIO.write(self, data)
print data
print(data)
def main():
"""
......
......@@ -3,7 +3,7 @@
import os
import signal
import time
import xmlrpclib
import six.moves.xmlrpc_client as xmlrpclib
# This mini-library is used to communicate with supervisord process
# It aims to replace the file "process.py"
......
......@@ -2,20 +2,20 @@
# vim: set et sts=2:
# pylint: disable-msg=W0311,C0301,C0103,C0111,W0141,W0142
import ConfigParser
from six.moves import configparser
import datetime
import json
import logging
import md5
import hashlib
import os
import sup_process
from . import sup_process
import re
import shutil
import stat
import thread
from six.moves import _thread, range
import time
import urllib
import xmlrpclib
from six.moves.urllib.request import urlopen
import six.moves.xmlrpc_client as xmlrpclib
from xml.dom import minidom
import xml_marshaller
......@@ -43,7 +43,8 @@ html_escape_table = {
def getBuildAndRunParams(config):
json_file = os.path.join(config['etc_dir'], 'config.json')
json_params = json.load(open(json_file))
with open(json_file) as f:
json_params = json.load(f)
return json_params
......@@ -52,7 +53,8 @@ def saveBuildAndRunParams(config, params):
Works like that because this function do not care
about how you got the parameters"""
json_file = os.path.join(config['etc_dir'], 'config.json')
open(json_file, "w").write(json.dumps(params))
with open(json_file, "w") as f:
f.write(json.dumps(params))
def html_escape(text):
......@@ -92,11 +94,11 @@ def updateUserCredential(config, username, password):
def getRcode(config):
parser = ConfigParser.ConfigParser()
parser = configparser.ConfigParser()
try:
parser.read(config['knowledge0_cfg'])
return parser.get('public', 'recovery-code')
except (ConfigParser.NoSectionError, IOError) as e:
except (configparser.NoSectionError, IOError) as e:
return None
def getUsernameList(config):
......@@ -188,12 +190,12 @@ def updateProxy(config):
'software_root': config['software_root']
}
for i in xrange(0, int(config['partition_amount'])):
for i in range(int(config['partition_amount'])):
partition_reference = '%s%s' % (prefix, i)
partition_path = os.path.join(config['instance_root'], partition_reference)
if not os.path.exists(partition_path):
os.mkdir(partition_path)
os.chmod(partition_path, 0750)
os.chmod(partition_path, 0o750)
slap_config['partition_list'].append({
'address_list': [
{
......@@ -423,7 +425,7 @@ def getSlapStatus(config):
except Exception:
pass
if partition_list:
for i in xrange(0, int(config['partition_amount'])):
for i in range(int(config['partition_amount'])):
slappart_id = '%s%s' % ("slappart", i)
if not [x[0] for x in partition_list if slappart_id == x[0]]:
partition_list.append((slappart_id, []))
......@@ -460,7 +462,7 @@ def removeInstanceRootDirectory(config):
fullPath = os.path.join(root, fname)
if not os.access(fullPath, os.W_OK):
# Some directories may be read-only, preventing to remove files in it
os.chmod(fullPath, 0744)
os.chmod(fullPath, 0o744)
shutil.rmtree(instance_directory)
def removeCurrentInstance(config):
......@@ -589,7 +591,7 @@ def newSoftware(folder, config, session):
software = "https://lab.nexedi.com/nexedi/slapos/raw/master/software/lamp-template/software.cfg"
softwareContent = ""
try:
softwareContent = urllib.urlopen(software).read()
softwareContent = urlopen(software).read()
except:
#Software.cfg and instance.cfg content will be empty
pass
......@@ -777,7 +779,7 @@ def md5sum(file):
return False
try:
fh = open(file, 'rb')
m = md5.md5()
m = hashlib.md5()
while True:
data = fh.read(8192)
if not data:
......@@ -830,7 +832,7 @@ def readParameters(path):
sub_obj[str(subnode.getAttribute('id'))] = subnode.childNodes[0].data # .decode('utf-8').decode('utf-8')
obj[str(elt.tagName)] = sub_obj
return obj
except Exception, e:
except Exception as e:
return str(e)
else:
return "No such file or directory: %s" % path
......@@ -901,7 +903,8 @@ def runSlapgridUntilSuccess(config, step):
else:
return -1
counter_file = os.path.join(config['runner_workdir'], '.turn-left')
open(counter_file, 'w+').write(str(max_tries))
with open(counter_file, 'w+') as f:
f.write(str(max_tries))
counter = max_tries
slapgrid = True
# XXX-Nico runSoftwareWithLock can return 0 or False (0==False)
......@@ -911,9 +914,11 @@ def runSlapgridUntilSuccess(config, step):
# slapgrid == 0 because EXIT_SUCCESS == 0
if slapgrid == 0:
break
times_left = int(open(counter_file).read()) - 1
with open(counter_file) as f:
times_left = int(f.read()) - 1
if times_left > 0 :
open(counter_file, 'w+').write(str(times_left))
with open(counter_file, 'w+') as f:
f.write(str(times_left))
counter = times_left
else :
counter = 0
......@@ -934,7 +939,7 @@ def setupDefaultSR(config):
if not os.path.exists(project) and config['default_sr'] != '':
configNewSR(config, config['default_sr'])
if config['auto_deploy']:
thread.start_new_thread(buildAndRun, (config,))
_thread.start_new_thread(buildAndRun, (config,))
def setMiniShellHistory(config, command):
......
......@@ -7,9 +7,9 @@ import json
import os
import shutil
import subprocess
import sup_process
import thread
import urllib
from . import sup_process
from six.moves import _thread
from six.moves.urllib.parse import unquote
from flask import (Flask, request, redirect, url_for, render_template,
g, flash, jsonify, session, abort, send_file)
......@@ -160,7 +160,7 @@ def removeSoftware():
def runSoftwareProfile():
thread.start_new_thread(runSlapgridUntilSuccess, (app.config, "software"))
_thread.start_new_thread(runSlapgridUntilSuccess, (app.config, "software"))
return jsonify(result=True)
......@@ -233,7 +233,7 @@ def removeInstance():
def runInstanceProfile():
if not os.path.exists(app.config['instance_root']):
os.mkdir(app.config['instance_root'])
thread.start_new_thread(runSlapgridUntilSuccess, (app.config, "instance"))
_thread.start_new_thread(runSlapgridUntilSuccess, (app.config, "instance"))
return jsonify(result=True)
......@@ -293,7 +293,7 @@ def cloneRepository():
try:
cloneRepo(request.form['repo'], path, request.form['user'], request.form['email'])
return jsonify(code=1, result="")
except GitCommandError, e:
except GitCommandError as e:
return jsonify(code=0, result=safeResult(str(e)))
......@@ -324,7 +324,7 @@ def getProjectStatus():
try:
result, branch, isdirty = gitStatus(path)
return jsonify(code=1, result=result, branch=branch, dirty=isdirty)
except GitCommandError, e:
except GitCommandError as e:
return jsonify(code=0, result=safeResult(str(e)))
else:
return jsonify(code=0, result="Can not read folder: Permission Denied")
......@@ -414,7 +414,7 @@ def changeBranch():
else:
json = "This is already your active branch for this project"
return jsonify(code=1, result=json)
except GitCommandError, e:
except GitCommandError as e:
return jsonify(code=0, result=safeResult(str(e)))
else:
return jsonify(code=0, result="Can not read folder: Permission Denied")
......@@ -432,7 +432,7 @@ def newBranch():
return jsonify(code=1, result="")
else:
return jsonify(code=0, result="Failed to checkout to branch %s.")
except GitCommandError, e:
except GitCommandError as e:
return jsonify(code=0, result=safeResult(str(e)))
else:
return jsonify(code=0, result="Can not read folder: Permission Denied")
......@@ -638,7 +638,7 @@ def updateAccount():
try:
updateGitConfig(app.config['default_repository_path'], name, email)
except GitCommandError, e:
except GitCommandError as e:
return jsonify(code=0, result=str(e))
git_user_file = os.path.join(app.config['etc_dir'], '.git_user')
with codecs.open(git_user_file, 'w', encoding='utf-8') as gfile:
......@@ -684,10 +684,8 @@ def fileBrowser():
dir = request.form['dir'].encode('utf-8')
newfilename = request.form.get('newfilename', '').encode('utf-8')
files = request.form.get('files', '').encode('utf-8')
if not request.form.has_key('opt') or not request.form['opt']:
opt = 1
else:
opt = int(request.form['opt'])
opt = request.form.get('opt')
opt = int(opt) if opt else 1
else:
opt = int(request.args.get('opt'))
......@@ -751,9 +749,9 @@ def fileBrowser():
def editFile():
return render_template('editFile.html', workDir='workspace',
profile=urllib.unquote(request.args.get('profile', '')),
profile=unquote(request.args.get('profile', '')),
projectList=listFolder(app.config, 'workspace'),
filename=urllib.unquote(request.args.get('filename', '')))
filename=unquote(request.args.get('filename', '')))
def shell():
return render_template('shell.html')
......
......@@ -24,6 +24,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import print_function
import os
import argparse
import subprocess
......@@ -96,7 +98,8 @@ def shred(options):
arg_list.extend(getFileList(options.file_list, options.check_exist))
pshred = subprocess.Popen(arg_list, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stderr=subprocess.STDOUT,
universal_newlines=True)
result, stderr = pshred.communicate()
if pshred.returncode is None:
pshred.kill()
......@@ -108,4 +111,4 @@ def shred(options):
def main():
arg_parser = getAgumentParser()
output = shred(arg_parser.parse_args())
print output
print(output)
from __future__ import print_function
import argparse
import sys
import os
......@@ -12,7 +14,7 @@ def killpidfromfile():
if sig is None:
raise ValueError('Unknown signal name %s' % sys.argv[2])
pid = int(open(file).read())
print 'Killing pid %s with signal %s' % (pid, sys.argv[2])
print('Killing pid %s with signal %s' % (pid, sys.argv[2]))
os.kill(pid, sig)
def sublist(a, b):
......@@ -63,7 +65,7 @@ def kill():
cmdline = p.cmdline()
if cmdline == args.arg if args.full else sublist(cmdline, args.arg):
p.send_signal(s)
print 'killed pid %s with signal %s' % (p.pid, args.signal)
print('killed pid %s with signal %s' % (p.pid, args.signal))
r = 0
except psutil.Error:
pass
......
......@@ -78,7 +78,7 @@ echo "htpasswd $@" > %s/monitor-htpasswd
self.writeContent(self.monitor_https_cors, '{% set allow_domain = "|".join(domain.replace(".", "\.").split()) -%}\n'
'SetEnvIf Origin "^http(s)?://(.+\.)?({{ allow_domain }})$" ORIGIN_DOMAIN=$0\n'
'Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN')
os.chmod(self.httpd_passwd_bin, 0755)
os.chmod(self.httpd_passwd_bin, 0o755)
def tearDown(self):
if os.path.exists(self.base_dir):
......@@ -101,31 +101,37 @@ echo "htpasswd $@" > %s/monitor-htpasswd
def check_config(self):
config_parameter = os.path.join(self.config_dir, 'config.parameters.json')
config_parameter_json = json.load(open(config_parameter))
config_json = json.load(open(self.config_path))
with open(config_parameter) as f:
config_parameter_json = json.load(f)
with open(self.config_path) as f:
config_json = json.load(f)
for config in config_json:
if config["key"]:
self.assertTrue(config_parameter_json.has_key(config["key"]))
self.assertIn(config["key"], config_parameter_json)
parameter = config_parameter_json[config["key"]]
else:
continue
if config["key"] == 'from-file':
self.assertTrue(os.path.exists(parameter['file']))
self.assertEqual(config["value"], open(parameter['file']).read())
with open(parameter['file']) as f:
self.assertEqual(config["value"], f.read())
elif config["key"] == 'httpd-password':
http_passwd = "%s/monitor-htpasswd" % self.base_dir
#XXX where \n bellow come from ?
command = 'htpasswd -cb %s admin %s%s' % (http_passwd, config["value"], '\n')
self.assertTrue(os.path.exists(parameter['file']))
self.assertTrue(os.path.exists(http_passwd))
self.assertEqual(config["value"], open(parameter['file']).read())
self.assertEqual(open(http_passwd).read(), command)
with open(parameter['file']) as f:
self.assertEqual(config["value"], f.read())
with open(http_passwd) as f:
self.assertEqual(f.read(), command)
elif config["key"] == 'cors-domain':
cors_file = "%s/test-httpd-cors.cfg" % self.base_dir
self.assertTrue(os.path.exists(cors_file))
cors_string = self.generate_cors_string(config["value"].split())
self.assertEqual(cors_string, open(cors_file).read())
with open(cors_file) as f:
self.assertEqual(cors_string, f.read())
def check_cfg_config(self, config_list):
cfg_output = os.path.join(self.config_dir, 'config.cfg')
......
......@@ -73,7 +73,6 @@ monitor-url-list = %(url_list)s
collector-db =
base-url = %(base_url)s
title = %(title)s
service-pid-folder = %(base_dir)s/run
promise-output-file = %(base_dir)s/monitor-bootstrap-status
promise-runner = %(promise_run_script)s
randomsleep = /bin/echo sleep
......@@ -102,11 +101,11 @@ partition-folder = %(base_dir)s
for index in range(1, amount+1):
promise_file = os.path.join(promise_dir, 'monitor_promise-%s' % index)
self.writeContent(promise_file, promse_content)
os.chmod(promise_file, 0755)
os.chmod(promise_file, 0o755)
for index in range(1, amount+1):
promise_file = os.path.join(plugin_dir, 'monitor_promise-%s.py' % index)
self.writeContent(promise_file, promse_content)
os.chmod(promise_file, 0644)
os.chmod(promise_file, 0o644)
def checkOPML(self, url_list):
opml_title = "<title>%(root_title)s</title>" % self.monitor_config_dict
......@@ -250,7 +249,8 @@ partition-folder = %(base_dir)s
instance_config = os.path.join(instance.config_folder, '.jio_documents', 'config.json')
self.assertTrue(os.path.exists(instance_config))
config_content = json.loads(open(instance_config).read())
with open(instance_config) as f:
config_content = json.load(f)
self.assertEqual(len(config_content), 4)
key_list = ['', 'sample', 'monitor-password', 'cors-domain']
for parameter in config_content:
......
......@@ -44,7 +44,7 @@ class MonitorGlobalTest(unittest.TestCase):
pkg_resources.resource_string(
'slapos.monitor',
'doc/monitor_instance.schema.json')
self.monitor_instance_schema = json.loads(monitor_schema_string)
self.monitor_instance_schema = json.loads(monitor_schema_string.decode('utf-8'))
self.monitor_config_dict = dict(
......@@ -91,7 +91,6 @@ monitor-url-list = %(url_list)s
collector-db =
base-url = %(base_url)s
title = %(title)s
service-pid-folder = %(base_dir)s/run
promise-output-file = %(base_dir)s/monitor-bootstrap-status
promise-runner = %(promise_run_script)s
randomsleep = /bin/echo sleep
......@@ -132,7 +131,7 @@ exit %(code)s
""" % result_dict
promise_path = os.path.join(self.etc_dir, 'promise', name)
self.writeContent(promise_path, content)
os.chmod(promise_path, 0755)
os.chmod(promise_path, 0o755)
return promise_path
def getPromiseParser(self):
......@@ -230,7 +229,7 @@ exit %(code)s
}"""
with open(os.path.join(self.private_dir, 'monitor.global.json')) as r:
result = json.loads(r.read().decode("utf-8"))
result = json.load(r)
result.pop("date")
self.assertEqual(result,
json.loads(expected_result))
......@@ -248,7 +247,7 @@ exit %(code)s
expected_result_dict["state"] = {'error': 0, 'success': 4}
instance_result_dict = None
with open(os.path.join(self.private_dir, 'monitor.global.json')) as r:
instance_result_dict = json.loads(r.read().decode("utf-8"))
instance_result_dict = json.load(r)
result = instance_result_dict.copy()
result.pop("date")
self.assertEqual(result,
......
......@@ -69,7 +69,7 @@ exit 0
"""
promise_path = os.path.join(self.old_promise_dir, name)
self.writeContent(promise_path, content)
os.chmod(promise_path, 0755)
os.chmod(promise_path, 0o755)
return promise_path
def writePromiseNOK(self, name):
......@@ -80,19 +80,18 @@ exit 2
"""
promise_path = os.path.join(self.old_promise_dir, name)
self.writeContent(promise_path, content)
os.chmod(promise_path, 0755)
os.chmod(promise_path, 0o755)
return promise_path
def generatePromiseScript(self, name, success=True, failure_count=1, content="",
periodicity=0.03):
promise_content = """from zope import interface as zope_interface
promise_content = """from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise import GenericPromise
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
self.setPeriodicity(minute=%(periodicity)s)
......@@ -151,21 +150,23 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'my_promise.status.json')
os.system('cat %s' % result_file)
self.assertTrue(os.path.exists(result_file))
my_result = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
my_result = json.load(f)
my_result['result'].pop('date')
expected_result = {
u'title': u'my_promise', u'name': u'my_promise.py',
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_promise.py' % self.promise_dir,
}
self.assertTrue(my_result.pop('execution-time'))
self.assertEqual(expected_result, my_result)
result_file = os.path.join(self.output_dir, 'my_second_promise.status.json')
self.assertTrue(os.path.exists(result_file))
second_result = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
second_result = json.load(f)
second_result['result'].pop('date')
expected_result = {
......@@ -173,9 +174,9 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_second_promise.py' % self.promise_dir,
}
self.assertTrue(second_result.pop('execution-time'))
self.assertEqual(expected_result, second_result)
def test_promise_generic_failed(self):
......@@ -186,7 +187,8 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'my_promise.status.json')
self.assertTrue(os.path.exists(result_file))
my_result = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
my_result = json.load(f)
my_result['result'].pop('date')
expected_result = {
......@@ -194,9 +196,9 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': True, u'message': u'failed', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_promise.py' % self.promise_dir,
}
self.assertTrue(my_result.pop('execution-time'))
self.assertEqual(expected_result, my_result)
def test_promise_generic_status_change(self):
......@@ -207,7 +209,8 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'my_promise.status.json')
self.assertTrue(os.path.exists(result_file))
my_result = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
my_result = json.load(f)
my_result['result'].pop('date')
expected_result = {
......@@ -215,9 +218,9 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': True, u'message': u'failed', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_promise.py' % self.promise_dir,
}
self.assertTrue(my_result.pop('execution-time'))
self.assertEqual(expected_result, my_result)
os.system('rm %s/*.pyc' % self.promise_dir)
......@@ -226,7 +229,8 @@ class RunPromise(GenericPromise):
promise_runner2 = MonitorPromiseLauncher(parser)
promise_runner2.start()
my_result = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
my_result = json.load(f)
my_result['result'].pop('date')
expected_result = {
......@@ -234,9 +238,9 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_promise.py' % self.promise_dir,
}
self.assertTrue(my_result.pop('execution-time'))
self.assertEqual(expected_result, my_result)
def test_promise_generic_periodicity(self):
......@@ -287,7 +291,8 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
result1 = json.load(f)
start_date = datetime.strptime(result1['result'].pop('date'), '%Y-%m-%dT%H:%M:%S+0000')
expected_result = {
......@@ -295,7 +300,6 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise,
}
......@@ -303,8 +307,10 @@ class RunPromise(GenericPromise):
parser = self.getPromiseParser(force=True)
promise_runner = MonitorPromiseLauncher(parser)
promise_runner.start()
result2 = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
result2 = json.load(f)
start_date2 = datetime.strptime(result2['result'].pop('date'), '%Y-%m-%dT%H:%M:%S+0000')
self.assertTrue(result2.pop('execution-time'))
self.assertEqual(expected_result, result2)
def test_promise_two_folder(self):
......@@ -319,7 +325,8 @@ class RunPromise(GenericPromise):
result2_file = os.path.join(self.output_dir, 'promise_2.status.json')
self.assertTrue(os.path.exists(result_file))
self.assertTrue(os.path.exists(result2_file))
result1 = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
result1 = json.load(f)
start_date = datetime.strptime(result1['result'].pop('date'), '%Y-%m-%dT%H:%M:%S+0000')
expected_result = {
......@@ -327,12 +334,13 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise,
}
self.assertTrue(result1.pop('execution-time'))
self.assertEqual(expected_result, result1)
result2 = json.loads(open(result2_file).read())
with open(result2_file) as f:
result2 = json.load(f)
start_date2 = datetime.strptime(result2['result'].pop('date'), '%Y-%m-%dT%H:%M:%S+0000')
expected_result = {
......@@ -340,9 +348,9 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise2,
}
self.assertTrue(result2.pop('execution-time'))
self.assertEqual(expected_result, result2)
def test_promise_NOK(self):
......@@ -353,23 +361,26 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
result1 = json.load(f)
result1['result'].pop('date')
expected_result = {
u'title': u'promise_1', u'name': u'promise_1',
u'result': {
u'failed': True, u'message': u'failed', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise,
}
self.assertTrue(result1.pop('execution-time'))
self.assertEqual(expected_result, result1)
# second run
promise_runner = MonitorPromiseLauncher(parser)
promise_runner.start()
result2 = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
result2 = json.load(f)
result2['result'].pop('date')
self.assertTrue(result2.pop('execution-time'))
self.assertEqual(expected_result, result2)
def test_promise_mixed(self):
......@@ -380,16 +391,17 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
result1 = json.load(f)
result1['result'].pop('date')
expected_result = {
u'title': u'promise_1', u'name': u'promise_1',
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise,
}
self.assertTrue(result1.pop('execution-time'))
self.assertEqual(expected_result, result1)
# second run with failure
......@@ -401,7 +413,9 @@ class RunPromise(GenericPromise):
promise_runner = MonitorPromiseLauncher(parser)
promise_runner.start()
result2 = json.loads(open(result_file).read().decode("utf-8"))
with open(result_file) as f:
result2 = json.load(f)
result2['result'].pop('date')
self.assertTrue(result2.pop('execution-time'))
self.assertEqual(expected_result, result2)
......@@ -32,6 +32,7 @@ import tempfile
import os
import unittest
import shutil
import six
class TestCheckFileState(TestPromisePluginMixin):
......@@ -69,8 +70,10 @@ extra_config_dict = {
self.assertEqual(result['result']['failed'], True)
self.assertEqual(
result['result']['message'],
"ERROR IOError(21, 'Is a directory') "
"during opening and reading file %r" % (filename,)
"ERROR %s(21, 'Is a directory') "
"during opening and reading file %r" % (
"IsADirectoryError" if six.PY3 else "IOError",
filename)
)
def test_check_file_not_exists(self):
......@@ -88,8 +91,10 @@ extra_config_dict = {
self.assertEqual(result['result']['failed'], True)
self.assertEqual(
result['result']['message'],
"ERROR IOError(2, 'No such file or directory') "
"during opening and reading file %r" % (filename,)
"ERROR %s(2, 'No such file or directory') "
"during opening and reading file %r" % (
"FileNotFoundError" if six.PY3 else "IOError",
filename)
)
def test_check_file_empty(self):
......
......@@ -27,6 +27,7 @@
from slapos.grid.promise import PromiseError
from slapos.test.promise.plugin import TestPromisePluginMixin
from slapos.util import str2bytes
from cryptography import x509
from cryptography.hazmat.backends import default_backend
......@@ -34,12 +35,13 @@ from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.oid import NameOID
import BaseHTTPServer
from six.moves import BaseHTTPServer
import datetime
import ipaddress
import json
import multiprocessing
import os
import six
import ssl
import tempfile
import time
......@@ -66,10 +68,10 @@ def createCSR(common_name, ip=None):
subject_alternative_name_list = []
if ip is not None:
subject_alternative_name_list.append(
x509.IPAddress(ipaddress.ip_address(unicode(ip)))
x509.IPAddress(ipaddress.ip_address(ip))
)
csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, unicode(common_name)),
x509.NameAttribute(NameOID.COMMON_NAME, common_name),
]))
if len(subject_alternative_name_list):
......@@ -89,10 +91,10 @@ class CertificateAuthority(object):
public_key = self.key.public_key()
builder = x509.CertificateBuilder()
builder = builder.subject_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, unicode(common_name)),
x509.NameAttribute(NameOID.COMMON_NAME, common_name),
]))
builder = builder.issuer_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, unicode(common_name)),
x509.NameAttribute(NameOID.COMMON_NAME, common_name),
]))
builder = builder.not_valid_before(
datetime.datetime.utcnow() - datetime.timedelta(days=2))
......@@ -147,16 +149,19 @@ class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
response = {
'Path': self.path,
}
self.wfile.write(json.dumps(response, indent=2))
self.wfile.write(str2bytes(json.dumps(response, indent=2)))
class CheckUrlAvailableMixin(TestPromisePluginMixin):
@classmethod
def setUpClass(cls):
cls.another_server_ca = CertificateAuthority("Another Server Root CA")
cls.test_server_ca = CertificateAuthority("Test Server Root CA")
cls.another_server_ca = CertificateAuthority(u"Another Server Root CA")
cls.test_server_ca = CertificateAuthority(u"Test Server Root CA")
ip = SLAPOS_TEST_IPV4.decode('utf-8') \
if isinstance(SLAPOS_TEST_IPV4, bytes) \
else SLAPOS_TEST_IPV4
key, key_pem, csr, csr_pem = createCSR(
"testserver.example.com", SLAPOS_TEST_IPV4)
u"testserver.example.com", ip)
_, cls.test_server_certificate_pem = cls.test_server_ca.signCSR(csr)
cls.test_server_certificate_file = tempfile.NamedTemporaryFile(
......@@ -175,17 +180,17 @@ class CheckUrlAvailableMixin(TestPromisePluginMixin):
cls.test_server_ca.certificate_pem)
cls.test_server_ca_certificate_file.close()
server = BaseHTTPServer.HTTPServer(
(SLAPOS_TEST_IPV4, SLAPOS_TEST_IPV4_PORT),
TestHandler)
server.socket = ssl.wrap_socket(
server.socket,
certfile=cls.test_server_certificate_file.name,
server_side=True)
cls.server_process = multiprocessing.Process(
target=server.serve_forever)
def server():
server = BaseHTTPServer.HTTPServer(
(SLAPOS_TEST_IPV4, SLAPOS_TEST_IPV4_PORT),
TestHandler)
server.socket = ssl.wrap_socket(
server.socket,
certfile=cls.test_server_certificate_file.name,
server_side=True)
server.serve_forever()
cls.server_process = multiprocessing.Process(target=server)
cls.server_process.start()
@classmethod
......@@ -269,7 +274,8 @@ class TestCheckUrlAvailable(CheckUrlAvailableMixin):
self.assertEqual(result['result']['failed'], True)
self.assertEqual(
result['result']['message'],
"ERROR: Invalid URL u'https://': No host supplied"
"ERROR: Invalid URL %s'https://': No host supplied" %
('' if six.PY3 else 'u')
)
def test_check_url_malformed(self):
......
......@@ -87,7 +87,7 @@ class TestApacheMPMWatchdog(unittest.TestCase):
self.assertEqual(None,
getServerStatus("http://localhost/",
"user", "password"))
self.assertNotEquals(None,
self.assertNotEqual(None,
getServerStatus("https://www.erp5.com/", None, None))
......
......@@ -29,58 +29,59 @@ import unittest
import os.path
from slapos.agent.agent import AutoSTemp, TestMap
TESTMAP_DICT = {
"test-wendelin-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/wendelin/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-wendelin-software-release"},
"test-agent-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/agent/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-agent-software-release"},
"test-powerdns-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/powerdns/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-powerdns-software-release"},
"test-monitor-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/monitor/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-monitor-software-release"},
"test-slapos-master-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slapos-master/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-slapos-master-software-release"},
"test-webrunner-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slaprunner/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-webrunner-software-release"},
"test-re6stnetmaster-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/re6stnet/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-re6stnetmaster-software-release"},
"test-erp5testnode-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/erp5testnode/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-erp5testnode-software-release"},
"test-apache-frontend-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/apache-frontend/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-apache-frontend-software-release"},
"test-nayuos-software-release": {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/nayuos/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-nayuos-software-release"}
}
from collections import OrderedDict
TESTMAP_DICT = OrderedDict([
("test-apache-frontend-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/apache-frontend/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-apache-frontend-software-release"}),
("test-slapos-master-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slapos-master/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-slapos-master-software-release"}),
("test-erp5testnode-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/erp5testnode/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-erp5testnode-software-release"}),
("test-webrunner-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slaprunner/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-webrunner-software-release"}),
("test-agent-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/agent/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-agent-software-release"}),
("test-powerdns-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/powerdns/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-powerdns-software-release"}),
("test-nayuos-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/nayuos/software.cfg",
"supply_computer": "COMP-1",
"group": "COMP-1",
"title": "test-nayuos-software-release"}),
("test-wendelin-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/wendelin/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-wendelin-software-release"}),
("test-monitor-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/monitor/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-monitor-software-release"}),
("test-re6stnetmaster-software-release", {
"url": "https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/re6stnet/software.cfg",
"supply_computer": "COMP-2",
"group": "COMP-2",
"title": "test-re6stnetmaster-software-release"}),
])
......@@ -158,7 +159,8 @@ class TestAutoSTemp(unittest.TestCase):
removes it when deleted.
"""
f = AutoSTemp("foo")
self.assertEqual(open(f.name, "r").read(), "foo")
with open(f.name, "r") as f_:
self.assertEqual(f_.read(), "foo")
fname = f.name
self.assertTrue(os.path.isfile(fname))
del f
......
......@@ -87,10 +87,10 @@ class TestCheckFeedAsPromise(unittest.TestCase):
feed = self.generateKOFeed()
option.ko_pattern_list = ['Error']
self.assertNotEquals(checkFeedAsPromise(feed, option), "")
self.assertNotEqual(checkFeedAsPromise(feed, option), "")
option.title, option.description = False, True
option.ko_pattern_list = ['FAILURE', 'Error']
self.assertNotEquals(checkFeedAsPromise(feed, option), "")
self.assertNotEqual(checkFeedAsPromise(feed, option), "")
def test_ifNoOKPatternFoundErrorIsRaised(self):
......@@ -100,7 +100,7 @@ class TestCheckFeedAsPromise(unittest.TestCase):
# If no time buffer, then not OK is always wrong
option.ok_pattern_list = ['OK']
self.assertNotEquals(len(checkFeedAsPromise(feed, option)), 0)
self.assertNotEqual(len(checkFeedAsPromise(feed, option)), 0)
# if time buffer, then not OK is wrong only after buffer expires
extra_item = {
......@@ -115,7 +115,7 @@ class TestCheckFeedAsPromise(unittest.TestCase):
# shorter buffer, we want to raise an error
option.time_buffer = 1800
self.assertNotEquals(len(checkFeedAsPromise(feed, option)), 0)
self.assertNotEqual(len(checkFeedAsPromise(feed, option)), 0)
def test_noItemInTheFeedIsNotAnError(self):
......
......@@ -7,6 +7,7 @@ import shutil
import tempfile
import time
import unittest
import six
from slapos.generatefeed import generateFeed
......@@ -120,8 +121,8 @@ class TestGenerateFeed(unittest.TestCase):
for i in range(5-3, 5): # older items (from 1 to 2) have been deleted
expected_remaining_item_list.append('%s.item' % i)
self.assertItemsEqual(remaining_status_item_list,
expected_remaining_item_list)
six.assertCountEqual(self, remaining_status_item_list,
expected_remaining_item_list)
if __name__ == '__main__':
unittest.main()
......@@ -140,7 +140,7 @@ class TestQemuQMPWrapper(unittest.TestCase):
elif message['execute'] == 'query-memory-devices':
memory_list = []
added_mem = self.readChange('dimm') + self.hotplugged_memory_amount
slot_amount = added_mem / self.memory_slot_size
slot_amount = added_mem // self.memory_slot_size
for i in range(slot_amount, 0, -1):
memory_list.append({
u'data': {
......@@ -159,7 +159,7 @@ class TestQemuQMPWrapper(unittest.TestCase):
elif message['execute'] == 'query-memdev':
memory_list = []
added_mem = self.readChange('dimm') + self.hotplugged_memory_amount
slot_amount = added_mem / self.memory_slot_size
slot_amount = added_mem // self.memory_slot_size
for i in range(slot_amount, 0, -1):
memory_list.append({
u'dump': True,
......
......@@ -3,7 +3,6 @@ import os
import string
import random
import supervisor
import thread
import unittest
......@@ -29,14 +28,6 @@ class TestRunnerBackEnd(unittest.TestCase):
if os.path.exists(garbage_file):
os.remove(garbage_file)
def _startSupervisord(self):
cwd = os.getcwd()
supervisord_config_file = os.path.join(cwd, 'supervisord.conf')
open(supervisord_config_file, 'w').write("""
""")
supervisord = supervisor.supervisord.Supervisord('-c', supervisord_config_file)
thread.start_new_thread()
def test_UserCanLoginAndUpdateCredentials(self):
"""
* Create a user with createNewUser
......
......@@ -5,7 +5,6 @@ import time
import unittest
from slapos.resilient import runner_exporter
from StringIO import StringIO
tested_instance_cfg = """[buildout]
installed_develop_eggs =
......@@ -75,7 +74,7 @@ class TestRunnerExporter(unittest.TestCase):
def _createExecutableFile(self, path, content=''):
self._createFile(path, content)
os.chmod(path, 0700)
os.chmod(path, 0o700)
def _setUpFakeInstanceFolder(self):
self._createFile('proxy.db')
......@@ -232,9 +231,9 @@ class TestRunnerExporter(unittest.TestCase):
self.assertEqual(
runner_exporter.getBackupFilesModifiedDuringExportList(config, time.time() - 5),
['instance/slappart0/srv/backup/data.dat',
'instance/slappart0/srv/backup/important_logs/this_is_a.log',
'instance/slappart1/srv/backup/data.dat']
[b'instance/slappart0/srv/backup/data.dat',
b'instance/slappart0/srv/backup/important_logs/this_is_a.log',
b'instance/slappart1/srv/backup/data.dat']
)
time.sleep(2)
self.assertFalse(
......@@ -243,5 +242,5 @@ class TestRunnerExporter(unittest.TestCase):
self._createFile('instance/slappart1/srv/backup/bakckup.data', 'my backup')
self.assertEqual(
runner_exporter.getBackupFilesModifiedDuringExportList(config, time.time() - 1),
['instance/slappart1/srv/backup/bakckup.data']
[b'instance/slappart1/srv/backup/bakckup.data']
)
......@@ -58,32 +58,32 @@ class TestSecureDelete(unittest.TestCase):
passes = 2 + 1 # Option -z is used, plus one more pass
result = shred(options)
self.assertFalse(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertIn("pass %d/%d" % (passes, passes), result)
self.assertIn("%s: removed" % os.path.basename(self.remove_file), result)
def test_secure_remove_file_keep_file(self):
options = getAgumentParser().parse_args(['-n', '2', '-z', '--file', self.remove_file])
passes = 2 + 1 # Option -z is used, plus one more pass
result = shred(options)
self.assertTrue(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertFalse("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertIn("pass %d/%d" % (passes, passes), result)
self.assertNotIn("%s: removed" % os.path.basename(self.remove_file), result)
def test_secure_remove_file_non_zero(self):
options = getAgumentParser().parse_args(['-n', '2', '-u', '--file', self.remove_file])
passes = 2
result = shred(options)
self.assertFalse(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertIn("pass %d/%d" % (passes, passes), result)
self.assertIn("%s: removed" % os.path.basename(self.remove_file), result)
def test_secure_remove_file_check_exist(self):
options = getAgumentParser().parse_args(['-n', '2', '-u', '-s', '--file', 'random.txt', self.remove_file])
passes = 2
result = shred(options)
self.assertFalse(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertIn("pass %d/%d" % (passes, passes), result)
self.assertIn("%s: removed" % os.path.basename(self.remove_file), result)
def test_secure_remove_file_check_exist_false(self):
options = getAgumentParser().parse_args(['-n', '2', '-u', '--file', 'random.txt'])
......@@ -99,19 +99,19 @@ class TestSecureDelete(unittest.TestCase):
# shred removed link and target file
self.assertFalse(os.path.exists(self.remove_file))
self.assertFalse(os.path.exists(self.link_name))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertIn("pass %d/%d" % (passes, passes), result)
self.assertIn("%s: removed" % os.path.basename(self.remove_file), result)
def test_secure_remove_file_multiple_files(self):
options = getAgumentParser().parse_args(['-n', '2', '-u', '-z', '--file', self.remove_file, self.remove_file2])
passes = 2 + 1 # Option -z is used, plus one more pass
result = shred(options)
self.assertFalse(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertIn("pass %d/%d" % (passes, passes), result)
self.assertIn("%s: removed" % os.path.basename(self.remove_file), result)
self.assertFalse(os.path.exists(self.remove_file2))
self.assertTrue("%s: removed" % os.path.basename(self.remove_file2) in result)
self.assertIn("%s: removed" % os.path.basename(self.remove_file2), result)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment