Commit 5ceae36b authored by Bryton Lacquement's avatar Bryton Lacquement 🚪

wip

parent f873f59d
......@@ -25,7 +25,7 @@
#
##############################################################################
import ConfigParser
from six.moves import configparser
import argparse
import collections
import json
......@@ -214,7 +214,7 @@ def main():
logger, log_file = getLogger(log, args.verbose)
configuration = ConfigParser.SafeConfigParser()
configuration = configparser.SafeConfigParser()
configuration.readfp(args.configuration_file)
pidfile = args.pidfile
......
from __future__ import print_function
import datetime
import json
import sys
......@@ -50,16 +52,16 @@ def retryOnNetworkFailure(func):
while True:
try:
return func(*args, **kwargs)
except SAFE_RPC_EXCEPTION_LIST, e:
print 'Network failure: %s , %s' % (sys.exc_info(), e)
except HTTPError, e:
print 'Network failure: %s , %s' % (sys.exc_info(), e)
except ConnectionError, e:
print 'Network failure: %s , %s' % (sys.exc_info(), e)
except slapos.slap.ConnectionError, e:
print 'Network failure: %s , %s' % (sys.exc_info(), e)
print 'Retry method %s in %i seconds' % (func, retry_time)
except SAFE_RPC_EXCEPTION_LIST as e:
print('Network failure: %s , %s' % (sys.exc_info(), e))
except HTTPError as e:
print('Network failure: %s , %s' % (sys.exc_info(), e))
except ConnectionError as e:
print('Network failure: %s , %s' % (sys.exc_info(), e))
except slapos.slap.ConnectionError as e:
print('Network failure: %s , %s' % (sys.exc_info(), e))
print('Retry method %s in %i seconds' % (func, retry_time))
time.sleep(retry_time)
retry_time = min(retry_time*1.5, 640)
......@@ -218,7 +220,7 @@ class SlapOSMasterCommunicator(object):
result = self.hateoas_navigator.GET(url)
result = json.loads(result)
if result['_links'].get('action_object_slap', None) is None:
print result['links']
print(result['links'])
return None
object_link = self.hateoas_navigator.hateoasGetLinkFromLinks(
......
......@@ -27,6 +27,8 @@
#
##############################################################################
from __future__ import print_function
import os, errno
import subprocess
import argparse
......@@ -52,7 +54,7 @@ def build_command(apachedex_executable, output_file,
# Automaticaly replace variable 'date'.
apache_log = logfile.strip() % {'date': today}
if not os.path.exists(apache_log):
print "WARNING: File %s not found..." % apache_log
print("WARNING: File %s not found..." % apache_log)
continue
log_list.append(apache_log)
if not log_list:
......@@ -81,7 +83,7 @@ def main():
base_url = args.base_url.strip()
if not os.path.exists(output_folder) or not os.path.isdir(output_folder):
print "ERROR: Output folder is not a directory. Exiting..."
print("ERROR: Output folder is not a directory. Exiting...")
return 1
today = date.today().strftime("%Y-%m-%d")
......@@ -93,7 +95,7 @@ def main():
args.apache_log_list,
config)
except ValueError as e:
print e
print(e)
return 1
process_handler = subprocess.Popen(argument_list,
stdout=subprocess.PIPE,
......@@ -103,11 +105,11 @@ def main():
stdout, stderr = process_handler.communicate()
if process_handler.returncode != 0:
if stderr:
print stderr
print(stderr)
return 1
with open(output_file, 'r') as f:
print base_url + '/ApacheDex-%s.html' % today
print(base_url + '/ApacheDex-%s.html' % today)
return 0
if __name__ == "__main__":
......
# -*- coding: utf-8 -*-
import ConfigParser
from six.moves import configparser
import argparse
import gdbm
from six.moves import dbm_gnu as gdbm
import sys
import os
......@@ -41,7 +41,7 @@ def main():
run(args)
def run(args):
slapos_conf = ConfigParser.ConfigParser()
slapos_conf = configparser.ConfigParser()
slapos_conf.read(args.configuration_file)
current_binary = os.path.join(os.getcwd(), sys.argv[0])
......@@ -52,7 +52,7 @@ def run(args):
partition_base_name = slapos_conf.get('slapformat', 'partition_base_name')
try:
bridge_name = slapos_conf.get('slapformat', 'interface_name')
except ConfigParser.NoOptionError:
except configparser.NoOptionError:
bridge_name = slapos_conf.get('slapformat', 'bridge_name')
instance_root = slapos_conf.get('slapos', 'instance_root')
partition_base_path = os.path.join(instance_root, partition_base_name)
......@@ -61,7 +61,7 @@ def run(args):
logging.basicConfig(level=logging.getLevelName(args.log[0]))
database = gdbm.open(args.database, 'c', 0600)
database = gdbm.open(args.database, 'c', 0o600)
try:
process.main(sr_directory, partition_list, database, bridge_name)
finally:
......
......@@ -28,7 +28,7 @@
import argparse
import errno
import gdbm
from six.moves import dbm_gnu as gdbm
import json
from lockfile import LockFile
import logging
......@@ -38,8 +38,8 @@ import signal
import socket
import subprocess
import sys
import SocketServer
import StringIO
from six.moves import socketserver
import six
import threading
# Copied from erp5.util:erp5/util/testnode/ProcessManager.py
......@@ -75,13 +75,13 @@ def subprocess_capture(p, log, log_prefix, get_output=True):
return (p.stdout and ''.join(stdout),
p.stderr and ''.join(stderr))
class EqueueServer(SocketServer.ThreadingUnixStreamServer):
class EqueueServer(socketserver.ThreadingUnixStreamServer):
daemon_threads = True
def __init__(self, *args, **kw):
self.options = kw.pop('equeue_options')
SocketServer.ThreadingUnixStreamServer.__init__(self,
socketserver.ThreadingUnixStreamServer.__init__(self,
RequestHandlerClass=None,
*args, **kw)
# Equeue Specific elements
......@@ -106,7 +106,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
self.logger.addHandler(handler)
def setDB(self, database):
self.db = gdbm.open(database, 'cs', 0700)
self.db = gdbm.open(database, 'cs', 0o700)
def _hasTakeoverBeenTriggered(self):
if hasattr(self, 'takeover_triggered_file_path') and \
......@@ -149,7 +149,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
# Handle request
self.logger.debug("Connection with file descriptor %d", request.fileno())
request.settimeout(self.options.timeout)
request_string = StringIO.StringIO()
request_string = six.StringIO()
segment = None
try:
while segment != '':
......@@ -181,7 +181,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
def remove_existing_file(path):
try:
os.remove(path)
except OSError, e:
except OSError as e:
if e.errno != errno.ENOENT:
raise
......
......@@ -76,7 +76,7 @@ def generateFeed(option):
# Reduces feed if number of items exceeds max_item
if len(item_dict) > option.max_item:
outdated_key_list = sorted_item_dict.keys()[:-option.max_item]
outdated_key_list = list(sorted_item_dict)[:-option.max_item]
for outdated_key in outdated_key_list:
del sorted_item_dict[outdated_key]
deleteFileList(outdated_key_list)
......
......@@ -12,6 +12,7 @@
#
##############################################################################
from __future__ import print_function
import os
import time
......@@ -76,7 +77,7 @@ def run():
result = parser.parse_args()
arguments = dict(result._get_kwargs())
if arguments['token'] == None and arguments['file_token'] == None:
print "lampconfigure: Error: Please specify where condition will be taken, use -d or -f option"
print("lampconfigure: Error: Please specify where condition will be taken, use -d or -f option")
return
setup(arguments)
......@@ -84,7 +85,7 @@ def setup(arguments):
timeout = 5;
while True:
if not checkAction(arguments):
print "Waiting for 3s and retrying"
print("Waiting for 3s and retrying")
time.sleep(3)
continue
time.sleep(timeout)
......@@ -115,9 +116,9 @@ def checkAction(arguments):
user = arguments['mysql_user'],
passwd = arguments['mysql_password'],
db = arguments['token'])
except Exception, ex:
except Exception as e:
#Mysql is not ready yet?...
print ex.message
print(e)
return False
if arguments['table'] == "**":
#only detect if mysql has been started
......@@ -145,7 +146,7 @@ def rename(arguments):
source = os.path.join(arguments['target_directory'], arguments['source'])
destination = os.path.join(arguments['target_directory'], arguments['destination'])
if not os.path.exists(source):
print "Error when moving: '%s': no such file or directory" % source
print("Error when moving: '%s': no such file or directory" % source)
return
os.rename(source, destination)
if arguments['mode'] != None:
......@@ -155,7 +156,7 @@ def delete(arguments):
for path in arguments['delete_target']:
path = os.path.join(arguments['target_directory'], path)
if not os.path.exists(path):
print "Error when deleting: '%s': no such file or directory" % path
print("Error when deleting: '%s': no such file or directory" % path)
continue
if os.path.isdir(path):
shutil.rmtree(path)
......@@ -164,7 +165,7 @@ def delete(arguments):
def run_script(arguments):
script = os.path.join(arguments['target_directory'], arguments['script'])
print 'Running script: %s' % script
print('Running script: %s' % script)
if os.path.exists(script):
import subprocess
#run python script with predefined data
......@@ -176,12 +177,12 @@ def run_script(arguments):
result = subprocess.Popen(data, env={'PYTHONPATH': ':'.join(sys.path)})
result.wait()
else:
print "Error: can not read file '%s'" % script
print("Error: can not read file '%s'" % script)
def run_sql_script(arguments):
script = os.path.join(arguments['target_directory'], arguments['sql_script'])
print 'Running SQL script: %s' % script
print('Running SQL script: %s' % script)
if os.path.exists(script):
conn = MySQLdb.connect(host=arguments['mysql_host'],
port=int(arguments['mysql_port']),
......@@ -196,7 +197,7 @@ def run_sql_script(arguments):
conn.close()
else:
print "Error: can not read file '%s'" % script
print("Error: can not read file '%s'" % script)
......@@ -204,6 +205,6 @@ def chmod(arguments):
for path in arguments['chmod_target']:
path = os.path.join(arguments['target_directory'], path)
if not os.path.exists(path):
print "Error when changing mode: '%s': no such file or directory" % path
print("Error when changing mode: '%s': no such file or directory" % path)
continue
os.chmod(path, int(arguments['mode'], 8))
......@@ -27,6 +27,8 @@
#
##############################################################################
from six.moves import zip
import sqlite3
import os
import pwd
......@@ -80,7 +82,7 @@ class ResourceCollect:
table="sqlite_master",
columns='name',
where="type='table' AND name='%s'" % name)
table_exists_result = zip(*check_result_cursor)
table_exists_result = list(zip(*check_result_cursor))
if not len(table_exists_result) or table_exists_result[0][0] is None:
return False
return True
......@@ -159,7 +161,7 @@ class ResourceCollect:
query_result = self.db.select('user', date_scope, colums,
where="partition='%s' and (time between '%s' and '%s') %s" %
(partition_id, min_time, max_time, where))
result_list = zip(*query_result)
result_list = list(zip(*query_result))
process_dict = memory_dict = io_dict = {}
if len(result_list):
......@@ -188,7 +190,7 @@ class ResourceCollect:
)
)
disk_used_sum = zip(*disk_result_cursor)
disk_used_sum = list(zip(*disk_result_cursor))
if len(disk_used_sum) and disk_used_sum[0][0] is not None:
io_dict['disk_used'] = round(disk_used_sum[0][0]/1024.0, 2)
self.db.close()
......@@ -252,7 +254,7 @@ def main():
status_file = os.path.join(parser.output_folder, 'monitor_resource.status.json')
if not os.path.exists(parser.collector_db):
print "Collector database not found..."
print("Collector database not found...")
initProcessDataFile(process_file)
initMemoryDataFile(mem_file)
initIODataFile(io_file)
......
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import glob
import json
import ConfigParser
from six.moves import configparser
import time
from datetime import datetime
import base64
......@@ -150,9 +152,9 @@ def generateMonitoringData(config, public_folder, private_folder, public_url,
previous_state_dict.get(tmp_json['name']),
public_folder
)
except ValueError, e:
except ValueError as e:
# bad json file
print "ERROR: Bad json file at: %s\n%s" % (file, str(e))
print("ERROR: Bad json file at: %s\n%s" % (file, str(e)))
continue
with open(promises_status_file, "w") as f:
......@@ -202,7 +204,7 @@ def savePromiseHistory(promise_name, state_dict, previous_state_list,
def run(monitor_conf_file):
config = ConfigParser.ConfigParser()
config = configparser.ConfigParser()
config.read(monitor_conf_file)
base_folder = config.get('monitor', 'private-folder')
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import os
import stat
import json
import ConfigParser
from six.moves import configparser
import traceback
import argparse
import urllib2
from six.moves import urllib
import ssl
import glob
import socket
......@@ -46,7 +48,7 @@ def parseArguments():
def mkdirAll(path):
try:
os.makedirs(path)
except OSError, e:
except OSError as e:
if e.errno == os.errno.EEXIST and os.path.isdir(path):
pass
else: raise
......@@ -54,13 +56,13 @@ def mkdirAll(path):
def softConfigGet(config, *args, **kwargs):
try:
return config.get(*args, **kwargs)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
except (configparser.NoOptionError, configparser.NoSectionError):
return None
def createSymlink(source, destination):
try:
os.symlink(source, destination)
except OSError, e:
except OSError as e:
if e.errno != os.errno.EEXIST:
raise
......@@ -98,10 +100,10 @@ class Monitoring(object):
def loadConfig(self, pathes, config=None):
if config is None:
config = ConfigParser.ConfigParser()
config = configparser.ConfigParser()
try:
config.read(pathes)
except ConfigParser.MissingSectionHeaderError:
except configparser.MissingSectionHeaderError:
traceback.print_exc()
return config
......@@ -130,8 +132,8 @@ class Monitoring(object):
try:
with open(config_list[2]) as cfile:
param_value = cfile.read()
except OSError, e:
print 'Cannot read file %s, Error is: %s' % (config_list[2], str(e))
except OSError as e:
print('Cannot read file %s, Error is: %s' % (config_list[2], str(e)))
pass
else:
param_value = ""
......@@ -146,7 +148,7 @@ class Monitoring(object):
)
if config_list[0] == 'htpasswd':
if len(config_list) != 5 or not os.path.exists(config_list[4]):
print 'htpasswd file is not specified: %s' % str(config_list)
print('htpasswd file is not specified: %s' % str(config_list))
continue
parameter['description']['user'] = config_list[3]
parameter['description']['htpasswd'] = config_list[4]
......@@ -177,8 +179,8 @@ class Monitoring(object):
}
)
configuration_list.append(parameter)
except OSError, e:
print 'Cannot read file at %s, Error is: %s' % (old_cors_file, str(e))
except OSError as e:
print('Cannot read file at %s, Error is: %s' % (old_cors_file, str(e)))
pass
return configuration_list
......@@ -191,7 +193,7 @@ class Monitoring(object):
try:
mkdirAll(dirname) # could also raise OSError
os.symlink(path, os.path.join(dirname, os.path.basename(path)))
except OSError, e:
except OSError as e:
if e.errno != os.errno.EEXIST:
raise
......@@ -211,20 +213,20 @@ class Monitoring(object):
# XXX - working here with public url
if hasattr(ssl, '_create_unverified_context'):
context = ssl._create_unverified_context()
response = urllib2.urlopen(url, context=context, timeout=timeout)
response = urllib.urlopen(url, context=context, timeout=timeout)
else:
response = urllib2.urlopen(url, timeout=timeout)
except urllib2.HTTPError:
print "ERROR: Failed to get Monitor configuration file at %s " % url
except socket.timeout, e:
print "ERROR: Timeout while downloading monitor config at %s " % url
response = urllib.urlopen(url, timeout=timeout)
except urllib.HTTPError:
print("ERROR: Failed to get Monitor configuration file at %s " % url)
except socket.timeout as e:
print("ERROR: Timeout while downloading monitor config at %s " % url)
else:
try:
monitor_dict = json.loads(response.read())
monitor_title = monitor_dict.get('title', 'Unknown Instance')
success = True
except ValueError, e:
print "ERROR: Json file at %s is not valid" % url
except ValueError as e:
print("ERROR: Json file at %s is not valid" % url)
self.bootstrap_is_ok = success
return monitor_title
......@@ -266,8 +268,8 @@ class Monitoring(object):
for parameter in parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % self.parameter_cfg_file
except OSError as e:
print("Error failed to create file %s" % self.parameter_cfg_file)
pass
......@@ -316,8 +318,8 @@ class Monitoring(object):
try:
if os.path.exists(file):
os.unlink(file)
except OSError, e:
print "failed to remove file %s." % file, str(e)
except OSError as e:
print("failed to remove file %s." % file, str(e))
# cleanup result of promises that was removed
promise_list = os.listdir(self.legacy_promise_folder)
......@@ -335,8 +337,8 @@ class Monitoring(object):
if os.path.exists(status_path):
try:
os.unlink(status_path)
except OSError, e:
print "Error: Failed to delete %s" % status_path, str(e)
except OSError as e:
print("Error: Failed to delete %s" % status_path, str(e))
else:
promise_list.pop(position)
......@@ -365,7 +367,7 @@ class Monitoring(object):
if self.bootstrap_is_ok:
with open(self.promise_output_file, 'w') as promise_file:
promise_file.write("")
print "SUCCESS: bootstrap is OK"
print("SUCCESS: bootstrap is OK")
return 0
......
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
......@@ -38,11 +40,11 @@ class MonitorConfigWrite(object):
def _fileWrite(self, file_path, content):
try:
with open(file_path, 'w') as wf:
print file_path, content
print(file_path, content)
wf.write(content.strip())
return True
except OSError, e:
print "ERROR while writing changes to %s.\n %s" % (file_path, str(e))
except OSError as e:
print("ERROR while writing changes to %s.\n %s" % (file_path, str(e)))
return False
def _htpasswdWrite(self, htpasswd_bin, parameter_dict, value):
......@@ -55,7 +57,7 @@ class MonitorConfigWrite(object):
)
result = process.communicate()[0]
if process.returncode != 0:
print result
print(result)
return False
with open(parameter_dict['file'], 'w') as pfile:
pfile.write(value)
......@@ -76,31 +78,31 @@ class MonitorConfigWrite(object):
or (cors_domain == "" and os.stat(httpd_cors_file).st_size == 0)):
# Skip if cors file is not empty
return True
except OSError, e:
print "Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e))
except OSError as e:
print("Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e)))
try:
with open(self.monitor_https_cors, 'r') as cors_template:
template = jinja2.Template(cors_template.read())
rendered_string = template.render(domain=cors_domain)
with open(httpd_cors_file, 'w') as file:
file.write(rendered_string)
except OSError, e:
print "ERROR while writing CORS changes to %s.\n %s" % (httpd_cors_file, str(e))
except OSError as e:
print("ERROR while writing CORS changes to %s.\n %s" % (httpd_cors_file, str(e)))
return False
# Save current cors domain list
try:
with open(old_httpd_cors_file, 'w') as cors_file:
cors_file.write(cors_domain)
except OSError, e:
print "Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e))
except OSError as e:
print("Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e)))
return False
# Restart httpd process
try:
subprocess.call(httpd_gracefull_bin)
except OSError, e:
print "Failed to execute command %s.\n %s" % (httpd_gracefull_bin, str(e))
except OSError as e:
print("Failed to execute command %s.\n %s" % (httpd_gracefull_bin, str(e)))
return False
return True
......@@ -122,7 +124,7 @@ class MonitorConfigWrite(object):
with open(self.config_json_file) as tmpfile:
new_parameter_list = json.loads(tmpfile.read())
except ValueError:
print "Error: Couldn't parse json file %s" % self.config_json_file
print("Error: Couldn't parse json file %s" % self.config_json_file)
with open(parameter_config_file) as tmpfile:
description_dict = json.loads(tmpfile.read())
......@@ -156,8 +158,8 @@ class MonitorConfigWrite(object):
for parameter in new_parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % self.output_cfg_file
except OSError as e:
print("Error failed to create file %s" % self.output_cfg_file)
pass
return result_dict
......@@ -190,8 +192,8 @@ def main():
if status and os.path.exists(parameter_tmp_file):
try:
os.unlink(config_file)
except OSError, e:
print "ERROR cannot remove file: %s" % parameter_tmp_file
except OSError as e:
print("ERROR cannot remove file: %s" % parameter_tmp_file)
else:
os.rename(parameter_tmp_file, config_file)
if run_counter == max_runn:
......
......@@ -13,7 +13,7 @@ import glob
import argparse
import traceback
import logging
import ConfigParser
from six.moves import configparser
from slapos.grid.promise import PromiseLauncher, PromiseQueueResult, PromiseError
from slapos.grid.promise.generic import PROMISE_LOG_FOLDER_NAME
from slapos.util import mkdir_p
......@@ -92,7 +92,7 @@ class MonitorPromiseLauncher(object):
def _loadConfigFromFile(self, config_file):
config = ConfigParser.ConfigParser()
config = configparser.ConfigParser()
config.read([config_file])
known_key_list = ['partition-cert', 'partition-key', 'partition-id',
'pid-path', 'computer-id', 'check-anomaly',
......@@ -158,7 +158,7 @@ class MonitorPromiseLauncher(object):
self.logger.info("Checking promises...")
try:
promise_launcher.run()
except PromiseError, e:
except PromiseError as e:
#self.logger.exception(e)
# error was already logged
pass
......
from __future__ import print_function
import socket
import logging
import time
......@@ -11,10 +13,10 @@ import random
import pycurl
import argparse
import json
from StringIO import StringIO
from ping import ping, ping6
from dnsbench import resolve
from http import get_curl, request
from six import StringIO
from .ping import ping, ping6
from .dnsbench import resolve
from .http import get_curl, request
import textwrap
class HelpFormatter(argparse.ArgumentDefaultsHelpFormatter):
......@@ -62,11 +64,11 @@ def download_external_configuration(url):
try:
return json.loads(buffer.getvalue())
except ValueError:
print "Unable to parse external configuration, error:"
print("Unable to parse external configuration, error:")
import traceback
traceback.print_exc(file=sys.stderr)
sys.stderr.flush()
print "Ignoring external configuration"
print("Ignoring external configuration")
finally:
curl.close()
......
import sys
import pycurl
from StringIO import StringIO
from six import StringIO
def get_curl(buffer, url):
curl = pycurl.Curl()
......
......@@ -22,6 +22,7 @@ def ping(host, timeout=10, protocol="4", count=10):
test_title = 'PING6'
proc = subprocess.Popen((ping_bin, '-c', str(count), '-w', str(timeout), host),
universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
......@@ -29,7 +30,7 @@ def ping(host, timeout=10, protocol="4", count=10):
return (test_title, host, 600, 'failed', 100, "Network is unreachable")
try:
packet_loss_line, summary_line = (out.splitlines() or [''])[-2:]
except:
except Exception:
return (test_title, host, 600, 'failed', -1, "Fail to parser ping output")
m = ping_re.match(summary_line)
match = re.search('(\d*)% packet loss', packet_loss_line)
......
......@@ -124,7 +124,7 @@ def main():
run(config)
return_code = 0
except SystemExit, err:
except SystemExit as err:
# Catch exception raise by optparse
return_code = err
......
from __future__ import print_function
import requests
import re
import signal
......@@ -56,11 +58,11 @@ def watchServerStatus(pid_dict, server_status, timeout):
if process.cmdline()[0].endswith("/httpd"):
_pid_dict.setdefault(i, time.time() + timeout)
if _pid_dict[i] < time.time():
print "Sending signal -%s to %s" % (signal.SIGKILL, i)
print("Sending signal -%s to %s" % (signal.SIGKILL, i))
try:
process.kill()
except psutil.NoSuchProcess:
print "Process is not there anymore"
print("Process is not there anymore")
continue
return _pid_dict
......
......@@ -47,7 +47,7 @@ def checkApachedexResult(apachedex_path, apachedex_report_status_file, desired_t
with open(apachedex_report_status_file) as f:
try:
json_content = json.load(f)
except ValueError, e:
except ValueError as e:
json_content = ''
if json_content:
message += "\n" + json_content["message"]
......
......@@ -7,6 +7,8 @@ Uses:
- /proc/meminfo
"""
from __future__ import print_function
import sys
import sqlite3
import argparse
......@@ -14,6 +16,8 @@ import datetime
from slapos.collect.db import Database
from six.moves import zip
def getMemoryInfo(database, time, date):
memory_info = {}
......@@ -21,7 +25,7 @@ def getMemoryInfo(database, time, date):
try:
database.connect()
query_result = database.select("computer", date, "memory_size", limit=1)
result = zip(*query_result)
result = list(zip(*query_result))
if not result or not result[0][0]:
return (None, "couldn't fetch total memory, collectordb is empty?")
memory_info['total'] = int(result[0][0]) # in byte
......@@ -29,7 +33,7 @@ def getMemoryInfo(database, time, date):
# fetch free and used memory
where_query = "time between '%s:00' and '%s:30' " % (time, time)
query_result = database.select("system", date, "memory_free, memory_used", where=where_query)
result = zip(*query_result)
result = list(zip(*query_result))
if not result or not result[0][0]:
return (None, "couldn't fetch free memory")
memory_info['free'] = int(result[0][0]) # in byte
......@@ -95,9 +99,9 @@ def main():
unit=args.unit,
)
if error:
print error
print(error)
return 0
print message
print(message)
return 0 if result else 1
if __name__ == "__main__":
......
from __future__ import print_function
import re
import time
import sys
......@@ -21,7 +23,7 @@ def test(log_file, maximum_delay):
f.seek(0, 2)
block_end_byte = f.tell()
f.seek(-min(block_end_byte, 4096), 1)
f.seek(block_end_byte - min(block_end_byte, 4096), 0)
data = f.read()
for line in reversed(data.splitlines()):
......@@ -76,7 +78,7 @@ def main():
result = test(args.log_file, args.maximum_delay)
print result
print(result)
if result != "OK":
sys.exit(1)
from __future__ import print_function
import argparse
import re
import time
......@@ -7,10 +9,10 @@ from slapos.networkbench.ping import ping, ping6
def test(ipv6, ipv4, count):
result_ipv4 = ping(ipv4, count=count)
print "%s host=%s code=%s, result=%s, packet_lost_ratio=%s msg=%s" % result_ipv4
print("%s host=%s code=%s, result=%s, packet_lost_ratio=%s msg=%s" % result_ipv4)
result_ipv6 = ping6(ipv6, count=count)
print "%s host=%s code=%s, result=%s, packet_lost_ratio=%s msg=%s" % result_ipv6
print("%s host=%s code=%s, result=%s, packet_lost_ratio=%s msg=%s" % result_ipv6)
if result_ipv4[3] == "failed" and result_ipv6[3] != "failed":
# IPv4 is unreacheable
......@@ -34,7 +36,7 @@ def test(ipv6, ipv4, count):
# Increase latency with the value.
latency4 += acceptable_delay + latency4*acceptable_lost
if latency4 < latency6:
print "Fail %s (latency4) > %s (latence6)" % (latency4, latency6)
print("Fail %s (latency4) > %s (latence6)" % (latency4, latency6))
return "FAIL"
# Compare if both has Same working rate
......@@ -51,7 +53,7 @@ def main():
result = test(args.ipv6, args.ipv4, args.count)
print result
print(result)
if result != "OK":
# re6st is not on an optimal state.
sys.exit(1)
......
......@@ -4,6 +4,8 @@
Check if a mariadb result matches the desired threshold or raises an error.
"""
from __future__ import print_function
import json
import os
import re
......@@ -58,7 +60,7 @@ def checkMariadbDigestResult(mariadbdex_path, mariadbdex_report_status_file,
with open(mariadbdex_report_status_file) as f:
try:
json_content = json.load(f)
except ValueError, e:
except ValueError as e:
json_content = ''
if json_content:
message += "\n" + json_content["message"]
......@@ -76,5 +78,5 @@ def main():
args.max_queries_threshold, args.slowest_query_threshold
)
print message
print(message)
sys.exit(status)
......@@ -4,6 +4,8 @@
Check user memory usage according to a given threshold.
"""
from __future__ import print_function
import sys
import os
import argparse
......@@ -87,9 +89,9 @@ def main():
unit=args.unit,
)
if error:
print error
print(error)
return 0
print message
print(message)
return 0 if result else 1
if __name__ == "__main__":
......
......@@ -7,14 +7,16 @@ import sys
import tempfile
import os
import argparse
import ConfigParser
from six.moves import configparser
import re
import pycurl
from mimetools import Message
from cStringIO import StringIO
from HTMLParser import HTMLParser
from email.message import Message
from six.moves import (
cStringIO as StringIO,
html_parser as HTMLParser,
)
begins_by_known_protocol_re = re.compile("^https?://")
get_protocol_re = re.compile("^([a-z]+)://")
......@@ -119,7 +121,7 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
def getConfig(config_parser, section, option, default=None, raw=False, vars=None):
try:
return config_parser.get(section, option, raw=raw, vars=vars)
except ConfigParser.NoOptionError:
except configparser.NoOptionError:
return default
def main():
......@@ -132,7 +134,7 @@ def main():
args.url_list = getattr(args, "url-list")
if args.config is not None:
parser = ConfigParser.ConfigParser()
parser = configparser.ConfigParser()
parser.read(args.config)
if args.url_list == []:
args.url_list = getConfig(parser, "public", "url-list", "").split()
......
from __future__ import print_function
import argparse
import re
import time
......@@ -22,10 +24,10 @@ def main():
result = test(args.address, args.ipv4, args.count)
print "%s host=%s code=%s, result=%s, packet_lost_ratio=%s msg=%s" % result
print("%s host=%s code=%s, result=%s, packet_lost_ratio=%s msg=%s" % result)
if result[4] != "0":
# Packet lost occurred
print "FAIL"
print("FAIL")
sys.exit(1)
print "OK"
print("OK")
......@@ -8,6 +8,8 @@ a file modification date is greater than the start date of the
process.
"""
from __future__ import print_function
import sys
import os
import errno
......@@ -29,10 +31,10 @@ def moduleIsModifiedSince(top, since, followlinks=False):
if ext in ignored_extension_set:
continue
if since < os.stat(os.path.join(root, name)).st_mtime:
print "%s was modified since the process started." % \
os.path.join(root, name)
print "Process Time %s < Last modidified file %s" % (time.ctime(since),
time.ctime(os.stat(os.path.join(root, name)).st_mtime))
print("%s was modified since the process started." % \
os.path.join(root, name))
print("Process Time %s < Last modidified file %s" % (time.ctime(since),
time.ctime(os.stat(os.path.join(root, name)).st_mtime)))
return True
return False
......@@ -41,7 +43,7 @@ def isProcessOlderThanDependencySet(pid, python_path_list, kill=False):
start_time = process.create_time()
if any(moduleIsModifiedSince(product_path, start_time) for product_path in python_path_list):
if kill:
print "Terminating process %s with pid %s" % (process.name(), pid)
print("Terminating process %s with pid %s" % (process.name(), pid))
process.terminate()
return True
return False
......
import argparse
import csv
import feedparser
import httplib # To avoid magic numbers
from six.moves import http_client as httplib # To avoid magic numbers
import io
import json
import logging
......
This diff is collapsed.
......@@ -3,7 +3,7 @@
import os
import signal
import time
import xmlrpclib
import six.moves.xmlrpc_client as xmlrpclib
# This mini-library is used to communicate with supervisord process
# It aims to replace the file "process.py"
......
......@@ -2,20 +2,20 @@
# vim: set et sts=2:
# pylint: disable-msg=W0311,C0301,C0103,C0111,W0141,W0142
import ConfigParser
from six.moves import configparser
import datetime
import json
import logging
import md5
import hashlib
import os
import sup_process
from . import sup_process
import re
import shutil
import stat
import thread
from six.moves import _thread
import time
import urllib
import xmlrpclib
import six.moves.xmlrpc_client as xmlrpclib
from xml.dom import minidom
import xml_marshaller
......@@ -92,11 +92,11 @@ def updateUserCredential(config, username, password):
def getRcode(config):
parser = ConfigParser.ConfigParser()
parser = configparser.ConfigParser()
try:
parser.read(config['knowledge0_cfg'])
return parser.get('public', 'recovery-code')
except (ConfigParser.NoSectionError, IOError) as e:
except (configparser.NoSectionError, IOError) as e:
return None
def getUsernameList(config):
......@@ -193,7 +193,7 @@ def updateProxy(config):
partition_path = os.path.join(config['instance_root'], partition_reference)
if not os.path.exists(partition_path):
os.mkdir(partition_path)
os.chmod(partition_path, 0750)
os.chmod(partition_path, 0o750)
slap_config['partition_list'].append({
'address_list': [
{
......@@ -461,7 +461,7 @@ def removeInstanceRootDirectory(config):
fullPath = os.path.join(root, fname)
if not os.access(fullPath, os.W_OK):
# Some directories may be read-only, preventing to remove files in it
os.chmod(fullPath, 0744)
os.chmod(fullPath, 0o744)
shutil.rmtree(instance_directory)
def removeCurrentInstance(config):
......@@ -775,7 +775,7 @@ def md5sum(file):
return False
try:
fh = open(file, 'rb')
m = md5.md5()
m = hashlib.md5()
while True:
data = fh.read(8192)
if not data:
......@@ -828,7 +828,7 @@ def readParameters(path):
sub_obj[str(subnode.getAttribute('id'))] = subnode.childNodes[0].data # .decode('utf-8').decode('utf-8')
obj[str(elt.tagName)] = sub_obj
return obj
except Exception, e:
except Exception as e:
return str(e)
else:
return "No such file or directory: %s" % path
......@@ -932,7 +932,7 @@ def setupDefaultSR(config):
if not os.path.exists(project) and config['default_sr'] != '':
configNewSR(config, config['default_sr'])
if config['auto_deploy']:
thread.start_new_thread(buildAndRun, (config,))
_thread.start_new_thread(buildAndRun, (config,))
def setMiniShellHistory(config, command):
......
......@@ -24,6 +24,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import print_function
import os
import argparse
import subprocess
......@@ -108,4 +110,4 @@ def shred(options):
def main():
arg_parser = getAgumentParser()
output = shred(arg_parser.parse_args())
print output
print(output)
from __future__ import print_function
import argparse
import sys
import os
......@@ -12,7 +14,7 @@ def killpidfromfile():
if sig is None:
raise ValueError('Unknown signal name %s' % sys.argv[2])
pid = int(open(file).read())
print 'Killing pid %s with signal %s' % (pid, sys.argv[2])
print('Killing pid %s with signal %s' % (pid, sys.argv[2]))
os.kill(pid, sig)
def sublist(a, b):
......@@ -63,7 +65,7 @@ def kill():
cmdline = p.cmdline()
if cmdline == args.arg if args.full else sublist(cmdline, args.arg):
p.send_signal(s)
print 'killed pid %s with signal %s' % (p.pid, args.signal)
print('killed pid %s with signal %s' % (p.pid, args.signal))
r = 0
except psutil.Error:
pass
......
......@@ -78,7 +78,7 @@ echo "htpasswd $@" > %s/monitor-htpasswd
self.writeContent(self.monitor_https_cors, '{% set allow_domain = "|".join(domain.replace(".", "\.").split()) -%}\n'
'SetEnvIf Origin "^http(s)?://(.+\.)?({{ allow_domain }})$" ORIGIN_DOMAIN=$0\n'
'Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN')
os.chmod(self.httpd_passwd_bin, 0755)
os.chmod(self.httpd_passwd_bin, 0o755)
def tearDown(self):
if os.path.exists(self.base_dir):
......@@ -106,7 +106,7 @@ echo "htpasswd $@" > %s/monitor-htpasswd
for config in config_json:
if config["key"]:
self.assertTrue(config_parameter_json.has_key(config["key"]))
self.assertTrue(config["key"] in config_parameter_json)
parameter = config_parameter_json[config["key"]]
else:
continue
......
......@@ -102,11 +102,11 @@ partition-folder = %(base_dir)s
for index in range(1, amount+1):
promise_file = os.path.join(promise_dir, 'monitor_promise-%s' % index)
self.writeContent(promise_file, promse_content)
os.chmod(promise_file, 0755)
os.chmod(promise_file, 0o755)
for index in range(1, amount+1):
promise_file = os.path.join(plugin_dir, 'monitor_promise-%s.py' % index)
self.writeContent(promise_file, promse_content)
os.chmod(promise_file, 0644)
os.chmod(promise_file, 0o644)
def checkOPML(self, url_list):
opml_title = "<title>%(root_title)s</title>" % self.monitor_config_dict
......
......@@ -44,7 +44,7 @@ class MonitorGlobalTest(unittest.TestCase):
pkg_resources.resource_string(
'slapos.monitor',
'doc/monitor_instance.schema.json')
self.monitor_instance_schema = json.loads(monitor_schema_string)
self.monitor_instance_schema = json.loads(monitor_schema_string.decode('utf-8'))
self.monitor_config_dict = dict(
......@@ -132,7 +132,7 @@ exit %(code)s
""" % result_dict
promise_path = os.path.join(self.etc_dir, 'promise', name)
self.writeContent(promise_path, content)
os.chmod(promise_path, 0755)
os.chmod(promise_path, 0o755)
return promise_path
def getPromiseParser(self):
......
......@@ -69,7 +69,7 @@ exit 0
"""
promise_path = os.path.join(self.old_promise_dir, name)
self.writeContent(promise_path, content)
os.chmod(promise_path, 0755)
os.chmod(promise_path, 0o755)
return promise_path
def writePromiseNOK(self, name):
......@@ -80,7 +80,7 @@ exit 2
"""
promise_path = os.path.join(self.old_promise_dir, name)
self.writeContent(promise_path, content)
os.chmod(promise_path, 0755)
os.chmod(promise_path, 0o755)
return promise_path
def generatePromiseScript(self, name, success=True, failure_count=1, content="",
......@@ -151,7 +151,7 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'my_promise.status.json')
os.system('cat %s' % result_file)
self.assertTrue(os.path.exists(result_file))
my_result = json.loads(open(result_file).read().decode("utf-8"))
my_result = json.load(open(result_file))
my_result['result'].pop('date')
expected_result = {
u'title': u'my_promise', u'name': u'my_promise.py',
......@@ -165,7 +165,7 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'my_second_promise.status.json')
self.assertTrue(os.path.exists(result_file))
second_result = json.loads(open(result_file).read().decode("utf-8"))
second_result = json.load(open(result_file))
second_result['result'].pop('date')
expected_result = {
......@@ -186,7 +186,7 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'my_promise.status.json')
self.assertTrue(os.path.exists(result_file))
my_result = json.loads(open(result_file).read().decode("utf-8"))
my_result = json.load(open(result_file))
my_result['result'].pop('date')
expected_result = {
......@@ -207,7 +207,7 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'my_promise.status.json')
self.assertTrue(os.path.exists(result_file))
my_result = json.loads(open(result_file).read().decode("utf-8"))
my_result = json.load(open(result_file))
my_result['result'].pop('date')
expected_result = {
......@@ -226,7 +226,7 @@ class RunPromise(GenericPromise):
promise_runner2 = MonitorPromiseLauncher(parser)
promise_runner2.start()
my_result = json.loads(open(result_file).read().decode("utf-8"))
my_result = json.load(open(result_file))
my_result['result'].pop('date')
expected_result = {
......@@ -287,7 +287,7 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read().decode("utf-8"))
result1 = json.load(open(result_file))
start_date = datetime.strptime(result1['result'].pop('date'), '%Y-%m-%dT%H:%M:%S')
expected_result = {
......@@ -303,7 +303,7 @@ class RunPromise(GenericPromise):
parser = self.getPromiseParser(force=True)
promise_runner = MonitorPromiseLauncher(parser)
promise_runner.start()
result2 = json.loads(open(result_file).read().decode("utf-8"))
result2 = json.load(open(result_file))
start_date2 = datetime.strptime(result2['result'].pop('date'), '%Y-%m-%dT%H:%M:%S')
self.assertEquals(expected_result, result2)
......@@ -319,7 +319,7 @@ class RunPromise(GenericPromise):
result2_file = os.path.join(self.output_dir, 'promise_2.status.json')
self.assertTrue(os.path.exists(result_file))
self.assertTrue(os.path.exists(result2_file))
result1 = json.loads(open(result_file).read().decode("utf-8"))
result1 = json.load(open(result_file))
start_date = datetime.strptime(result1['result'].pop('date'), '%Y-%m-%dT%H:%M:%S')
expected_result = {
......@@ -332,7 +332,7 @@ class RunPromise(GenericPromise):
}
self.assertEquals(expected_result, result1)
result2 = json.loads(open(result2_file).read())
result2 = json.load(open(result_file))
start_date2 = datetime.strptime(result2['result'].pop('date'), '%Y-%m-%dT%H:%M:%S')
expected_result = {
......@@ -353,7 +353,7 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read().decode("utf-8"))
result1 = json.load(open(result_file))
result1['result'].pop('date')
expected_result = {
u'title': u'promise_1', u'name': u'promise_1',
......@@ -368,7 +368,7 @@ class RunPromise(GenericPromise):
# second run
promise_runner = MonitorPromiseLauncher(parser)
promise_runner.start()
result2 = json.loads(open(result_file).read().decode("utf-8"))
result2 = json.load(open(result_file))
result2['result'].pop('date')
self.assertEquals(expected_result, result2)
......@@ -380,7 +380,7 @@ class RunPromise(GenericPromise):
result_file = os.path.join(self.output_dir, 'promise_1.status.json')
self.assertTrue(os.path.exists(result_file))
result1 = json.loads(open(result_file).read().decode("utf-8"))
result1 = json.load(open(result_file))
result1['result'].pop('date')
expected_result = {
u'title': u'promise_1', u'name': u'promise_1',
......@@ -401,7 +401,7 @@ class RunPromise(GenericPromise):
promise_runner = MonitorPromiseLauncher(parser)
promise_runner.start()
result2 = json.loads(open(result_file).read().decode("utf-8"))
result2 = json.load(open(result_file))
result2['result'].pop('date')
self.assertEquals(expected_result, result2)
......@@ -140,7 +140,7 @@ class TestQemuQMPWrapper(unittest.TestCase):
elif message['execute'] == 'query-memory-devices':
memory_list = []
added_mem = self.readChange('dimm') + self.hotplugged_memory_amount
slot_amount = added_mem / self.memory_slot_size
slot_amount = added_mem // self.memory_slot_size
for i in range(slot_amount, 0, -1):
memory_list.append({
u'data': {
......@@ -159,7 +159,7 @@ class TestQemuQMPWrapper(unittest.TestCase):
elif message['execute'] == 'query-memdev':
memory_list = []
added_mem = self.readChange('dimm') + self.hotplugged_memory_amount
slot_amount = added_mem / self.memory_slot_size
slot_amount = added_mem // self.memory_slot_size
for i in range(slot_amount, 0, -1):
memory_list.append({
u'dump': True,
......
......@@ -3,7 +3,7 @@ import os
import string
import random
import supervisor
import thread
from six.moves import _thread
import unittest
......@@ -35,7 +35,7 @@ class TestRunnerBackEnd(unittest.TestCase):
open(supervisord_config_file, 'w').write("""
""")
supervisord = supervisor.supervisord.Supervisord('-c', supervisord_config_file)
thread.start_new_thread()
_thread.start_new_thread()
def test_UserCanLoginAndUpdateCredentials(self):
"""
......
......@@ -58,32 +58,32 @@ class TestSecureDelete(unittest.TestCase):
passes = 2 + 1 # Option -z is used, plus one more pass
result = shred(options)
self.assertFalse(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertTrue(b"pass %d/%d" % (passes, passes) in result)
self.assertTrue(b"%s: removed" % os.path.basename(self.remove_file).encode('utf-8') in result)
def test_secure_remove_file_keep_file(self):
options = getAgumentParser().parse_args(['-n', '2', '-z', '--file', self.remove_file])
passes = 2 + 1 # Option -z is used, plus one more pass
result = shred(options)
self.assertTrue(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertFalse("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertTrue(b"pass %d/%d" % (passes, passes) in result)
self.assertFalse(b"%s: removed" % os.path.basename(self.remove_file).encode('utf-8') in result)
def test_secure_remove_file_non_zero(self):
options = getAgumentParser().parse_args(['-n', '2', '-u', '--file', self.remove_file])
passes = 2
result = shred(options)
self.assertFalse(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertTrue(b"pass %d/%d" % (passes, passes) in result)
self.assertTrue(b"%s: removed" % os.path.basename(self.remove_file).encode('utf-8') in result)
def test_secure_remove_file_check_exist(self):
options = getAgumentParser().parse_args(['-n', '2', '-u', '-s', '--file', 'random.txt', self.remove_file])
passes = 2
result = shred(options)
self.assertFalse(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertTrue(b"pass %d/%d" % (passes, passes) in result)
self.assertTrue(b"%s: removed" % os.path.basename(self.remove_file).encode('utf-8') in result)
def test_secure_remove_file_check_exist_false(self):
options = getAgumentParser().parse_args(['-n', '2', '-u', '--file', 'random.txt'])
......@@ -99,19 +99,19 @@ class TestSecureDelete(unittest.TestCase):
# shred removed link and target file
self.assertFalse(os.path.exists(self.remove_file))
self.assertFalse(os.path.exists(self.link_name))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertTrue(b"pass %d/%d" % (passes, passes) in result)
self.assertTrue(b"%s: removed" % os.path.basename(self.remove_file).encode('utf-8') in result)
def test_secure_remove_file_multiple_files(self):
options = getAgumentParser().parse_args(['-n', '2', '-u', '-z', '--file', self.remove_file, self.remove_file2])
passes = 2 + 1 # Option -z is used, plus one more pass
result = shred(options)
self.assertFalse(os.path.exists(self.remove_file))
self.assertTrue("pass %s/%s" % (passes, passes) in result)
self.assertTrue("%s: removed" % os.path.basename(self.remove_file) in result)
self.assertTrue(b"pass %d/%d" % (passes, passes) in result)
self.assertTrue(b"%s: removed" % os.path.basename(self.remove_file).encode('utf-8') in result)
self.assertFalse(os.path.exists(self.remove_file2))
self.assertTrue("%s: removed" % os.path.basename(self.remove_file2) in result)
self.assertTrue(b"%s: removed" % os.path.basename(self.remove_file2).encode('utf-8') in result)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment