Commit 37db086e authored by Bryton Lacquement's avatar Bryton Lacquement 🚪

fixup! Add support for Python 3

parent eaad8c02
......@@ -37,10 +37,8 @@ setup(name=name,
'Flask', # needed by servers
'atomize', # needed by pubsub
'feedparser', # needed by pubsub
'apache_libcloud>=0.4.0', # needed by cloudmgr
'lockfile', # used by equeue
'lxml', # needed for xml parsing
'paramiko', # needed by cloudmgr
'psutil', # needed for playing with processes in portable way
'setuptools', # namespaces
'slapos.core', # as it provides library for slap
......@@ -79,13 +77,6 @@ setup(name=name,
'check-feed-as-promise = slapos.checkfeedaspromise:main',
'check-apachedex-result = slapos.promise.check_apachedex_result:main',
'check-slow-queries-digest-result = slapos.promise.check_slow_queries_digest_result:main',
'clouddestroy = slapos.cloudmgr.destroy:main',
'cloudgetprivatekey = slapos.cloudmgr.getprivatekey:main',
'cloudgetpubliciplist = slapos.cloudmgr.getpubliciplist:main',
'cloudlist = slapos.cloudmgr.list:main',
'cloudmgr = slapos.cloudmgr.cloudmgr:main',
'cloudstart = slapos.cloudmgr.start:main',
'cloudstop = slapos.cloudmgr.stop:main',
'equeue = slapos.equeue:main',
'generatefeed = slapos.generatefeed:main',
'htpasswd = slapos.htpasswd:main',
......
......@@ -345,8 +345,8 @@ def main():
# Select an unused computer to run the test.
group = test_mapping.getNextGroup(
ignore_list = [group for _, _, group in \
running_test_dict.itervalues()])
ignore_list = [group for _, _, group in
six.itervalues(running_test_dict)])
# Select a test
test_line = test_result.start(
......@@ -455,7 +455,7 @@ def main():
logger.info('Sleeping %is...', to_sleep)
time.sleep(to_sleep)
if not test_result.isAlive():
for _, tester, computer_id in running_test_dict.itervalues():
for _, tester, computer_id in six.itervalues(running_test_dict):
tester.teardown()
time.sleep(300)
......
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from update import update
from destroy import destroy
from start import start
from stop import stop
from list import uuidlist
from getpubliciplist import getpubliciplist
class NodeInterface:
def __init__(self, key, secret, service, location, node_uuid=None,
ssh_key=None):
self.key = key
self.location = location
self.node_uuid = node_uuid
self.ssh_key = ssh_key
self.secret = secret
self.service = service
def update(self, image, size, security_group):
result = update(self.key, self.secret, self.service, self.location,
self.node_uuid, self.ssh_key, image, size, security_group)
self.node_uuid = result['node_uuid']
self.ssh_key = result['ssh_key']
return result
def stop(self):
stop(self.key, self.secret, self.service, self.node_uuid, self.ssh_key)
def start(self):
start(self.key, self.secret, self.service, self.node_uuid)
def destroy(self):
destroy(self.key, self.secret, self.service, self.node_uuid)
def getPublicIpList(self):
return getpubliciplist(self.key, self.secret, self.service, self.node_uuid)
def getNodeUuidList(self):
return uuidlist(self.key, self.secret, self.service)
import sys
import time
from cloudinterface import NodeInterface
class CloudManager:
def __init__(self, configuration_file):
self.configuration_file = configuration_file
self.initialiseConfiguration()
def initialiseConfiguration(self):
self.configuration_dict = {}
try:
external_eval = file(self.configuration_file).read()
except IOError:
pass
else:
try:
self.configuration_dict = eval(external_eval)
except:
pass
if type(self.configuration_dict) != type({}):
self.configuration_dict = {}
for k in 'key', 'secret':
if k not in self.configuration_dict:
self.configuration_dict[k] = ''
elif type(self.configuration_dict[k]) != type(''):
self.configuration_dict[k] = ''
if 'node_list' not in self.configuration_dict:
self.configuration_dict['node_list'] = []
if type(self.configuration_dict['node_list']) != type([]):
self.configuration_dict['node_list'] = []
def run(self):
print 'Run begin...'
started = stopped = destroyed = unsupported = 0
self.key = self.configuration_dict['key']
self.secret = self.configuration_dict['secret']
for node in self.configuration_dict['node_list']:
node_object = NodeInterface(self.key, self.secret, node['service'],
node['location'], node['node_uuid'], node['ssh_key'])
if node['requested_state'] == 'started':
print 'Starting %r' % node['node_uuid']
if node_object.start():
started += 1
else:
unsupported += 1
elif node['requested_state'] == 'stopped':
print 'Stopping %r' % node['node_uuid']
if node_object.stop():
stopped += 1
else:
unsupported += 1
elif node['requested_state'] == 'destroyed':
print 'Destroying %r' % node['node_uuid']
if node_object.destroy():
destroyed +=1
else:
unsupported += 1
else:
print 'Unsupported state %r for node %r' % (node['requested_state'],
node['node_uuid'])
unsupported += 1
print 'Run finished', dict(started=started, stopped=stopped,
destroyed=destroyed, unsupported=unsupported)
def main():
while True:
CloudManager(sys.argv[1]).run()
time.sleep(5)
from slapos.cloudmgr.lib import getDriverInstance, getNode
from libcloud.types import NodeState
import sys
def destroy(key, secret, service, node_uuid):
driver = getDriverInstance(service, key, secret)
node = getNode(driver, node_uuid)
if node is None:
return False
if node.state in [NodeState.RUNNING, NodeState.REBOOTING, NodeState.PENDING]:
return node.destroy()
else:
return False
def main():
print destroy(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
import sys
def getprivatekey(key, secret, service, node_uuid):
"""Fetches private key of node"""
raise NotImplementedError('Cedric implement')
return dict(
ssh_key = ssh_key,
)
def main():
getprivatekey(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
from slapos.cloudmgr.lib import getDriverInstance, getNode
from libcloud.types import NodeState
import time
import sys
def getpubliciplist(key, secret, service, node_uuid):
"""Gets public ip(s), can wait for the first IP to appear"""
public_ip_list = []
for i in range(100):
driver = getDriverInstance(service, key, secret)
node = getNode(driver, node_uuid)
if node.state not in [NodeState.RUNNING, NodeState.REBOOTING,
NodeState.PENDING]:
break
if node.public_ip[0]:
public_ip_list = node.public_ip
break
else:
time.sleep(5)
return dict(
public_ip_list = public_ip_list
)
def main():
print getpubliciplist(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
from libcloud.types import Provider
from libcloud.providers import get_driver
import paramiko
import StringIO
driver_list = {
'DUMMY': get_driver(Provider.DUMMY),
'EC2_US_EAST': get_driver(Provider.EC2_US_EAST),
'EC2_US_WEST': get_driver(Provider.EC2_US_WEST),
'EC2_EU_WEST': get_driver(Provider.EC2_EU_WEST),
'RACKSPACE': get_driver(Provider.RACKSPACE),
'SLICEHOST': get_driver(Provider.SLICEHOST),
'GOGRID': get_driver(Provider.GOGRID),
'VPSNET': get_driver(Provider.VPSNET),
'LINODE': get_driver(Provider.LINODE),
'VCLOUD': get_driver(Provider.VCLOUD),
'RIMUHOSTING': get_driver(Provider.RIMUHOSTING),
'ECP': get_driver(Provider.ECP),
'IBM': get_driver(Provider.IBM),
'OPENNEBULA': get_driver(Provider.OPENNEBULA),
'DREAMHOST': get_driver(Provider.DREAMHOST),
}
def getDriverInstance(driverName, key, secret=None, secure=True, host=None,
port=None):
return driver_list.get(driverName)(key, secret, secure, host, port)
def getSSHConnection(driver, hostname, private_key):
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.WarningPolicy())
#TODO if exception : try DSSKey
pkey = paramiko.RSAKey.from_private_key(StringIO.StringIO(private_key))
client.connect(hostname=hostname, username='root', pkey=pkey,
look_for_keys=False)
return client
def getNode(driver, node_uuid):
node_list = [node for node in driver.list_nodes() if node_uuid in node.uuid]
if len(node_list) == 0:
return None
if len(node_list) != 1:
raise IndexError('Several nodes with the uuid %r exist.' % node_uuid)
return node_list[0]
# Original work by Cedric de Saint Martin, adapted by Lukasz Nowak
import sys
from slapos.cloudmgr.lib import getDriverInstance
def nodelist(key, secret, service):
driver = getDriverInstance(service, key, secret)
node_list = driver.list_nodes()
return node_list
def uuidlist(key, secret, service):
return [q.uuid for q in nodelist(key, secret, service)]
def main():
node_list = nodelist(*sys.argv[1:])
print 'Available nodes (%s):' % len(node_list)
for node in node_list:
print node
from slapos.cloudmgr.lib import getDriverInstance, getNode
import sys
def start(key, secret, service, node_uuid):
"""Starts node"""
driver = getDriverInstance(service, key, secret)
node = getNode(driver, node_uuid)
if node is None:
return False
if node.state in [0, 1]:
return True
elif node.state == 3:
return node.reboot()
else:
return False
return True
def main():
print start(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
from getprivatekey import getprivatekey
from libcloud.types import NodeState
from slapos.cloudmgr.lib import getDriverInstance, getSSHConnection, getNode
from paramiko import SSHException
import sys
def stop(key, secret, service, node_uuid, ssh_key=None):
"""Stops node"""
driver = getDriverInstance(service, key, secret)
node = getNode(driver, node_uuid)
# Checks state
if node.state not in [NodeState.RUNNING, NodeState.REBOOTING,
NodeState.PENDING]:
return False
# Good state : connects
if ssh_key is None:
ssh_key = getprivatekey(key, secret, service, node_uuid)
public_ip = node.public_ip[0]
if not public_ip:
raise Exception('Node is started but has no IP.')
try:
ssh = getSSHConnection(driver, public_ip, ssh_key)
print('Stopping instance...')
stdin, stdout, stderr = ssh.exec_command('halt')
except SSHException, e:
print('unable to stop')
raise e
error_log = stderr.read()
if error_log:
raise Exception('''Unable to stop : error log is :
%r
output is : %r''' % (error_log, stdout.read()))
return True
def main():
print stop(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
from slapos.cloudmgr.lib import getDriverInstance
import sys
import uuid
from libcloud.types import NodeState
def update(key, secret, service, location, node_uuid=None, ssh_key=None,
image=None, size=None, security_group=None):
"""Update or create node"""
if node_uuid is not None:
driver = getDriverInstance(service, key, secret)
found_node_list = [node for node in driver.list_nodes() if node.uuid==node_uuid]
if len(found_node_list) == 0:
# node_uuid relates to not available one, recreate
node_uuid = None
elif len(found_node_list) == 1:
node = found_node_list[0]
if node.state not in [NodeState.RUNNING, NodeState.REBOOTING,
NodeState.PENDING]:
# node_uuid relates to destroyed one, recreate
node_uuid = None
if not node_uuid:
if not image or not size or not location:
raise Exception("Node can not be created because of lacking informations")
# XXX-Cedric : what exception?
# Creates node
return install(key, secret, service, image, size, location, security_group)
else:
return dict(
ssh_key = ssh_key,
node_uuid = node_uuid
)
def install(key, secret, service, image_id, size_id, location_id,
security_group):
driver = getDriverInstance(service, key, secret)
# Defines a dict that will be used to create our node(s)
argument_list = dict()
argument_list['image'] = [image for image in driver.list_images() if
image_id in image.id][0]
argument_list['size'] = [size for size in driver.list_sizes() if
size_id in size.id][0]
# We can create uour own images and sizes
#image = NodeImage(id=self.options['image_id'],
# name=self.options['image_name'],
# driver=driver)
#size = NodeSize(self.options['size_id'], self.options['size_name'], None,
# None, None, None, driver=driver)
argument_list['location'] = [location for location in
driver.list_locations() if location_id in location.id][0]
# If we are in ec2 : adding ssh key manually
if 'EC2' in service:
try:
unique_keyname = str(uuid.uuid1())
keypair = driver.ex_create_keypair(unique_keyname)
ssh_key = keypair['keyMaterial']
argument_list['ex_keyname'] = unique_keyname
except Exception, e:
# XX-Cedric : what to do here?
if e.args[0].find("InvalidKeyPair.Duplicate") == -1:
# XXX-Cedric : our unique key was not so unique...Do something
raise e
else:
raise e
# Prepares ssh key deployment and/or postflight script
# NOT IMPLEMENTED
#if self.options.get('ssh_key') or self.options.get('script'):
# deployment_argument_list = []
# if self.options.get('ssh_key'):
# deployment_argument_list.append(SSHKeyDeployment(
# self.options['ssh_key']))
# if self.options.get('script'):
# script_to_run = ScriptDeployment(self.options['script'])
# deployment_argument_list.append(script_to_run)
# argument_list['deploy'] = MultiStepDeployment(deployment_argument_list)
# If ec2, creates group, adds rules to it.
if 'EC2' in service:
try:
driver.ex_create_security_group(security_group, security_group)
except Exception, e:
if e.args[0].find("InvalidPermission.Duplicate") == -1:
pass #It's okay, don't worry.
driver.ex_authorize_security_group_permissive(security_group)
argument_list['ex_securitygroup'] = security_group
# Installs node
node = driver.create_node(**argument_list)#deploy_node(**argument_list)
node_uuid = node.uuid
return {'node_uuid': node_uuid, 'ssh_key': ssh_key}
def main():
try:
node_uuid = sys.argv[4]
except IndexError:
node_uuid = None
update(sys.argv[1], sys.argv[2], sys.argv[3], node_uuid)
# -*- coding: utf-8 -*-
from __future__ import print_function
from six.moves import configparser
import argparse
from six.moves import dbm_gnu as gdbm
......@@ -27,7 +28,7 @@ def main():
if args.pid is not None:
pid_filename = args.pid[0]
if os.path.exists(pid_filename):
print >> sys.stderr, "Already running"
print("Already running", file=sys.stderr)
return 127
with open(pid_filename, 'w') as pid_file:
pid_file.write(str(os.getpid()))
......
......@@ -42,6 +42,14 @@ from six.moves import socketserver
import io
import threading
try:
logging_levels = logging._nameToLevel
logging_choices = logging_levels.keys()
except AttributeError:
logging_levels = logging._levelNames
logging_choices = [i for i in logging_levels
if isinstance(i, str)]
# Copied from erp5.util:erp5/util/testnode/ProcessManager.py
def subprocess_capture(p, log, log_prefix, get_output=True):
def readerthread(input, output, buffer):
......@@ -81,9 +89,9 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
def __init__(self, *args, **kw):
self.options = kw.pop('equeue_options')
super(EqueueServer, self).__init__(self,
RequestHandlerClass=None,
*args, **kw)
socketserver.ThreadingUnixStreamServer.__init__(self,
RequestHandlerClass=None,
*args, **kw)
# Equeue Specific elements
self.setLogger(self.options.logfile[0], self.options.loglevel[0])
self.setDB(self.options.database[0])
......@@ -99,7 +107,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
self.logger = logging.getLogger("EQueue")
handler = logging.handlers.WatchedFileHandler(logfile, mode='a')
# Natively support logrotate
level = logging._levelNames.get(loglevel, logging.INFO)
level = logging_levels.get(loglevel, logging.INFO)
self.logger.setLevel(level)
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
handler.setFormatter(formatter)
......@@ -131,7 +139,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
try:
sys.stdout.flush()
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stderr=subprocess.PIPE, universal_newlines=True)
subprocess_capture(p, self.logger.info, '', True)
if p.returncode == 0:
self.logger.info("%s finished successfully.", cmd_readable)
......@@ -172,7 +180,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
try:
request.send(command)
except:
except Exception:
self.logger.warning("Couldn't respond to %r", request.fileno())
self.close_request(request)
self._runCommandIfNeeded(command, timestamp)
......@@ -193,8 +201,7 @@ def main():
"calls are stored")
parser.add_argument('--loglevel', nargs=1,
default='INFO',
choices=[i for i in logging._levelNames
if isinstance(i, str)],
choices=logging_choices,
required=False)
parser.add_argument('-l', '--logfile', nargs=1, required=True,
help="Path to the log file.")
......
......@@ -89,19 +89,19 @@ def setup(arguments):
time.sleep(3)
continue
time.sleep(timeout)
if arguments.has_key('delete_target'):
if 'delete_target' in arguments:
delete(arguments)
if arguments.has_key('source'):
if 'source' in arguments:
rename(arguments)
if arguments.has_key('script'):
if 'script' in arguments:
run_script(arguments)
if arguments.has_key('sql_script'):
if 'sql_script' in arguments:
run_sql_script(arguments)
if arguments.has_key('chmod_target'):
if 'chmod_target' in arguments:
chmod(arguments)
return
......
......@@ -33,7 +33,7 @@ def buildStatistic(history_folder):
last_date = None
if stats_dict["data"]:
if stats_dict["data"][-1].has_key("start-date"):
if "start-date" in stats_dict["data"][-1]:
last_date = stats_dict["data"][-1]["start-date"]
else:
last_date = stats_dict["data"][-1]["date"]
......
......@@ -293,17 +293,17 @@ def main():
if process_result and process_result['total_process'] != 0.0:
appendToJsonFile(process_file, ", ".join(
[str(process_result[key]) for key in label_list if process_result.has_key(key)])
[str(process_result[key]) for key in label_list if key in process_result])
)
resource_status_dict.update(process_result)
if memory_result and memory_result['memory_rss'] != 0.0:
appendToJsonFile(mem_file, ", ".join(
[str(memory_result[key]) for key in label_list if memory_result.has_key(key)])
[str(memory_result[key]) for key in label_list if key in memory_result])
)
resource_status_dict.update(memory_result)
if io_result and io_result['io_rw_counter'] != 0.0:
appendToJsonFile(io_file, ", ".join(
[str(io_result[key]) for key in label_list if io_result.has_key(key)])
[str(io_result[key]) for key in label_list if key in io_result])
)
resource_status_dict.update(io_result)
......
......@@ -14,7 +14,7 @@ def get_curl(buffer, url):
result = "OK"
try:
curl.perform()
except:
except Exception:
import traceback
traceback.print_exc(file=sys.stderr)
sys.stderr.flush()
......
......@@ -16,7 +16,7 @@ from tzlocal import get_localzone
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# check backup ran OK every 5 minutes
self.setPeriodicity(minute=5)
......
......@@ -8,7 +8,7 @@ import os
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# set periodicity to run the promise twice per day
self.custom_frequency = int(self.getConfig('frequency', 720))
self.setPeriodicity(self.custom_frequency)
......
......@@ -12,7 +12,7 @@ r = re.compile(br"^([0-9]+\-[0-9]+\-[0-9]+ [0-9]+\:[0-9]+\:[0-9]+)(\,[0-9]+) - (
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=10)
def sense(self):
......
......@@ -6,7 +6,7 @@ from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# SR can set custom periodicity
self.setPeriodicity(float(self.getConfig('frequency', 2)))
......@@ -22,7 +22,8 @@ class RunPromise(GenericPromise):
url = self.getConfig('url').strip()
try:
result = open(filename).read()
with open(filename) as f:
result = f.read()
except Exception as e:
self.logger.error(
"ERROR %r during opening and reading file %r" % (e, filename))
......
......@@ -16,7 +16,7 @@ from slapos.collect.db import Database
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# check disk space at least every 3 minutes
self.setPeriodicity(minute=3)
......@@ -131,7 +131,7 @@ class RunPromise(GenericPromise):
min_free_size = int(min_size_str)*1024*1024
else:
with open(disk_threshold_file, 'w') as f:
f.write(str(min_free_size/(1024*1024)))
f.write(str(min_free_size//(1024*1024)))
if check_date:
# testing mode
......
......@@ -8,7 +8,7 @@ from slapos.networkbench.ping import ping, ping6
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# set periodicity to run the promise twice per day
self.custom_frequency = int(self.getConfig('frequency', 720))
self.setPeriodicity(self.custom_frequency)
......
......@@ -8,7 +8,7 @@ from datetime import datetime
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
def sense(self):
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
import socket
import sys
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# check port is listening at least every 2 minutes
self.setPeriodicity(minute=2)
......@@ -30,9 +28,9 @@ class RunPromise(GenericPromise):
# self.logger.info("port connection OK")
try:
socket.create_connection(addr).close()
except (socket.herror, socket.gaierror), e:
except (socket.herror, socket.gaierror) as e:
self.logger.error("ERROR hostname/port ({}) is not correct: {}".format(addr, e))
except (socket.error, socket.timeout), e:
except (socket.error, socket.timeout) as e:
self.logger.error("ERROR while connecting to {}: {}".format(addr, e))
else:
self.logger.info("port connection OK ({})".format(addr))
......
......@@ -8,7 +8,7 @@ from slapos.networkbench.ping import ping, ping6
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# set periodicity to run the promise twice per day
self.custom_frequency = int(self.getConfig('frequency', 720))
self.setPeriodicity(self.custom_frequency)
......
......@@ -9,22 +9,23 @@ import os
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# test load every 3 minutes
self.setPeriodicity(minute=3)
def checkCPULoad(self, tolerance=2.2):
# tolerance=1.5 => accept CPU load up to 1.5 =150%
uptime_result = subprocess.check_output(['uptime'], universal_newlines=True)
uptime_result = subprocess.check_output('uptime', universal_newlines=True)
line = uptime_result.strip().split(' ')
load, load5, long_load = line[-3:]
long_load = float(long_load.replace(',', '.'))
core_count = int(subprocess.check_output(['nproc']).strip())
core_count = int(subprocess.check_output('nproc').strip())
max_load = core_count * tolerance
if long_load > max_load:
# display top statistics
top_result = subprocess.check_output(['top', '-n', '1', '-b'])
top_result = subprocess.check_output(('top', '-n', '1', '-b'),
universal_newlines=True)
message = "CPU load is high: %s %s %s\n\n" % (load, load5, long_load)
i = 0
result_list = top_result.split('\n')
......
......@@ -8,7 +8,7 @@ import requests
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# SR can set custom periodicity
self.setPeriodicity(float(self.getConfig('frequency', 2)))
......
......@@ -10,7 +10,7 @@ from .util import tail_file
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=2)
def sense(self):
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
......@@ -7,12 +7,10 @@ try:
except ImportError:
import subprocess
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=int(self.getConfig('frequency', 5)))
def sense(self):
......@@ -31,16 +29,17 @@ class RunPromise(GenericPromise):
self.logger.error("Wrapper %r not supported." % (wrapper,))
return
process = subprocess.Popen(
try:
subprocess.subprocess.check_output(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
result = process.communicate()[0].strip()
if process.returncode == 0:
self.logger.info("OK")
)
except subprocess.CalledProcessError as e:
self.logger.error(message, result if str is bytes else
result.decode('utf-8', 'replace'))
else:
self.logger.error(message % (result,))
self.logger.info("OK")
def anomaly(self):
"""
......
from zope import interface as zope_interface
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
try:
......@@ -6,12 +6,10 @@ try:
except ImportError:
import subprocess
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
# check configuration every 5 minutes (only for anomaly)
self.setPeriodicity(minute=int(self.getConfig('frequency', 5)))
......@@ -23,16 +21,14 @@ class RunPromise(GenericPromise):
validate_script = self.getConfig('verification-script')
if not validate_script:
raise ValueError("'verification-script' was not set in promise parameters.")
process = subprocess.Popen(
[validate_script],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
message = process.communicate()[0]
if process.returncode == 0:
self.logger.info("OK")
try:
subprocess.check_output(validate_script, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
message = e.output
self.logger.error(message if str is bytes else
message.decode('utf-8', 'replace'))
else:
self.logger.error("%s" % message)
self.logger.info("OK")
def anomaly(self):
return self._anomaly(result_count=1, failure_amount=1)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import csv
import datetime
import json
import httplib
from six.moves import http_client as httplib
import os
import shutil
import socket
......@@ -13,8 +14,8 @@ import subprocess
import sys
import time
import traceback
import urllib2
import urlparse
from six.moves.urllib.request import urlopen
from six.moves.urllib.parse import urlparse
import uuid
def createStatusItem(item_directory, instance_name, callback, date, link, status):
......@@ -79,8 +80,7 @@ def main():
saveStatus('STARTED')
if args.max_run <= 0:
print "--max-run argument takes a strictely positive number as argument"
sys.exit(-1)
parser.error("--max-run argument takes a strictly positive number as argument")
while args.max_run > 0:
try:
......@@ -108,7 +108,7 @@ def main():
content.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
))
print content
print(content)
# Write feed safely
error_message = ""
......@@ -128,7 +128,7 @@ def main():
'slapos:%s' % uuid.uuid4(),
])
os.rename(temp_file, args.logfile[0])
except Exception, e:
except Exception as e:
error_message = "ERROR ON WRITING FEED - %s" % str(e)
finally:
try:
......@@ -143,14 +143,14 @@ def main():
if exit_code != 0:
sys.exit(exit_code)
print 'Fetching %s feed...' % args.feed_url[0]
print('Fetching %s feed...' % args.feed_url[0])
feed = urllib2.urlopen(args.feed_url[0])
feed = urlopen(args.feed_url[0])
body = feed.read()
some_notification_failed = False
for notif_url in args.notification_url:
notification_url = urlparse.urlparse(notif_url)
notification_url = urlparse(notif_url)
notification_port = notification_url.port
if notification_port is None:
......
......@@ -26,6 +26,7 @@
#
##############################################################################
from __future__ import print_function
import argparse
import json
import importlib
......@@ -120,7 +121,7 @@ def runTestSuite(test_suite_title, test_suite_arguments, logger):
parsed_arguments = dict(key.split('=') for key in test_suite_arguments)
test_suite_module = importFrom(test_suite_title)
success = test_suite_module.runTestSuite(**parsed_arguments)
except:
except Exception:
logger.exception('Impossible to run resiliency test:')
success = False
return success
......@@ -228,7 +229,7 @@ def runResiliencyTest():
"""
error_message_set, exit_status = ScalabilityLauncher().run()
for error_message in error_message_set:
print >>sys.stderr, 'ERROR: %s' % error_message
print('ERROR: %s' % error_message, file=sys.stderr)
sys.exit(exit_status)
......@@ -284,7 +285,7 @@ def runUnitTest():
test_count=1,
error_count=error_count,
duration=test_duration)
except:
except Exception:
raise
finally:
os.remove(fname)
......@@ -33,8 +33,9 @@ import random
import ssl
import string
import time
import urllib
import urllib2
from six.moves.urllib.parse import quote
from six.moves.urllib.request import HTTPBasicAuthHandler, HTTPSHandler, \
build_opener
class NotHttpOkException(Exception):
pass
......@@ -50,7 +51,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
Set inside of slaprunner the instance parameter to use to deploy erp5 instance.
"""
p = '<?xml version="1.0" encoding="utf-8"?> <instance> <parameter id="_">{"zodb-zeo": {"backup-periodicity": "*:1/4"}, "mariadb": {"backup-periodicity": "*:1/4"}}</parameter> </instance>'
parameter = urllib2.quote(p)
parameter = quote(p)
self._connectToSlaprunner(
resource='saveParameterXml',
data='software_type=default&parameter=%s' % parameter)
......@@ -109,7 +110,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
resource='/saveFileContent',
data='file=runner_workdir%%2Finstance%%2F%s%%2Fetc%%2Fhaproxy.cfg&content=%s' % (
haproxy_slappart,
urllib.quote(file_content),
quote(file_content),
)
)
......@@ -133,12 +134,12 @@ class ERP5TestSuite(SlaprunnerTestSuite):
def _connectToERP5(self, url, data=None, password=None):
if password is None:
password = self._getERP5Password()
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler = HTTPBasicAuthHandler()
auth_handler.add_password(realm='Zope', uri=url, user='zope', passwd=password)
ssl_context = ssl._create_unverified_context()
opener_director = urllib2.build_opener(
opener_director = build_opener(
auth_handler,
urllib2.HTTPSHandler(context=ssl_context)
HTTPSHandler(context=ssl_context)
)
self.logger.info('Calling ERP5 url %s' % url)
......@@ -213,7 +214,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
try:
if "erp5" == self._getCreatedERP5SiteId():
break
except:
except Exception:
self.logger.info("Fail to connect to erp5.... wait a bit longer")
pass
......
......@@ -167,7 +167,7 @@ class GitlabTestSuite(SlaprunnerTestSuite):
while loop < 3:
try:
self._connectToGitlab(url=self.backend_url)
except Exception, e:
except Exception as e:
if loop == 2:
raise
self.logger.warning(str(e))
......
......@@ -32,7 +32,7 @@ import logging
import random
import string
import time
import urllib
from six.moves.urllib.request import urlopen
logger = logging.getLogger('KVMResiliencyTest')
......@@ -45,7 +45,7 @@ def fetchKey(ip):
new_key = None
for i in range(0, 10):
try:
new_key = urllib.urlopen('http://%s:10080/get' % ip).read().strip()
new_key = urlopen('http://%s:10080/get' % ip).read().strip()
break
except IOError:
logger.error('Server in new KVM does not answer.')
......@@ -148,7 +148,7 @@ class KVMTestSuite(ResiliencyTestSuite):
for i in range(0, 60):
failure = False
try:
connection = urllib.urlopen('http://%s:10080/set?key=%s' % (self.ip, self.key))
connection = urlopen('http://%s:10080/set?key=%s' % (self.ip, self.key))
if connection.getcode() is 200:
break
else:
......
......@@ -34,7 +34,7 @@ import os
import subprocess
import sys
import time
import urllib2
from six.moves.urllib.request import urlopen
UNIT_TEST_ERP5TESTNODE = 'UnitTest'
......@@ -85,13 +85,13 @@ class ResiliencyTestSuite(object):
takeover_url = root_partition_parameter_dict['takeover-%s-%s-url' % (namebase, target_clone)]
takeover_password = root_partition_parameter_dict['takeover-%s-%s-password' % (namebase, target_clone)]
# Connect to takeover web interface
takeover_page_content = urllib2.urlopen(takeover_url).read()
takeover_page_content = urlopen(takeover_url).read()
# Wait for importer script to be not running
while 'Importer script(s) of backup in progress: True' in takeover_page_content:
time.sleep(10)
takeover_page_content = urllib2.urlopen(takeover_url).read()
takeover_page_content = urlopen(takeover_url).read()
# Do takeover
takeover_result = urllib2.urlopen('%s?password=%s' % (takeover_url, takeover_password)).read()
takeover_result = urlopen('%s?password=%s' % (takeover_url, takeover_password)).read()
if 'Error' in takeover_result:
raise Exception('Error while doing takeover: %s' % takeover_result)
......@@ -214,7 +214,8 @@ class ResiliencyTestSuite(object):
if 'monitor' in promise:
continue
try:
process = subprocess.check_output(os.path.join(promise_directory, promise))
subprocess.check_output(os.path.join(promise_directory, promise),
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
self.logger.error('ERROR : promise "%s" failed with output :\n%s', promise, e.output)
return False
......
......@@ -29,15 +29,16 @@
from .resiliencytestsuite import ResiliencyTestSuite
import base64
import cookielib
from six.moves import http_cookiejar as cookielib
import json
from lxml import etree
import random
import ssl
import string
import time
import urllib2
import urllib
from six.moves.urllib.request import HTTPCookieProcessor, HTTPSHandler, \
build_opener
from six.moves.urllib.error import HTTPError
class NotHttpOkException(Exception):
pass
......@@ -52,9 +53,9 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
cookie_jar = cookielib.CookieJar()
ssl_context = ssl._create_unverified_context()
self._opener_director = urllib2.build_opener(
urllib2.HTTPCookieProcessor(cookie_jar),
urllib2.HTTPSHandler(context=ssl_context)
self._opener_director = build_opener(
HTTPCookieProcessor(cookie_jar),
HTTPSHandler(context=ssl_context)
)
ResiliencyTestSuite.__init__(self, *args, **kwargs)
......@@ -95,7 +96,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
if result.getcode() is not 200:
raise NotHttpOkException(result.getcode())
return result.read()
except urllib2.HTTPError:
except HTTPError:
self.logger.error('Error when contacting slaprunner at URL: {}'.format(url))
raise
......@@ -164,7 +165,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
"""
try:
return self._connectToSlaprunner(resource='isSRReady')
except (NotHttpOkException, urllib2.HTTPError) as error:
except (NotHttpOkException, HTTPError) as error:
# The nginx frontend might timeout before software release is finished.
self.logger.warning('Problem occured when contacting the server: %s' % error)
return -1
......@@ -187,7 +188,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
self.logger.info('Building the Software Release...')
try:
self._connectToSlaprunner(resource='runSoftwareProfile')
except (NotHttpOkException, urllib2.HTTPError):
except (NotHttpOkException, HTTPError):
# The nginx frontend might timeout before software release is finished.
pass
......@@ -197,7 +198,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
self.logger.info('Deploying instance...')
try:
self._connectToSlaprunner(resource='runInstanceProfile')
except (NotHttpOkException, urllib2.HTTPError):
except (NotHttpOkException, HTTPError):
# The nginx frontend might timeout before someftware release is finished.
pass
while True:
......@@ -219,7 +220,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
if data['code'] == 0:
self.logger.warning(data['result'])
except (NotHttpOkException, urllib2.HTTPError):
except (NotHttpOkException, HTTPError):
# cloning can be very long.
# XXX: quite dirty way to check.
while self._connectToSlaprunner('getProjectStatus', data='project=workspace/slapos').find('On branch master') == -1:
......
from __future__ import print_function
from __future__ import division, print_function
import argparse
import itertools
......@@ -11,6 +11,7 @@ import time
from datetime import datetime
from .runner_utils import *
from six.moves import map
os.environ['LC_ALL'] = 'C'
os.umask(0o77)
......@@ -102,7 +103,7 @@ def getBackupFilesModifiedDuringExportList(config, export_start_date):
export_time = time.time() - export_start_date
# find all files that were modified during export
modified_files = subprocess.check_output((
'find', 'instance', '-cmin', str(export_time / 60.), '-type', 'f', '-path', '*/srv/backup/*'
'find', 'instance', '-cmin', str(export_time / 60), '-type', 'f', '-path', '*/srv/backup/*'
))
if not modified_files:
return ()
......@@ -119,7 +120,7 @@ def getBackupFilesModifiedDuringExportList(config, export_start_date):
'--relative',
'--no-implied-dirs'
]
rsync_arg_list += map("--exclude={}".format, getExcludePathList(os.getcwd()))
rsync_arg_list += list(map("--exclude={}".format, getExcludePathList(os.getcwd())))
rsync_arg_list += '.', 'unexisting_dir_or_file_just_to_have_the_output'
process = subprocess.Popen(rsync_arg_list, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = process.communicate(modified_files)[0]
......
......@@ -7,7 +7,7 @@ import md5
import os
import re
import shutil
import urllib
from six.moves.urllib.parse import unquote
import zipfile
import fnmatch
......@@ -22,7 +22,7 @@ class FileBrowser(object):
self.config = config
def _realdir(self, dir):
realdir = realpath(self.config, urllib.unquote(dir))
realdir = realpath(self.config, unquote(dir))
if not realdir:
raise NameError('Could not load directory %s: Permission denied' % dir)
return realdir
......@@ -45,7 +45,7 @@ class FileBrowser(object):
"""List elements of directory 'dir' taken"""
html = 'var gsdirs = [], gsfiles = [];'
dir = urllib.unquote(dir)
dir = unquote(dir)
# XXX-Marco 'dir' and 'all' should not shadow builtin names
realdir = realpath(self.config, dir)
if not realdir:
......@@ -74,7 +74,7 @@ class FileBrowser(object):
return html
def fancylistDirs(self, dir, key, listfiles, all=False):
dir = urllib.unquote(dir)
dir = unquote(dir)
realdir = realpath(self.config, dir)
if not realdir:
raise NameError('Could not load directory %s: Permission denied' % dir)
......@@ -106,7 +106,7 @@ class FileBrowser(object):
realdir = self._realdir(dir)
folder = os.path.join(realdir, filename)
if not os.path.exists(folder):
os.mkdir(folder, 0744)
os.mkdir(folder, 0o744)
return "{result: '1'}"
else:
return "{result: '0'}"
......@@ -125,7 +125,7 @@ class FileBrowser(object):
"""Delete a list of files or directories"""
# XXX-Marco do not shadow 'dir'
realdir = self._realdir(dir)
lfiles = urllib.unquote(files).split(',,,')
lfiles = unquote(files).split(',,,')
try:
# XXX-Marco do not shadow 'file'
for item in lfiles:
......@@ -147,7 +147,7 @@ class FileBrowser(object):
def copyItem(self, dir, files, del_source=False):
"""Copy a list of files or directory to dir"""
realdir = self._realdir(dir)
lfiles = urllib.unquote(files).split(',,,')
lfiles = unquote(files).split(',,,')
try:
# XXX-Marco do not shadow 'file'
for file in lfiles:
......@@ -174,7 +174,7 @@ class FileBrowser(object):
def rename(self, dir, filename, newfilename):
"""Rename file or directory to dir/filename"""
realdir = self._realdir(dir)
realfile = realpath(self.config, urllib.unquote(filename))
realfile = realpath(self.config, unquote(filename))
if not realfile:
raise NameError('Could not load directory %s: Permission denied' % filename)
tofile = os.path.join(realdir, newfilename)
......@@ -208,7 +208,7 @@ class FileBrowser(object):
def downloadFile(self, dir, filename):
"""Download file dir/filename"""
realdir = self._realdir(dir)
file = os.path.join(realdir, urllib.unquote(filename))
file = os.path.join(realdir, unquote(filename))
if not os.path.exists(file):
raise NameError('NOT ALLOWED OPERATION : File or directory does not exist %s'
% os.path.join(dir, filename))
......@@ -255,8 +255,8 @@ class FileBrowser(object):
def readFile(self, dir, filename, truncate=False):
"""Read file dir/filename and return content"""
realfile = realpath(self.config, os.path.join(urllib.unquote(dir),
urllib.unquote(filename)))
realfile = realpath(self.config, os.path.join(unquote(dir),
unquote(filename)))
if not realfile:
raise NameError('Could not load directory %s: Permission denied' % dir)
if not isText(realfile):
......
......@@ -2,13 +2,12 @@
# vim: set et sts=2:
# pylint: disable-msg=W0311,C0301,C0103,C0111,R0904,R0903
import ConfigParser
from six.moves import configparser
import datetime
import flask
import logging
import logging.handlers
import os
import urlparse
from slapos.htpasswd import HtpasswdFile
from slapos.runner.process import setHandler
import sys
......@@ -36,7 +35,7 @@ class Config:
self.configuration_file_path = os.path.abspath(os.getenv('RUNNER_CONFIG'))
# Load configuration file
configuration_parser = ConfigParser.SafeConfigParser()
configuration_parser = configparser.SafeConfigParser()
configuration_parser.read(self.configuration_file_path)
for section in ("slaprunner", "slapos", "slapproxy", "slapformat",
......@@ -144,7 +143,7 @@ def serve(config):
result = cloneRepo(repo_url, repository_path)
if branch_name:
switchBranch(repository_path, branch_name)
except GitCommandError, e:
except GitCommandError as e:
app.logger.warning('Error while cloning default repository: %s' % str(e))
traceback.print_exc()
# Start slapproxy here when runner is starting
......@@ -152,10 +151,10 @@ def serve(config):
startProxy(app.config)
app.logger.info('Running slapgrid...')
if app.config['auto_deploy_instance'] in TRUE_VALUES:
import thread
from six.moves import _thread
# XXX-Nicolas: Hack to be sure that supervisord has started
# before any communication with it, so that gunicorn doesn't exit
thread.start_new_thread(waitForRun, (app.config,))
_thread.start_new_thread(waitForRun, (app.config,))
config.logger.info('Done.')
app.wsgi_app = ProxyFix(app.wsgi_app)
......@@ -166,7 +165,7 @@ def waitForRun(config):
def getUpdatedParameter(self, var):
configuration_parser = ConfigParser.SafeConfigParser()
configuration_parser = configparser.SafeConfigParser()
configuration_file_path = os.path.abspath(os.getenv('RUNNER_CONFIG'))
configuration_parser.read(configuration_file_path)
......
......@@ -70,7 +70,8 @@ def getSession(config):
"""
user_path = os.path.join(config['etc_dir'], '.htpasswd')
if os.path.exists(user_path):
return open(user_path).read().split(';')
with open(user_path) as f:
return f.read().split(';')
def checkUserCredential(config, username, password):
htpasswdfile = os.path.join(config['etc_dir'], '.htpasswd')
......@@ -125,8 +126,8 @@ def getCurrentSoftwareReleaseProfile(config):
Returns used Software Release profile as a string.
"""
try:
software_folder = open(
os.path.join(config['etc_dir'], ".project")).read().rstrip()
with open(os.path.join(config['etc_dir'], ".project")) as f:
software_folder = f.read().rstrip()
return realpath(
config, os.path.join(software_folder, config['software_profile']))
# XXXX No Comments
......@@ -141,9 +142,11 @@ def requestInstance(config, software_type=None):
software_type_path = os.path.join(config['etc_dir'], ".software_type.xml")
if software_type:
# Write it to conf file for later use
open(software_type_path, 'w').write(software_type)
with open(software_type_path, 'w') as f:
f.write(software_type)
elif os.path.exists(software_type_path):
software_type = open(software_type_path).read().rstrip()
with open(software_type_path) as f:
software_type = f.read().rstrip()
else:
software_type = 'default'
......@@ -261,7 +264,8 @@ def slapgridResultToFile(config, step, returncode, datetime):
filename = step + "_info.json"
file = os.path.join(config['runner_workdir'], filename)
result = {'last_build':datetime, 'success':returncode}
open(file, "w").write(json.dumps(result))
with open(file, "w") as f:
f.write(json.dumps(result))
def getSlapgridResult(config, step):
......@@ -359,7 +363,8 @@ def config_SR_folder(config):
for path in os.listdir(config['software_link']):
cfg_path = os.path.join(config['software_link'], path, config_name)
if os.path.exists(cfg_path):
cfg = open(cfg_path).read().split("#")
with open(cfg_path) as f:
cfg = f.read().split("#")
if len(cfg) != 2:
continue # there is a broken config file
software_link_list.append(cfg[1])
......@@ -382,7 +387,8 @@ def loadSoftwareRList(config):
for path in os.listdir(config['software_link']):
cfg_path = os.path.join(config['software_link'], path, config_name)
if os.path.exists(cfg_path):
cfg = open(cfg_path).read().split("#")
with open(cfg_path) as f:
cfg = f.read().split("#")
if len(cfg) != 2:
continue # there is a broken config file
sr_list.append(dict(md5=cfg[1], path=cfg[0], title=path))
......@@ -409,7 +415,8 @@ def getProfilePath(projectDir, profile):
"""
if not os.path.exists(os.path.join(projectDir, ".project")):
return False
projectFolder = open(os.path.join(projectDir, ".project")).read()
with open(os.path.join(projectDir, ".project")) as f:
projectFolder = f.read()
return os.path.join(projectFolder, profile)
......@@ -438,7 +445,7 @@ def svcStopAll(config):
try:
return Popen([config['slapos'], 'node', 'supervisorctl', '--cfg', config['configuration_file_path'],
'stop', 'all']).communicate()[0]
except:
except Exception:
pass
def svcStartAll(config):
......@@ -446,7 +453,7 @@ def svcStartAll(config):
try:
return Popen([config['slapos'], 'node', 'supervisorctl', '--cfg', config['configuration_file_path'],
'start', 'all']).communicate()[0]
except:
except Exception:
pass
def removeInstanceRootDirectory(config):
......@@ -567,7 +574,8 @@ def configNewSR(config, projectpath):
sup_process.stopProcess(config, 'slapgrid-sr')
logger.warning("User opened a new SR. Removing all instances...")
removeCurrentInstance(config)
open(os.path.join(config['etc_dir'], ".project"), 'w').write(projectpath)
with open(os.path.join(config['etc_dir'], ".project"), 'w') as f:
f.write(projectpath)
return True
else:
return False
......@@ -593,12 +601,15 @@ def newSoftware(folder, config, session):
softwareContent = ""
try:
softwareContent = urlopen(software).read()
except:
except Exception:
#Software.cfg and instance.cfg content will be empty
pass
open(os.path.join(folderPath, config['software_profile']), 'w').write(softwareContent)
open(os.path.join(folderPath, config['instance_profile']), 'w').write("")
open(os.path.join(basedir, ".project"), 'w').write(folder + "/")
with open(os.path.join(folderPath, config['software_profile']), 'w') as f:
f.write(softwareContent)
with open(os.path.join(folderPath, config['instance_profile']), 'w') as f:
f.write("")
with open(os.path.join(basedir, ".project"), 'w') as f:
f.write(folder + "/")
#Clean sapproxy Database
stopProxy(config)
removeProxyDb(config)
......@@ -633,7 +644,8 @@ def getProjectTitle(config):
if instance_name:
instance_name = '%s - ' % instance_name
if os.path.exists(conf):
project = open(conf, "r").read().split("/")
with open(conf, "r") as f:
project = f.read().split("/")
software = project[-2]
return '%s%s (%s)' % (instance_name, software, '/'.join(project[:-2]))
return "%sNo Profile" % instance_name
......@@ -643,7 +655,8 @@ def getSoftwareReleaseName(config):
"""Get the name of the current Software Release"""
sr_profile = os.path.join(config['etc_dir'], ".project")
if os.path.exists(sr_profile):
project = open(sr_profile, "r").read().split("/")
with open(sr_profile, "r") as f:
project = f.read().split("/")
software = project[-2]
return software.replace(' ', '_')
return None
......@@ -731,7 +744,7 @@ def readFileFrom(f, lastPosition, limit=20000):
# XXX-Marco do now shadow 'bytes'
bytes = f.tell()
block = -1
data = ""
data = b""
length = bytes
truncated = False # True if a part of log data has been truncated
if (lastPosition <= 0 and length > limit) or (length - lastPosition > limit):
......@@ -753,7 +766,6 @@ def readFileFrom(f, lastPosition, limit=20000):
data = f.read(BUFSIZ - margin) + data
bytes -= BUFSIZ
block -= 1
f.close()
return {
'content': data,
'position': length,
......@@ -761,16 +773,14 @@ def readFileFrom(f, lastPosition, limit=20000):
}
text_range = str2bytes(''.join(map(chr, [7, 8, 9, 10, 12, 13, 27]
+ list(range(0x20, 0x100)))))
def isText(file):
"""Return True if the mimetype of file is Text"""
if not os.path.exists(file):
return False
text_range = str2bytes(''.join(map(chr, [7, 8, 9, 10, 12, 13, 27]
+ list(range(0x20, 0x100)))))
is_binary_string = lambda bytes: bool(bytes.translate(None, text_range))
try:
return not is_binary_string(open(file).read(1024))
except:
with open(file, 'rb') as f:
return not f.read(1024).translate(None, text_range)
except Exception:
return False
......@@ -780,15 +790,15 @@ def md5sum(file):
if os.path.isdir(file):
return False
try:
fh = open(file, 'rb')
m = hashlib.md5()
while True:
data = fh.read(8192)
if not data:
break
m.update(data)
return m.hexdigest()
except:
with open(file, 'rb') as fh:
m = hashlib.md5()
while True:
data = fh.read(8192)
if not data:
break
m.update(data)
return m.hexdigest()
except Exception:
return False
......@@ -949,10 +959,12 @@ def setMiniShellHistory(config, command):
command = command + "\n"
history_file = config['minishell_history_file']
if os.path.exists(history_file):
history = open(history_file, 'r').readlines()
with open(history_file, 'r') as f:
history = f.readlines()
if len(history) >= history_max_size:
del history[0]
else:
history = []
history.append(command)
open(history_file, 'w+').write(''.join(history))
with open(history_file, 'w+') as f:
f.write(''.join(history))
......@@ -15,6 +15,7 @@ from flask import (Flask, request, redirect, url_for, render_template,
g, flash, jsonify, session, abort, send_file)
import slapos
from slapos.util import bytes2str
from slapos.runner.utils import (checkSoftwareFolder, configNewSR, checkUserCredential,
createNewUser, getBuildAndRunParams,
getProfilePath, getSlapgridResult,
......@@ -253,13 +254,13 @@ def getFileLog():
raise IOError
if not isText(file_path):
content = "Can not open binary file, please select a text file!"
if 'truncate' in request.form:
content = tail(open(file_path), int(request.form['truncate']))
else:
with open(file_path) as f:
with open(file_path) as f:
if 'truncate' in request.form:
content = tail(f, int(request.form['truncate']))
else:
content = f.read()
return jsonify(code=1, result=html_escape(content))
except:
except Exception:
return jsonify(code=0, result="Warning: Log file doesn't exist yet or empty log!!")
......@@ -505,8 +506,8 @@ def slapgridResult():
if request.form['log'] in ['software', 'instance']:
log_file = request.form['log'] + "_log"
if os.path.exists(app.config[log_file]):
log_result = readFileFrom(open(app.config[log_file]),
int(request.form['position']))
with open(app.config[log_file], 'rb') as f:
log_result = bytes2str(readFileFrom(f, int(request.form['position'])))
build_result = getSlapgridResult(app.config, 'software')
run_result = getSlapgridResult(app.config, 'instance')
software_info = {'state':software_state,
......@@ -717,7 +718,7 @@ def fileBrowser():
filename)
try:
return send_file(result, attachment_filename=filename, as_attachment=True)
except:
except Exception:
abort(404)
elif opt == 9:
result = file_request.readFile(dir, filename, False)
......
......@@ -4,6 +4,7 @@ import argparse
import sys
import os
import signal
from six.moves import map
def killpidfromfile():
"""deprecated: use below kill() instead"""
......@@ -50,7 +51,7 @@ def kill():
parser.error('Unknown signal name %s' % args.signal)
pid = args.pidfile and [int(open(p).read()) for p in args.pidfile]
exe = args.exe and map(os.path.realpath, args.exe)
exe = args.exe and list(map(os.path.realpath, args.exe))
import psutil
r = 1
......
......@@ -93,7 +93,7 @@ from slapos.grid.promise import GenericPromise
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self, config)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=%(periodicity)s)
def sense(self):
......
from __future__ import print_function
import ZODB.FileStorage
import ZODB.serialize
import argparse
......@@ -16,24 +17,24 @@ def run():
point = now - (3600 * 24 * args.days)
print 'Now is %s' % time.asctime(time.localtime(now))
print 'Will pack until %s' % time.asctime(time.localtime(point))
print('Now is %s' % time.asctime(time.localtime(now)))
print('Will pack until %s' % time.asctime(time.localtime(point)))
failures = 0
for f in args.files:
b = time.time()
print 'Trying to pack %r' % f
print('Trying to pack %r' % f)
try:
pack(point, f)
except Exception:
print 'Failed to pack %r:' % f
print('Failed to pack %r:' % f)
traceback.print_exc()
failures += 1
print 'Finished %s in %.3fs' % (f, time.time() - b)
print('Finished %s in %.3fs' % (f, time.time() - b))
if failures:
print 'Failed files: %s' % failures
print('Failed files: %s' % failures)
return failures
else:
print 'All files sucessfully packed.'
print('All files sucessfully packed.')
return 0
def pack(point, f):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment