Commit 5d438bdc authored by Bryton Lacquement's avatar Bryton Lacquement 🚪

wip

parents 78928a11 76b353fa
0.91 (2019-03-28)
=================
* runner: fixed the exporter script
0.90 (2019-03-20)
=================
* fixed wrong upload
0.89 (2019-03-20)
=================
* promise.plugin: Support ATS with traffic_ctl
* resiliencytest: remove some slapparts hardcoded in test code
get ERP5 connection paramater from slappart0
* runner: API-fy inspectInstance
* promise.plugin: fix test ipv6_is_faster failing when ping outside is not possible
0.88 (2019-03-06)
=================
* runner: add logging when we remove all instances inside a webrunner.
* runner: cleanup: do not use list as a variable
0.87 (2019-02-28)
=================
* promise check_port_listening: port parameter should be int or str, unicode is not accepted
* monitor.runpromise: exit with non zero code if a promise has failed
0.86 (2019-02-06)
=================
......
......@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
import glob
import os
version = '0.86'
version = '0.91'
name = 'slapos.toolbox'
long_description = open("README.rst").read() + "\n"
......
......@@ -70,6 +70,9 @@ class AutoSTemp(object):
from .tester import SoftwareReleaseTester
class TestMap(object):
# tell pytest to skip this class (even if name starts with Test)
__test__ = False
def __init__(self, test_dict):
self.ran_test_set = set()
self.test_map_dict = collections.OrderedDict()
......
This diff is collapsed.
......@@ -156,15 +156,15 @@ class MonitorPromiseLauncher(object):
)
self.logger.info("Checking promises...")
exit_code = 0
try:
promise_launcher.run()
except PromiseError as e:
#self.logger.exception(e)
# error was already logged
pass
exit_code = 1
os.remove(self.config.pid_path)
self.logger.info("Finished promises.")
return exit_code
def main():
arg_parser = getArgumentParser()
......
......@@ -19,7 +19,8 @@ class RunPromise(GenericPromise):
Simply test if we can connect to specified host:port.
"""
hostname = self.getConfig('hostname')
port = self.getConfig('port')
# if type of port should be int or str, unicode is not accepted.
port = int(self.getConfig('port'))
addr = (hostname , port)
# in case of any error, we call "logger.error"
......
......@@ -17,10 +17,20 @@ class RunPromise(GenericPromise):
"""
Check trafficserver cache availability
"""
traffic_line = self.getConfig('wrapper-path')
wrapper = self.getConfig('wrapper-path')
if 'traffic_line' in wrapper:
args = [wrapper, '-r', 'proxy.node.cache.percent_free']
message = "Cache not available, availability: %s"
elif 'traffic_ctl' in wrapper:
args = [wrapper, 'metric', 'get', 'proxy.process.cache.percent_full']
message = "Cache not available, occupation: %s"
else:
self.logger.error("Wrapper %r not supported." % (wrapper,))
return
process = subprocess.Popen(
[traffic_line, '-r', 'proxy.node.cache.percent_free'],
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
......@@ -28,7 +38,7 @@ class RunPromise(GenericPromise):
if process.returncode == 0:
self.logger.info("OK")
else:
self.logger.error("Cache not available, availability: %s" % result)
self.logger.error(message % (result,))
def anomaly(self):
"""
......
......@@ -62,9 +62,9 @@ class ERP5TestSuite(SlaprunnerTestSuite):
but connection parameter of what is inside of webrunner.
"""
data = self._connectToSlaprunner(
resource='getConnectionParameter/slappart7'
resource='getConnectionParameter/slappart0'
)
url = json.loads(json.loads(data)['_'])['default-v6']
url = json.loads(json.loads(data)['_'])['family-default-v6']
self.logger.info('Retrieved erp5 url is:\n%s' % url)
return url
......@@ -76,6 +76,12 @@ class ERP5TestSuite(SlaprunnerTestSuite):
self.logger.info('Retrieved erp5 password is:\n%s' % password)
return password
def _getSlaprunnerServiceInformationList(self):
result = self._connectToSlaprunner(
resource='/inspectInstance',
)
return json.loads(result)
def _editHAProxyconfiguration(self):
"""
XXX pure hack.
......@@ -85,20 +91,31 @@ class ERP5TestSuite(SlaprunnerTestSuite):
"""
self.logger.info('Editing HAProxy configuration...')
service_information_list = self._getSlaprunnerServiceInformationList()
# We expect only one service haproxy
haproxy_service, = [
x['service_name'] for x in service_information_list
if 'haproxy' in x['service_name']
]
haproxy_slappart = haproxy_service.split(':', 1)[0]
result = self._connectToSlaprunner(
resource='/getFileContent',
data='file=runner_workdir%2Finstance%2Fslappart7%2Fetc%2Fhaproxy.cfg'
data='file=runner_workdir%2Finstance%2F{slappart}%2Fetc%2Fhaproxy.cfg'.format(slappart=haproxy_slappart)
)
file_content = json.loads(result)['result']
file_content = file_content.replace('var/run/haproxy.sock', 'ha.sock')
self._connectToSlaprunner(
resource='/saveFileContent',
data='file=runner_workdir%%2Finstance%%2Fslappart7%%2Fetc%%2Fhaproxy.cfg&content=%s' % urllib.quote(file_content)
data='file=runner_workdir%%2Finstance%%2F%s%%2Fetc%%2Fhaproxy.cfg&content=%s' % (
haproxy_slappart,
urllib.quote(file_content),
)
)
# Restart HAProxy
self._connectToSlaprunner(
resource='/startStopProccess/name/slappart7:*/cmd/RESTART'
resource='/startStopProccess/name/%s:*/cmd/RESTART' % haproxy_slappart
)
......
......@@ -102,7 +102,12 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
def _login(self):
self.logger.debug('Logging in...')
b64string = base64.encodestring('%s:%s' % (self.slaprunner_user, self.slaprunner_password))[:-1]
self._opener_director.addheaders = [('Authorization', 'Basic %s'%b64string)]
self._opener_director.addheaders = [
('Authorization', 'Basic %s' % b64string),
# By default we will prefer to receive JSON to simplify
# treatments of the response
("Accept", "application/json"),
]
def _retrieveInstanceLogFile(self):
"""
......
......@@ -28,7 +28,7 @@ def parseArgumentList():
return parser.parse_args()
def rsync(rsync_binary, source, destination, extra_args=None, dry=False):
def rsync(rsync_binary, source, destination, exclude_list=None, extra_args=None, dry=False):
arg_list = [
rsync_binary,
'-rlptgov',
......@@ -38,10 +38,12 @@ def rsync(rsync_binary, source, destination, extra_args=None, dry=False):
'--delete',
'--delete-excluded'
]
if isinstance(exclude_list, list):
arg_list.extend(["--exclude={}".format(x) for x in sorted(exclude_list)])
if isinstance(extra_args, list):
arg_list.extend(extra_args)
if isinstance(source, list):
arg_list.extend(source)
arg_list.extend(sorted(source))
else:
arg_list.append(source)
arg_list.append(destination)
......@@ -91,19 +93,46 @@ def synchroniseRunnerWorkingDirectory(config, backup_path):
if file_list:
rsync(
config.rsync_binary, file_list, backup_path,
["--exclude={}".format(x) for x in exclude_list],
exclude_list=exclude_list,
dry=config.dry
)
def backupFilesWereModifiedDuringExport(export_start_date):
def getBackupFilesModifiedDuringExportList(config, export_start_date):
export_time = time.time() - export_start_date
return bool(
subprocess.check_output((
'find', '-cmin', str(export_time / 60.), '-type', 'f', '-path', '*/srv/backup/*'
# find all files that were modified during export
modified_files = subprocess.check_output((
'find', 'instance', '-cmin', str(export_time / 60.), '-type', 'f', '-path', '*/srv/backup/*'
))
)
if not modified_files:
return ()
# filter those modified files through rsync --exclude getExcludePathList.
# Indeed, some modified files may be listed in getExcludePathList and in this
# case, we won't copy them to PBS so it's not really important if they are
# modified.
rsync_arg_list = [
config.rsync_binary,
'-n',
'--out-format=%n',
'--files-from=-',
'--relative',
'--no-implied-dirs'
]
rsync_arg_list += map("--exclude={}".format, getExcludePathList(os.getcwd()))
rsync_arg_list += '.', 'unexisting_dir_or_file_just_to_have_the_output'
process = subprocess.Popen(rsync_arg_list, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = process.communicate(modified_files)[0]
retcode = process.poll()
if retcode:
raise CalledProcessError(retcode, rsync_arg_list[0], output=output)
important_modified_file_list = output.splitlines()
not_important_modified_file_set = set(modified_files.splitlines()).difference(important_modified_file_list)
if not_important_modified_file_set:
print("WARNING: The following files in srv/backup were modified since the exporter started (srv/backup should contain almost static files):", *sorted(not_important_modified_file_set), sep='\n')
return important_modified_file_list
def runExport():
export_start_date = int(time.time())
......@@ -148,9 +177,11 @@ def runExport():
time.sleep(10)
# Check that export didn't happen during backup of instances
with CwdContextManager(backup_runner_path):
if backupFilesWereModifiedDuringExport(export_start_date):
print("ERROR: Some backups are not consistent, exporter should be re-run."
" Let's sleep %s minutes, to let the backup end..." % args.backup_wait_time)
with CwdContextManager(runner_working_path):
modified_file_list = getBackupFilesModifiedDuringExportList(args, export_start_date)
if len(modified_file_list):
print("ERROR: The following files in srv/backup were modified since the exporter started. Since they must be backup, exporter should be re-run."
" Let's sleep %s minutes, to let the backup end.\n%s" % (
args.backup_wait_time, '\n'.join(modified_file_list)))
time.sleep(args.backup_wait_time * 60)
sys.exit(1)
......@@ -127,9 +127,12 @@ def writeSignatureFile(slappart_signature_method_dict, runner_working_path, sign
)
break
# construct list of file path and remove broken symlink
filepath_list = filter(os.path.isfile, [os.path.join(dirpath, filename) for filename in filename_list])
if signature_process:
(output, error_output) = signature_process.communicate(
str2bytes('\0'.join([os.path.join(dirpath, filename) for filename in filename_list]))
str2bytes('\0'.join(filepath_list))
)
if signature_process.returncode != 0:
......@@ -146,10 +149,7 @@ def writeSignatureFile(slappart_signature_method_dict, runner_working_path, sign
signature_list.extend(bytes2str(output).strip('\n').split('\n'))
else:
signature_list.extend(
getSha256Sum([
os.path.join(dirpath, filename)
for filename in filename_list
])
getSha256Sum(filepath_list)
)
# Write the signatures in file
......
......@@ -354,15 +354,14 @@ def config_SR_folder(config):
md5sum = md5digest(profile)
link_to_folder(name, md5sum)
# check other links
# XXX-Marco do not shadow 'list'
list = []
software_link_list = []
for path in os.listdir(config['software_link']):
cfg_path = os.path.join(config['software_link'], path, config_name)
if os.path.exists(cfg_path):
cfg = open(cfg_path).read().split("#")
if len(cfg) != 2:
continue # there is a broken config file
list.append(cfg[1])
software_link_list.append(cfg[1])
if os.path.exists(config['software_root']):
folder_list = os.listdir(config['software_root'])
else:
......@@ -370,14 +369,14 @@ def config_SR_folder(config):
if not folder_list:
return
for folder in folder_list:
if folder in list:
if folder in software_link_list:
continue # this folder is already registered
else:
link_to_folder(folder, folder)
def loadSoftwareRList(config):
"""Return list (of dict) of Software Release from symbolik SR folder"""
list = []
sr_list = []
config_name = 'slaprunner.config'
for path in os.listdir(config['software_link']):
cfg_path = os.path.join(config['software_link'], path, config_name)
......@@ -385,8 +384,8 @@ def loadSoftwareRList(config):
cfg = open(cfg_path).read().split("#")
if len(cfg) != 2:
continue # there is a broken config file
list.append(dict(md5=cfg[1], path=cfg[0], title=path))
return list
sr_list.append(dict(md5=cfg[1], path=cfg[0], title=path))
return sr_list
def isInstanceRunning(config):
......@@ -565,6 +564,7 @@ def configNewSR(config, projectpath):
if folder:
sup_process.stopProcess(config, 'slapgrid-cp')
sup_process.stopProcess(config, 'slapgrid-sr')
logger.warning("User opened a new SR. Removing all instances...")
removeCurrentInstance(config)
open(os.path.join(config['etc_dir'], ".project"), 'w').write(projectpath)
return True
......@@ -603,6 +603,7 @@ def newSoftware(folder, config, session):
removeProxyDb(config)
startProxy(config)
#Stop runngin process and remove existing instance
logger.warning("User created a new SR. Removing all instances...")
removeCurrentInstance(config)
session['title'] = getProjectTitle(config)
code = 1
......@@ -681,6 +682,7 @@ def removeSoftwareByName(config, md5, folder_name):
return (0, "Software installation or instantiation in progress, cannot remove")
if getSoftwareReleaseName(config) == folder_name:
logger.warning("User removed the SR currently used. Removing all instances...")
removeCurrentInstance(config)
result = removeSoftwareRootDirectory(config, md5, folder_name)
......
......@@ -181,6 +181,14 @@ def inspectInstance():
else:
file_path = ''
supervisor = []
if "application/json" in request.accept_mimetypes.best:
result_list = []
for service in supervisor:
result_list.append({
'service_name': service[0],
'status': service[1],
})
return jsonify(result_list)
return render_template('instanceInspect.html',
file_path=file_path,
supervisor=supervisor,
......@@ -215,6 +223,7 @@ def supervisordStatus():
def removeInstance():
logger.warning("User clicked on 'Destroy All Services'. Removing all instances...")
result = removeCurrentInstance(app.config)
if isinstance(result, str):
flash(result)
......
......@@ -159,6 +159,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_promise.py' % self.promise_dir,
}
self.assertEqual(expected_result, my_result)
......@@ -175,6 +176,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_second_promise.py' % self.promise_dir,
}
self.assertEqual(expected_result, second_result)
......@@ -197,6 +199,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': True, u'message': u'failed', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_promise.py' % self.promise_dir,
}
self.assertEqual(expected_result, my_result)
......@@ -219,6 +222,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': True, u'message': u'failed', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_promise.py' % self.promise_dir,
}
self.assertEqual(expected_result, my_result)
......@@ -239,6 +243,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s/my_promise.py' % self.promise_dir,
}
self.assertEqual(expected_result, my_result)
......@@ -300,6 +305,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise,
}
......@@ -335,6 +341,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise,
}
self.assertEqual(expected_result, result1)
......@@ -348,6 +355,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise2,
}
self.assertEqual(expected_result, result2)
......@@ -369,6 +377,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': True, u'message': u'failed', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise,
}
self.assertEqual(expected_result, result1)
......@@ -399,6 +408,7 @@ class RunPromise(GenericPromise):
u'result': {
u'failed': False, u'message': u'success', u'type': u'Test Result'
},
u'execution-time': 0.05,
u'path': u'%s' % promise,
}
self.assertEqual(expected_result, result1)
......
......@@ -62,7 +62,13 @@ extra_config_dict = {
result = self.getPromiseResult(self.promise_name)
last_message = result['result']['message'].split('\n')[-1]
self.assertEqual(result['result']['failed'], False)
self.assertEqual(last_message, "OK: IPv4 reachable, IPv6 reachable")
#self.assertEqual(last_message, "OK: IPv4 reachable, IPv6 reachable")
# some testnodes cannot ping because they are qemu VM with Nat network, and
# ICMP is disabled. Expected result is "OK: IPv4 reachable, IPv6 reachable"
# but it ICMP is not working, we will have "IPv4 unreachable" in the message
# so we will test only if the ping returned "OK" and if IPv6 was reachable.
self.assertTrue(last_message.startswith("OK:"))
self.assertTrue("IPv6 reachable" in last_message)
def test_ipv4_is_faster(self):
content = self.base_content % {
......
......@@ -39,7 +39,15 @@ __buildout_signature__ = MarkupSafe-1.0-py2.7-linux-x86_64.egg Jinja2-2.10-py2.7
recipe = slapos.recipe.template:jinja2
rendered = /some/prefix/slappart18/test/srv/exporter.exclude
template = inline:
srv/backup/**"""
srv/backup/*.log
[exclude1]
__buildout_installed__ = {cwd}/instance/slappart1/srv/exporter.exclude
__buildout_signature__ = MarkupSafe-1.0-py2.7-linux-x86_64.egg Jinja2-2.10-py2.7.egg zc.buildout-2.12.2-py2.7.egg slapos.recipe.template-4.3-py2.7.egg setuptools-40.4.3-py2.7.egg
recipe = slapos.recipe.template:jinja2
rendered = /some/prefix/slappart18/test/srv/exporter.exclude
template = inline:
srv/backup/log/**"""
class Config():
......@@ -75,24 +83,37 @@ class TestRunnerExporter(unittest.TestCase):
"""Create data mirroring tested_instance_cfg"""
os.makedirs('instance/slappart0/etc')
os.makedirs('instance/slappart0/srv/backup')
os.makedirs('instance/slappart0/srv/backup/important_logs')
os.makedirs('instance/slappart1/etc')
os.makedirs('instance/slappart1/srv/backup')
os.makedirs('instance/slappart1/srv/backup/log')
self._createFile('instance/slappart0/.installed.cfg',
tested_instance_cfg.format(cwd=os.getcwd()))
self._createFile('instance/slappart0/srv/backup/data.dat',
'all my fortune lays on this secret !')
self._createFile('instance/slappart0/srv/backup/important_logs/this_is_a.log',
'this log is very important !')
self._createFile('instance/slappart0/srv/backup/data.log',
'this log is not so important !')
self._createFile('instance/slappart0/srv/exporter.exclude',
'srv/backup/**')
'srv/backup/*.log')
self._createFile('instance/slappart0/etc/config.json')
self._createFile('instance/slappart0/etc/.parameters.xml')
self._createFile('instance/slappart0/etc/.project',
'workspace/slapos-dev/software/erp5')
self._createFile('instance/slappart1/srv/backup/data.dat',
'This is important data also !')
self._createFile('instance/slappart1/srv/backup/log/log1',
'First log')
self._createFile('instance/slappart1/srv/backup/log/log2',
'Second log')
self._createFile('instance/slappart1/srv/exporter.exclude',
'srv/backup/log/**')
self._createExecutableFile(
'instance/slappart1/srv/.backup_identity_script',
"#!/bin/sh\nexec xargs -0 md5sum"
......@@ -119,8 +140,10 @@ class TestRunnerExporter(unittest.TestCase):
'.installed*.cfg',
'instance/slappart0/etc/nicolas.txt',
'instance/slappart0/etc/rafael.txt',
'instance/slappart0/srv/backup/**',
'instance/slappart0/srv/backup/*.log',
'instance/slappart0/srv/exporter.exclude',
'instance/slappart1/srv/backup/log/**',
'instance/slappart1/srv/exporter.exclude',
]
)
......@@ -137,7 +160,7 @@ class TestRunnerExporter(unittest.TestCase):
)
self.assertEqual(check_output_mock.call_count, 1)
check_output_mock.assert_any_call(
['rsync', '-rlptgov', '--stats', '--safe-links', '--ignore-missing-args', '--delete', '--delete-excluded', 'config.json', '.parameters.xml', '.project', 'backup/runner/etc/']
['rsync', '-rlptgov', '--stats', '--safe-links', '--ignore-missing-args', '--delete', '--delete-excluded', '.parameters.xml', '.project', 'config.json', 'backup/runner/etc/']
)
......@@ -154,7 +177,7 @@ class TestRunnerExporter(unittest.TestCase):
self.assertEqual(check_output_mock.call_count, 1)
check_output_mock.assert_any_call(
['rsync', '-rlptgov', '--stats', '--safe-links', '--ignore-missing-args', '--delete', '--delete-excluded', '--exclude=*.pid', '--exclude=*.sock', '--exclude=*.socket', '--exclude=.installed*.cfg', '--exclude=instance/slappart0/etc/nicolas.txt', '--exclude=instance/slappart0/etc/rafael.txt', '--exclude=instance/slappart0/srv/backup/**', '--exclude=instance/slappart0/srv/exporter.exclude', 'instance', 'project', 'public', 'proxy.db', 'backup/runner/runner']
['rsync', '-rlptgov', '--stats', '--safe-links', '--ignore-missing-args', '--delete', '--delete-excluded', '--exclude=*.pid', '--exclude=*.sock', '--exclude=*.socket', '--exclude=.installed*.cfg', '--exclude=instance/slappart0/etc/nicolas.txt', '--exclude=instance/slappart0/etc/rafael.txt', '--exclude=instance/slappart0/srv/backup/*.log', '--exclude=instance/slappart0/srv/exporter.exclude', '--exclude=instance/slappart1/srv/backup/log/**', '--exclude=instance/slappart1/srv/exporter.exclude', 'instance', 'project', 'proxy.db', 'public', 'backup/runner/runner']
)
def test_getSlappartSignatureMethodDict(self):
......@@ -179,6 +202,10 @@ class TestRunnerExporter(unittest.TestCase):
self._createFile('backup/runner/instance/slappart0/data', 'hello')
self._createFile('backup/runner/instance/slappart1/data', 'world')
os.symlink('data', 'backup/runner/instance/slappart0/good_link')
os.symlink(os.path.abspath('backup/runner/instance/slappart0/data'), 'backup/runner/instance/slappart0/good_abs_link')
os.symlink('unexisting_file', 'backup/runner/instance/slappart0/bad_link')
slappart_signature_method_dict = {
'./instance/slappart1': './instance/slappart1/srv/.backup_identity_script',
}
......@@ -190,15 +217,30 @@ class TestRunnerExporter(unittest.TestCase):
signature_file_content = f.read()
# Slappart1 is using md5sum as signature, others are using sha256sum (default)
self.assertEqual(signature_file_content, """2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 ./runner/instance/slappart0/data
self.assertEqual(signature_file_content,
"""2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 ./runner/instance/slappart0/data
2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 ./runner/instance/slappart0/good_abs_link
2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 ./runner/instance/slappart0/good_link
49b74873d57ff0307b7c9364e2fe2a3876d8722fbe7ce3a6f1438d47647a86f4 ./etc/.project
7d793037a0760186574b0282f2f435e7 ./runner/instance/slappart1/data""")
def test_backupFilesWereModifiedDuringExport(self):
def test_getBackupFilesModifiedDuringExportList(self):
self._setUpFakeInstanceFolder()
with runner_exporter.CwdContextManager('instance'):
self.assertTrue(runner_exporter.backupFilesWereModifiedDuringExport(time.time() - 5))
time.sleep(2)
self.assertFalse(runner_exporter.backupFilesWereModifiedDuringExport(time.time() - 1))
self._createFile('slappart1/srv/backup/bakckup.data', 'my backup')
self.assertTrue(runner_exporter.backupFilesWereModifiedDuringExport(time.time() - 1))
config = Config()
config.rsync_binary = 'rsync'
self.assertEqual(
runner_exporter.getBackupFilesModifiedDuringExportList(config, time.time() - 5),
['instance/slappart0/srv/backup/data.dat',
'instance/slappart0/srv/backup/important_logs/this_is_a.log',
'instance/slappart1/srv/backup/data.dat']
)
time.sleep(2)
self.assertFalse(
runner_exporter.getBackupFilesModifiedDuringExportList(config, time.time() - 1)
)
self._createFile('instance/slappart1/srv/backup/bakckup.data', 'my backup')
self.assertEqual(
runner_exporter.getBackupFilesModifiedDuringExportList(config, time.time() - 1),
['instance/slappart1/srv/backup/bakckup.data']
)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment