Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
S
slapos.toolbox
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
6
Merge Requests
6
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
nexedi
slapos.toolbox
Commits
0c6995b2
Commit
0c6995b2
authored
Oct 19, 2018
by
Nicolas Wavrant
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
resilient: port the export script of the webrunner to python
parent
c4d2977f
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
473 additions
and
0 deletions
+473
-0
setup.py
setup.py
+2
-0
slapos/resilient/runner_exporter.py
slapos/resilient/runner_exporter.py
+260
-0
slapos/test/test_runner_exporter.py
slapos/test/test_runner_exporter.py
+211
-0
No files found.
setup.py
View file @
0c6995b2
...
@@ -53,6 +53,7 @@ setup(name=name,
...
@@ -53,6 +53,7 @@ setup(name=name,
'dnspython'
,
'dnspython'
,
'requests'
,
'requests'
,
'jsonschema'
,
'jsonschema'
,
'zc.buildout'
,
]
+
additional_install_requires
,
]
+
additional_install_requires
,
extras_require
=
{
extras_require
=
{
'lampconfigure'
:
[
"mysqlclient"
],
#needed for MySQL Database access
'lampconfigure'
:
[
"mysqlclient"
],
#needed for MySQL Database access
...
@@ -108,6 +109,7 @@ setup(name=name,
...
@@ -108,6 +109,7 @@ setup(name=name,
'pubsubserver = slapos.pubsub:main'
,
'pubsubserver = slapos.pubsub:main'
,
'qemu-qmp-client = slapos.qemuqmpclient:main'
,
'qemu-qmp-client = slapos.qemuqmpclient:main'
,
'rdiffbackup.genstatrss = slapos.resilient.rdiffBackupStat2RSS:main'
,
'rdiffbackup.genstatrss = slapos.resilient.rdiffBackupStat2RSS:main'
,
'runner-exporter = slapos.resilient.runner_exporter:runExport'
,
'securedelete = slapos.securedelete:main'
,
'securedelete = slapos.securedelete:main'
,
'slapos-kill = slapos.systool:kill'
,
'slapos-kill = slapos.systool:kill'
,
'slaprunnertest = slapos.runner.runnertest:main'
,
'slaprunnertest = slapos.runner.runnertest:main'
,
...
...
slapos/resilient/runner_exporter.py
0 → 100644
View file @
0c6995b2
import
argparse
import
errno
import
glob
import
os
import
shutil
import
subprocess
import
sys
import
time
from
datetime
import
datetime
from
hashlib
import
sha256
from
zc.buildout.configparser
import
parse
#TODO : Redirect output to log. maybe it can be done at slapos level
os
.
environ
[
'LC_ALL'
]
=
'C'
# TODO: check it is the same as umask 077
os
.
umask
(
077
)
class
CwdContextManager
:
# Context Manager Class for executing code
# in a given directory
# There is no need to provide fallback or basic
# checks in this code, as these checkes should
# exist in the code invoking this Context Manager.
# If someone needs to add checks here, I'm pretty
# sure it means that they are trying to hide legitimate
# errors.
# See tests to see examples of invokation
def
__init__
(
self
,
path
):
self
.
path
=
path
def
__enter__
(
self
):
self
.
oldpath
=
os
.
getcwd
()
os
.
chdir
(
self
.
path
)
def
__exit__
(
self
,
exc_type
,
exc_value
,
traceback
):
os
.
chdir
(
self
.
oldpath
)
def
parseArgumentList
():
parser
=
argparse
.
ArgumentParser
()
base_argument_dict
=
{
'action'
:
'store'
,
'type'
:
str
,
'required'
:
True
,
}
parser
.
add_argument
(
'--srv-path'
,
**
base_argument_dict
)
parser
.
add_argument
(
'--backup-path'
,
**
base_argument_dict
)
parser
.
add_argument
(
'--etc-path'
,
**
base_argument_dict
)
parser
.
add_argument
(
'--tmp-path'
,
**
base_argument_dict
)
parser
.
add_argument
(
'--rsync-binary'
,
**
base_argument_dict
)
parser
.
add_argument
(
'--backup-wait-time'
,
type
=
int
,
action
=
'store'
,
required
=
True
)
parser
.
add_argument
(
'-n'
,
action
=
'store_true'
,
dest
=
'dry'
,
default
=
False
)
return
parser
.
parse_args
()
def
rsync
(
rsync_binary
,
source
,
destination
,
extra_args
=
None
,
dry
=
False
):
arg_list
=
[
rsync_binary
,
'-rlptgov'
,
'--stats'
,
'--safe-links'
,
'--ignore-missing-args'
,
'--delete'
,
'--delete-excluded'
]
if
isinstance
(
extra_args
,
list
):
arg_list
.
extend
(
extra_args
)
arg_list
.
append
(
source
)
arg_list
.
append
(
destination
)
if
dry
:
print
'DEBUG: '
,
arg_list
else
:
rsync_process
=
subprocess
.
Popen
(
arg_list
)
rsync_process
.
wait
()
# TODO : pipe stdout dans : (egrep -v "$IGNOREOUT" || true) || [ $? = "$IGNOREEXIT" ]
# with :
# IGNOREEXIT=24
# IGNOREOUT='^(file has vanished: |rsync warning: some files vanished before they could be transferred)'
def
getExcludePathList
(
path
):
excluded_path_list
=
[
"*.sock"
,
"*.socket"
,
"*.pid"
,
".installed*.cfg"
,
]
def
append_relative
(
path_list
):
for
p
in
path_list
:
p
=
p
.
strip
()
if
p
:
excluded_path_list
.
append
(
os
.
path
.
relpath
(
p
,
path
))
for
partition
in
glob
.
glob
(
path
+
"/instance/slappart*"
):
if
not
(
os
.
path
.
exists
(
partition
)
and
os
.
path
.
isdir
(
partition
)):
continue
with
CwdContextManager
(
partition
):
try
:
with
open
(
"srv/exporter.exclude"
)
as
f
:
exclude
=
f
.
readlines
()
except
IOError
as
e
:
if
e
.
errno
!=
errno
.
ENOENT
:
raise
else
:
append_relative
(
exclude
)
for
installed
in
glob
.
glob
(
".installed*.cfg"
):
try
:
with
open
(
installed
)
as
f
:
installed
=
parse
(
f
,
installed
)
except
IOError
as
e
:
if
e
.
errno
!=
errno
.
ENOENT
:
raise
else
:
for
section
in
installed
.
itervalues
():
append_relative
(
section
.
get
(
'__buildout_installed__'
,
''
).
splitlines
())
return
excluded_path_list
def
getSha256Sum
(
file_path
):
# TODO : introduce reading by chunk,
# otherwise reading backup files of 5Gb
# may crash the server.
with
open
(
file_path
,
'rb'
)
as
f
:
return
sha256
(
f
.
read
()).
hexdigest
()
def
getCustomSignature
(
signature_script_path
,
file_path
):
try
:
process
=
subprocess
.
Popen
([
signature_script_path
,
file_path
],
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
)
process
.
wait
()
except
OSError
:
raise
RuntimeError
(
process
.
stderr
.
read
())
return
process
.
stdout
.
read
()
def
synchroniseRunnerConfigurationDirectory
(
config
,
backup_path
):
rsync
(
config
.
rsync_binary
,
'config.json'
,
backup_path
,
dry
=
config
.
dry
)
for
hidden_file
in
[
x
for
x
in
os
.
listdir
(
'.'
)
if
x
[
0
]
==
'.'
]:
rsync
(
config
.
rsync_binary
,
hidden_file
,
backup_path
,
dry
=
config
.
dry
)
def
synchroniseRunnerWorkingDirectory
(
config
,
backup_path
):
if
os
.
path
.
exists
(
'instance'
)
and
os
.
path
.
isdir
(
'instance'
):
exclude_list
=
getExcludePathList
(
'.'
)
rsync
(
config
.
rsync_binary
,
'instance'
,
backup_path
,
[
"--exclude={}"
.
format
(
x
)
for
x
in
exclude_list
],
dry
=
config
.
dry
,
)
for
file
in
(
'project'
,
'public'
,
'proxy.db'
):
if
os
.
path
.
exists
(
file
):
rsync
(
config
.
rsync_binary
,
file
,
backup_path
,
dry
=
config
.
dry
)
def
getSlappartSignatureMethodDict
():
slappart_signature_method_dict
=
{}
for
partition
in
glob
.
glob
(
"./instance/slappart*"
):
if
os
.
path
.
isdir
(
partition
):
script_path
=
os
.
path
.
join
(
partition
,
'srv'
,
'.backup_identity_script'
)
if
os
.
path
.
exists
(
script_path
):
slappart_signature_method_dict
[
partition
]
=
script_path
return
slappart_signature_method_dict
def
writeSignatureFile
(
slappart_signature_method_dict
,
runner_working_path
,
signature_file_path
=
'../backup.signature'
):
special_slappart_list
=
slappart_signature_method_dict
.
keys
()
signature_list
=
[]
for
dirpath
,
dirname_list
,
filename_list
in
os
.
walk
(
'.'
):
# Find if special signature function should be applied
for
special_slappart
in
special_slappart_list
:
if
dirpath
.
startswith
(
special_slappart
):
signature_function
=
lambda
x
:
getCustomSignature
(
os
.
path
.
join
(
runner_working_path
,
slappart_signature_method_dict
[
special_slappart
]),
x
)
break
else
:
signature_function
=
getSha256Sum
# Calculate all signatures
for
filename
in
filename_list
:
file_path
=
os
.
path
.
join
(
dirpath
,
filename
)
signature_list
.
append
(
"%s %s"
%
(
signature_function
(
file_path
),
os
.
path
.
realpath
(
file_path
)))
# Write the signatures in file
with
open
(
signature_file_path
,
'w+'
)
as
signature_file
:
signature_file
.
write
(
"
\
n
"
.
join
(
sorted
(
signature_list
)))
def
backupFilesWereModifiedDuringExport
():
process
=
subprocess
.
Popen
([
'find'
,
'-mmin'
,
'0.1'
,
'-type'
,
'f'
],
stdout
=
subprocess
.
PIPE
)
process
.
wait
()
changed_file_output
=
process
.
stdout
.
read
()
for
line
in
changed_file_output
.
split
(
'
\
n
'
):
if
'/srv/backup/'
in
line
:
return
True
return
False
def
runExport
():
print
datetime
.
now
().
isoformat
()
args
=
parseArgumentList
()
def
_rsync
(
*
params
):
return
rsync
(
args
.
rsync_binary
,
*
params
,
dry
=
args
.
dry
)
runner_working_path
=
os
.
path
.
join
(
args
.
srv_path
,
'runner'
)
backup_runner_path
=
os
.
path
.
join
(
args
.
backup_path
,
'runner'
)
# Synchronise runner's etc directory
with
CwdContextManager
(
args
.
etc_path
):
with
open
(
'.resilient-timestamp'
,
'w'
)
as
f
:
f
.
write
(
datetime
.
now
().
strftime
(
"%s"
))
# "+ '/'" is mandatory otherwise rsyncing the etc directory
# will create in the backup_etc_path only a file called etc
backup_etc_path
=
os
.
path
.
join
(
args
.
backup_path
,
'etc'
)
+
'/'
synchroniseRunnerConfigurationDirectory
(
args
,
backup_etc_path
)
# Synchronise runner's working directory
# and aggregate signature functions as we are here
with
CwdContextManager
(
runner_working_path
):
synchroniseRunnerWorkingDirectory
(
args
,
backup_runner_path
)
slappart_signature_method_dict
=
getSlappartSignatureMethodDict
()
# Calculate signature of synchronised files
with
CwdContextManager
(
backup_runner_path
):
writeSignatureFile
(
slappart_signature_method_dict
,
runner_working_path
)
# BBB: clean software folder if it was synchronized
# in an old instance
backup_software_path
=
os
.
path
.
join
(
backup_runner_path
,
'software'
)
if
os
.
path
.
exists
(
backup_software_path
)
and
os
.
path
.
isdir
(
backup_software_path
):
shutil
.
rmtree
(
backup_software_path
)
# Wait a little to increase the probability to detect an ongoing backup.
time
.
sleep
(
10
)
# Check that export didn't happen during backup of instances
with
CwdContextManager
(
backup_runner_path
):
if
backupFilesWereModifiedDuringExport
():
print
"ERROR: Some backups are not consistent, exporter should be re-run."
print
"Let's sleep minutes, to let the backup end..."
%
backup_wait_time
time
.
sleep
(
backup_wait_time
*
60
)
system
.
exit
(
1
)
\ No newline at end of file
slapos/test/test_runner_exporter.py
0 → 100644
View file @
0c6995b2
import
mock
import
os
import
shutil
import
time
import
unittest
from
slapos.resilient
import
runner_exporter
tested_instance_cfg
=
"""[buildout]
installed_develop_eggs =
parts = folders hello-nicolas hello-rafael exclude
[folders]
__buildout_installed__ =
__buildout_signature__ = wcwidth-0.1.7-py2.7.egg contextlib2-0.5.5-py2.7.egg ...
etc = /srv/slapgrid/slappart18/test/etc
home = /srv/slapgrid/slappart18/test
recipe = slapos.cookbook:mkdirectory
srv = /srv/slapgrid/slappart18/test/srv
[hello-nicolas]
__buildout_installed__ = ./instance/slappart0/etc/nicolas.txt
__buildout_signature__ = MarkupSafe-1.0-py2.7-linux-x86_64.egg Jinja2-2.10-py2.7.egg zc.buildout-2.12.2-py2.7.egg slapos.recipe.template-4.3-py2.7.egg setuptools-40.4.3-py2.7.egg
mode = 0644
name = Nicolas
output = /srv/slapgrid/slappart18/test/etc/nicolas.txt
recipe = slapos.recipe.template
[hello-rafael]
__buildout_installed__ = ./instance/slappart0/etc//rafael.txt
__buildout_signature__ = MarkupSafe-1.0-py2.7-linux-x86_64.egg Jinja2-2.10-py2.7.egg zc.buildout-2.12.2-py2.7.egg slapos.recipe.template-4.3-py2.7.egg setuptools-40.4.3-py2.7.egg
name = Rafael
output = /srv/slapgrid/slappart18/test/etc/rafael.txt
recipe = slapos.recipe.template
[exclude]
__buildout_installed__ = srv/exporter.exclude
__buildout_signature__ = MarkupSafe-1.0-py2.7-linux-x86_64.egg Jinja2-2.10-py2.7.egg zc.buildout-2.12.2-py2.7.egg slapos.recipe.template-4.3-py2.7.egg setuptools-40.4.3-py2.7.egg
recipe = slapos.recipe.template:jinja2
rendered = /srv/slapgrid/slappart18/test/srv/exporter.exclude
template = inline:
srv/backup/**"""
class
Config
():
pass
class
TestRunnerExporter
(
unittest
.
TestCase
):
def
setUp
(
self
):
if
not
os
.
path
.
exists
(
'test_folder'
):
os
.
mkdir
(
'test_folder'
)
os
.
chdir
(
'test_folder'
)
def
tearDown
(
self
):
if
os
.
path
.
basename
(
os
.
getcwd
())
==
'test_folder'
:
os
.
chdir
(
'..'
)
shutil
.
rmtree
(
'test_folder'
)
elif
'test_folder'
in
os
.
listdir
(
'.'
):
shutil
.
rmtree
(
'test_folder'
)
def
_createFile
(
self
,
path
,
content
=
''
):
with
open
(
path
,
'w'
)
as
f
:
f
.
write
(
content
)
def
_createExecutableFile
(
self
,
path
,
content
=
''
):
self
.
_createFile
(
path
,
content
)
os
.
chmod
(
path
,
0700
)
def
_setUpFakeInstanceFolder
(
self
):
self
.
_createFile
(
'proxy.db'
)
os
.
makedirs
(
'project'
)
os
.
makedirs
(
'public'
)
"""Create data mirroring tested_instance_cfg"""
os
.
makedirs
(
'instance/slappart0/etc'
)
os
.
makedirs
(
'instance/slappart0/srv/backup'
)
os
.
makedirs
(
'instance/slappart1/etc'
)
os
.
makedirs
(
'instance/slappart1/srv'
)
self
.
_createFile
(
'instance/slappart0/.installed.cfg'
,
tested_instance_cfg
)
self
.
_createFile
(
'instance/slappart0/srv/backup/data.dat'
,
'all my fortune lays on this secret !'
)
self
.
_createFile
(
'instance/slappart0/srv/exporter.exclude'
,
'srv/backup/**'
)
self
.
_createFile
(
'instance/slappart0/etc/config.json'
)
self
.
_createFile
(
'instance/slappart0/etc/.parameters.xml'
)
self
.
_createFile
(
'instance/slappart0/etc/.project'
,
'workspace/slapos-dev/software/erp5'
)
self
.
_createExecutableFile
(
'instance/slappart1/srv/.backup_identity_script'
,
'#!/bin/bash
\
n
md5sum $1 | cut -d " " -f 1 | tr -d "
\
n
"'
)
def
test_CwdContextManager
(
self
):
os
.
makedirs
(
'a/b'
)
with
runner_exporter
.
CwdContextManager
(
'a'
):
self
.
assertEqual
(
os
.
listdir
(
'.'
),
[
'b'
])
os
.
mkdir
(
'c'
)
self
.
assertEqual
(
os
.
listdir
(
'.'
),
[
'a'
])
self
.
assertEqual
(
sorted
(
os
.
listdir
(
'a'
)),
[
'b'
,
'c'
])
def
test_getExcludePathList
(
self
):
self
.
_setUpFakeInstanceFolder
()
self
.
assertEqual
(
sorted
(
runner_exporter
.
getExcludePathList
(
'.'
)),
[
'*.pid'
,
'*.sock'
,
'*.socket'
,
'.installed*.cfg'
,
'instance/slappart0/etc/nicolas.txt'
,
'instance/slappart0/etc/rafael.txt'
,
'srv/backup/**'
,
'srv/exporter.exclude'
,
]
)
@
mock
.
patch
(
'subprocess.Popen'
)
def
test_synchroniseRunnerConfigurationDirectory
(
self
,
popen_mock
):
self
.
_setUpFakeInstanceFolder
()
config
=
Config
()
config
.
rsync_binary
=
'rsync'
config
.
dry
=
False
with
runner_exporter
.
CwdContextManager
(
'instance/slappart0/etc'
):
runner_exporter
.
synchroniseRunnerConfigurationDirectory
(
config
,
'backup/runner/etc/'
)
self
.
assertEqual
(
popen_mock
.
call_count
,
3
)
popen_mock
.
assert_any_call
(
[
'rsync'
,
'-rlptgov'
,
'--stats'
,
'--safe-links'
,
'--ignore-missing-args'
,
'--delete'
,
'--delete-excluded'
,
'config.json'
,
'backup/runner/etc/'
]
)
popen_mock
.
assert_any_call
(
[
'rsync'
,
'-rlptgov'
,
'--stats'
,
'--safe-links'
,
'--ignore-missing-args'
,
'--delete'
,
'--delete-excluded'
,
'.project'
,
'backup/runner/etc/'
]
)
popen_mock
.
assert_any_call
(
[
'rsync'
,
'-rlptgov'
,
'--stats'
,
'--safe-links'
,
'--ignore-missing-args'
,
'--delete'
,
'--delete-excluded'
,
'.project'
,
'backup/runner/etc/'
]
)
@
mock
.
patch
(
'subprocess.Popen'
)
def
test_synchroniseRunnerWorkingDirectory
(
self
,
popen_mock
):
self
.
_setUpFakeInstanceFolder
()
config
=
Config
()
config
.
rsync_binary
=
'rsync'
config
.
dry
=
False
with
runner_exporter
.
CwdContextManager
(
'.'
):
runner_exporter
.
synchroniseRunnerWorkingDirectory
(
config
,
'backup/runner/runner'
)
popen_mock
.
assert_any_call
(
[
'rsync'
,
'-rlptgov'
,
'--stats'
,
'--safe-links'
,
'--ignore-missing-args'
,
'--delete'
,
'--delete-excluded'
,
'--exclude=*.sock'
,
'--exclude=*.socket'
,
'--exclude=*.pid'
,
'--exclude=.installed*.cfg'
,
'--exclude=srv/backup/**'
,
'--exclude=instance/slappart0/etc/nicolas.txt'
,
'--exclude=instance/slappart0/etc/rafael.txt'
,
'--exclude=srv/exporter.exclude'
,
'instance'
,
'backup/runner/runner'
]
)
popen_mock
.
assert_any_call
(
[
'rsync'
,
'-rlptgov'
,
'--stats'
,
'--safe-links'
,
'--ignore-missing-args'
,
'--delete'
,
'--delete-excluded'
,
'proxy.db'
,
'backup/runner/runner'
]
)
def
test_getSlappartSignatureMethodDict
(
self
):
self
.
_setUpFakeInstanceFolder
()
slappart_signature_method_dict
=
runner_exporter
.
getSlappartSignatureMethodDict
()
self
.
assertEqual
(
slappart_signature_method_dict
,
{
'./instance/slappart1'
:
'./instance/slappart1/srv/.backup_identity_script'
,
}
)
def
test_writeSignatureFile
(
self
):
self
.
_setUpFakeInstanceFolder
()
os
.
makedirs
(
'backup/instance/etc'
)
os
.
makedirs
(
'backup/instance/slappart0'
)
os
.
makedirs
(
'backup/instance/slappart1'
)
self
.
_createFile
(
'backup/instance/etc/.project'
,
'workspace/slapos-dev/software/erp5'
)
self
.
_createFile
(
'backup/instance/slappart0/data'
,
'hello'
)
self
.
_createFile
(
'backup/instance/slappart1/data'
,
'world'
)
slappart_signature_method_dict
=
{
'./instance/slappart1'
:
'./instance/slappart1/srv/.backup_identity_script'
,
}
with
runner_exporter
.
CwdContextManager
(
'backup'
):
runner_exporter
.
writeSignatureFile
(
slappart_signature_method_dict
,
'..'
,
signature_file_path
=
'backup.signature'
)
with
open
(
'backup.signature'
,
'r'
)
as
f
:
signature_file_content
=
f
.
read
()
# Slappart1 is using md5sum as signature, others are using sha256sum (default)
self
.
assertEqual
(
signature_file_content
,
"""2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 /srv/slapgrid/slappart18/srv/runner/project/slapos.toolbox/test_folder/backup/instance/slappart0/data
49b74873d57ff0307b7c9364e2fe2a3876d8722fbe7ce3a6f1438d47647a86f4 /srv/slapgrid/slappart18/srv/runner/project/slapos.toolbox/test_folder/backup/instance/etc/.project
7d793037a0760186574b0282f2f435e7 /srv/slapgrid/slappart18/srv/runner/project/slapos.toolbox/test_folder/backup/instance/slappart1/data"""
)
def
test_backupFilesWereModifiedDuringExport
(
self
):
self
.
_setUpFakeInstanceFolder
()
with
runner_exporter
.
CwdContextManager
(
'instance'
):
self
.
assertTrue
(
runner_exporter
.
backupFilesWereModifiedDuringExport
())
time
.
sleep
(
15
)
self
.
assertFalse
(
runner_exporter
.
backupFilesWereModifiedDuringExport
())
if
__name__
==
'__main__'
:
unittest
.
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment