Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
slapos
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Romain Courteaud
slapos
Commits
e3a6f032
Commit
e3a6f032
authored
Jun 23, 2021
by
Xavier Thompson
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
software/theia: Add export script
parent
c52d568a
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
411 additions
and
14 deletions
+411
-14
software/theia/buildout.hash.cfg
software/theia/buildout.hash.cfg
+10
-2
software/theia/instance-export.cfg.jinja.in
software/theia/instance-export.cfg.jinja.in
+36
-4
software/theia/instance-theia.cfg.jinja.in
software/theia/instance-theia.cfg.jinja.in
+11
-8
software/theia/software.cfg
software/theia/software.cfg
+31
-0
software/theia/theia_common.py
software/theia/theia_common.py
+165
-0
software/theia/theia_export.py
software/theia/theia_export.py
+158
-0
No files found.
software/theia/buildout.hash.cfg
View file @
e3a6f032
...
...
@@ -15,7 +15,7 @@
[instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum =
001440693adfb9583d1479aae4bfa970
md5sum =
6ce2d9f0c32e3b781fab5d59685dc866
[instance]
_update_hash_filename_ = instance.cfg.in
...
...
@@ -27,12 +27,20 @@ md5sum = d1181f788461a31026e36677ab56398d
[instance-export]
_update_hash_filename_ = instance-export.cfg.jinja.in
md5sum =
4f52e4cff8e72369b38e47df297df7e2
md5sum =
802c13e0cdbdc4dffa606df1344eeb6b
[instance-resilient]
_update_hash_filename_ = instance-resilient.cfg.jinja
md5sum = d78a9f885bdebf6720197209e0c21aa0
[theia-common]
_update_hash_filename_ = theia_common.py
md5sum = e57396473b4b6a17d26a747f0030293c
[theia-export]
_update_hash_filename_ = theia_export.py
md5sum = b5f5ac1924b27d3f2be2e5ea291c119e
[yarn.lock]
_update_hash_filename_ = yarn.lock
md5sum = 80e7ad91deea54cebcccef5a83fdb380
...
...
software/theia/instance-export.cfg.jinja.in
View file @
e3a6f032
...
...
@@ -24,10 +24,42 @@ namebase = {{ parameter_dict['namebase'] }}
# to be pulled from the export instance.
# All it expects is that a script be available in exporter:wrapper.
[exporter]
recipe = slapos.cookbook:wrapper
command-line = echo "Export Not Implemented Yet"
wrapper-path = $${directory:bin}/$${slap-parameter:namebase}-exporter
wrapper = $${:wrapper-path}
wrapper = $${theia-export-script:rendered}
[theia-export-script]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/theia-export-script
mode = 0700
exitcode-file = $${directory:srv}/export-exitcode-file
error-file = $${directory:srv}/export-errormessage-file
context =
raw python ${software-info:python-with-eggs}
raw theia_export ${software-info:theia-export}
raw bash ${software-info:bash}
raw rsync ${software-info:rsync}
raw sqlite3 ${software-info:sqlite3}
raw root_path $${buildout:directory}
raw backup_path $${directory:backup}
raw slapos_cfg $${directory:runner}/etc/slapos.cfg
raw project_path $${directory:project}
raw public_path $${directory:frontend-static-public}
key exitfile :exitcode-file
key errorfile :error-file
{%- raw %}
template =
inline:#!{{ bash }}
{{ python }} {{ theia_export }} \
--rsync {{ rsync }} \
--sqlite3 {{ sqlite3 }} \
--root {{ root_path }} \
--backup {{ backup_path }} \
--cfg {{ slapos_cfg }} \
--dirs {{ project_path }} \
--dirs {{ public_path }} \
--exitfile {{ exitfile }} \
--errorfile {{ errorfile }}
{%- endraw %}
# Extend resilient parameters with normal theia connection parameters
[resilient-publish-connection-parameter]
...
...
software/theia/instance-theia.cfg.jinja.in
View file @
e3a6f032
...
...
@@ -41,23 +41,26 @@ backend-url = $${frontend-instance:url}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
dot-theia = $${buildout:directory}/.theia/
home = $${buildout:directory}
etc = $${:home}/etc
var = $${:home}/var
srv = $${:home}/srv
bin = $${:home}/bin
tmp = $${:home}/tmp
dot-theia = $${:home}/.theia/
pidfiles = $${:var}/run
services = $${:etc}/service
runner = $${:srv}/runner
backup = $${:srv}/backup/theia
project = $${:srv}/project
frontend-static = $${:srv}/frontend-static
frontend-static-public = $${:frontend-static}/public
frontend-static-css = $${:frontend-static}/css
bash-completions = $${
buildout:directory
}/.local/share/bash-completion/completions/
fish-completions = $${
buildout:directory
}/.config/fish/completions/
bash-completions = $${
:home
}/.local/share/bash-completion/completions/
fish-completions = $${
:home
}/.config/fish/completions/
# Promises
...
...
software/theia/software.cfg
View file @
e3a6f032
...
...
@@ -23,11 +23,14 @@ extends =
parts =
theia-wrapper
slapos-cookbook
python-with-eggs
instance-theia
instance
instance-import
instance-export
instance-resilient
theia-common
theia-export
# default for slapos-standalone
shared-part-list =
...
...
@@ -353,6 +356,18 @@ template =
#!/bin/sh
exec ${nodejs:location}/bin/node ${theia:location}/node_modules/.bin/theia-open "$@"
[python-with-eggs]
recipe = zc.recipe.egg
interpreter = ${:_buildout_section_name_}
eggs =
${slapos-toolbox:eggs}
six
zc.buildout
# Only generate the interpreter script to avoid conflicts with scripts
# for eggs that are also generated by another section, like slapos.toolbox
scripts = ${:interpreter}
[instance-theia]
<= template-base
output = ${buildout:directory}/instance-theia.cfg.jinja
...
...
@@ -371,3 +386,19 @@ output = ${buildout:directory}/instance-export.cfg.jinja
[instance-resilient]
<= download-base
[theia-common]
<= download-base
destination = ${buildout:directory}/theia_common.py
[theia-export]
<= download-base
destination = ${buildout:directory}/theia_export.py
[software-info]
python-with-eggs = ${buildout:bin-directory}/${python-with-eggs:interpreter}
python = ${python:location}/bin/python
rsync = ${rsync:location}/bin/rsync
sqlite3 = ${sqlite3:location}/bin/sqlite3
bash = ${bash:location}/bin/bash
theia-export = ${theia-export:output}
software/theia/theia_common.py
0 → 100644
View file @
e3a6f032
import
contextlib
import
errno
import
glob
import
hashlib
import
os
import
re
import
subprocess
as
sp
import
sqlite3
import
six
import
zc.buildout.configparser
from
slapos.util
import
bytes2str
,
str2bytes
RSYNC_FLAGS
=
(
'-rlptgo'
,
'--safe-links'
,
'--stats'
,
'--ignore-missing-args'
,
'--delete'
,
'--delete-excluded'
)
RSYNC_REGEX
=
'^(file has vanished: |rsync warning: some files vanished before they could be transferred)'
EXCLUDE_PATTERNS
=
(
'*.sock'
,
'*.socket'
,
'*.pid'
,
'.installed*.cfg'
)
EXCLUDE_FLAGS
=
[
'--exclude={}'
.
format
(
x
)
for
x
in
sorted
(
EXCLUDE_PATTERNS
)]
def
makedirs
(
path
):
try
:
os
.
makedirs
(
path
if
os
.
path
.
isdir
(
path
)
else
os
.
path
.
dirname
(
path
))
except
OSError
as
e
:
if
e
.
errno
!=
errno
.
EEXIST
:
raise
def
copytree
(
rsyncbin
,
src
,
dst
,
exclude
=
[],
extrargs
=
[],
verbosity
=
'-v'
):
# Ensure there is a trailing slash in the source directory
# to avoid creating an additional directory level at the destination
src
=
os
.
path
.
join
(
src
,
''
)
# Compute absolute path of destination
dst
=
os
.
path
.
abspath
(
dst
)
# Create destination dir if it doesn't exist
makedirs
(
dst
)
command
=
[
rsyncbin
]
command
.
extend
(
RSYNC_FLAGS
)
# Exclude destination file from sources
command
.
append
(
'--filter=-/ {}'
.
format
(
dst
))
command
.
extend
(
EXCLUDE_FLAGS
)
command
.
extend
((
'--filter=-/ {}'
.
format
(
x
)
for
x
in
sorted
(
exclude
)))
command
.
extend
(
extrargs
)
command
.
append
(
verbosity
)
command
.
append
(
src
)
command
.
append
(
dst
)
try
:
return
sp
.
check_output
(
command
,
universal_newlines
=
True
)
except
sp
.
CalledProcessError
as
e
:
# Not all rsync errors are to be considered as errors
if
e
.
returncode
!=
24
or
re
.
search
(
RSYNC_REGEX
,
e
.
output
,
re
.
M
)
is
None
:
raise
return
e
.
output
def
copydb
(
sqlite3bin
,
src_db
,
dst_db
):
makedirs
(
dst_db
)
sp
.
check_output
((
sqlite3bin
,
src_db
,
'.backup '
+
dst_db
))
def
remove
(
path
):
try
:
os
.
remove
(
path
)
except
OSError
:
if
os
.
path
.
exists
(
path
):
raise
def
parse_installed
(
partition
):
paths
=
[]
custom_script
=
os
.
path
.
join
(
partition
,
'srv'
,
'.backup_identity_script'
)
for
cfg
in
glob
.
glob
(
os
.
path
.
join
(
partition
,
'.installed*.cfg'
)):
try
:
with
open
(
cfg
)
as
f
:
installed_cfg
=
zc
.
buildout
.
configparser
.
parse
(
f
,
cfg
)
except
IOError
as
e
:
if
e
.
errno
!=
errno
.
ENOENT
:
raise
else
:
for
section
in
six
.
itervalues
(
installed_cfg
):
for
p
in
section
.
get
(
'__buildout_installed__'
,
''
).
splitlines
():
p
=
p
.
strip
()
if
p
and
p
!=
custom_script
:
paths
.
append
(
p
)
return
paths
def
sha256sum
(
file_path
,
chunk_size
=
1024
*
1024
):
sha256
=
hashlib
.
sha256
()
with
open
(
file_path
,
'rb'
)
as
f
:
chunk
=
f
.
read
(
chunk_size
)
while
chunk
:
sha256
.
update
(
chunk
)
chunk
=
f
.
read
(
chunk_size
)
return
sha256
.
hexdigest
()
def
hashwalk
(
backup_dir
,
mirror_partitions
):
scripts
=
{}
for
p
in
mirror_partitions
:
script_path
=
os
.
path
.
join
(
p
,
'srv'
,
'.backup_identity_script'
)
if
os
.
path
.
exists
(
script_path
):
scripts
[
os
.
path
.
abspath
(
p
)]
=
script_path
for
dirpath
,
dirnames
,
filenames
in
os
.
walk
(
backup_dir
):
filenames
.
sort
()
for
f
in
filenames
:
filepath
=
os
.
path
.
join
(
dirpath
,
f
)
if
os
.
path
.
isfile
(
filepath
):
displaypath
=
os
.
path
.
relpath
(
filepath
,
start
=
backup_dir
)
yield
'%s %s'
%
(
sha256sum
(
filepath
),
displaypath
)
remaining_dirnames
=
[]
for
subdir
in
dirnames
:
subdirpath
=
os
.
path
.
abspath
(
os
.
path
.
join
(
dirpath
,
subdir
))
custom_hashscript
=
scripts
.
get
(
subdirpath
)
if
custom_hashscript
:
print
(
'Using custom signature script %s'
%
custom_hashscript
)
for
s
in
hashcustom
(
subdirpath
,
backup_dir
,
custom_hashscript
):
yield
s
else
:
remaining_dirnames
.
append
(
subdir
)
remaining_dirnames
.
sort
()
dirnames
[:]
=
remaining_dirnames
@
contextlib
.
contextmanager
def
cwd
(
path
):
old_path
=
os
.
getcwd
()
try
:
os
.
chdir
(
path
)
yield
finally
:
os
.
chdir
(
old_path
)
def
hashcustom
(
mirrordir
,
backup_dir
,
custom_hashscript
):
workingdir
=
os
.
path
.
join
(
mirrordir
,
os
.
pardir
,
os
.
pardir
,
os
.
pardir
)
with
cwd
(
os
.
path
.
abspath
(
workingdir
)):
for
dirpath
,
_
,
filenames
in
os
.
walk
(
mirrordir
):
filepaths
=
[]
for
f
in
filenames
:
path
=
os
.
path
.
join
(
dirpath
,
f
)
if
os
.
path
.
isfile
(
path
):
filepaths
.
append
(
'./'
+
os
.
path
.
relpath
(
path
,
start
=
workingdir
))
if
not
filepaths
:
continue
hashprocess
=
sp
.
Popen
(
custom_hashscript
,
stdin
=
sp
.
PIPE
,
stdout
=
sp
.
PIPE
,
stderr
=
sp
.
PIPE
)
out
,
err
=
hashprocess
.
communicate
(
str2bytes
(
'
\
0
'
.
join
(
filepaths
)))
if
hashprocess
.
returncode
!=
0
:
template
=
"Custom signature script %s failed on inputs:
\
n
%s"
msg
=
template
%
(
custom_hashscript
,
'
\
n
'
.
join
(
filepaths
))
msg
+=
"
\
n
with stdout:
\
n
%s"
%
bytes2str
(
out
)
msg
+=
"
\
n
and stderr:
\
n
%s"
%
bytes2str
(
err
)
raise
Exception
(
msg
)
signatures
=
bytes2str
(
out
).
strip
(
'
\
n
'
).
split
(
'
\
n
'
)
signatures
.
sort
()
displaypath
=
os
.
path
.
relpath
(
dirpath
,
start
=
backup_dir
)
for
s
in
signatures
:
yield
'%s %s/ (custom)'
%
(
s
,
displaypath
)
software/theia/theia_export.py
0 → 100644
View file @
e3a6f032
import
argparse
import
glob
import
itertools
import
os
import
sys
import
time
import
traceback
import
six
from
six.moves
import
configparser
sys
.
path
.
append
(
os
.
path
.
dirname
(
__file__
))
from
theia_common
import
copytree
,
copydb
,
hashwalk
,
parse_installed
,
remove
os
.
environ
[
'LC_ALL'
]
=
'C'
os
.
umask
(
0o77
)
BACKUP_WAIT
=
10
def
main
():
parser
=
argparse
.
ArgumentParser
()
parser
.
add_argument
(
'--rsync'
,
required
=
True
)
parser
.
add_argument
(
'--sqlite3'
,
required
=
True
)
parser
.
add_argument
(
'--root'
,
required
=
True
)
parser
.
add_argument
(
'--backup'
,
required
=
True
)
parser
.
add_argument
(
'--cfg'
,
required
=
True
)
parser
.
add_argument
(
'--dirs'
,
action
=
'append'
)
parser
.
add_argument
(
'--exitfile'
,
required
=
True
)
parser
.
add_argument
(
'--errorfile'
,
required
=
True
)
args
=
parser
.
parse_args
()
TheiaExport
(
args
)()
class
TheiaExport
(
object
):
def
__init__
(
self
,
args
):
self
.
rsync_bin
=
args
.
rsync
self
.
sqlite3_bin
=
args
.
sqlite3
self
.
root_dir
=
args
.
root
self
.
backup_dir
=
args
.
backup
self
.
slapos_cfg
=
cfg
=
args
.
cfg
self
.
dirs
=
args
.
dirs
self
.
exit_file
=
args
.
exitfile
self
.
error_file
=
args
.
errorfile
configp
=
configparser
.
SafeConfigParser
()
configp
.
read
(
cfg
)
self
.
proxy_db
=
configp
.
get
(
'slapproxy'
,
'database_uri'
)
self
.
instance_dir
=
configp
.
get
(
'slapos'
,
'instance_root'
)
partitions
=
glob
.
glob
(
os
.
path
.
join
(
self
.
instance_dir
,
'slappart*'
))
self
.
partition_dirs
=
[
p
for
p
in
partitions
if
os
.
path
.
isdir
(
p
)]
self
.
copytree_partitions_args
=
{}
self
.
logs
=
[]
def
mirrorpath
(
self
,
src
):
return
os
.
path
.
abspath
(
os
.
path
.
join
(
self
.
backup_dir
,
os
.
path
.
relpath
(
src
,
start
=
self
.
root_dir
)))
def
backuptree
(
self
,
src
,
exclude
=
[],
extrargs
=
[],
verbosity
=
'-v'
):
dst
=
self
.
mirrorpath
(
src
)
return
copytree
(
self
.
rsync_bin
,
src
,
dst
,
exclude
,
extrargs
,
verbosity
)
def
backupdb
(
self
):
copydb
(
self
.
sqlite3_bin
,
self
.
proxy_db
,
self
.
mirrorpath
(
self
.
proxy_db
))
def
backuppartition
(
self
,
partition
):
installed
=
parse_installed
(
partition
)
rules
=
os
.
path
.
join
(
partition
,
'srv'
,
'exporter.exclude'
)
extrargs
=
(
'--filter=.-/ '
+
rules
,)
if
os
.
path
.
exists
(
rules
)
else
()
self
.
backuptree
(
partition
,
exclude
=
installed
,
extrargs
=
extrargs
)
self
.
copytree_partitions_args
[
partition
]
=
(
installed
,
extrargs
)
def
sign
(
self
,
signaturefile
):
remove
(
signaturefile
)
pardir
=
os
.
path
.
abspath
(
os
.
path
.
join
(
self
.
backup_dir
,
os
.
pardir
))
tmpfile
=
os
.
path
.
join
(
pardir
,
'backup.signature.tmp'
)
mirror_partitions
=
[
self
.
mirrorpath
(
p
)
for
p
in
self
.
partition_dirs
]
with
open
(
tmpfile
,
'w'
)
as
f
:
for
s
in
hashwalk
(
self
.
backup_dir
,
mirror_partitions
):
f
.
write
(
s
+
'
\
n
'
)
os
.
rename
(
tmpfile
,
signaturefile
)
def
checkpartition
(
self
,
partition
,
pattern
=
'/srv/backup/'
):
installed
,
extrargs
=
self
.
copytree_partitions_args
[
partition
]
output
=
self
.
backuptree
(
partition
,
exclude
=
installed
,
extrargs
=
extrargs
+
(
'--dry-run'
,
'--update'
),
verbosity
=
'--out-format=%n'
,
)
return
[
path
for
path
in
output
.
splitlines
()
if
pattern
in
path
]
def
loginfo
(
self
,
msg
):
print
(
msg
)
self
.
logs
.
append
(
msg
)
def
__call__
(
self
):
remove
(
self
.
error_file
)
exitcode
=
0
try
:
self
.
export
()
except
Exception
:
exitcode
=
1
exc
=
traceback
.
format_exc
()
with
open
(
self
.
error_file
,
'w'
)
as
f
:
f
.
write
(
'
\
n
... OK
\
n
\
n
'
.
join
(
self
.
logs
))
f
.
write
(
'
\
n
... ERROR !
\
n
\
n
'
)
f
.
write
(
exc
)
print
(
'
\
n
\
n
ERROR
\
n
\
n
'
+
exc
)
finally
:
with
open
(
self
.
exit_file
,
'w'
)
as
f
:
f
.
write
(
str
(
exitcode
))
sys
.
exit
(
exitcode
)
def
export
(
self
):
export_start_date
=
int
(
time
.
time
())
etc_dir
=
os
.
path
.
join
(
self
.
root_dir
,
'etc'
)
with
open
(
os
.
path
.
join
(
etc_dir
,
'.resilient_timestamp'
),
'w'
)
as
f
:
f
.
write
(
str
(
export_start_date
))
self
.
loginfo
(
'Backup directory '
+
etc_dir
)
self
.
backuptree
(
etc_dir
,
extrargs
=
(
'--filter=- */'
,
'--filter=-! .*'
))
for
d
in
self
.
dirs
:
self
.
loginfo
(
'Backup directory '
+
d
)
self
.
backuptree
(
d
)
self
.
loginfo
(
'Backup slapproxy database'
)
self
.
backupdb
()
self
.
loginfo
(
'Backup partitions'
)
for
p
in
self
.
partition_dirs
:
self
.
backuppartition
(
p
)
self
.
loginfo
(
'Compute backup signature'
)
self
.
sign
(
os
.
path
.
join
(
self
.
backup_dir
,
'backup.signature'
))
time
.
sleep
(
10
)
self
.
loginfo
(
'Check partitions'
)
modified
=
list
(
itertools
.
chain
.
from_iterable
(
self
.
checkpartition
(
p
)
for
p
in
self
.
partition_dirs
))
if
modified
:
msg
=
'Some files have been modified since the backup started'
self
.
loginfo
(
msg
+
':'
)
self
.
loginfo
(
'
\
n
'
.
join
(
modified
))
self
.
loginfo
(
"Let's wait %d minutes and try again"
%
BACKUP_WAIT
)
time
.
sleep
(
BACKUP_WAIT
*
60
)
raise
Exception
(
msg
)
self
.
loginfo
(
'Done'
)
if
__name__
==
'__main__'
:
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment