Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
S
slapos.toolbox
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Xiaowu Zhang
slapos.toolbox
Commits
049c42b1
Commit
049c42b1
authored
Aug 02, 2019
by
Bryton Lacquement
🚪
Committed by
Julien Muchembled
Aug 02, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fixup! Add support for Python 3
parent
31532fac
Changes
36
Show whitespace changes
Inline
Side-by-side
Showing
36 changed files
with
207 additions
and
186 deletions
+207
-186
slapos/agent/agent.py
slapos/agent/agent.py
+3
-3
slapos/container/__init__.py
slapos/container/__init__.py
+2
-1
slapos/equeue/__init__.py
slapos/equeue/__init__.py
+15
-8
slapos/lamp/__init__.py
slapos/lamp/__init__.py
+5
-5
slapos/monitor/build_statistic.py
slapos/monitor/build_statistic.py
+1
-1
slapos/monitor/collect.py
slapos/monitor/collect.py
+3
-3
slapos/networkbench/http.py
slapos/networkbench/http.py
+1
-1
slapos/promise/plugin/backupserver_check_backup.py
slapos/promise/plugin/backupserver_check_backup.py
+1
-1
slapos/promise/plugin/check_error_on_http_log.py
slapos/promise/plugin/check_error_on_http_log.py
+1
-1
slapos/promise/plugin/check_error_on_zope_longrequest_log.py
slapos/promise/plugin/check_error_on_zope_longrequest_log.py
+1
-1
slapos/promise/plugin/check_file_state.py
slapos/promise/plugin/check_file_state.py
+3
-2
slapos/promise/plugin/check_free_disk_space.py
slapos/promise/plugin/check_free_disk_space.py
+2
-2
slapos/promise/plugin/check_icmp_packet_lost.py
slapos/promise/plugin/check_icmp_packet_lost.py
+1
-1
slapos/promise/plugin/check_partition_deployment_state.py
slapos/promise/plugin/check_partition_deployment_state.py
+1
-1
slapos/promise/plugin/check_port_listening.py
slapos/promise/plugin/check_port_listening.py
+5
-7
slapos/promise/plugin/check_re6st_optimal_status.py
slapos/promise/plugin/check_re6st_optimal_status.py
+1
-1
slapos/promise/plugin/check_server_cpu_load.py
slapos/promise/plugin/check_server_cpu_load.py
+5
-4
slapos/promise/plugin/check_url_available.py
slapos/promise/plugin/check_url_available.py
+1
-1
slapos/promise/plugin/monitor_bootstrap_status.py
slapos/promise/plugin/monitor_bootstrap_status.py
+1
-1
slapos/promise/plugin/trafficserver_cache_availability.py
slapos/promise/plugin/trafficserver_cache_availability.py
+11
-11
slapos/promise/plugin/validate_frontend_configuration.py
slapos/promise/plugin/validate_frontend_configuration.py
+10
-14
slapos/pubsub/notifier.py
slapos/pubsub/notifier.py
+10
-10
slapos/resiliencytest/__init__.py
slapos/resiliencytest/__init__.py
+3
-4
slapos/resiliencytest/suites/erp5.py
slapos/resiliencytest/suites/erp5.py
+9
-8
slapos/resiliencytest/suites/gitlab.py
slapos/resiliencytest/suites/gitlab.py
+1
-1
slapos/resiliencytest/suites/kvm.py
slapos/resiliencytest/suites/kvm.py
+3
-3
slapos/resiliencytest/suites/resiliencytestsuite.py
slapos/resiliencytest/suites/resiliencytestsuite.py
+6
-5
slapos/resiliencytest/suites/slaprunner.py
slapos/resiliencytest/suites/slaprunner.py
+12
-11
slapos/resilient/runner_exporter.py
slapos/resilient/runner_exporter.py
+3
-2
slapos/runner/fileBrowser.py
slapos/runner/fileBrowser.py
+11
-11
slapos/runner/run.py
slapos/runner/run.py
+6
-7
slapos/runner/utils.py
slapos/runner/utils.py
+49
-37
slapos/runner/views.py
slapos/runner/views.py
+9
-8
slapos/systool.py
slapos/systool.py
+2
-1
slapos/test/monitor/testrunpromise.py
slapos/test/monitor/testrunpromise.py
+1
-1
slapos/zodbpack.py
slapos/zodbpack.py
+8
-7
No files found.
slapos/agent/agent.py
View file @
049c42b1
...
...
@@ -345,8 +345,8 @@ def main():
# Select an unused computer to run the test.
group
=
test_mapping
.
getNextGroup
(
ignore_list
=
[
group
for
_
,
_
,
group
in
\
running_test_dict
.
itervalues
(
)])
ignore_list
=
[
group
for
_
,
_
,
group
in
six
.
itervalues
(
running_test_dict
)])
# Select a test
test_line
=
test_result
.
start
(
...
...
@@ -455,7 +455,7 @@ def main():
logger
.
info
(
'Sleeping %is...'
,
to_sleep
)
time
.
sleep
(
to_sleep
)
if
not
test_result
.
isAlive
():
for
_
,
tester
,
computer_id
in
running_test_dict
.
itervalues
(
):
for
_
,
tester
,
computer_id
in
six
.
itervalues
(
running_test_dict
):
tester
.
teardown
()
time
.
sleep
(
300
)
...
...
slapos/container/__init__.py
View file @
049c42b1
# -*- coding: utf-8 -*-
from
__future__
import
print_function
from
six.moves
import
configparser
import
argparse
from
six.moves
import
dbm_gnu
as
gdbm
...
...
@@ -27,7 +28,7 @@ def main():
if
args
.
pid
is
not
None
:
pid_filename
=
args
.
pid
[
0
]
if
os
.
path
.
exists
(
pid_filename
):
print
>>
sys
.
stderr
,
"Already running"
print
(
"Already running"
,
file
=
sys
.
stderr
)
return
127
with
open
(
pid_filename
,
'w'
)
as
pid_file
:
pid_file
.
write
(
str
(
os
.
getpid
()))
...
...
slapos/equeue/__init__.py
View file @
049c42b1
...
...
@@ -42,6 +42,14 @@ from six.moves import socketserver
import
io
import
threading
try
:
logging_levels
=
logging
.
_nameToLevel
logging_choices
=
logging_levels
.
keys
()
except
AttributeError
:
logging_levels
=
logging
.
_levelNames
logging_choices
=
[
i
for
i
in
logging_levels
if
isinstance
(
i
,
str
)]
# Copied from erp5.util:erp5/util/testnode/ProcessManager.py
def
subprocess_capture
(
p
,
log
,
log_prefix
,
get_output
=
True
):
def
readerthread
(
input
,
output
,
buffer
):
...
...
@@ -81,7 +89,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
def
__init__
(
self
,
*
args
,
**
kw
):
self
.
options
=
kw
.
pop
(
'equeue_options'
)
s
uper
(
EqueueServer
,
self
)
.
__init__
(
self
,
s
ocketserver
.
ThreadingUnixStreamServer
.
__init__
(
self
,
RequestHandlerClass
=
None
,
*
args
,
**
kw
)
# Equeue Specific elements
...
...
@@ -99,7 +107,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
self
.
logger
=
logging
.
getLogger
(
"EQueue"
)
handler
=
logging
.
handlers
.
WatchedFileHandler
(
logfile
,
mode
=
'a'
)
# Natively support logrotate
level
=
logging
.
_levelName
s
.
get
(
loglevel
,
logging
.
INFO
)
level
=
logging
_level
s
.
get
(
loglevel
,
logging
.
INFO
)
self
.
logger
.
setLevel
(
level
)
formatter
=
logging
.
Formatter
(
"%(asctime)s - %(levelname)s - %(message)s"
)
handler
.
setFormatter
(
formatter
)
...
...
@@ -131,7 +139,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
try
:
sys
.
stdout
.
flush
()
p
=
subprocess
.
Popen
(
cmd_list
,
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
)
stderr
=
subprocess
.
PIPE
,
universal_newlines
=
True
)
subprocess_capture
(
p
,
self
.
logger
.
info
,
''
,
True
)
if
p
.
returncode
==
0
:
self
.
logger
.
info
(
"%s finished successfully."
,
cmd_readable
)
...
...
@@ -172,7 +180,7 @@ class EqueueServer(socketserver.ThreadingUnixStreamServer):
try
:
request
.
send
(
command
)
except
:
except
Exception
:
self
.
logger
.
warning
(
"Couldn't respond to %r"
,
request
.
fileno
())
self
.
close_request
(
request
)
self
.
_runCommandIfNeeded
(
command
,
timestamp
)
...
...
@@ -193,8 +201,7 @@ def main():
"calls are stored"
)
parser
.
add_argument
(
'--loglevel'
,
nargs
=
1
,
default
=
'INFO'
,
choices
=
[
i
for
i
in
logging
.
_levelNames
if
isinstance
(
i
,
str
)],
choices
=
logging_choices
,
required
=
False
)
parser
.
add_argument
(
'-l'
,
'--logfile'
,
nargs
=
1
,
required
=
True
,
help
=
"Path to the log file."
)
...
...
slapos/lamp/__init__.py
View file @
049c42b1
...
...
@@ -89,19 +89,19 @@ def setup(arguments):
time
.
sleep
(
3
)
continue
time
.
sleep
(
timeout
)
if
arguments
.
has_key
(
'delete_target'
)
:
if
'delete_target'
in
arguments
:
delete
(
arguments
)
if
arguments
.
has_key
(
'source'
)
:
if
'source'
in
arguments
:
rename
(
arguments
)
if
arguments
.
has_key
(
'script'
)
:
if
'script'
in
arguments
:
run_script
(
arguments
)
if
arguments
.
has_key
(
'sql_script'
)
:
if
'sql_script'
in
arguments
:
run_sql_script
(
arguments
)
if
arguments
.
has_key
(
'chmod_target'
)
:
if
'chmod_target'
in
arguments
:
chmod
(
arguments
)
return
...
...
slapos/monitor/build_statistic.py
View file @
049c42b1
...
...
@@ -33,7 +33,7 @@ def buildStatistic(history_folder):
last_date
=
None
if
stats_dict
[
"data"
]:
if
stats_dict
[
"data"
][
-
1
].
has_key
(
"start-date"
)
:
if
"start-date"
in
stats_dict
[
"data"
][
-
1
]
:
last_date
=
stats_dict
[
"data"
][
-
1
][
"start-date"
]
else
:
last_date
=
stats_dict
[
"data"
][
-
1
][
"date"
]
...
...
slapos/monitor/collect.py
View file @
049c42b1
...
...
@@ -293,17 +293,17 @@ def main():
if
process_result
and
process_result
[
'total_process'
]
!=
0.0
:
appendToJsonFile
(
process_file
,
", "
.
join
(
[
str
(
process_result
[
key
])
for
key
in
label_list
if
process_result
.
has_key
(
key
)]
)
str
(
process_result
[
key
])
for
key
in
label_list
if
key
in
process_result
)
)
resource_status_dict
.
update
(
process_result
)
if
memory_result
and
memory_result
[
'memory_rss'
]
!=
0.0
:
appendToJsonFile
(
mem_file
,
", "
.
join
(
[
str
(
memory_result
[
key
])
for
key
in
label_list
if
memory_result
.
has_key
(
key
)]
)
str
(
memory_result
[
key
])
for
key
in
label_list
if
key
in
memory_result
)
)
resource_status_dict
.
update
(
memory_result
)
if
io_result
and
io_result
[
'io_rw_counter'
]
!=
0.0
:
appendToJsonFile
(
io_file
,
", "
.
join
(
[
str
(
io_result
[
key
])
for
key
in
label_list
if
io_result
.
has_key
(
key
)]
)
str
(
io_result
[
key
])
for
key
in
label_list
if
key
in
io_result
)
)
resource_status_dict
.
update
(
io_result
)
...
...
slapos/networkbench/http.py
View file @
049c42b1
...
...
@@ -14,7 +14,7 @@ def get_curl(buffer, url):
result
=
"OK"
try
:
curl
.
perform
()
except
:
except
Exception
:
import
traceback
traceback
.
print_exc
(
file
=
sys
.
stderr
)
sys
.
stderr
.
flush
()
...
...
slapos/promise/plugin/backupserver_check_backup.py
View file @
049c42b1
...
...
@@ -16,7 +16,7 @@ from tzlocal import get_localzone
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# check backup ran OK every 5 minutes
self
.
setPeriodicity
(
minute
=
5
)
...
...
slapos/promise/plugin/check_error_on_http_log.py
View file @
049c42b1
...
...
@@ -8,7 +8,7 @@ import os
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# set periodicity to run the promise twice per day
self
.
custom_frequency
=
int
(
self
.
getConfig
(
'frequency'
,
720
))
self
.
setPeriodicity
(
self
.
custom_frequency
)
...
...
slapos/promise/plugin/check_error_on_zope_longrequest_log.py
View file @
049c42b1
...
...
@@ -12,7 +12,7 @@ r = re.compile(br"^([0-9]+\-[0-9]+\-[0-9]+ [0-9]+\:[0-9]+\:[0-9]+)(\,[0-9]+) - (
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
self
.
setPeriodicity
(
minute
=
10
)
def
sense
(
self
):
...
...
slapos/promise/plugin/check_file_state.py
View file @
049c42b1
...
...
@@ -6,7 +6,7 @@ from slapos.grid.promise.generic import GenericPromise
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# SR can set custom periodicity
self
.
setPeriodicity
(
float
(
self
.
getConfig
(
'frequency'
,
2
)))
...
...
@@ -22,7 +22,8 @@ class RunPromise(GenericPromise):
url
=
self
.
getConfig
(
'url'
).
strip
()
try
:
result
=
open
(
filename
).
read
()
with
open
(
filename
)
as
f
:
result
=
f
.
read
()
except
Exception
as
e
:
self
.
logger
.
error
(
"ERROR %r during opening and reading file %r"
%
(
e
,
filename
))
...
...
slapos/promise/plugin/check_free_disk_space.py
View file @
049c42b1
...
...
@@ -16,7 +16,7 @@ from slapos.collect.db import Database
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# check disk space at least every 3 minutes
self
.
setPeriodicity
(
minute
=
3
)
...
...
@@ -131,7 +131,7 @@ class RunPromise(GenericPromise):
min_free_size
=
int
(
min_size_str
)
*
1024
*
1024
else
:
with
open
(
disk_threshold_file
,
'w'
)
as
f
:
f
.
write
(
str
(
min_free_size
/
(
1024
*
1024
)))
f
.
write
(
str
(
min_free_size
/
/
(
1024
*
1024
)))
if
check_date
:
# testing mode
...
...
slapos/promise/plugin/check_icmp_packet_lost.py
View file @
049c42b1
...
...
@@ -8,7 +8,7 @@ from slapos.networkbench.ping import ping, ping6
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# set periodicity to run the promise twice per day
self
.
custom_frequency
=
int
(
self
.
getConfig
(
'frequency'
,
720
))
self
.
setPeriodicity
(
self
.
custom_frequency
)
...
...
slapos/promise/plugin/check_partition_deployment_state.py
View file @
049c42b1
...
...
@@ -8,7 +8,7 @@ from datetime import datetime
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
self
.
setPeriodicity
(
minute
=
1
)
def
sense
(
self
):
...
...
slapos/promise/plugin/check_port_listening.py
View file @
049c42b1
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
import
socket
import
sys
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# check port is listening at least every 2 minutes
self
.
setPeriodicity
(
minute
=
2
)
...
...
@@ -30,9 +28,9 @@ class RunPromise(GenericPromise):
# self.logger.info("port connection OK")
try
:
socket
.
create_connection
(
addr
).
close
()
except
(
socket
.
herror
,
socket
.
gaierror
)
,
e
:
except
(
socket
.
herror
,
socket
.
gaierror
)
as
e
:
self
.
logger
.
error
(
"ERROR hostname/port ({}) is not correct: {}"
.
format
(
addr
,
e
))
except
(
socket
.
error
,
socket
.
timeout
)
,
e
:
except
(
socket
.
error
,
socket
.
timeout
)
as
e
:
self
.
logger
.
error
(
"ERROR while connecting to {}: {}"
.
format
(
addr
,
e
))
else
:
self
.
logger
.
info
(
"port connection OK ({})"
.
format
(
addr
))
...
...
slapos/promise/plugin/check_re6st_optimal_status.py
View file @
049c42b1
...
...
@@ -8,7 +8,7 @@ from slapos.networkbench.ping import ping, ping6
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# set periodicity to run the promise twice per day
self
.
custom_frequency
=
int
(
self
.
getConfig
(
'frequency'
,
720
))
self
.
setPeriodicity
(
self
.
custom_frequency
)
...
...
slapos/promise/plugin/check_server_cpu_load.py
View file @
049c42b1
...
...
@@ -9,22 +9,23 @@ import os
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# test load every 3 minutes
self
.
setPeriodicity
(
minute
=
3
)
def
checkCPULoad
(
self
,
tolerance
=
2.2
):
# tolerance=1.5 => accept CPU load up to 1.5 =150%
uptime_result
=
subprocess
.
check_output
(
[
'uptime'
]
,
universal_newlines
=
True
)
uptime_result
=
subprocess
.
check_output
(
'uptime'
,
universal_newlines
=
True
)
line
=
uptime_result
.
strip
().
split
(
' '
)
load
,
load5
,
long_load
=
line
[
-
3
:]
long_load
=
float
(
long_load
.
replace
(
','
,
'.'
))
core_count
=
int
(
subprocess
.
check_output
(
[
'nproc'
]
).
strip
())
core_count
=
int
(
subprocess
.
check_output
(
'nproc'
).
strip
())
max_load
=
core_count
*
tolerance
if
long_load
>
max_load
:
# display top statistics
top_result
=
subprocess
.
check_output
([
'top'
,
'-n'
,
'1'
,
'-b'
])
top_result
=
subprocess
.
check_output
((
'top'
,
'-n'
,
'1'
,
'-b'
),
universal_newlines
=
True
)
message
=
"CPU load is high: %s %s %s
\
n
\
n
"
%
(
load
,
load5
,
long_load
)
i
=
0
result_list
=
top_result
.
split
(
'
\
n
'
)
...
...
slapos/promise/plugin/check_url_available.py
View file @
049c42b1
...
...
@@ -8,7 +8,7 @@ import requests
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# SR can set custom periodicity
self
.
setPeriodicity
(
float
(
self
.
getConfig
(
'frequency'
,
2
)))
...
...
slapos/promise/plugin/monitor_bootstrap_status.py
View file @
049c42b1
...
...
@@ -10,7 +10,7 @@ from .util import tail_file
class
RunPromise
(
GenericPromise
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
self
.
setPeriodicity
(
minute
=
2
)
def
sense
(
self
):
...
...
slapos/promise/plugin/trafficserver_cache_availability.py
View file @
049c42b1
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
...
...
@@ -7,12 +7,10 @@ try:
except
ImportError
:
import
subprocess
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
self
.
setPeriodicity
(
minute
=
int
(
self
.
getConfig
(
'frequency'
,
5
)))
def
sense
(
self
):
...
...
@@ -31,16 +29,18 @@ class RunPromise(GenericPromise):
self
.
logger
.
error
(
"Wrapper %r not supported."
%
(
wrapper
,))
return
process
=
subprocess
.
Popen
(
try
:
subprocess
.
subprocess
.
check_output
(
args
,
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
STDOUT
,
)
result
=
process
.
communicate
()[
0
].
strip
()
if
process
.
returncode
==
0
:
self
.
logger
.
info
(
"OK"
)
except
subprocess
.
CalledProcessError
as
e
:
result
=
e
.
output
.
strip
()
self
.
logger
.
error
(
message
,
result
if
str
is
bytes
else
result
.
decode
(
'utf-8'
,
'replace'
))
else
:
self
.
logger
.
error
(
message
%
(
result
,)
)
self
.
logger
.
info
(
"OK"
)
def
anomaly
(
self
):
"""
...
...
slapos/promise/plugin/validate_frontend_configuration.py
View file @
049c42b1
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
try
:
...
...
@@ -6,12 +6,10 @@ try:
except
ImportError
:
import
subprocess
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
super
(
RunPromise
,
self
).
__init__
(
config
)
# check configuration every 5 minutes (only for anomaly)
self
.
setPeriodicity
(
minute
=
int
(
self
.
getConfig
(
'frequency'
,
5
)))
...
...
@@ -23,16 +21,14 @@ class RunPromise(GenericPromise):
validate_script
=
self
.
getConfig
(
'verification-script'
)
if
not
validate_script
:
raise
ValueError
(
"'verification-script' was not set in promise parameters."
)
process
=
subprocess
.
Popen
(
[
validate_script
],
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
STDOUT
,
)
message
=
process
.
communicate
()[
0
]
if
process
.
returncode
==
0
:
self
.
logger
.
info
(
"OK"
)
try
:
subprocess
.
check_output
(
validate_script
,
stderr
=
subprocess
.
STDOUT
)
except
subprocess
.
CalledProcessError
as
e
:
message
=
e
.
output
self
.
logger
.
error
(
message
if
str
is
bytes
else
message
.
decode
(
'utf-8'
,
'replace'
))
else
:
self
.
logger
.
error
(
"%s"
%
message
)
self
.
logger
.
info
(
"OK"
)
def
anomaly
(
self
):
return
self
.
_anomaly
(
result_count
=
1
,
failure_amount
=
1
)
slapos/pubsub/notifier.py
View file @
049c42b1
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from
__future__
import
print_function
import
argparse
import
csv
import
datetime
import
json
import
httplib
from
six.moves
import
http_client
as
httplib
import
os
import
shutil
import
socket
...
...
@@ -13,8 +14,8 @@ import subprocess
import
sys
import
time
import
traceback
import
urllib2
import
urlparse
from
six.moves.urllib.request
import
urlopen
from
six.moves.urllib.parse
import
urlparse
import
uuid
def
createStatusItem
(
item_directory
,
instance_name
,
callback
,
date
,
link
,
status
):
...
...
@@ -79,8 +80,7 @@ def main():
saveStatus
(
'STARTED'
)
if
args
.
max_run
<=
0
:
print
"--max-run argument takes a strictely positive number as argument"
sys
.
exit
(
-
1
)
parser
.
error
(
"--max-run argument takes a strictly positive number as argument"
)
while
args
.
max_run
>
0
:
try
:
...
...
@@ -108,7 +108,7 @@ def main():
content
.
replace
(
'&'
,
'&'
).
replace
(
'<'
,
'<'
).
replace
(
'>'
,
'>'
)
))
print
content
print
(
content
)
# Write feed safely
error_message
=
""
...
...
@@ -128,7 +128,7 @@ def main():
'slapos:%s'
%
uuid
.
uuid4
(),
])
os
.
rename
(
temp_file
,
args
.
logfile
[
0
])
except
Exception
,
e
:
except
Exception
as
e
:
error_message
=
"ERROR ON WRITING FEED - %s"
%
str
(
e
)
finally
:
try
:
...
...
@@ -143,14 +143,14 @@ def main():
if
exit_code
!=
0
:
sys
.
exit
(
exit_code
)
print
'Fetching %s feed...'
%
args
.
feed_url
[
0
]
print
(
'Fetching %s feed...'
%
args
.
feed_url
[
0
])
feed
=
url
lib2
.
url
open
(
args
.
feed_url
[
0
])
feed
=
urlopen
(
args
.
feed_url
[
0
])
body
=
feed
.
read
()
some_notification_failed
=
False
for
notif_url
in
args
.
notification_url
:
notification_url
=
urlparse
.
urlparse
(
notif_url
)
notification_url
=
urlparse
(
notif_url
)
notification_port
=
notification_url
.
port
if
notification_port
is
None
:
...
...
slapos/resiliencytest/__init__.py
View file @
049c42b1
...
...
@@ -26,6 +26,7 @@
#
##############################################################################
from
__future__
import
print_function
import
argparse
import
json
import
importlib
...
...
@@ -120,7 +121,7 @@ def runTestSuite(test_suite_title, test_suite_arguments, logger):
parsed_arguments
=
dict
(
key
.
split
(
'='
)
for
key
in
test_suite_arguments
)
test_suite_module
=
importFrom
(
test_suite_title
)
success
=
test_suite_module
.
runTestSuite
(
**
parsed_arguments
)
except
:
except
Exception
:
logger
.
exception
(
'Impossible to run resiliency test:'
)
success
=
False
return
success
...
...
@@ -228,7 +229,7 @@ def runResiliencyTest():
"""
error_message_set
,
exit_status
=
ScalabilityLauncher
().
run
()
for
error_message
in
error_message_set
:
print
>>
sys
.
stderr
,
'ERROR: %s'
%
error_message
print
(
'ERROR: %s'
%
error_message
,
file
=
sys
.
stderr
)
sys
.
exit
(
exit_status
)
...
...
@@ -284,7 +285,5 @@ def runUnitTest():
test_count
=
1
,
error_count
=
error_count
,
duration
=
test_duration
)
except
:
raise
finally
:
os
.
remove
(
fname
)
slapos/resiliencytest/suites/erp5.py
View file @
049c42b1
...
...
@@ -33,8 +33,9 @@ import random
import
ssl
import
string
import
time
import
urllib
import
urllib2
from
six.moves.urllib.parse
import
quote
from
six.moves.urllib.request
import
HTTPBasicAuthHandler
,
HTTPSHandler
,
\
build_opener
class
NotHttpOkException
(
Exception
):
pass
...
...
@@ -50,7 +51,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
Set inside of slaprunner the instance parameter to use to deploy erp5 instance.
"""
p
=
'<?xml version="1.0" encoding="utf-8"?> <instance> <parameter id="_">{"zodb-zeo": {"backup-periodicity": "*:1/4"}, "mariadb": {"backup-periodicity": "*:1/4"}}</parameter> </instance>'
parameter
=
urllib2
.
quote
(
p
)
parameter
=
quote
(
p
)
self
.
_connectToSlaprunner
(
resource
=
'saveParameterXml'
,
data
=
'software_type=default¶meter=%s'
%
parameter
)
...
...
@@ -109,7 +110,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
resource
=
'/saveFileContent'
,
data
=
'file=runner_workdir%%2Finstance%%2F%s%%2Fetc%%2Fhaproxy.cfg&content=%s'
%
(
haproxy_slappart
,
urllib
.
quote
(
file_content
),
quote
(
file_content
),
)
)
...
...
@@ -133,12 +134,12 @@ class ERP5TestSuite(SlaprunnerTestSuite):
def
_connectToERP5
(
self
,
url
,
data
=
None
,
password
=
None
):
if
password
is
None
:
password
=
self
.
_getERP5Password
()
auth_handler
=
urllib2
.
HTTPBasicAuthHandler
()
auth_handler
=
HTTPBasicAuthHandler
()
auth_handler
.
add_password
(
realm
=
'Zope'
,
uri
=
url
,
user
=
'zope'
,
passwd
=
password
)
ssl_context
=
ssl
.
_create_unverified_context
()
opener_director
=
urllib2
.
build_opener
(
opener_director
=
build_opener
(
auth_handler
,
urllib2
.
HTTPSHandler
(
context
=
ssl_context
)
HTTPSHandler
(
context
=
ssl_context
)
)
self
.
logger
.
info
(
'Calling ERP5 url %s'
%
url
)
...
...
@@ -213,7 +214,7 @@ class ERP5TestSuite(SlaprunnerTestSuite):
try
:
if
"erp5"
==
self
.
_getCreatedERP5SiteId
():
break
except
:
except
Exception
:
self
.
logger
.
info
(
"Fail to connect to erp5.... wait a bit longer"
)
pass
...
...
slapos/resiliencytest/suites/gitlab.py
View file @
049c42b1
...
...
@@ -167,7 +167,7 @@ class GitlabTestSuite(SlaprunnerTestSuite):
while
loop
<
3
:
try
:
self
.
_connectToGitlab
(
url
=
self
.
backend_url
)
except
Exception
,
e
:
except
Exception
as
e
:
if
loop
==
2
:
raise
self
.
logger
.
warning
(
str
(
e
))
...
...
slapos/resiliencytest/suites/kvm.py
View file @
049c42b1
...
...
@@ -32,7 +32,7 @@ import logging
import
random
import
string
import
time
import
urllib
from
six.moves.urllib.request
import
urlopen
logger
=
logging
.
getLogger
(
'KVMResiliencyTest'
)
...
...
@@ -45,7 +45,7 @@ def fetchKey(ip):
new_key
=
None
for
i
in
range
(
0
,
10
):
try
:
new_key
=
url
lib
.
url
open
(
'http://%s:10080/get'
%
ip
).
read
().
strip
()
new_key
=
urlopen
(
'http://%s:10080/get'
%
ip
).
read
().
strip
()
break
except
IOError
:
logger
.
error
(
'Server in new KVM does not answer.'
)
...
...
@@ -148,7 +148,7 @@ class KVMTestSuite(ResiliencyTestSuite):
for
i
in
range
(
0
,
60
):
failure
=
False
try
:
connection
=
url
lib
.
url
open
(
'http://%s:10080/set?key=%s'
%
(
self
.
ip
,
self
.
key
))
connection
=
urlopen
(
'http://%s:10080/set?key=%s'
%
(
self
.
ip
,
self
.
key
))
if
connection
.
getcode
()
is
200
:
break
else
:
...
...
slapos/resiliencytest/suites/resiliencytestsuite.py
View file @
049c42b1
...
...
@@ -34,7 +34,7 @@ import os
import
subprocess
import
sys
import
time
import
urllib2
from
six.moves.urllib.request
import
urlopen
UNIT_TEST_ERP5TESTNODE
=
'UnitTest'
...
...
@@ -85,13 +85,13 @@ class ResiliencyTestSuite(object):
takeover_url
=
root_partition_parameter_dict
[
'takeover-%s-%s-url'
%
(
namebase
,
target_clone
)]
takeover_password
=
root_partition_parameter_dict
[
'takeover-%s-%s-password'
%
(
namebase
,
target_clone
)]
# Connect to takeover web interface
takeover_page_content
=
url
lib2
.
url
open
(
takeover_url
).
read
()
takeover_page_content
=
urlopen
(
takeover_url
).
read
()
# Wait for importer script to be not running
while
'Importer script(s) of backup in progress: True'
in
takeover_page_content
:
time
.
sleep
(
10
)
takeover_page_content
=
url
lib2
.
url
open
(
takeover_url
).
read
()
takeover_page_content
=
urlopen
(
takeover_url
).
read
()
# Do takeover
takeover_result
=
url
lib2
.
url
open
(
'%s?password=%s'
%
(
takeover_url
,
takeover_password
)).
read
()
takeover_result
=
urlopen
(
'%s?password=%s'
%
(
takeover_url
,
takeover_password
)).
read
()
if
'Error'
in
takeover_result
:
raise
Exception
(
'Error while doing takeover: %s'
%
takeover_result
)
...
...
@@ -214,7 +214,8 @@ class ResiliencyTestSuite(object):
if
'monitor'
in
promise
:
continue
try
:
process
=
subprocess
.
check_output
(
os
.
path
.
join
(
promise_directory
,
promise
))
subprocess
.
check_output
(
os
.
path
.
join
(
promise_directory
,
promise
),
stderr
=
subprocess
.
STDOUT
)
except
subprocess
.
CalledProcessError
as
e
:
self
.
logger
.
error
(
'ERROR : promise "%s" failed with output :
\
n
%s'
,
promise
,
e
.
output
)
return
False
...
...
slapos/resiliencytest/suites/slaprunner.py
View file @
049c42b1
...
...
@@ -29,15 +29,16 @@
from
.resiliencytestsuite
import
ResiliencyTestSuite
import
base64
import
cookielib
from
six.moves
import
http_cookiejar
as
cookielib
import
json
from
lxml
import
etree
import
random
import
ssl
import
string
import
time
import
urllib2
import
urllib
from
six.moves.urllib.request
import
HTTPCookieProcessor
,
HTTPSHandler
,
\
build_opener
from
six.moves.urllib.error
import
HTTPError
class
NotHttpOkException
(
Exception
):
pass
...
...
@@ -52,9 +53,9 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
cookie_jar
=
cookielib
.
CookieJar
()
ssl_context
=
ssl
.
_create_unverified_context
()
self
.
_opener_director
=
urllib2
.
build_opener
(
urllib2
.
HTTPCookieProcessor
(
cookie_jar
),
urllib2
.
HTTPSHandler
(
context
=
ssl_context
)
self
.
_opener_director
=
build_opener
(
HTTPCookieProcessor
(
cookie_jar
),
HTTPSHandler
(
context
=
ssl_context
)
)
ResiliencyTestSuite
.
__init__
(
self
,
*
args
,
**
kwargs
)
...
...
@@ -95,7 +96,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
if
result
.
getcode
()
is
not
200
:
raise
NotHttpOkException
(
result
.
getcode
())
return
result
.
read
()
except
urllib2
.
HTTPError
:
except
HTTPError
:
self
.
logger
.
error
(
'Error when contacting slaprunner at URL: {}'
.
format
(
url
))
raise
...
...
@@ -164,7 +165,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
"""
try
:
return
self
.
_connectToSlaprunner
(
resource
=
'isSRReady'
)
except
(
NotHttpOkException
,
urllib2
.
HTTPError
)
as
error
:
except
(
NotHttpOkException
,
HTTPError
)
as
error
:
# The nginx frontend might timeout before software release is finished.
self
.
logger
.
warning
(
'Problem occured when contacting the server: %s'
%
error
)
return
-
1
...
...
@@ -187,7 +188,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
self
.
logger
.
info
(
'Building the Software Release...'
)
try
:
self
.
_connectToSlaprunner
(
resource
=
'runSoftwareProfile'
)
except
(
NotHttpOkException
,
urllib2
.
HTTPError
):
except
(
NotHttpOkException
,
HTTPError
):
# The nginx frontend might timeout before software release is finished.
pass
...
...
@@ -197,7 +198,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
self
.
logger
.
info
(
'Deploying instance...'
)
try
:
self
.
_connectToSlaprunner
(
resource
=
'runInstanceProfile'
)
except
(
NotHttpOkException
,
urllib2
.
HTTPError
):
except
(
NotHttpOkException
,
HTTPError
):
# The nginx frontend might timeout before someftware release is finished.
pass
while
True
:
...
...
@@ -219,7 +220,7 @@ class SlaprunnerTestSuite(ResiliencyTestSuite):
if
data
[
'code'
]
==
0
:
self
.
logger
.
warning
(
data
[
'result'
])
except
(
NotHttpOkException
,
urllib2
.
HTTPError
):
except
(
NotHttpOkException
,
HTTPError
):
# cloning can be very long.
# XXX: quite dirty way to check.
while
self
.
_connectToSlaprunner
(
'getProjectStatus'
,
data
=
'project=workspace/slapos'
).
find
(
'On branch master'
)
==
-
1
:
...
...
slapos/resilient/runner_exporter.py
View file @
049c42b1
from
__future__
import
print_function
from
__future__
import
division
,
print_function
import
argparse
import
itertools
...
...
@@ -11,6 +11,7 @@ import time
from
datetime
import
datetime
from
.runner_utils
import
*
from
six.moves
import
map
os
.
environ
[
'LC_ALL'
]
=
'C'
os
.
umask
(
0o77
)
...
...
@@ -102,7 +103,7 @@ def getBackupFilesModifiedDuringExportList(config, export_start_date):
export_time
=
time
.
time
()
-
export_start_date
# find all files that were modified during export
modified_files
=
subprocess
.
check_output
((
'find'
,
'instance'
,
'-cmin'
,
str
(
export_time
/
60
.
),
'-type'
,
'f'
,
'-path'
,
'*/srv/backup/*'
'find'
,
'instance'
,
'-cmin'
,
str
(
export_time
/
60
),
'-type'
,
'f'
,
'-path'
,
'*/srv/backup/*'
))
if
not
modified_files
:
return
()
...
...
slapos/runner/fileBrowser.py
View file @
049c42b1
...
...
@@ -7,7 +7,7 @@ import md5
import
os
import
re
import
shutil
import
urllib
from
six.moves.urllib.parse
import
unquote
import
zipfile
import
fnmatch
...
...
@@ -22,7 +22,7 @@ class FileBrowser(object):
self
.
config
=
config
def
_realdir
(
self
,
dir
):
realdir
=
realpath
(
self
.
config
,
u
rllib
.
u
nquote
(
dir
))
realdir
=
realpath
(
self
.
config
,
unquote
(
dir
))
if
not
realdir
:
raise
NameError
(
'Could not load directory %s: Permission denied'
%
dir
)
return
realdir
...
...
@@ -45,7 +45,7 @@ class FileBrowser(object):
"""List elements of directory 'dir' taken"""
html
=
'var gsdirs = [], gsfiles = [];'
dir
=
u
rllib
.
u
nquote
(
dir
)
dir
=
unquote
(
dir
)
# XXX-Marco 'dir' and 'all' should not shadow builtin names
realdir
=
realpath
(
self
.
config
,
dir
)
if
not
realdir
:
...
...
@@ -74,7 +74,7 @@ class FileBrowser(object):
return html
def fancylistDirs(self, dir, key, listfiles, all=False):
dir = u
rllib.u
nquote(dir)
dir = unquote(dir)
realdir = realpath(self.config, dir)
if not realdir:
raise NameError('Could not load directory %s: Permission denied' % dir)
...
...
@@ -106,7 +106,7 @@ class FileBrowser(object):
realdir = self._realdir(dir)
folder = os.path.join(realdir, filename)
if not os.path.exists(folder):
os.mkdir(folder, 0744)
os.mkdir(folder, 0
o
744)
return "
{
result
:
'1'
}
"
else:
return "
{
result
:
'0'
}
"
...
...
@@ -125,7 +125,7 @@ class FileBrowser(object):
"""Delete a list of files or directories"""
# XXX-Marco do not shadow 'dir'
realdir = self._realdir(dir)
lfiles = u
rllib.u
nquote(files).split(',,,')
lfiles = unquote(files).split(',,,')
try:
# XXX-Marco do not shadow 'file'
for item in lfiles:
...
...
@@ -147,7 +147,7 @@ class FileBrowser(object):
def copyItem(self, dir, files, del_source=False):
"""Copy a list of files or directory to dir"""
realdir = self._realdir(dir)
lfiles = u
rllib.u
nquote(files).split(',,,')
lfiles = unquote(files).split(',,,')
try:
# XXX-Marco do not shadow 'file'
for file in lfiles:
...
...
@@ -174,7 +174,7 @@ class FileBrowser(object):
def rename(self, dir, filename, newfilename):
"""Rename file or directory to dir/filename"""
realdir = self._realdir(dir)
realfile = realpath(self.config, u
rllib.u
nquote(filename))
realfile = realpath(self.config, unquote(filename))
if not realfile:
raise NameError('Could not load directory %s: Permission denied' % filename)
tofile = os.path.join(realdir, newfilename)
...
...
@@ -208,7 +208,7 @@ class FileBrowser(object):
def downloadFile(self, dir, filename):
"""Download file dir/filename"""
realdir = self._realdir(dir)
file = os.path.join(realdir, u
rllib.u
nquote(filename))
file = os.path.join(realdir, unquote(filename))
if not os.path.exists(file):
raise NameError('NOT ALLOWED OPERATION : File or directory does not exist %s'
% os.path.join(dir, filename))
...
...
@@ -255,8 +255,8 @@ class FileBrowser(object):
def readFile(self, dir, filename, truncate=False):
"""Read file dir/filename and return content"""
realfile = realpath(self.config, os.path.join(u
rllib.u
nquote(dir),
u
rllib.u
nquote(filename)))
realfile = realpath(self.config, os.path.join(unquote(dir),
unquote(filename)))
if not realfile:
raise NameError('Could not load directory %s: Permission denied' % dir)
if not isText(realfile):
...
...
slapos/runner/run.py
View file @
049c42b1
...
...
@@ -2,13 +2,12 @@
# vim: set et sts=2:
# pylint: disable-msg=W0311,C0301,C0103,C0111,R0904,R0903
import
ConfigP
arser
from
six.moves
import
configp
arser
import
datetime
import
flask
import
logging
import
logging.handlers
import
os
import
urlparse
from
slapos.htpasswd
import
HtpasswdFile
from
slapos.runner.process
import
setHandler
import
sys
...
...
@@ -36,7 +35,7 @@ class Config:
self
.
configuration_file_path
=
os
.
path
.
abspath
(
os
.
getenv
(
'RUNNER_CONFIG'
))
# Load configuration file
configuration_parser
=
ConfigP
arser
.
SafeConfigParser
()
configuration_parser
=
configp
arser
.
SafeConfigParser
()
configuration_parser
.
read
(
self
.
configuration_file_path
)
for
section
in
(
"slaprunner"
,
"slapos"
,
"slapproxy"
,
"slapformat"
,
...
...
@@ -144,7 +143,7 @@ def serve(config):
result
=
cloneRepo
(
repo_url
,
repository_path
)
if
branch_name
:
switchBranch
(
repository_path
,
branch_name
)
except
GitCommandError
,
e
:
except
GitCommandError
as
e
:
app
.
logger
.
warning
(
'Error while cloning default repository: %s'
%
str
(
e
))
traceback
.
print_exc
()
# Start slapproxy here when runner is starting
...
...
@@ -152,10 +151,10 @@ def serve(config):
startProxy
(
app
.
config
)
app
.
logger
.
info
(
'Running slapgrid...'
)
if
app
.
config
[
'auto_deploy_instance'
]
in
TRUE_VALUES
:
import
thread
from
six.moves
import
_
thread
# XXX-Nicolas: Hack to be sure that supervisord has started
# before any communication with it, so that gunicorn doesn't exit
thread
.
start_new_thread
(
waitForRun
,
(
app
.
config
,))
_
thread
.
start_new_thread
(
waitForRun
,
(
app
.
config
,))
config
.
logger
.
info
(
'Done.'
)
app
.
wsgi_app
=
ProxyFix
(
app
.
wsgi_app
)
...
...
@@ -166,7 +165,7 @@ def waitForRun(config):
def
getUpdatedParameter
(
self
,
var
):
configuration_parser
=
ConfigP
arser
.
SafeConfigParser
()
configuration_parser
=
configp
arser
.
SafeConfigParser
()
configuration_file_path
=
os
.
path
.
abspath
(
os
.
getenv
(
'RUNNER_CONFIG'
))
configuration_parser
.
read
(
configuration_file_path
)
...
...
slapos/runner/utils.py
View file @
049c42b1
...
...
@@ -55,7 +55,7 @@ def saveBuildAndRunParams(config, params):
about how you got the parameters"""
json_file
=
os
.
path
.
join
(
config
[
'etc_dir'
],
'config.json'
)
with
open
(
json_file
,
"w"
)
as
f
:
f
.
write
(
json
.
dumps
(
params
)
)
json
.
dump
(
params
,
f
)
def
html_escape
(
text
):
...
...
@@ -70,7 +70,8 @@ def getSession(config):
"""
user_path
=
os
.
path
.
join
(
config
[
'etc_dir'
],
'.htpasswd'
)
if
os
.
path
.
exists
(
user_path
):
return
open
(
user_path
).
read
().
split
(
';'
)
with
open
(
user_path
)
as
f
:
return
f
.
read
().
split
(
';'
)
def
checkUserCredential
(
config
,
username
,
password
):
htpasswdfile
=
os
.
path
.
join
(
config
[
'etc_dir'
],
'.htpasswd'
)
...
...
@@ -125,8 +126,8 @@ def getCurrentSoftwareReleaseProfile(config):
Returns used Software Release profile as a string.
"""
try
:
software_folder
=
open
(
os
.
path
.
join
(
config
[
'etc_dir'
],
".project"
))
.
read
().
rstrip
()
with
open
(
os
.
path
.
join
(
config
[
'etc_dir'
],
".project"
))
as
f
:
software_folder
=
f
.
read
().
rstrip
()
return
realpath
(
config
,
os
.
path
.
join
(
software_folder
,
config
[
'software_profile'
]))
# XXXX No Comments
...
...
@@ -141,9 +142,11 @@ def requestInstance(config, software_type=None):
software_type_path
=
os
.
path
.
join
(
config
[
'etc_dir'
],
".software_type.xml"
)
if
software_type
:
# Write it to conf file for later use
open
(
software_type_path
,
'w'
).
write
(
software_type
)
with
open
(
software_type_path
,
'w'
)
as
f
:
f
.
write
(
software_type
)
elif
os
.
path
.
exists
(
software_type_path
):
software_type
=
open
(
software_type_path
).
read
().
rstrip
()
with
open
(
software_type_path
)
as
f
:
software_type
=
f
.
read
().
rstrip
()
else
:
software_type
=
'default'
...
...
@@ -261,7 +264,8 @@ def slapgridResultToFile(config, step, returncode, datetime):
filename
=
step
+
"_info.json"
file
=
os
.
path
.
join
(
config
[
'runner_workdir'
],
filename
)
result
=
{
'last_build'
:
datetime
,
'success'
:
returncode
}
open
(
file
,
"w"
).
write
(
json
.
dumps
(
result
))
with
open
(
file
,
"w"
)
as
f
:
json
.
dump
(
result
,
f
)
def
getSlapgridResult
(
config
,
step
):
...
...
@@ -359,7 +363,8 @@ def config_SR_folder(config):
for
path
in
os
.
listdir
(
config
[
'software_link'
]):
cfg_path
=
os
.
path
.
join
(
config
[
'software_link'
],
path
,
config_name
)
if
os
.
path
.
exists
(
cfg_path
):
cfg
=
open
(
cfg_path
).
read
().
split
(
"#"
)
with
open
(
cfg_path
)
as
f
:
cfg
=
f
.
read
().
split
(
"#"
)
if
len
(
cfg
)
!=
2
:
continue
# there is a broken config file
software_link_list
.
append
(
cfg
[
1
])
...
...
@@ -382,7 +387,8 @@ def loadSoftwareRList(config):
for
path
in
os
.
listdir
(
config
[
'software_link'
]):
cfg_path
=
os
.
path
.
join
(
config
[
'software_link'
],
path
,
config_name
)
if
os
.
path
.
exists
(
cfg_path
):
cfg
=
open
(
cfg_path
).
read
().
split
(
"#"
)
with
open
(
cfg_path
)
as
f
:
cfg
=
f
.
read
().
split
(
"#"
)
if
len
(
cfg
)
!=
2
:
continue
# there is a broken config file
sr_list
.
append
(
dict
(
md5
=
cfg
[
1
],
path
=
cfg
[
0
],
title
=
path
))
...
...
@@ -409,7 +415,8 @@ def getProfilePath(projectDir, profile):
"""
if
not
os
.
path
.
exists
(
os
.
path
.
join
(
projectDir
,
".project"
)):
return
False
projectFolder
=
open
(
os
.
path
.
join
(
projectDir
,
".project"
)).
read
()
with
open
(
os
.
path
.
join
(
projectDir
,
".project"
))
as
f
:
projectFolder
=
f
.
read
()
return
os
.
path
.
join
(
projectFolder
,
profile
)
...
...
@@ -438,7 +445,7 @@ def svcStopAll(config):
try
:
return
Popen
([
config
[
'slapos'
],
'node'
,
'supervisorctl'
,
'--cfg'
,
config
[
'configuration_file_path'
],
'stop'
,
'all'
]).
communicate
()[
0
]
except
:
except
Exception
:
pass
def
svcStartAll
(
config
):
...
...
@@ -446,7 +453,7 @@ def svcStartAll(config):
try
:
return
Popen
([
config
[
'slapos'
],
'node'
,
'supervisorctl'
,
'--cfg'
,
config
[
'configuration_file_path'
],
'start'
,
'all'
]).
communicate
()[
0
]
except
:
except
Exception
:
pass
def
removeInstanceRootDirectory
(
config
):
...
...
@@ -567,7 +574,8 @@ def configNewSR(config, projectpath):
sup_process.stopProcess(config, '
slapgrid
-
sr
')
logger.warning("User opened a new SR. Removing all instances...")
removeCurrentInstance(config)
open(os.path.join(config['
etc_dir
'], ".project"), '
w
').write(projectpath)
with open(os.path.join(config['
etc_dir
'], ".project"), '
w
') as f:
f.write(projectpath)
return True
else:
return False
...
...
@@ -593,12 +601,15 @@ def newSoftware(folder, config, session):
softwareContent = ""
try:
softwareContent = urlopen(software).read()
except:
except
Exception
:
#Software.cfg and instance.cfg content will be empty
pass
open(os.path.join(folderPath, config['
software_profile
']), '
w
').write(softwareContent)
open(os.path.join(folderPath, config['
instance_profile
']), '
w
').write("")
open(os.path.join(basedir, ".project"), '
w
').write(folder + "/")
with open(os.path.join(folderPath, config['
software_profile
']), '
w
') as f:
f.write(softwareContent)
with open(os.path.join(folderPath, config['
instance_profile
']), '
w
') as f:
pass
with open(os.path.join(basedir, ".project"), '
w
') as f:
f.write(folder + "/")
#Clean sapproxy Database
stopProxy(config)
removeProxyDb(config)
...
...
@@ -633,7 +644,8 @@ def getProjectTitle(config):
if instance_name:
instance_name = '
%
s
-
' % instance_name
if os.path.exists(conf):
project = open(conf, "r").read().split("/")
with open(conf, "r") as f:
project = f.read().split("/")
software = project[-2]
return '
%
s
%
s
(
%
s
)
' % (instance_name, software, '
/
'.join(project[:-2]))
return "%sNo Profile" % instance_name
...
...
@@ -643,7 +655,8 @@ def getSoftwareReleaseName(config):
"""Get the name of the current Software Release"""
sr_profile = os.path.join(config['
etc_dir
'], ".project")
if os.path.exists(sr_profile):
project = open(sr_profile, "r").read().split("/")
with open(sr_profile, "r") as f:
project = f.read().split("/")
software = project[-2]
return software.replace('
', '
_
')
return None
...
...
@@ -731,7 +744,7 @@ def readFileFrom(f, lastPosition, limit=20000):
# XXX-Marco do now shadow '
bytes
'
bytes = f.tell()
block = -1
data = ""
data =
b
""
length = bytes
truncated = False # True if a part of log data has been truncated
if (lastPosition <= 0 and length > limit) or (length - lastPosition > limit):
...
...
@@ -753,7 +766,6 @@ def readFileFrom(f, lastPosition, limit=20000):
data = f.read(BUFSIZ - margin) + data
bytes -= BUFSIZ
block -= 1
f.close()
return {
'
content
': data,
'
position
': length,
...
...
@@ -761,16 +773,14 @@ def readFileFrom(f, lastPosition, limit=20000):
}
text_range = str2bytes(''.join(map(chr, [7, 8, 9, 10, 12, 13, 27]
+ list(range(0x20, 0x100)))))
def isText(file):
"""Return True if the mimetype of file is Text"""
if not os.path.exists(file):
return False
text_range = str2bytes(''.join(map(chr, [7, 8, 9, 10, 12, 13, 27]
+ list(range(0x20, 0x100)))))
is_binary_string = lambda bytes: bool(bytes.translate(None, text_range))
try:
return not is_binary_string(open(file).read(1024))
except:
with open(file, 'rb') as f:
return not f.read(1024).translate(None, text_range)
except Exception:
return False
...
...
@@ -780,15 +790,15 @@ def md5sum(file):
if os.path.isdir(file):
return False
try:
fh = open(file, 'rb')
m = hashlib.md5()
with open(file, 'rb') as fh:
while True:
data = fh.read(8192)
if not data:
break
m.update(data)
return m.hexdigest()
except:
except
Exception
:
return False
...
...
@@ -949,10 +959,12 @@ def setMiniShellHistory(config, command):
command = command + "
\
n
"
history_file = config['
minishell_history_file
']
if os.path.exists(history_file):
history = open(history_file, 'r').readlines()
with open(history_file, 'r') as f:
history = f.readlines()
if len(history) >= history_max_size:
del history[0]
else:
history = []
history.append(command)
open(history_file, '
w
+
').write(''.join(history))
with open(history_file, '
w
') as f:
f.write(''.join(history))
slapos/runner/views.py
View file @
049c42b1
...
...
@@ -15,6 +15,7 @@ from flask import (Flask, request, redirect, url_for, render_template,
g
,
flash
,
jsonify
,
session
,
abort
,
send_file
)
import
slapos
from
slapos.util
import
bytes2str
from
slapos.runner.utils
import
(
checkSoftwareFolder
,
configNewSR
,
checkUserCredential
,
createNewUser
,
getBuildAndRunParams
,
getProfilePath
,
getSlapgridResult
,
...
...
@@ -253,13 +254,13 @@ def getFileLog():
raise
IOError
if
not
isText
(
file_path
):
content
=
"Can not open binary file, please select a text file!"
with
open
(
file_path
)
as
f
:
if
'truncate'
in
request
.
form
:
content
=
tail
(
open
(
file_path
)
,
int
(
request
.
form
[
'truncate'
]))
content
=
tail
(
f
,
int
(
request
.
form
[
'truncate'
]))
else
:
with
open
(
file_path
)
as
f
:
content
=
f
.
read
()
return
jsonify
(
code
=
1
,
result
=
html_escape
(
content
))
except
:
except
Exception
:
return
jsonify
(
code
=
0
,
result
=
"Warning: Log file doesn't exist yet or empty log!!"
)
...
...
@@ -505,8 +506,8 @@ def slapgridResult():
if
request
.
form
[
'log'
]
in
[
'software'
,
'instance'
]:
log_file
=
request
.
form
[
'log'
]
+
"_log"
if
os
.
path
.
exists
(
app
.
config
[
log_file
]):
log_result
=
readFileFrom
(
open
(
app
.
config
[
log_file
]),
int
(
request
.
form
[
'position'
]
))
with
open
(
app
.
config
[
log_file
],
'rb'
)
as
f
:
log_result
=
bytes2str
(
readFileFrom
(
f
,
int
(
request
.
form
[
'position'
])
))
build_result
=
getSlapgridResult
(
app
.
config
,
'software'
)
run_result
=
getSlapgridResult
(
app
.
config
,
'instance'
)
software_info
=
{
'state'
:
software_state
,
...
...
@@ -717,7 +718,7 @@ def fileBrowser():
filename
)
try
:
return
send_file
(
result
,
attachment_filename
=
filename
,
as_attachment
=
True
)
except
:
except
Exception
:
abort
(
404
)
elif
opt
==
9
:
result
=
file_request
.
readFile
(
dir
,
filename
,
False
)
...
...
slapos/systool.py
View file @
049c42b1
...
...
@@ -4,6 +4,7 @@ import argparse
import
sys
import
os
import
signal
from
six.moves
import
map
def
killpidfromfile
():
"""deprecated: use below kill() instead"""
...
...
@@ -50,7 +51,7 @@ def kill():
parser
.
error
(
'Unknown signal name %s'
%
args
.
signal
)
pid
=
args
.
pidfile
and
[
int
(
open
(
p
).
read
())
for
p
in
args
.
pidfile
]
exe
=
args
.
exe
and
map
(
os
.
path
.
realpath
,
args
.
exe
)
exe
=
args
.
exe
and
list
(
map
(
os
.
path
.
realpath
,
args
.
exe
)
)
import
psutil
r
=
1
...
...
slapos/test/monitor/testrunpromise.py
View file @
049c42b1
...
...
@@ -93,7 +93,7 @@ from slapos.grid.promise import GenericPromise
class RunPromise(GenericPromise):
def __init__(self, config):
GenericPromise.__init__(self,
config)
super(RunPromise, self).__init__(
config)
self.setPeriodicity(minute=%(periodicity)s)
def sense(self):
...
...
slapos/zodbpack.py
View file @
049c42b1
from
__future__
import
print_function
import
ZODB.FileStorage
import
ZODB.serialize
import
argparse
...
...
@@ -16,24 +17,24 @@ def run():
point
=
now
-
(
3600
*
24
*
args
.
days
)
print
'Now is %s'
%
time
.
asctime
(
time
.
localtime
(
now
))
print
'Will pack until %s'
%
time
.
asctime
(
time
.
localtime
(
point
))
print
(
'Now is'
,
time
.
asctime
(
time
.
localtime
(
now
)
))
print
(
'Will pack until'
,
time
.
asctime
(
time
.
localtime
(
point
)
))
failures
=
0
for
f
in
args
.
files
:
b
=
time
.
time
()
print
'Trying to pack %r'
%
f
print
(
'Trying to pack %r'
%
f
)
try
:
pack
(
point
,
f
)
except
Exception
:
print
'Failed to pack %r:'
%
f
print
(
'Failed to pack %r:'
%
f
)
traceback
.
print_exc
()
failures
+=
1
print
'Finished %s in %.3fs'
%
(
f
,
time
.
time
()
-
b
)
print
(
'Finished %s in %.3fs'
%
(
f
,
time
.
time
()
-
b
)
)
if
failures
:
print
'Failed files: %s'
%
failures
print
(
'Failed files: %s'
%
failures
)
return
failures
else
:
print
'All files sucessfully packed.'
print
(
'All files sucessfully packed.'
)
return
0
def
pack
(
point
,
f
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment