Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
S
slapos.toolbox
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
6
Merge Requests
6
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
nexedi
slapos.toolbox
Commits
718c323d
Commit
718c323d
authored
Oct 10, 2019
by
Rafael Monnerat
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
wip
parent
ea3d289c
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
101 additions
and
25 deletions
+101
-25
slapos/monitor/collect.py
slapos/monitor/collect.py
+0
-25
slapos/monitor/collect_csv_dump.py
slapos/monitor/collect_csv_dump.py
+101
-0
No files found.
slapos/monitor/collect.py
View file @
718c323d
...
@@ -65,22 +65,6 @@ class ResourceCollect(ConsumptionReportBase):
...
@@ -65,22 +65,6 @@ class ResourceCollect(ConsumptionReportBase):
# Do not try to created or update tables, access will be refused
# Do not try to created or update tables, access will be refused
self
.
db
=
Database
(
db_path
,
create
=
False
,
timeout
=
15
)
self
.
db
=
Database
(
db_path
,
create
=
False
,
timeout
=
15
)
def
appendToJsonFile
(
file_path
,
content
,
stepback
=
2
):
with
open
(
file_path
,
mode
=
"r+"
)
as
jfile
:
jfile
.
seek
(
0
,
2
)
position
=
jfile
.
tell
()
-
stepback
jfile
.
seek
(
position
)
jfile
.
write
(
'%s}'
%
',"{}"]'
.
format
(
content
))
def
initDataFile
(
data_file
,
column_list
):
with
open
(
process_file
,
'w'
)
as
fdata
:
data_dict
=
{
"date"
:
time
.
time
(),
"data"
:
column_list
}
fdata
.
write
(
json
.
dumps
(
data_dict
))
def
main
():
def
main
():
parser
=
parseArguments
()
parser
=
parseArguments
()
if
not
os
.
path
.
exists
(
parser
.
output_folder
)
and
os
.
path
.
isdir
(
parser
.
output_folder
):
if
not
os
.
path
.
exists
(
parser
.
output_folder
)
and
os
.
path
.
isdir
(
parser
.
output_folder
):
...
@@ -141,15 +125,6 @@ def main():
...
@@ -141,15 +125,6 @@ def main():
'memory_percent'
,
'memory_rss'
,
'io_rw_counter'
,
'io_cycles_counter'
,
'memory_percent'
,
'memory_rss'
,
'io_rw_counter'
,
'io_cycles_counter'
,
'disk_used'
]
'disk_used'
]
resource_status_dict
=
{}
resource_status_dict
=
{}
if
not
os
.
path
.
exists
(
process_file
)
or
os
.
stat
(
process_file
).
st_size
==
0
:
initDataFile
(
process_file
,
[
"date, total process, CPU percent, CPU time, CPU threads"
])
if
not
os
.
path
.
exists
(
mem_file
)
or
os
.
stat
(
mem_file
).
st_size
==
0
:
initDataFile
(
mem_file
,
[
"date, memory used percent, memory used"
])
if
not
os
.
path
.
exists
(
io_file
)
or
os
.
stat
(
io_file
).
st_size
==
0
:
initDataFile
(
io_file
,
[
"date, io rw counter, io cycles counter, disk used"
])
if
process_result
and
process_result
[
'total_process'
]
!=
0.0
:
if
process_result
and
process_result
[
'total_process'
]
!=
0.0
:
appendToJsonFile
(
process_file
,
", "
.
join
(
appendToJsonFile
(
process_file
,
", "
.
join
(
str
(
process_result
[
key
])
for
key
in
label_list
if
key
in
process_result
)
str
(
process_result
[
key
])
for
key
in
label_list
if
key
in
process_result
)
...
...
slapos/monitor/collect_csv_dump.py
0 → 100644
View file @
718c323d
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010-2016 Vifib SARL and Contributors.
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import
os
import
argparse
import
csv
from
slapos.util
import
mkdir_p
from
slapos.collect.db
import
Database
def
skip_bootstrap
(
self
):
return
Database
.
_bootstrap
=
skip_bootstrap
def
parseArguments
():
"""
Parse arguments for monitor collector instance.
"""
parser
=
argparse
.
ArgumentParser
()
parser
.
add_argument
(
'--output_folder'
,
help
=
'Path of the folder where output files should be written.'
)
parser
.
add_argument
(
'--collector_db'
,
default
=
'/srv/slapgrid/var/data-log/'
,
help
=
'The path of slapos collect database is located.'
)
return
parser
.
parse_args
()
lapos
/
monitor
/
collect_csv_dump
.
py
def
writeFile
(
name
,
folder
,
date_scope
,
rows
):
if
os
.
path
.
exists
(
os
.
path
.
join
(
folder
,
"%s/dump_%s.csv"
%
(
date_scope
,
name
))):
# File already exists, no reason to recreate it.
return
mkdir_p
(
os
.
path
.
join
(
folder
,
date_scope
),
0o755
)
file_io
=
open
(
os
.
path
.
join
(
folder
,
"%s/dump_%s.csv"
%
(
date_scope
,
name
)),
"w"
)
csv_output
=
csv
.
writer
(
file_io
)
csv_output
.
writerows
(
rows
)
file_io
.
close
()
def
dump_table_into_csv
(
db
,
folder
):
db
.
connect
()
table_list
=
db
.
getTableList
()
# Save all dates first, as db.selector may switch the cursor
date_list
=
[(
date_scope
,
_
)
\
for
date_scope
,
_
in
db
.
getDateScopeList
(
reported
=
1
)]
for
date_scope
,
amount
in
date_list
:
for
table
in
table_list
:
if
os
.
path
.
exists
(
os
.
path
.
join
(
folder
,
"%s/dump_%s.csv"
%
(
date_scope
,
table
))):
# File already exists, no reason to recreate it.
continue
writeFile
(
table
,
folder
,
date_scope
,
db
.
select
(
table
,
date_scope
))
db
.
close
()
if
__name__
==
"__main__"
:
parser
=
parseArguments
()
if
parser
.
output_folder
is
None
:
raise
Exception
(
"Invalid ouput folder: %s"
%
parser
.
output_folder
)
if
parser
.
collector_db
is
None
:
raise
Exception
(
"Invalid collector database folder: %s"
%
parser
.
collector_db
)
if
not
os
.
path
.
exists
(
parser
.
output_folder
)
and
\
os
.
path
.
isdir
(
parser
.
output_folder
):
raise
Exception
(
"Invalid ouput folder: %s"
%
parser
.
output_folder
)
if
not
os
.
path
.
exists
(
parser
.
collector_db
):
print
"Collector database not found..."
dump_table_into_csv
(
Database
(
parser
.
collector_db
),
parser
.
output_folder
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment