Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
slapos
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Cédric Le Ninivin
slapos
Commits
515e9daf
Commit
515e9daf
authored
Jan 14, 2015
by
Alain Takoudjou
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
monitor: Allow to generate rss for errorlog
parent
68bae853
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
288 additions
and
2 deletions
+288
-2
stack/monitor/buildout.cfg
stack/monitor/buildout.cfg
+18
-1
stack/monitor/errorlog2rss.py.in
stack/monitor/errorlog2rss.py.in
+69
-0
stack/monitor/logTools.py
stack/monitor/logTools.py
+147
-0
stack/monitor/monitor.cfg.in
stack/monitor/monitor.cfg.in
+47
-1
stack/monitor/monitor.py.in
stack/monitor/monitor.py.in
+7
-0
No files found.
stack/monitor/buildout.cfg
View file @
515e9daf
...
...
@@ -30,6 +30,7 @@ eggs =
PyRSS2Gen
Jinja2
APacheDEX
pyparsing
[make-rss-script]
recipe = slapos.recipe.template
...
...
@@ -43,7 +44,7 @@ recipe = slapos.recipe.template
url = ${:_profile_base_location_}/monitor.cfg.in
output = ${buildout:directory}/monitor.cfg
filename = monitor.cfg
md5sum =
af5c10c05b03b59a49187fd3e151eb43
md5sum =
377112bd0e7e5f17f2d69950fac872ed
mode = 0644
[monitor-bin]
...
...
@@ -134,6 +135,22 @@ md5sum = 00b230e1dee6e7f25d2050f6f0ae39e2
filename = run-apachedex.py.in
mode = 0644
[log-tools]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
download-only = true
md5sum = 8add29b36060967209e54732967e8d80
filename = logTools.py
mode = 0644
[errorlog-2rss]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
download-only = true
md5sum = 9617c1a06ef81c5141c8b8d08a985939
filename = errorlog2rss.py.in
mode = 0644
[dcron-service]
recipe = slapos.recipe.template
url = ${template-dcron-service:output}
...
...
stack/monitor/errorlog2rss.py.in
0 → 100644
View file @
515e9daf
#!{{ python_executable }}
# BEWARE: This file is operated by slapgrid
# BEWARE: It will be overwritten automatically
import sys
import os
import subprocess
from datetime import datetime, timedelta
type = "{{ type }}".strip()
log_file_list = {{ log_file_list }}
module_folder = "{{ log_tool_folder }}".strip()
output_folder = "{{ log_as_rss_ouptut }}".strip()
db_path = "{{ db_location }}".strip()
base_link = "{{ base_link }}".strip()
interval = {{ hour_interval }}
rotate_time = {{ rotate_time }}
limit = {{ item_limit }}
PARSE_TYPE_LIST = ['zope']
if not os.path.exists(module_folder) or not os.path.isdir(module_folder):
print "ERROR: Could not load log parsing Module. %s is not a directory. Exiting..." % module_folder
exit(1)
if not os.path.exists(output_folder) or not os.path.isdir(output_folder):
print "ERROR: %s is not a directory. Exiting..." % output_folder
exit(1)
if not type in PARSE_TYPE_LIST:
print "ERROR: Could not parse specified log file type. Exiting..." % output_folder
exit(1)
sys.path.append(module_folder)
import logTools
for name, log_path in log_file_list.items():
if not os.path.exists(log_path):
print "WARNING: File %s not found..." % log_path
continue
rss_path = os.path.join(output_folder, name)
if not rss_path.endswith('.html'):
rss_path += '.html'
start_date = (datetime.now() -
timedelta(hours=interval)).strftime('%Y-%m-%d %H:%M:%S')
to_date = (datetime.now() -
timedelta(hours=rotate_time)).strftime('%Y-%m-%d %H:%M:%S')
item_list = []
if type == 'zope':
method = logTools.isZopeLogBeginLine
zope_parser = logTools.getZopeParser()
item_list = logTools.parseLog(log_path, zope_parser, method,
filter_with="ERROR", start_date=start_date)
if item_list:
start = 0
size = len(item_list)
if limit and size > limit:
start = size - limit - 1
print "Found %s log entries matching date upper than %s..." % (size, start_date)
logTools.insertRssDb(db_path, item_list[start:-1], name)
print "%s items inserted into database..." % limit
logTools.truncateRssDb(db_path, to_date)
print "Generating RSS entries with %s items..." % limit
logTools.generateRSS(db_path, name, rss_path, start_date, base_link+name, limit=limit)
stack/monitor/logTools.py
0 → 100644
View file @
515e9daf
from
pyparsing
import
Word
,
alphas
,
Suppress
,
Combine
,
nums
,
string
,
Optional
,
Regex
import
os
,
re
import
datetime
import
uuid
import
base64
import
sqlite3
import
PyRSS2Gen
def
init_db
(
db_path
):
db
=
sqlite3
.
connect
(
db_path
)
c
=
db
.
cursor
()
c
.
executescript
(
"""
CREATE TABLE IF NOT EXISTS rss_entry (
name VARCHAR(25),
datetime VARCHAR(15),
status VARCHAR(20),
method VARCHAR(25),
title VARCHAR(255),
content VARCHAR(255));
"""
)
db
.
commit
()
db
.
close
()
def
getZopeParser
():
integer
=
Word
(
nums
)
serverDateTime
=
Combine
(
integer
+
"-"
+
integer
+
"-"
+
integer
+
" "
+
integer
+
":"
+
integer
+
":"
+
integer
+
","
+
integer
)
status
=
Word
(
string
.
uppercase
,
max
=
7
,
min
=
3
)
word
=
Word
(
alphas
+
nums
+
"@._-"
)
message
=
Regex
(
".*"
)
bnf
=
serverDateTime
.
setResultsName
(
"timestamp"
)
+
status
.
setResultsName
(
"statusCode"
)
+
\
word
.
setResultsName
(
"method"
)
+
message
.
setResultsName
(
"content"
)
return
bnf
def
isZopeLogBeginLine
(
line
):
# This expression will check if line start with a date string
# XXX - if line match expression, then regex.group() return the date
if
not
line
or
line
.
strip
()
==
"------"
:
return
None
regex
=
re
.
match
(
r"(^\
d{
2,4}-\
d{
2}-\
d{
1,2}\
s+
\d{2}:\
d{
2}:\
d{
2}?[,\
d]+)
",
line)
return regex
def parseLog(path, parserbnf, method, filter_with="
ERROR
", start_date="", date_format=""):
if not os.path.exists(path):
print "
ERROR
:
cannot
get
file
:
%
s
" % path
return []
log_result = []
if not date_format:
date_format = "
%
Y
-%
m
-%
d
%
H
:
%
M
:
%
S
,
%
f"
with open(path, 'r') as logfile:
index = 0
for line in logfile:
regex = method(line)
if not regex:
if index == 0 or line.strip() == "
------
":
continue
# Add this line to log content
log_result[index - 1]['content'] += ("
\
n
" + line)
else:
try:
fields = parserbnf.parseString(line)
if filter_with and not fields.statusCode == filter_with:
continue
if start_date and regex.group() < start_date:
continue
log_result.append(dict(datetime=datetime.datetime.strptime(
fields.timestamp , date_format),
status=fields.get('statusCode', ''),
method=fields.get('method', ''),
title=fields.content,
content=fields.content))
index += 1
except Exception:
raise
# print "
WARNING
:
Could
not
parse
log
line
.
%
s
\
n
<<
%
s
>>
" % (str(e), line)
return log_result
def insertRssDb(db_path, entry_list, rss_name):
init_db(db_path)
db = sqlite3.connect(db_path)
for entry in entry_list:
date = entry['datetime'].strftime('%Y-%m-%d %H:%M:%S')
db.execute("
insert
into
rss_entry
(
name
,
datetime
,
status
,
method
,
title
,
content
)
values
(
?
,
?
,
?
,
?
,
?
,
?
)
",
(rss_name, date, entry['status'], entry['method'], entry['title'], entry['content']))
db.commit()
db.close()
def truncateRssDb(db_path, to_date):
db = sqlite3.connect(db_path)
db.execute("
delete
from
rss_entry
where
datetime
<
?
", (to_date,))
db.commit()
db.close()
def selectRssDb(db_path, rss_name, start_date, limit=0):
db = sqlite3.connect(db_path)
query = "
select
name
,
datetime
,
status
,
method
,
title
,
content
from
rss_entry
"
query += "
where
name
=
?
and
datetime
>=
?
order
by
datetime
DESC
"
if limit:
query += "
limit
?
"
rows = db.execute(query, (rss_name, start_date, limit))
else:
rows = db.execute(query, (rss_name, start_date))
#db.close()
if rows:
return rows
return []
def generateRSS(db_path, name, rss_path, start_date, url_link, limit=0):
items = []
db = sqlite3.connect(db_path)
query = "
select
name
,
datetime
,
status
,
method
,
title
,
content
from
rss_entry
"
query += "
where
name
=
?
and
datetime
>=
?
order
by
datetime
DESC
"
if limit:
query += "
limit
?
"
entry_list = db.execute(query, (name, start_date, limit))
else:
entry_list = db.execute(query, (name, start_date))
for entry in entry_list:
name, rss_date, status, method, title, content = entry
if method:
title = "
[
%
s
]
%
s
" % (method, title)
title = "
[
%
s
]
%
s
" % (status, title)
rss_item = PyRSS2Gen.RSSItem(
title = title,
link = "",
description = content.replace('
\
n
', '<br/>'),
pubDate = rss_date,
guid = PyRSS2Gen.Guid(base64.b64encode("
%
s
,
%
s
" % (rss_date, url_link)))
)
items.append(rss_item)
db.close()
### Build the rss feed
items.reverse()
rss_feed = PyRSS2Gen.RSS2 (
title = name,
link = url_link,
description = name,
lastBuildDate = datetime.datetime.utcnow(),
items = items
)
with open(rss_path, 'w') as rss_ouput:
rss_ouput.write(rss_feed.to_xml())
stack/monitor/monitor.cfg.in
View file @
515e9daf
...
...
@@ -19,6 +19,7 @@ index-filename = index.cgi
index-path = $${monitor-directory:www}/$${:index-filename}
db-path = $${monitor-directory:etc}/monitor.db
monitor-password-path = $${monitor-directory:etc}/.monitor.shadow
log-rss-directory = $${monitor-directory:log-rss-directory}
[monitor-directory]
recipe = slapos.cookbook:mkdirectory
...
...
@@ -53,6 +54,7 @@ monitor-result = $${:var}/monitor
apachedex-result = $${:srv}/apachedex
private-directory = $${:srv}/monitor-private
log-rss-directory = $${:srv}/ERRORLogAsRSS
[public-symlink]
recipe = cns.recipe.symlink
...
...
@@ -189,7 +191,51 @@ context =
recipe = plone.recipe.command
command = ln -s $${:source} $${monitor-directory:private-directory}
source =
# ErrorLog URL As RSS
[error-log-rss-base]
recipe = slapos.recipe.template:jinja2
template = ${errorlog-2rss:location}/${errorlog-2rss:filename}
rendered = $${monitor-directory:bin}/$${:script-name}
mode = 0700
extensions = jinja2.ext.do
base-link = $${monitor-parameters:url}/private/
extra-context =
context =
raw python_executable ${buildout:directory}/bin/${extra-eggs:interpreter}
raw log_tool_folder ${log-tools:location}
raw db_location $${monitor-directory:etc}/$${:script-name}.db
key log_as_rss_ouptut monitor-directory:log-rss-directory
key base_link :base-link
$${:extra-context}
[log-as-rss-symlink]
< = monitor-directory-access
source = $${monitor-directory:log-rss-directory}
[error-log-rss]
<= error-log-rss-base
script-name = fileLog_AsRSS
# rotate_time: Max time (in hour) before remove an rss entry from database
# item_limit: Max number of rss entries to generate per cycle
extra-context =
section log_file_list error-log-list
raw item_limit 10
raw hour_interval 1
raw rotate_time 24
raw type zope
[error-log-list]
# XXX - error-log-name = FILE_PATH
[cron-entry-logAsRss]
<= cron
recipe = slapos.cookbook:cron.d
name = $${error-log-rss:script-name}
frequency = 30 * * * *
command = $${error-log-rss:rendered}
# Apache Dex
[apachedex-entries-base]
recipe = slapos.recipe.template:jinja2
template = ${run-apachedex:location}/${run-apachedex:filename}
...
...
stack/monitor/monitor.py.in
View file @
515e9daf
...
...
@@ -63,6 +63,13 @@ def init_db():
CREATE TABLE IF NOT EXISTS status (
timestamp INTEGER UNIQUE,
status VARCHAR(255));
CREATE TABLE IF NOT EXISTS rss_entry (
name VARCHAR(255),
timestamp INTEGER,
status VARCHAR(255),
method VARCHAR(255),
title VARCHAR(255),
content VARCHAR(255));
CREATE TABLE IF NOT EXISTS individual_status (
timestamp INTEGER,
status VARCHAR(255),
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment