Commit 3c581963 authored by Alain Takoudjou's avatar Alain Takoudjou

move slapos monitor script to slapos.toolbox

parent 5e334c04
......@@ -45,6 +45,7 @@ setup(name=name,
'GitPython', #needed for git manipulation into slaprunner
'passlib',
'netifaces',
'PyRSS2Gen',
] + additional_install_requires,
extras_require = {
'lampconfigure': ["mysqlclient"], #needed for MySQL Database access
......@@ -72,6 +73,12 @@ setup(name=name,
'is-local-tcp-port-opened = slapos.promise.is_local_tcp_port_opened:main',
'is-process-older-than-dependency-set = slapos.promise.is_process_older_than_dependency_set:main',
'killpidfromfile = slapos.systool:killpidfromfile', # BBB
'monitor.bootstrap = slapos.monitor.monitor:main',
'monitor.collect = slapos.monitor.collect:main',
'monitor.runpromise = slapos.monitor.runpromise:main',
'monitor.genstatus = slapos.monitor.globalstate:main',
'monitor.genrss = slapos.monitor.status2rss:main',
'monitor.genconfig = slapos.monitor.monitor_gen_update_config:main',
'runResiliencyUnitTestTestNode = slapos.resiliencytest:runUnitTest',
'runResiliencyScalabilityTestNode = slapos.resiliencytest:runResiliencyTest',
'runStandaloneResiliencyTest = slapos.resiliencytest:runStandaloneResiliencyTest',
......
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010-2014 Vifib SARL and Contributors.
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import sqlite3
import os
import pwd
import time
import json
import argparse
import psutil
from time import strftime
from datetime import datetime, timedelta
def parseArguments():
"""
Parse arguments for monitor collector instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--output_folder',
help='Path of the folder where output files should be written.')
parser.add_argument('--partition_id',
help='ID of the computer partition to collect data from.')
parser.add_argument('--collector_db',
help='The path of slapos collect database.')
return parser.parse_args()
class ResourceCollect:
def __init__(self, db_path = None):
assert os.path.exists(db_path) and os.path.isfile(db_path)
self.uri = db_path
self.connection = None
self.cursor = None
def connect(self):
self.connection = sqlite3.connect(self.uri)
self.cursor = self.connection.cursor()
def close(self):
assert self.connection is not None
self.cursor.close()
self.connection.close()
def _execute(self, sql):
assert self.connection is not None
return self.cursor.execute(sql)
def select(self, table, date=None, columns="*", where=None):
""" Query database for a full table information """
if date is not None:
where_clause = " WHERE date = '%s' " % date
else:
where_clause = ""
if where is not None:
if where_clause == "":
where_clause += " WHERE 1 = 1 "
where_clause += " AND %s " % where
select_sql = "SELECT %s FROM %s %s " % (columns, table, where_clause)
return self._execute(select_sql)
def has_table(self, name):
self.connect()
check_result_cursor = self.select(
table="sqlite_master",
columns='name',
where="type='table' AND name='%s'" % name)
table_exists_result = zip(*check_result_cursor)
if not len(table_exists_result) or table_exists_result[0][0] is None:
return False
return True
def getPartitionCPULoadAverage(self, partition_id, date_scope):
self.connect()
query_result_cursor = self.select("user", date_scope,
columns="SUM(cpu_percent)",
where="partition = '%s'" % partition_id)
cpu_percent_sum = zip(*query_result_cursor)
if len(cpu_percent_sum) and cpu_percent_sum[0][0] is None:
return
query_result_cursor = self.select("user", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
sample_amount = zip(*query_result_cursor)
self.close()
if len(sample_amount) and len(cpu_percent_sum):
return round(cpu_percent_sum[0][0]/sample_amount[0][0], 2)
def getPartitionUsedMemoryAverage(self, partition_id, date_scope):
self.connect()
query_result_cursor = self.select("user", date_scope,
columns="SUM(memory_rss)",
where="partition = '%s'" % partition_id)
memory_sum = zip(*query_result_cursor)
if len(memory_sum) and memory_sum[0][0] is None:
return
query_result_cursor = self.select("user", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
sample_amount = zip(*query_result_cursor)
self.close()
if len(sample_amount) and len(memory_sum):
return round(memory_sum[0][0]/(sample_amount[0][0]*1024*1024.0), 2)
def getPartitionDiskUsedAverage(self, partition_id, date_scope):
if not self.has_table('folder'):
return
self.db.connect()
query_result_cursor = self.select("folder", date_scope,
columns="SUM(disk_used)",
where="partition = '%s'" % partition_id)
disk_used_sum = zip(*query_result_cursor)
if len(disk_used_sum) and disk_used_sum[0][0] is None:
return
query_result_cursor = self.select("folder", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
collect_amount = zip(*query_result_cursor)
self.db.close()
if len(collect_amount) and len(disk_used_sum):
return round(disk_used_sum[0][0]/(collect_amount[0][0]*1024.0), 2)
def getPartitionConsumption(self, partition_id, where=""):
"""
Query collector db to get consumed resource for last minute
"""
self.connect()
comsumption_list = []
if where != "":
where = "and %s" % where
date_scope = datetime.now().strftime('%Y-%m-%d')
min_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:00')
max_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:59')
sql_query = """select count(pid), SUM(cpu_percent) as cpu_result, SUM(cpu_time),
MAX(cpu_num_threads), SUM(memory_percent), SUM(memory_rss), pid, SUM(io_rw_counter),
SUM(io_cycles_counter) from user
where date='%s' and partition='%s' and (time between '%s' and '%s') %s
group by pid order by cpu_result desc""" % (
date_scope, partition_id, min_time, max_time, where)
query_result = self._execute(sql_query)
for result in query_result:
count = int(result[0])
if not count > 0:
continue
resource_dict = {
'pid': result[6],
'cpu_percent': round(result[1]/count, 2),
'cpu_time': round((result[2] or 0)/(60.0), 2),
'cpu_num_threads': round(result[3]/count, 2),
'memory_percent': round(result[4]/count, 2),
'memory_rss': round((result[5] or 0)/(1024*1024.0), 2),
'io_rw_counter': round(result[7]/count, 2),
'io_cycles_counter': round(result[8]/count, 2)
}
try:
pprocess = psutil.Process(int(result[6]))
except psutil.NoSuchProcess:
pass
else:
resource_dict['name'] = pprocess.name()
resource_dict['command'] = pprocess.cmdline()
resource_dict['user'] = pprocess.username()
resource_dict['date'] = datetime.fromtimestamp(pprocess.create_time()).strftime("%Y-%m-%d %H:%M:%S")
comsumption_list.append(resource_dict)
self.close()
return comsumption_list
def getPartitionComsumptionStatus(self, partition_id, where=""):
self.connect()
if where != "":
where = " and %s" % where
date_scope = datetime.now().strftime('%Y-%m-%d')
min_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:00')
max_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:59')
sql_query = """select count(pid), SUM(cpu_percent), SUM(cpu_time),
SUM(cpu_num_threads), SUM(memory_percent), SUM(memory_rss), SUM(io_rw_counter),
SUM(io_cycles_counter) from user where
date='%s' and partition='%s' and (time between '%s' and '%s') %s""" % (
date_scope, partition_id, min_time, max_time, where)
query_result = self._execute(sql_query)
result_list = zip(*query_result)
process_dict = memory_dict = io_dict = {}
if len(result_list):
result = result_list
process_dict = {'total_process': result[0][0],
'cpu_percent': round((result[1][0] or 0), 2),
'cpu_time': round((result[2][0] or 0)/(60.0), 2),
'cpu_num_threads': round((result[3][0] or 0), 2),
'date': '%s %s' % (date_scope, min_time)
}
memory_dict = {'memory_percent': round((result[4][0] or 0), 2),
'memory_rss': round((result[5][0] or 0)/(1024*1024.0), 2),
'date': '%s %s' % (date_scope, min_time)
}
io_dict = {'io_rw_counter': round((result[6][0] or 0), 2),
'io_cycles_counter': round((result[7][0] or 0), 2),
'disk_used': 0,
'date': '%s %s' % (date_scope, min_time)
}
if self.has_table('folder'):
disk_result_cursor = self.select(
"folder", date_scope,
columns="SUM(disk_used)",
where="partition='%s' and (time between '%s' and '%s') %s" % (
partition_id, min_time, max_time, where
)
)
disk_used_sum = zip(*disk_result_cursor)
if len(disk_used_sum) and disk_used_sum[0][0] is not None:
io_dict['disk_used'] = round(disk_used_sum[0][0]/1024.0, 2)
self.close()
return (process_dict, memory_dict, io_dict)
def appendToJsonFile(file_path, content, stepback=2):
with open (file_path, mode="r+") as jfile:
jfile.seek(0, 2)
position = jfile.tell() - stepback
jfile.seek(position)
jfile.write('%s}' % ',"{}"]'.format(content))
def initProcessDataFile(file_path):
with open(process_file, 'w') as fprocess:
data_dict = {
"date": time.time(),
"data": ["date, total process, CPU percent, CPU time, CPU threads"]
}
fprocess.write(json.dumps(data_dict))
def initMemoryDataFile(file_path):
with open(mem_file, 'w') as fmem:
data_dict = {
"date": time.time(),
"data": ["date, memory used percent, memory used"]
}
fmem.write(json.dumps(data_dict))
def initIODataFile(file_path):
with open(io_file, 'w') as fio:
data_dict = {
"date": time.time(),
"data": ["date, io rw counter, io cycles counter, disk used"]
}
fio.write(json.dumps(data_dict))
def main():
parser = parseArguments()
if not os.path.exists(parser.output_folder) and os.path.isdir(parser.output_folder):
raise Exception("Invalid ouput folder: %s" % parser.output_folder)
# Consumption global status
process_file = os.path.join(parser.output_folder, 'monitor_resource_process.data.json')
mem_file = os.path.join(parser.output_folder, 'monitor_resource_memory.data.json')
io_file = os.path.join(parser.output_folder, 'monitor_resource_io.data.json')
resource_file = os.path.join(parser.output_folder, 'monitor_process_resource.status.json')
status_file = os.path.join(parser.output_folder, 'monitor_resource.status.json')
if not os.path.exists(parser.collector_db):
print "Collector database not found..."
initProcessDataFile(process_file)
initMemoryDataFile(mem_file)
initIODataFile(io_file)
with open(status_file, "w") as status_file:
status_file.write('{"cpu_time": 0, "cpu_percent": 0, "memory_rss": 0, "memory_percent": 0, "io_rw_counter": 0, "date": "", "total_process": 0, "disk_used": 0, "io_cycles_counter": 0, "cpu_num_threads": 0}')
with open(resource_file, "w") as resource_file:
resource_file.write('[]')
exit(1)
collector = ResourceCollect(parser.collector_db)
date_scope = datetime.now().strftime('%Y-%m-%d')
stat_info = os.stat(parser.output_folder)
partition_user = pwd.getpwuid(stat_info.st_uid)[0]
process_result, memory_result, io_result = collector.getPartitionComsumptionStatus(partition_user)
label_list = ['date', 'total_process', 'cpu_percent', 'cpu_time', 'cpu_num_threads',
'memory_percent', 'memory_rss', 'io_rw_counter', 'io_cycles_counter',
'disk_used']
resource_status_dict = {}
if not os.path.exists(process_file):
initProcessDataFile(process_file)
if not os.path.exists(mem_file):
initMemoryDataFile(mem_file)
if not os.path.exists(io_file):
initIODataFile(io_file)
if process_result and process_result['total_process'] != 0.0:
appendToJsonFile(process_file, ", ".join(
[str(process_result[key]) for key in label_list if process_result.has_key(key)])
)
resource_status_dict.update(process_result)
if memory_result and memory_result['memory_rss'] != 0.0:
appendToJsonFile(mem_file, ", ".join(
[str(memory_result[key]) for key in label_list if memory_result.has_key(key)])
)
resource_status_dict.update(memory_result)
if io_result and io_result['io_rw_counter'] != 0.0:
appendToJsonFile(io_file, ", ".join(
[str(io_result[key]) for key in label_list if io_result.has_key(key)])
)
resource_status_dict.update(io_result)
with open(status_file, 'w') as fp:
fp.write(json.dumps(resource_status_dict))
# Consumption Resource
resource_process_status_list = collector.getPartitionConsumption(partition_user)
if resource_process_status_list:
with open(resource_file, 'w') as rf:
rf.write(json.dumps(resource_process_status_list))
#!/usr/bin/env python
import sys
import os
import glob
import json
import ConfigParser
import time
from datetime import datetime
def softConfigGet(config, *args, **kwargs):
try:
return config.get(*args, **kwargs)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return ""
def generateStatisticsData(stat_file_path, content):
# csv document for statictics
if not os.path.exists(stat_file_path):
with open(stat_file_path, 'w') as fstat:
data_dict = {
"date": time.time(),
"data": ["Date, Success, Error, Warning"]
}
fstat.write(json.dumps(data_dict))
current_state = ''
if content.has_key('state'):
current_state = '%s, %s, %s, %s' % (
content['date'],
content['state']['success'],
content['state']['error'],
content['state']['warning'])
# append to file
if current_state:
with open (stat_file_path, mode="r+") as fstat:
fstat.seek(0,2)
position = fstat.tell() -2
fstat.seek(position)
fstat.write('%s}' % ',"{}"]'.format(current_state))
def run(args_list):
monitor_file, instance_file = args_list
monitor_config = ConfigParser.ConfigParser()
monitor_config.read(monitor_file)
base_folder = monitor_config.get('monitor', 'private-folder')
status_folder = monitor_config.get('monitor', 'public-folder')
base_url = monitor_config.get('monitor', 'base-url')
related_monitor_list = monitor_config.get("monitor", "monitor-url-list").split()
statistic_folder = os.path.join(base_folder, 'data', '.jio_documents')
parameter_file = os.path.join(base_folder, 'config', '.jio_documents', 'config.json')
report_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if not os.path.exists(statistic_folder):
try:
os.makedirs(statistic_folder)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(statistic_folder):
pass
else: raise
# search for all status files
file_list = filter(os.path.isfile,
glob.glob("%s/*.status.json" % status_folder)
)
error = warning = success = 0
status = 'OK'
promise_list = []
global_state_file = os.path.join(base_folder, 'monitor.global.json')
public_state_file = os.path.join(status_folder, 'monitor.global.json')
for file in file_list:
try:
with open(file, 'r') as temp_file:
tmp_json = json.loads(temp_file.read())
except ValueError:
# bad json file ?
continue
if tmp_json['status'] == 'ERROR':
error += 1
elif tmp_json['status'] == 'OK':
success += 1
elif tmp_json['status'] == 'WARNING':
warning += 1
tmp_json['time'] = tmp_json['start-date'].split(' ')[1]
promise_list.append(tmp_json)
if error:
status = 'ERROR'
elif warning:
status = 'WARNING'
global_state_dict = dict(
status=status,
state={
'error': error,
'success': success,
'warning': warning,
},
type='global',
date=report_date,
_links={"rss_url": {"href": "%s/public/feed" % base_url},
"public_url": {"href": "%s/share/jio_public/" % base_url},
"private_url": {"href": "%s/share/jio_private/" % base_url}
},
data={'state': 'monitor_state.data',
'process_state': 'monitor_process_resource.status',
'process_resource': 'monitor_resource_process.data',
'memory_resource': 'monitor_resource_memory.data',
'io_resource': 'monitor_resource_io.data',
'monitor_process_state': 'monitor_resource.status'}
)
global_state_dict['_embedded'] = {'promises': promise_list}
if os.path.exists(instance_file):
config = ConfigParser.ConfigParser()
config.read(instance_file)
if 'instance' in config.sections():
instance_dict = {}
global_state_dict['title'] = config.get('instance', 'name')
global_state_dict['hosting-title'] = config.get('instance', 'root-name')
if not global_state_dict['title']:
global_state_dict['title'] = 'Instance Monitoring'
instance_dict['computer'] = config.get('instance', 'computer')
instance_dict['ipv4'] = config.get('instance', 'ipv4')
instance_dict['ipv6'] = config.get('instance', 'ipv6')
instance_dict['software-release'] = config.get('instance', 'software-release')
instance_dict['software-type'] = config.get('instance', 'software-type')
instance_dict['partition'] = config.get('instance', 'partition')
global_state_dict['_embedded'].update({'instance' : instance_dict})
if related_monitor_list:
global_state_dict['_links']['related_monitor'] = [{'href': "%s/share/jio_public" % url}
for url in related_monitor_list]
if os.path.exists(parameter_file):
with open(parameter_file) as cfile:
global_state_dict['parameters'] = json.loads(cfile.read())
# Public information with the link to private folder
public_state_dict = dict(
status=status,
date=report_date,
_links={'monitor': {'href': '%s/share/jio_private/' % base_url}},
title=global_state_dict.get('title', '')
)
public_state_dict['hosting-title'] = global_state_dict.get('hosting-title', '')
public_state_dict['_links']['related_monitor'] = global_state_dict['_links'].get('related_monitor', [])
with open(global_state_file, 'w') as fglobal:
fglobal.write(json.dumps(global_state_dict))
with open(public_state_file, 'w') as fpglobal:
fpglobal.write(json.dumps(public_state_dict))
generateStatisticsData(
os.path.join(statistic_folder, 'monitor_state.data.json'),
global_state_dict)
return 0
def main():
if len(sys.argv) < 3:
print("Usage: %s <monitor_conf_path> <instance_conf_path>" % sys.argv[0])
sys.exit(2)
sys.exit(run(sys.argv[1:]))
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import stat
import json
import ConfigParser
import traceback
import argparse
import urllib2
import ssl
import glob
from datetime import datetime
OPML_START = """<?xml version="1.0" encoding="UTF-8"?>
<!-- OPML generated by SlapOS -->
<opml version="1.1">
<head>
<title>%(root_title)s</title>
<dateCreated>%(creation_date)s</dateCreated>
<dateModified>%(modification_date)s</dateModified>
</head>
<body>
<outline text="%(outline_title)s">"""
OPML_END = """ </outline>
</body>
</opml>"""
OPML_OUTLINE_FEED = '<outline text="%(title)s" title="%(title)s" type="rss" version="RSS" htmlUrl="%(html_url)s" xmlUrl="%(xml_url)s" url="%(global_url)s" />'
def parseArguments():
"""
Parse arguments for monitor instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--config_file',
default='monitor.cfg',
help='Monitor Configuration file')
return parser.parse_args()
def mkdirAll(path):
try:
os.makedirs(path)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(path):
pass
else: raise
def softConfigGet(config, *args, **kwargs):
try:
return config.get(*args, **kwargs)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return None
def createSymlink(source, destination):
try:
os.symlink(source, destination)
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
class Monitoring(object):
def __init__(self, configuration_file):
config = self.loadConfig([configuration_file])
# Set Monitor variables
self.title = config.get("monitor", "title")
self.root_title = config.get("monitor", "root-title")
self.service_pid_folder = config.get("monitor", "service-pid-folder")
self.crond_folder = config.get("monitor", "crond-folder")
self.logrotate_d = config.get("monitor", "logrotate-folder")
self.promise_runner = config.get("monitor", "promise-runner")
self.promise_folder_list = config.get("monitor", "promise-folder-list").split()
self.public_folder = config.get("monitor", "public-folder")
self.private_folder = config.get("monitor", "private-folder")
self.collector_db = config.get("monitor", "collector-db")
self.collect_script = config.get("monitor", "collect-script")
self.webdav_folder = config.get("monitor", "webdav-folder")
self.report_script_folder = config.get("monitor", "report-folder")
self.webdav_url = '%s/share' % config.get("monitor", "base-url")
self.public_url = '%s/public' % config.get("monitor", "base-url")
self.python = config.get("monitor", "python") or "python"
self.public_path_list = config.get("monitor", "public-path-list").split()
self.private_path_list = config.get("monitor", "private-path-list").split()
self.monitor_url_list = config.get("monitor", "monitor-url-list").split()
self.parameter_list = [param.strip() for param in config.get("monitor", "parameter-list").split('\n') if param]
# Use this file to write knowledge0_cfg required by webrunner
self.parameter_cfg_file = config.get("monitor", "parameter-file-path").strip()
self.config_folder = os.path.join(self.private_folder, 'config')
self.report_folder = self.private_folder
self.promise_output_file = config.get("monitor", "promise-output-file")
self.bootstrap_is_ok = True
self.promise_dict = {}
for promise_folder in self.promise_folder_list:
self.setupPromiseDictFromFolder(promise_folder)
def loadConfig(self, pathes, config=None):
if config is None:
config = ConfigParser.ConfigParser()
try:
config.read(pathes)
except ConfigParser.MissingSectionHeaderError:
traceback.print_exc()
return config
def readInstanceConfiguration(self):
type_list = ['raw', 'file', 'htpasswd', 'httpdcors']
configuration_list = []
if not self.parameter_list:
return []
for config in self.parameter_list:
config_list = config.strip().split(' ')
# type: config_list[0]
if len(config_list) >= 3 and config_list[0] in type_list:
if config_list[0] == 'raw':
configuration_list.append(dict(
key='',
title=config_list[1],
value=' '.join(config_list[2:])
))
elif (config_list[0] == 'file' or config_list[0] == 'htpasswd') and \
os.path.exists(config_list[2]) and os.path.isfile(config_list[2]):
try:
with open(config_list[2]) as cfile:
parameter = dict(
key=config_list[1],
title=config_list[1],
value=cfile.read(),
description={
"type": config_list[0],
"file": config_list[2]
}
)
if config_list[0] == 'htpasswd':
if len(config_list) != 5 or not os.path.exists(config_list[4]):
print 'htpasswd file is not specified: %s' % str(config_list)
continue
parameter['description']['user'] = config_list[3]
parameter['description']['htpasswd'] = config_list[4]
configuration_list.append(parameter)
except OSError, e:
print 'Cannot read file %s, Error is: %s' % (config_list[2], str(e))
pass
elif config_list[0] == 'httpdcors' and os.path.exists(config_list[2]) and \
os.path.exists(config_list[3]):
old_cors_file = os.path.join(
os.path.dirname(config_list[2]),
'prev_%s' % os.path.basename(config_list[2])
)
try:
cors_content = ""
if os.path.exists(old_cors_file):
with open(old_cors_file) as cfile:
cors_content = cfile.read()
else:
# Create empty file
with open(old_cors_file, 'w') as cfile:
cfile.write("")
parameter = dict(
key=config_list[1],
title=config_list[1],
value=cors_content,
description={
"type": config_list[0],
"cors_file": config_list[2],
"gracefull_bin": config_list[3]
}
)
configuration_list.append(parameter)
except OSError, e:
print 'Cannot read file at %s, Error is: %s' % (old_cors_file, str(e))
pass
return configuration_list
def setupPromiseDictFromFolder(self, folder):
for filename in os.listdir(folder):
path = os.path.join(folder, filename)
if os.path.isfile(path) and os.access(path, os.X_OK):
self.promise_dict[filename] = {"path": path,
"configuration": ConfigParser.ConfigParser()}
def createSymlinksFromConfig(self, destination_folder, source_path_list, name=""):
if destination_folder:
if source_path_list:
for path in source_path_list:
path = path.rstrip('/')
dirname = os.path.join(destination_folder, name)
try:
mkdirAll(dirname) # could also raise OSError
os.symlink(path, os.path.join(dirname, os.path.basename(path)))
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
def getMonitorTitleFromUrl(self, monitor_url):
# This file should be generated
if not monitor_url.startswith('https://') and not monitor_url.startswith('http://'):
return 'Unknown Instance'
if not monitor_url.endswith('/'):
monitor_url = monitor_url + '/'
url = monitor_url + '/.jio_documents/monitor.global.json' # XXX Hard Coded path
try:
# XXX - working here with public url
if hasattr(ssl, '_create_unverified_context'):
context = ssl._create_unverified_context()
response = urllib2.urlopen(url, context=context)
else:
response = urllib2.urlopen(url)
except urllib2.HTTPError:
self.bootstrap_is_ok = False
return 'Unknown Instance'
else:
try:
monitor_dict = json.loads(response.read())
return monitor_dict.get('title', 'Unknown Instance')
except ValueError, e:
print "Bad Json file at %s" % url
self.bootstrap_is_ok = False
return 'Unknown Instance'
def getReportInfoFromFilename(self, filename):
splited_filename = filename.split('_every_')
possible_time_list = ['hour', 'minute']
if len(splited_filename) == 1:
return (filename, "* * * * *")
run_time = splited_filename[1].split('_')
report_name = splited_filename[0]
if len(run_time) != 2 or not run_time[1] in possible_time_list:
return (report_name, "* * * * *")
try:
value = int(run_time[0])
except ValueError:
print "Warning: Bad report filename: %s" % filename
return (report_name, "* * * * *")
if run_time[1] == 'hour':
return (report_name, "* */%s * * *" % value)
if run_time[1] == 'minute':
return (report_name, "*/%s * * * *" % value)
def configureFolders(self):
# configure public and private folder
self.createSymlinksFromConfig(self.webdav_folder, [self.public_folder])
self.createSymlinksFromConfig(self.webdav_folder, [self.private_folder])
#configure jio_documents folder
jio_public = os.path.join(self.webdav_folder, 'jio_public')
jio_private = os.path.join(self.webdav_folder, 'jio_private')
mkdirAll(jio_public)
mkdirAll(jio_private)
createSymlink(self.public_folder,
os.path.join(jio_public, '.jio_documents'))
createSymlink(self.private_folder,
os.path.join(jio_private, '.jio_documents'))
self.data_folder = os.path.join(self.private_folder, 'data', '.jio_documents')
self.document_folder = os.path.join(self.private_folder, 'documents')
config_folder = os.path.join(self.config_folder, '.jio_documents')
mkdirAll(self.data_folder)
mkdirAll(config_folder)
createSymlink(os.path.join(self.private_folder, 'data'),
os.path.join(jio_private, 'data'))
createSymlink(self.config_folder, os.path.join(jio_private, 'config'))
createSymlink(self.data_folder, self.document_folder)
# Cleanup private folder
for file in glob.glob("%s/*.history.json" % self.private_folder):
try:
os.unlink(file)
except OSError:
print "failed to remove file %s. Ignoring..." % file
def makeConfigurationFiles(self):
config_folder = os.path.join(self.config_folder, '.jio_documents')
parameter_config_file = os.path.join(config_folder, 'config.parameters.json')
parameter_file = os.path.join(config_folder, 'config.json')
#mkdirAll(config_folder)
parameter_list = self.readInstanceConfiguration()
description_dict = {}
if parameter_list:
for i in range(0, len(parameter_list)):
key = parameter_list[i]['key']
if key:
description_dict[key] = parameter_list[i].pop('description')
with open(parameter_config_file, 'w') as config_file:
config_file.write(json.dumps(description_dict))
with open(parameter_file, 'w') as config_file:
config_file.write(json.dumps(parameter_list))
try:
with open(self.parameter_cfg_file, 'w') as pfile:
pfile.write('[public]\n')
for parameter in parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % self.parameter_cfg_file
pass
def generateOpmlFile(self, feed_url_list, output_file):
if os.path.exists(output_file):
creation_date = datetime.fromtimestamp(os.path.getctime(output_file)).utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
else:
creation_date = modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
opml_content = OPML_START % {'creation_date': creation_date,
'modification_date': modification_date,
'outline_title': 'Monitoring RSS Feed list',
'root_title': self.root_title}
opml_content += OPML_OUTLINE_FEED % {'title': self.title,
'html_url': self.public_url + '/feed',
'xml_url': self.public_url + '/feed',
'global_url': "%s/jio_private/" % self.webdav_url}
for feed_url in feed_url_list:
opml_content += OPML_OUTLINE_FEED % {'title': self.getMonitorTitleFromUrl(feed_url + "/share/jio_public/"),
'html_url': feed_url + '/public/feed',
'xml_url': feed_url + '/public/feed',
'global_url': "%s/share/jio_private/" % feed_url}
opml_content += OPML_END
with open(output_file, 'w') as wfile:
wfile.write(opml_content)
def generateLogrotateEntry(self, name, file_list, option_list):
"""
Will add a new entry in logrotate.d folder. This can help to rotate data file daily
"""
content = "%(logfiles)s {\n%(options)s\n}\n" % {
'logfiles': ' '.join(file_list),
'options': '\n'.join(option_list)
}
file_path = os.path.join(self.logrotate_d, name)
with open(file_path, 'w') as flog:
flog.write(content)
def generateReportCronEntries(self):
cron_line_list = []
report_name_list = [name.replace('.report.json', '')
for name in os.listdir(self.report_folder) if name.endswith('.report.json')]
for filename in os.listdir(self.report_script_folder):
report_script = os.path.join(self.report_script_folder, filename)
if os.path.isfile(report_script) and os.access(report_script, os.X_OK):
report_name, frequency = self.getReportInfoFromFilename(filename)
# report_name = os.path.splitext(filename)[0]
report_json_path = "%s.report.json" % report_name
report_cmd_line = [
frequency,
self.promise_runner,
'--pid_path "%s"' % os.path.join(self.service_pid_folder,
"%s.pid" % filename),
'--output "%s"' % os.path.join(self.report_folder,report_json_path),
'--promise_script "%s"' % report_script,
'--promise_name "%s"' % report_name,
'--monitor_url "%s/jio_private/"' % self.webdav_url, # XXX hardcoded,
'--history_folder "%s"' % self.data_folder,
'--instance_name "%s"' % self.title,
'--hosting_name "%s"' % self.root_title,
'--promise_type "report"']
cron_line_list.append(' '.join(report_cmd_line))
if report_name in report_name_list:
report_name_list.pop(report_name_list.index(report_name))
# cleanup removed report json result
if report_name_list != []:
for report_name in report_name_list:
result_path = os.path.join(self.public_folder, '%s.report.json' % report_name)
if os.path.exists(result_path):
try:
os.unlink(result_path)
except OSError, e:
print "Error: Failed to delete %s" % result_path, str(e)
pass
with open(self.crond_folder + "/monitor-reports", "w") as freport:
freport.write("\n".join(cron_line_list))
def generateServiceCronEntries(self):
# XXX only if at least one configuration file is modified, then write in the cron
#cron_line_list = ['PATH=%s\n' % os.environ['PATH']]
cron_line_list = []
service_name_list = [name.replace('.status.json', '')
for name in os.listdir(self.public_folder) if name.endswith('.status.json')]
for service_name, promise in self.promise_items:
service_config = promise["configuration"]
service_status_path = "%s/%s.status.json" % (self.public_folder, service_name)
mkdirAll(os.path.dirname(service_status_path))
promise_cmd_line = [
softConfigGet(service_config, "service", "frequency") or "* * * * *",
self.promise_runner,
'--pid_path "%s"' % os.path.join(self.service_pid_folder,
"%s.pid" % service_name),
'--output "%s"' % service_status_path,
'--promise_script "%s"' % promise["path"],
'--promise_name "%s"' % service_name,
'--monitor_url "%s/jio_private/"' % self.webdav_url, # XXX hardcoded,
'--history_folder "%s"' % self.public_folder,
'--instance_name "%s"' % self.title,
'--hosting_name "%s"' % self.root_title]
cron_line_list.append(' '.join(promise_cmd_line))
if service_name in service_name_list:
service_name_list.pop(service_name_list.index(service_name))
if service_name_list != []:
# XXX Some service was removed, delete his status file so monitor will not consider his status anymore
for service_name in service_name_list:
status_path = os.path.join(self.public_folder, '%s.status.json' % service_name)
if os.path.exists(status_path):
try:
os.unlink(status_path)
except OSError, e:
print "Error: Failed to delete %s" % status_path, str(e)
pass
with open(self.crond_folder + "/monitor-promises", "w") as fp:
fp.write("\n".join(cron_line_list))
def addCronEntry(self, name, frequency, command):
entry_line = '%s %s' % (frequency, command)
cron_entry_file = os.path.join(self.crond_folder, name)
with open(cron_entry_file, "w") as cronf:
cronf.write(entry_line)
def bootstrapMonitor(self):
if os.path.exists(self.promise_output_file):
os.unlink(self.promise_output_file)
# create symlinks from monitor.conf
self.createSymlinksFromConfig(self.public_folder, self.public_path_list)
self.createSymlinksFromConfig(self.private_folder, self.private_path_list)
self.configureFolders()
# create symlinks from service configurations
self.promise_items = self.promise_dict.items()
for service_name, promise in self.promise_items:
service_config = promise["configuration"]
public_path_list = softConfigGet(service_config, "service", "public-path-list")
private_path_list = softConfigGet(service_config, "service", "private-path-list")
if public_path_list:
self.createSymlinksFromConfig(self.public_folder,
public_path_list.split(),
service_name)
if private_path_list:
self.createSymlinksFromConfig(self.private_folder,
private_path_list.split(),
service_name)
# Generate OPML file
self.generateOpmlFile(self.monitor_url_list,
os.path.join(self.public_folder, 'feeds'))
# put promises to a cron file
self.generateServiceCronEntries()
# put report script to cron
self.generateReportCronEntries()
# Generate parameters files and scripts
self.makeConfigurationFiles()
# Rotate monitor data files
option_list = [
'daily', 'nocreate', 'olddir %s' % self.data_folder, 'rotate 5',
'nocompress', 'extension .json', 'dateext',
'dateformat -%Y-%m-%d', 'notifempty'
]
file_list = [
"%s/*.data.json" % self.private_folder,
"%s/*.data.json" % self.data_folder]
self.generateLogrotateEntry('monitor.data', file_list, option_list)
# Rotate public history status file, delete data of previous days
option_list = [
'daily', 'nocreate', 'rotate 0',
'nocompress', 'notifempty'
]
file_list = ["%s/*.history.json" % self.public_folder]
self.generateLogrotateEntry('monitor.service.status', file_list, option_list)
# Add cron entry for SlapOS Collect
command = "%s %s --output_folder %s --collector_db %s" % (self.python,
self.collect_script, self.data_folder, self.collector_db)
self.addCronEntry('monitor_collect', '* * * * *', command)
# Write an empty file when monitor bootstrap went until the end
if self.bootstrap_is_ok:
with open(self.promise_output_file, 'w') as promise_file:
promise_file.write("")
return 0
def main():
parser = parseArguments()
monitor = Monitoring(parser.config_file)
sys.exit(monitor.bootstrapMonitor())
#!/usr/bin/env python
import sys
import os
import re
import json
import argparse
import subprocess
from datetime import datetime
import time
def parseArguments():
"""
Parse arguments for monitor instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--config_folder',
help='Path where json configuration/document will be read and write')
parser.add_argument('--htpasswd_bin',
help='Path apache htpasswd binary. Needed to write htpasswd file.')
parser.add_argument('--output_cfg_file',
help='Ouput parameters in cfg file.')
return parser.parse_args()
def fileWrite(file_path, content):
if os.path.exists(file_path):
try:
with open(file_path, 'w') as wf:
wf.write(content)
return True
except OSError, e:
print "ERROR while writing changes to %s.\n %s" % (file_path, str(e))
return False
def htpasswdWrite(htpasswd_bin, parameter_dict, value):
if not os.path.exists(parameter_dict['file']):
return False
command = [htpasswd_bin, '-cb', parameter_dict['htpasswd'], parameter_dict['user'], value]
process = subprocess.Popen(
command,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
result = process.communicate()[0]
if process.returncode != 0:
print result
return False
with open(parameter_dict['file'], 'w') as pfile:
pfile.write(value)
return True
def httpdCorsDomainWrite(httpd_cors_file, httpd_gracefull_bin, cors_domain):
cors_string = ""
cors_domain_list = cors_domain.split()
old_httpd_cors_file = os.path.join(
os.path.dirname(httpd_cors_file),
'prev_%s' % os.path.basename(httpd_cors_file)
)
if os.path.exists(old_httpd_cors_file) and os.path.isfile(old_httpd_cors_file):
try:
with open(old_httpd_cors_file, 'r') as cors_file:
if cors_file.read() == cors_domain:
if os.path.exists(httpd_cors_file) and (os.stat(httpd_cors_file).st_size > 0
or (cors_domain == "" and os.stat(httpd_cors_file).st_size == 0)):
# Skip if cors file is not empty
return True
except OSError, e:
print "Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e))
for domain in cors_domain_list:
if cors_string:
cors_string += '|'
cors_string += re.escape(domain)
try:
with open(httpd_cors_file, 'w') as file:
file.write('SetEnvIf Origin "^http(s)?://(.+\.)?(%s)$" origin_is=$0\n' % cors_string)
file.write('Header always set Access-Control-Allow-Origin %{origin_is}e env=origin_is')
except OSError, e:
print "ERROR while writing CORS changes to %s.\n %s" % (httpd_cors_file, str(e))
return False
# Save current cors domain list
try:
with open(old_httpd_cors_file, 'w') as cors_file:
cors_file.write(cors_domain)
except OSError, e:
print "Failed to open file at %s. \n%s" % (old_httpd_cors_file, str(e))
return False
# Restart httpd process
try:
subprocess.call(httpd_gracefull_bin)
except OSError, e:
print "Failed to execute command %s.\n %s" % (httpd_gracefull_bin, str(e))
return False
def applyEditChage(parser):
parameter_tmp_file = os.path.join(parser.config_folder, 'config.tmp.json')
config_file = os.path.join(parser.config_folder, 'config.json')
parameter_config_file = os.path.join(parser.config_folder, 'config.parameters.json')
if not os.path.exists(parameter_tmp_file) or not os.path.isfile(parameter_tmp_file):
return {}
if not os.path.exists(config_file):
print "ERROR: Config file doesn't exist... Exiting"
return {}
new_parameter_list = []
parameter_list = []
description_dict = {}
result_dict = {}
try:
with open(parameter_tmp_file) as tmpfile:
new_parameter_list = json.loads(tmpfile.read())
except ValueError:
print "Error: Couldn't parse json file %s" % parameter_tmp_file
with open(parameter_config_file) as tmpfile:
description_dict = json.loads(tmpfile.read())
for i in range(0, len(new_parameter_list)):
key = new_parameter_list[i]['key']
if key != '':
description_entry = description_dict[key]
if description_entry['type'] == 'file':
result_dict[key] = fileWrite(description_entry['file'], new_parameter_list[i]['value'])
elif description_entry['type'] == 'htpasswd':
result_dict[key] = htpasswdWrite(parser.htpasswd_bin, description_entry, new_parameter_list[i]['value'])
elif description_entry['type'] == 'httpdcors':
result_dict[key] = httpdCorsDomainWrite(description_entry['cors_file'], description_entry['gracefull_bin'], new_parameter_list[i]['value'])
if (parser.output_cfg_file):
try:
with open(parser.output_cfg_file, 'w') as pfile:
pfile.write('[public]\n')
for parameter in new_parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % parser.output_cfg_file
pass
return result_dict
def main():
parser = parseArguments()
parameter_tmp_file = os.path.join(parser.config_folder, 'config.tmp.json')
config_file = os.path.join(parser.config_folder, 'config.json')
# Run 4 times with sleep
run_counter = 1
max_runn = 4
sleep_time = 15
while True:
result_dict = applyEditChage(parser)
if result_dict != {}:
status = True
for key in result_dict:
if not result_dict[key]:
status = False
if status and os.path.exists(parameter_tmp_file):
try:
os.unlink(config_file)
except OSError, e:
print "ERROR cannot remove file: %s" % parameter_tmp_file
else:
os.rename(parameter_tmp_file, config_file)
if run_counter == max_runn:
break
else:
run_counter += 1
time.sleep(sleep_time)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import subprocess
import json
import psutil
import time
from shutil import copyfile
import glob
import argparse
import traceback
def parseArguments():
"""
Parse arguments for monitor collector instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--pid_path',
help='Path where the pid of this process will be writen.')
parser.add_argument('--output',
help='The Path of file where Json result of this promise will be saved.')
parser.add_argument('--promise_script',
help='Promise script to execute.')
parser.add_argument('--promise_name',
help='Title to give to this promise.')
parser.add_argument('--promise_type',
default='status',
help='Type of promise to execute. [status, report].')
parser.add_argument('--monitor_url',
help='Monitor Instance website URL.')
parser.add_argument('--history_folder',
help='Path where old result file will be placed before generate a new json result file.')
parser.add_argument('--instance_name',
default='UNKNOWN Software Instance',
help='Software Instance name.')
parser.add_argument('--hosting_name',
default='UNKNOWN Hosting Subscription',
help='Hosting Subscription name.')
return parser
def runpromise(parser):
if os.path.exists(parser.pid_path):
with open(parser.pid_path, "r") as pidfile:
try:
pid = int(pidfile.read(6))
except ValueError:
pid = None
if pid and os.path.exists("/proc/" + str(pid)):
print("A process is already running with pid " + str(pid))
return 1
start_date = ""
with open(parser.pid_path, "w") as pidfile:
process = executeCommand(parser.promise_script)
ps_process = psutil.Process(process.pid)
start_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ps_process.create_time()))
pidfile.write(str(process.pid))
status_json = generateStatusJsonFromProcess(process, start_date=start_date)
status_json['_links'] = {"monitor": {"href": parser.monitor_url}}
status_json['title'] = parser.promise_name
status_json['instance'] = parser.instance_name
status_json['hosting_subscription'] = parser.hosting_name
status_json['type'] = parser.promise_type
# Save the lastest status change date (needed for rss)
status_json['change-time'] = ps_process.create_time()
if os.path.exists(parser.output):
with open(parser.output) as f:
try:
last_result = json.loads(f.read())
if status_json['status'] == last_result['status'] and last_result.has_key('change-time'):
status_json['change-time'] = last_result['change-time']
except ValueError:
pass
updateStatusHistoryFolder(
parser.promise_name,
parser.output,
parser.history_folder,
parser.promise_type
)
with open(parser.output, "w") as outputfile:
json.dump(status_json, outputfile)
os.remove(parser.pid_path)
def updateStatusHistoryFolder(name, status_file, history_folder, promise_type):
history_path = os.path.join(history_folder)
if not os.path.exists(status_file):
return
if not os.path.exists(history_folder):
return
if not os.path.exists(history_path):
try:
os.makedirs(history_path)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(history_path):
pass
else: raise
with open(status_file, 'r') as sf:
try:
status_dict = json.loads(sf.read())
except ValueError:
traceback.print_exc()
return
if promise_type == 'status':
filename = '%s.history.json' % name
history_file = os.path.join(history_path, filename)
# Remove links from history (not needed)
status_dict.pop('_links', None)
if not os.path.exists(history_file):
with open(history_file, 'w') as f_history:
data_dict = {
"date": time.time(),
"data": [status_dict]
}
f_history.write(json.dumps(data_dict))
else:
# Remove useless informations
status_dict.pop('hosting_subscription', '')
status_dict.pop('title', '')
status_dict.pop('instance', '')
status_dict.pop('type', '')
with open (history_file, mode="r+") as f_history:
f_history.seek(0,2)
position = f_history.tell() -2
f_history.seek(position)
#f_history.write(',%s]}' % str(status_dict))
f_history.write('%s}' % ',{}]'.format(json.dumps(status_dict)))
elif promise_type == 'report':
# keep_item_amount = 3
filename = '%s.history.json' % (
name)
copyfile(status_file, os.path.join(history_path, filename))
"""# Don't let history foler grow too much, keep xx files
file_list = filter(os.path.isfile,
glob.glob("%s/*.%s.history.json" % (history_path, promise_type))
)
file_count = len(file_list)
if file_count > keep_item_amount:
file_list.sort(key=lambda x: os.path.getmtime(x))
while file_count > keep_item_amount:
to_delete = file_list.pop(0)
try:
os.unlink(to_delete)
file_count -= 1
except OSError:
raise"""
def generateStatusJsonFromProcess(process, start_date=None, title=None):
stdout, stderr = process.communicate()
status_json = {}
if process.returncode != 0:
status_json["status"] = "ERROR"
else:
status_json["status"] = "OK"
if stderr:
status_json["message"] = stderr
elif stdout:
status_json["message"] = stdout
if start_date:
status_json["start-date"] = start_date
if title:
status_json["title"] = title
return status_json
def executeCommand(args):
return subprocess.Popen(
args,
#cwd=instance_path,
#env=None if sys.platform == 'cygwin' else {},
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
def main():
arg_parser = parseArguments()
sys.exit(runpromise(parser.parse_args()))
import sys
import os
import json
from datetime import datetime
import base64
import hashlib
import PyRSS2Gen
import argparse
def parseArguments():
"""
Parse arguments for monitor Rss Generator.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--items_folder',
help='Path where to get *.status.json files which contain result of promises.')
parser.add_argument('--output',
help='The Path of file where feed file will be saved.')
parser.add_argument('--feed_url',
help='Url of this feed file.')
parser.add_argument('--public_url',
help='Monitor Instance public URL.')
parser.add_argument('--private_url',
help='Monitor Instance private URL.')
parser.add_argument('--instance_name',
default='UNKNOW Software Instance',
help='Software Instance name.')
parser.add_argument('--hosting_name',
default='',
help='Hosting Subscription name.')
return parser.parse_args()
def getKey(item):
return item.pubDate
def main():
parser = parseArguments()
rss_item_list = []
report_date = datetime.utcnow()
for filename in os.listdir(parser.items_folder):
if filename.endswith(".status.json"):
filepath = os.path.join(parser.items_folder, filename)
result_dict = None
try:
result_dict = json.load(open(filepath, "r"))
except ValueError:
print "Failed to load json file: %s" % filepath
continue
description = result_dict.get('message', '')
event_time = datetime.fromtimestamp(result_dict['change-time'])
rss_item = PyRSS2Gen.RSSItem(
categories = [result_dict['status']],
source = PyRSS2Gen.Source(result_dict['title'], parser.public_url),
title = '[%s] %s' % (result_dict['status'], result_dict['title']),
comments = description,
description = "%s: %s\n%s" % (event_time, result_dict['status'], description),
link = parser.private_url,
pubDate = event_time,
guid = PyRSS2Gen.Guid(base64.b64encode("%s, %s" % (parser.hosting_name, result_dict['title'])))
)
rss_item_list.append(rss_item)
### Build the rss feed
sorted(rss_item_list, key=getKey)
rss_feed = PyRSS2Gen.RSS2 (
title = parser.instance_name,
link = parser.feed_url,
description = parser.hosting_name,
lastBuildDate = report_date,
items = rss_item_list
)
with open(parser.output, 'w') as frss:
frss.write(rss_feed.to_xml())
def main():
exit(main())
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment