Commit 944a959b authored by Rafael Monnerat's avatar Rafael Monnerat

monitor/globalstate: Refactor.

parent c1925550
...@@ -62,154 +62,55 @@ class MonitorFeed(object): ...@@ -62,154 +62,55 @@ class MonitorFeed(object):
with open(output_file, 'w') as frss: with open(output_file, 'w') as frss:
frss.write(rss_feed.to_xml()) frss.write(rss_feed.to_xml())
def generateStatisticsData(stat_file_path, content): def generateMonitoringRSS(self, public_folder, private_folder):
# csv document for success/error statictics
if not os.path.exists(stat_file_path) or os.stat(stat_file_path).st_size == 0:
with open(stat_file_path, 'w') as fstat:
data_dict = {
"date": time.time(),
"data": ["Date, Success, Error, Warning"]
}
fstat.write(json.dumps(data_dict))
current_state = ''
if 'state' in content:
current_state = '%s, %s, %s, %s' % (
content['date'],
content['state']['success'],
content['state']['error'],
'')
# append to file
if current_state:
with open (stat_file_path, mode="r+") as fstat:
fstat.seek(0,2)
position = fstat.tell() -2
fstat.seek(position)
fstat.write('%s}' % ',"{}"]'.format(current_state))
def writeDocumentList(folder_path):
# Save document list in a file called _document_list
public_document_list = [os.path.splitext(file)[0]
for file in os.listdir(folder_path) if file.endswith('.json')]
with open(os.path.join(folder_path, '_document_list'), 'w') as lfile:
lfile.write('\n'.join(public_document_list))
def generateMonitoringData(config, public_folder, private_folder, public_url,
private_url, feed_url):
feed_output = os.path.join(public_folder, 'feed') feed_output = os.path.join(public_folder, 'feed')
# search for all status files
file_list = filter( file_list = filter(
os.path.isfile, os.path.isfile,
glob.glob("%s/promise/*.status.json" % public_folder) glob.glob("%s/promise/*.status.json" % public_folder)
) )
promises_status_file = os.path.join(private_folder, '_promise_status') promises_status_file = os.path.join(private_folder, '_promise_status')
previous_state_dict = {} current_state_dict = {}
new_state_dict = {}
error = success = 0
monitor_feed = MonitorFeed(
config.get('monitor', 'title'),
config.get('monitor', 'root-title'),
public_url,
private_url,
feed_url)
if os.path.exists(promises_status_file): if os.path.exists(promises_status_file):
with open(promises_status_file) as f: with open(promises_status_file) as f:
try: try:
previous_state_dict = json.loads(f.read()) current_state_dict = json.load(f)
except ValueError: except ValueError:
pass pass
for file in file_list: for file in file_list:
try: try:
with open(file, 'r') as temp_file: with open(file, 'r') as temp_file:
tmp_json = json.loads(temp_file.read()) tmp_json = json.load(temp_file)
if tmp_json['result']['failed']:
promise_status = "ERROR"
error += 1
else:
promise_status = "OK"
success += 1
tmp_json['result']['change-date'] = tmp_json['result']['date']
if tmp_json['name'] in previous_state_dict:
status, change_date, _ = previous_state_dict[tmp_json['name']]
if promise_status == status:
tmp_json['result']['change-date'] = change_date
name = tmp_json['name']
if name not in current_state_dict:
continue
promise_status, change_date, message_hash = current_state_dict[tmp_json['name']]
tmp_json['result']['change-date'] = change_date
tmp_json['status'] = promise_status tmp_json['status'] = promise_status
message_hash = hashlib.md5( self.appendItem(tmp_json, message_hash)
str2bytes(tmp_json['result'].get('message', ''))).hexdigest()
new_state_dict[tmp_json['name']] = [
promise_status,
tmp_json['result']['change-date'],
message_hash
]
monitor_feed.appendItem(tmp_json, message_hash)
savePromiseHistory(
tmp_json['title'],
tmp_json,
previous_state_dict.get(tmp_json['name']),
public_folder
)
except ValueError as e: except ValueError as e:
# bad json file # bad json file
print("ERROR: Bad json file at: %s\n%s" % (file, e)) print("ERROR: Bad json file at: %s\n%s" % (file, e))
continue continue
with open(promises_status_file, "w") as f: self.generateRSS(feed_output)
json.dump(new_state_dict, f)
monitor_feed.generateRSS(feed_output)
return error, success
def savePromiseHistory(promise_name, state_dict, previous_state_list,
history_folder):
if not os.path.exists(history_folder) and os.path.isdir(history_folder):
self.logger.warning('Bad promise history folder, history is not saved...')
return
history_file = os.path.join( def writeDocumentList(folder_path):
history_folder, # Save document list in a file called _document_list
'%s.history.json' % promise_name public_document_list = [os.path.splitext(file)[0]
) for file in os.listdir(folder_path) if file.endswith('.json')]
# Remove useless informations with open(os.path.join(folder_path, '_document_list'), 'w') as lfile:
result = state_dict.pop('result') lfile.write('\n'.join(public_document_list))
state_dict.update(result)
state_dict.pop('path', '')
state_dict.pop('type', '')
if not os.path.exists(history_file) or not os.stat(history_file).st_size:
with open(history_file, 'w') as f:
data_dict = {
"date": time.time(),
"data": [state_dict]
}
json.dump(data_dict, f)
else:
if previous_state_list is not None:
_, change_date, checksum = previous_state_list
current_sum = hashlib.md5(str2bytes(state_dict.get('message', ''))).hexdigest()
if state_dict['change-date'] == change_date and \
current_sum == checksum:
# Only save the changes and not the same info
return
state_dict.pop('title', '')
state_dict.pop('name', '')
with open (history_file, mode="r+") as f:
f.seek(0,2)
f.seek(f.tell() -2)
f.write('%s}' % ',{}]'.format(json.dumps(state_dict)))
def run(monitor_conf_file): def run(monitor_conf_file):
config = configparser.ConfigParser() config = configparser.ConfigParser()
config.read(monitor_conf_file) config.read(monitor_conf_file)
partition_folder = config.get('promises', 'partition-folder')
base_folder = config.get('monitor', 'private-folder') base_folder = config.get('monitor', 'private-folder')
status_folder = config.get('monitor', 'public-folder') status_folder = config.get('monitor', 'public-folder')
base_url = config.get('monitor', 'base-url') base_url = config.get('monitor', 'base-url')
...@@ -221,83 +122,80 @@ def run(monitor_conf_file): ...@@ -221,83 +122,80 @@ def run(monitor_conf_file):
public_url = "%s/share/public/" % base_url public_url = "%s/share/public/" % base_url
private_url = "%s/share/private/" % base_url private_url = "%s/share/private/" % base_url
feed_url = "%s/public/feed" % base_url feed_url = "%s/public/feed" % base_url
status = 'OK'
slapgrid_global_state_file = os.path.join(partition_folder,
".slapgrid/promise/global.json")
slapgrid_public_state_file = os.path.join(partition_folder,
".slapgrid/promise/public.json")
global_state_file = os.path.join(base_folder, 'monitor.global.json') global_state_file = os.path.join(base_folder, 'monitor.global.json')
public_state_file = os.path.join(status_folder, 'monitor.global.json') public_state_file = os.path.join(status_folder, 'monitor.global.json')
report_date = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S+0000') report_date = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S+0000')
error, success = generateMonitoringData(config, status_folder, base_folder, monitor_feed = MonitorFeed(
public_url, private_url, feed_url) config.get('monitor', 'title'),
if error: config.get('monitor', 'root-title'),
status = 'ERROR' public_url,
private_url,
feed_url)
global_state_dict = dict( monitor_feed.generateMonitoringRSS(status_folder, base_folder)
status=status,
state={ with open(slapgrid_global_state_file) as f:
'error': error, global_state_dict = json.load(f)
'success': success
}, with open(slapgrid_public_state_file) as f:
type='global', # bwd compatibility public_state_dict = json.load(f)
portal_type='Software Instance',
date=report_date,
global_state_dict.update(dict(
_links={"rss_url": {"href": feed_url}, _links={"rss_url": {"href": feed_url},
"public_url": {"href": public_url}, "public_url": {"href": public_url},
"private_url": {"href": private_url}, "private_url": {"href": private_url},
"related_monitor": [] "related_monitor": [{'href': "%s/share/public" % url}
for url in related_monitor_list]
}, },
data={'state': 'monitor_state.data',
'process_state': 'monitor_process_resource.status',
'process_resource': 'monitor_resource_process.data',
'memory_resource': 'monitor_resource_memory.data',
'io_resource': 'monitor_resource_io.data',
'monitor_process_state': 'monitor_resource.status'},
title=config.get('monitor', 'title'), title=config.get('monitor', 'title'),
specialise_title=config.get('monitor', 'root-title'), specialise_title=config.get('monitor', 'root-title'),
aggregate_reference=config.get('promises', 'computer-id'),
ipv4=config.get('promises', 'ipv4'), ipv4=config.get('promises', 'ipv4'),
ipv6=config.get('promises', 'ipv6'), ipv6=config.get('promises', 'ipv6'),
software_release=config.get('promises', 'software-release'), software_release=config.get('promises', 'software-release'),
software_type=config.get('promises', 'software-type'), software_type=config.get('promises', 'software-type'),
partition_id=config.get('promises', 'partition-id'), ))
)
if not global_state_dict['title']: if not global_state_dict['title']:
global_state_dict['title'] = 'Instance Monitoring' global_state_dict['title'] = 'Instance Monitoring'
if related_monitor_list:
global_state_dict['_links']['related_monitor'] = [{'href': "%s/share/public" % url}
for url in related_monitor_list]
if os.path.exists(parameter_file): if os.path.exists(parameter_file):
with open(parameter_file) as cfile: with open(parameter_file) as cfile:
global_state_dict['parameters'] = json.loads(cfile.read()) global_state_dict['parameters'] = json.load(cfile)
# Public information with the link to private folder # Public information with the link to private folder
public_state_dict = dict( public_state_dict.update(dict(
status=status, _links={'monitor': {'href': '%s/share/private/' % base_url},
date=report_date, 'related_monitor': global_state_dict['_links']['related_monitor']},
_links={'monitor': {'href': '%s/share/private/' % base_url}}, title=global_state_dict['title'],
title=global_state_dict.get('title', ''), specialise_title=global_state_dict['specialise_title'],
specialise_title=global_state_dict.get('specialise_title', ''), ))
)
public_state_dict['_links']['related_monitor'] = global_state_dict['_links'].get('related_monitor', [])
with open(global_state_file, 'w') as fglobal: with open(global_state_file, 'w') as fglobal:
fglobal.write(json.dumps(global_state_dict)) json.dump(global_state_dict, fglobal)
with open(public_state_file, 'w') as fpglobal: with open(public_state_file, 'w') as fpglobal:
fpglobal.write(json.dumps(public_state_dict)) json.dump(public_state_dict, fpglobal)
# Implement backward compatibility with older UI.
for hfile in glob.glob(os.path.join(status_folder, "history/*.history.json")):
hfile_name = os.path.basename(hfile)
hfile_link = os.path.join(status_folder, hfile_name)
if not os.path.exists(hfile_link):
os.symlink(hfile, hfile_link)
# write list of files # write list of files
writeDocumentList(status_folder) writeDocumentList(status_folder)
writeDocumentList(base_folder) writeDocumentList(base_folder)
writeDocumentList(statistic_folder) writeDocumentList(statistic_folder)
generateStatisticsData(
os.path.join(statistic_folder, 'monitor_state.data.json'),
global_state_dict
)
return 0 return 0
def main(): def main():
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment