Commit 15a22114 authored by Alain Takoudjou's avatar Alain Takoudjou

monitor2: new stack working with erp5 renderjs web interface

parent e4adff53
......@@ -33,6 +33,12 @@ on-update = true
<= monitor-download-base
url = ${:_profile_base_location_}/templates/${:filename}
[monitor-template-script]
<= monitor-download-base
url = ${:_profile_base_location_}/scripts/${:filename}
destination = ${buildout:parts-directory}/monitor-scripts
on-update = true
[eggs]
recipe = zc.recipe.egg
eggs +=
......@@ -40,9 +46,10 @@ eggs +=
cns.recipe.symlink
[extra-eggs]
recipe = zc.recipe.egg
<= eggs
interpreter = pythonwitheggs
eggs =
eggs +=
psutil
PyRSS2Gen
Jinja2
......@@ -85,7 +92,7 @@ md5sum = 04b664dfb47bfd3d01502768311aa239
# Monitor templates files
[monitor-httpd-conf]
<= monitor-template-base
md5sum = 8a1aa7cba281877d6cf63cb8ade64b5e
md5sum = 08137be9b80e0e13d9a906c264a2f51f
filename = monitor-httpd.conf.in
[monitor-service-conf-template]
......@@ -101,7 +108,17 @@ md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
[monitor-conf]
<= monitor-template-base
filename = monitor.conf.in
md5sum = 2db5c08c7e8658981b4b1e3f27fd5967
md5sum = c8f024d741c6494d7c9ba01601d0b917
[monitor-instance-info]
<= monitor-template-base
filename = instance-info.conf.in
md5sum = 1bdb4e05c6be04f4e5766c64467fbcec
[monitor-httpd-cors]
<= monitor-template-base
filename = httpd-cors.cfg.in
md5sum = 5afad2bb6e088e080e907f1d837effbb
# End templates files
......@@ -110,12 +127,17 @@ recipe = slapos.recipe.template:jinja2
filename = template-monitor.cfg
template = ${:_profile_base_location_}/instance-monitor.cfg.jinja2.in
rendered = ${buildout:directory}/template-monitor.cfg
md5sum = bf0578bb6863fea73c1c40bc009c7654
md5sum = c26a0f81d67dd28b7150b97eb21b4f37
context =
key apache_location apache:location
key gzip_location gzip:location
raw monitor_bin ${monitor2-bin:location}/${monitor2-bin:filename}
raw monitor_collect ${monitor-collect:location}/${monitor-collect:filename}
raw monitor_conf_template ${monitor-conf:location}/${monitor-conf:filename}
raw monitor_document_edit ${monitor-document-edit:location}/${monitor-document-edit:filename}
raw monitor_https_cors ${monitor-httpd-cors:location}/${monitor-httpd-cors:filename}
raw monitor_instance_info ${monitor-instance-info:location}/${monitor-instance-info:filename}
raw monitor_globalstate ${monitor-globalstate:location}/${monitor-globalstate:filename}
raw monitor_password_promise_template ${monitor-password-promise:location}/${monitor-password-promise:filename}
raw monitor_password_cgi_template ${monitor-password-py-cgi:location}/${monitor-password-py-cgi:filename}
raw monitor_password_promise_interface_template ${monitor-password-promise-interface:location}/${monitor-password-promise-interface:filename}
......@@ -135,35 +157,49 @@ context =
raw monitor_service_conf_template ${monitor-service-conf-template:location}/${monitor-service-conf-template:filename}
raw openssl_executable_location ${openssl:location}/bin/openssl
raw python_executable ${buildout:executable}
raw promise_executor_py ${run-promise-py:location}/${run-promise-py:filename}
raw python_with_eggs ${buildout:directory}/bin/${extra-eggs:interpreter}
raw promise_executor_py ${run-promise-py:rendered}
raw template_wrapper ${template-wrapper:output}
raw status2rss_executable_path ${status2rss-executable:location}/${status2rss-executable:filename}
[monitor2-bin]
<= monitor-download-base
<= monitor-template-script
filename = monitor.py
md5sum = 7e1f2210a87b2212d11524e06985dd49
url = ${:_profile_base_location_}/scripts/${:filename}
md5sum = af9eba3b4e7f265f9779874df6b94afb
[run-promise-py]
<= monitor-download-base
filename = run-promise.py
md5sum = 8a46adcbc126ec9589d1810ba291d048
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/scripts/run-promise.py
rendered = ${buildout:parts-directory}/monitor-scripts/run-promise.py
md5sum = 8ba8b661c55f2c5a379e9e42573be486
mode = 0755
url = ${:_profile_base_location_}/scripts/${:filename}
context =
raw python ${buildout:directory}/bin/${extra-eggs:interpreter}
[monitor-password-promise]
<= monitor-download-base
<= monitor-template-script
filename = monitor-password-promise.py
md5sum = f7e937d6619eb674f39f34718928d91d
url = ${:_profile_base_location_}/scripts/${:filename}
[status2rss-executable]
<= monitor-download-base
<= monitor-template-script
filename = status2rss.py
md5sum = 65315ded80cd72f54f6e12d06ce813c4
url = ${:_profile_base_location_}/scripts/${:filename}
md5sum = f297779d0881f4bd48081506efb492a4
[monitor-globalstate]
<= monitor-template-script
filename = globalstate.py
md5sum = 384a1148cb3da9cf353a108fe70709c5
[monitor-collect]
<= monitor-template-script
filename = collect.py
md5sum = cc65aebd4c35b3172a7ca83abde761bc
[monitor-document-edit]
<= monitor-template-script
filename = monitor-document.py
md5sum = e9a41ff86b3d598e8f72d22d2ff3f838
[make-rss-script]
......
This diff is collapsed.
#!/usr/bin/env python
import sys
import os
import glob
import json
import ConfigParser
import time
from datetime import datetime
def softConfigGet(config, *args, **kwargs):
try:
return config.get(*args, **kwargs)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return ""
def generateStatisticsData(stat_file_path, content):
# csv document for statictics
if not os.path.exists(stat_file_path):
with open(stat_file_path, 'w') as fstat:
data_dict = {
"date": time.time(),
"data": ["Date, Success, Error, Warning"]
}
fstat.write(json.dumps(data_dict))
current_state = ''
if content.has_key('state'):
current_state = '%s, %s, %s, %s' % (
content['date'],
content['state']['success'],
content['state']['error'],
content['state']['warning'])
# append to file
if current_state:
with open (stat_file_path, mode="r+") as fstat:
fstat.seek(0,2)
position = fstat.tell() -2
fstat.seek(position)
fstat.write('%s}' % ',"{}"]'.format(current_state))
def main(args_list):
monitor_file, instance_file = args_list
monitor_config = ConfigParser.ConfigParser()
monitor_config.read(monitor_file)
base_folder = monitor_config.get('monitor', 'private-folder')
status_folder = monitor_config.get('monitor', 'public-folder')
base_url = monitor_config.get('monitor', 'base-url')
related_monitor_list = monitor_config.get("monitor", "monitor-url-list").split()
statistic_folder = os.path.join(base_folder, 'data', '.jio_documents')
parameter_file = os.path.join(base_folder, 'config', '.jio_documents', 'config.json')
if not os.path.exists(statistic_folder):
try:
os.makedirs(statistic_folder)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(statistic_folder):
pass
else: raise
# search for all status files
file_list = filter(os.path.isfile,
glob.glob("%s/*.status.json" % status_folder)
)
error = warning = success = 0
latest_date = ''
status = 'OK'
promise_list = []
global_state_file = os.path.join(base_folder, 'monitor.global.json')
public_state_file = os.path.join(status_folder, 'monitor.global.json')
for file in file_list:
try:
with open(file, 'r') as temp_file:
tmp_json = json.loads(temp_file.read())
except ValueError:
# bad json file ?
continue
if tmp_json['status'] == 'ERROR':
error += 1
elif tmp_json['status'] == 'OK':
success += 1
elif tmp_json['status'] == 'WARNING':
warning += 1
if tmp_json['start-date'] > latest_date:
latest_date = tmp_json['start-date']
tmp_json['time'] = tmp_json['start-date'].split(' ')[1]
del tmp_json['start-date']
promise_list.append(tmp_json)
if error:
status = 'ERROR'
elif warning:
status = 'WARNING'
if not latest_date:
latest_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
global_state_dict = dict(
status=status,
state={
'error': error,
'success': success,
'warning': warning,
},
date=latest_date,
_links={"rss_url": {"href": "%s/public/feed" % base_url},
"public_url": {"href": "%s/share/jio_public/" % base_url},
"private_url": {"href": "%s/share/jio_private/" % base_url}
},
data={'state': 'monitor_state.data',
'process_state': 'monitor_process_resource.status',
'process_resource': 'monitor_resource_process.data',
'memory_resource': 'monitor_resource_memory.data',
'io_resource': 'monitor_resource_io.data',
'monitor_process_state': 'monitor_resource.status'}
)
global_state_dict['_embedded'] = {'promises': promise_list}
if os.path.exists(instance_file):
config = ConfigParser.ConfigParser()
config.read(instance_file)
if 'instance' in config.sections():
instance_dict = {}
global_state_dict['title'] = config.get('instance', 'name')
global_state_dict['hosting-title'] = config.get('instance', 'root-name')
if not global_state_dict['title']:
global_state_dict['title'] = 'Instance Monitoring'
instance_dict['computer'] = config.get('instance', 'computer')
instance_dict['ipv4'] = config.get('instance', 'ipv4')
instance_dict['ipv6'] = config.get('instance', 'ipv6')
instance_dict['software-release'] = config.get('instance', 'software-release')
instance_dict['software-type'] = config.get('instance', 'software-type')
instance_dict['partition'] = config.get('instance', 'partition')
global_state_dict['_embedded'].update({'instance' : instance_dict})
if related_monitor_list:
global_state_dict['_links']['related_monitor'] = [{'href': "%s/share/jio_public" % url}
for url in related_monitor_list]
if os.path.exists(parameter_file):
with open(parameter_file) as cfile:
global_state_dict['parameters'] = json.loads(cfile.read())
# Public information with the link to private folder
public_state_dict = dict(
status=status,
date=latest_date,
_links={'monitor': {'href': '%s/share/jio_private/' % base_url}},
title=global_state_dict.get('title', '')
)
public_state_dict['hosting-title'] = global_state_dict.get('hosting-title', '')
public_state_dict['_links']['related_monitor'] = global_state_dict['_links'].get('related_monitor', [])
with open(global_state_file, 'w') as fglobal:
fglobal.write(json.dumps(global_state_dict))
with open(public_state_file, 'w') as fpglobal:
fpglobal.write(json.dumps(public_state_dict))
generateStatisticsData(
os.path.join(statistic_folder, 'monitor_state.data.json'),
global_state_dict)
return 0
if __name__ == "__main__":
if len(sys.argv) < 3:
print("Usage: %s <monitor_conf_path> <instance_conf_path>" % sys.argv[0])
sys.exit(2)
sys.exit(main(sys.argv[1:]))
#!/usr/bin/env python
import sys
import os
import json
import argparse
import subprocess
from datetime import datetime
def parseArguments():
"""
Parse arguments for monitor instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--config_folder',
help='Path where json configuration/document will be read and write')
parser.add_argument('--htpasswd_bin',
help='Path apache htpasswd binary. Needed to write htpasswd file.')
parser.add_argument('--output_cfg_file',
help='Ouput parameters in cfg file.')
return parser.parse_args()
def fileWrite(file_path, content):
if os.path.exists(file_path):
try:
with open(file_path, 'w') as wf:
wf.write(content)
return True
except OSError, e:
print "ERROR while writing changes to %s.\n %s" % (file_path, str(e))
return False
def htpasswdWrite(htpasswd_bin, parameter_dict, value):
if not os.path.exists(parameter_dict['file']):
return False
command = [htpasswd_bin, '-cb', parameter_dict['htpasswd'], parameter_dict['user'], value]
process = subprocess.Popen(
command,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
result = process.communicate()[0]
if process.returncode != 0:
print result
return False
with open(parameter_dict['file'], 'w') as pfile:
pfile.write(value)
return True
def applyEditChage(parser):
parameter_tmp_file = os.path.join(parser.config_folder, 'config.tmp.json')
config_file = os.path.join(parser.config_folder, 'config.json')
parameter_config_file = os.path.join(parser.config_folder, 'config.parameters.json')
if not os.path.exists(parameter_tmp_file) or not os.path.isfile(parameter_tmp_file):
return {}
if not os.path.exists(config_file):
print "ERROR: Config file doesn't exist... Exiting"
return {}
new_parameter_list = []
parameter_list = []
description_dict = {}
result_dict = {}
try:
with open(parameter_tmp_file) as tmpfile:
new_parameter_list = json.loads(tmpfile.read())
except ValueError:
print "Error: Couldn't parse json file %s" % parameter_tmp_file
with open(parameter_config_file) as tmpfile:
description_dict = json.loads(tmpfile.read())
for i in range(0, len(new_parameter_list)):
key = new_parameter_list[i]['key']
if key != '':
description_entry = description_dict[key]
if description_entry['type'] == 'file':
result_dict[key] = fileWrite(description_entry['file'], new_parameter_list[i]['value'])
elif description_entry['type'] == 'htpasswd':
result_dict[key] = htpasswdWrite(parser.htpasswd_bin, description_entry, new_parameter_list[i]['value'])
if (parser.output_cfg_file):
try:
with open(parser.output_cfg_file, 'w') as pfile:
pfile.write('[public]\n')
for parameter in new_parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % parser.output_cfg_file
pass
return result_dict
if __name__ == "__main__":
parser = parseArguments()
parameter_tmp_file = os.path.join(parser.config_folder, 'config.tmp.json')
config_file = os.path.join(parser.config_folder, 'config.json')
result_dict = applyEditChage(parser)
if result_dict != {}:
status = True
for key in result_dict:
if not result_dict[key]:
status = False
if status and os.path.exists(parameter_tmp_file):
try:
os.unlink(config_file)
except OSError, e:
print "ERROR cannot remove file: %s" % parameter_tmp_file
else:
os.rename(parameter_tmp_file, config_file)
This diff is collapsed.
#!/usr/bin/env python
import json
import os
import time
from datetime import datetime
OPML_START = """<?xml version="1.0" encoding="UTF-8"?>
<!-- OPML generated by SlapOS -->
<opml version="1.1">
<head>
<title>SlapOS Monitoring Status Lists</title>
<dateCreated>%(creation_date)s</dateCreated>
<dateModified>%(mondification_date)s</dateModified>
</head>
<body>
<outline text="%(outline_title)s">"""
OPML_END = """ </outline>
</body>
</opml>"""
OPML_OUTLINE_FEED = '<outline text="%(title)s" title="%(title)s" type="rss" version="RSS" htmlUrl="%(html_url)s" xmlUrl="%(xml_url)s" />'
def main(config_file, output_file):
feed_url_list = []
if os.path.exists(output_file):
creation_date = datetime.fromtimestamp(os.path.getctime(output_file)).utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
else:
creation_date = modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
with open(config_file, 'r') as fconfig:
feed_url_list = json.loads(fconfig.read())
opml_content = OPML_START
for feed_line in feed_url_list:
opml_content += OPML_OUTLINE_FEED % {'title': feed_line['title'], 'html_url': feed_line['url'], 'xml_url': feed_line['url']}
opml_content += OPML_END
with open(output_file, 'w') as wfile:
wfile.write(opml_content)
if __name__ == "__main__":
if len(sys.argv) < 3:
print("Usage: %s <rss_conf_file> <output_path>" % sys.argv[0])
sys.exit(2)
config_file = sys.argv[1]
output_file = sys.argv[2]
main(config_file, output_file)
\ No newline at end of file
#!/usr/bin/env python
#!{{ python }}
# -*- coding: utf-8 -*-
import sys
import os
import subprocess
import json
import psutil
import time
from shutil import copyfile
import glob
import argparse
def parseArguments():
"""
Parse arguments for monitor collector instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--pid_path',
help='Path where the pid of this process will be writen.')
parser.add_argument('--output',
help='The Path of file where Json result of this promise will be saved.')
parser.add_argument('--promise_script',
help='Promise script to execute.')
parser.add_argument('--promise_name',
help='Title to give to this promise.')
parser.add_argument('--monitor_url',
help='Monitor Instance website URL.')
parser.add_argument('--history_folder',
help='Path where old result file will be placed before generate a new json result file.')
parser.add_argument('--instance_name',
default='UNKNOW Software Instance',
help='Software Instance name.')
parser.add_argument('--hosting_name',
default='UNKNOW Hosting Subscription',
help='Hosting Subscription name.')
return parser.parse_args()
def main():
if len(sys.argv) < 4:
print("Usage: %s <pid_path> <output_path> <command...>" % sys.argv[0])
return 2
pid_path=sys.argv[1]
output_path=sys.argv[2]
if os.path.exists(pid_path):
with open(pid_path, "r") as pidfile:
parser = parseArguments()
if os.path.exists(parser.pid_path):
with open(parser.pid_path, "r") as pidfile:
try:
pid = int(pidfile.read(6))
except ValueError:
......@@ -21,27 +49,88 @@ def main():
if pid and os.path.exists("/proc/" + str(pid)):
print("A process is already running with pid " + str(pid))
return 1
with open(pid_path, "w") as pidfile:
process = executeCommand(sys.argv[3:])
start_date = ""
with open(parser.pid_path, "w") as pidfile:
process = executeCommand(parser.promise_script)
ps_process = psutil.Process(process.pid)
start_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ps_process.create_time()))
pidfile.write(str(process.pid))
status_json = generateStatusJsonFromProcess(process)
with open(output_path, "w") as outputfile:
status_json = generateStatusJsonFromProcess(process, start_date=start_date)
status_json['_links'] = {"monitor": {"href": parser.monitor_url}}
status_json['title'] = parser.promise_name
status_json['instance'] = parser.instance_name
status_json['hosting_subscription'] = parser.hosting_name
# Save the lastest status change date (needed for rss)
status_json['change-time'] = ps_process.create_time()
if os.path.exists(parser.output):
with open(parser.output) as f:
last_result = json.loads(f.read())
if status_json['status'] == last_result['status'] and last_result.has_key('change-time'):
status_json['change-time'] = last_result['change-time']
updateStatusHistoryFolder(
parser.promise_name,
parser.output,
parser.history_folder
)
with open(parser.output, "w") as outputfile:
json.dump(status_json, outputfile)
os.remove(pid_path)
os.remove(parser.pid_path)
def updateStatusHistoryFolder(name, status_file, history_folder):
old_history_list = []
history_path = os.path.join(history_folder, name, '.jio_documents')
if not os.path.exists(status_file):
return
if not os.path.exists(history_folder):
return
if not os.path.exists(history_path):
try:
os.makedirs(history_path)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(history_path):
pass
else: raise
with open(status_file, 'r') as sf:
status_dict = json.loads(sf.read())
filename = '%s.status.json' % (
status_dict['start-date'].replace(' ', '_').replace(':', ''))
copyfile(status_file, os.path.join(history_path, filename))
# Don't let history foler grow too much, keep 40 files
file_list = filter(os.path.isfile,
glob.glob("%s/*.status.json" % history_path)
)
file_count = len(file_list)
if file_count > 40:
file_list.sort(key=lambda x: os.path.getmtime(x))
while file_count > 40:
to_delete = file_list.pop(0)
try:
os.unlink(to_delete)
file_count -= 1
except OSError:
raise
def generateStatusJsonFromProcess(process):
def generateStatusJsonFromProcess(process, start_date=None, title=None):
stdout, stderr = process.communicate()
try:
status_json = json.loads(stdout)
except ValueError:
status_json = {}
if process.returncode != 0:
status_json["status"] = "error"
status_json["status"] = "ERROR"
elif not status_json.get("status"):
status_json["status"] = "OK"
if stderr:
status_json["error"] = stderr
status_json["message"] = stderr
if start_date:
status_json["start-date"] = start_date
if title:
status_json["title"] = title
return status_json
......
......@@ -4,104 +4,73 @@ import json
import datetime
import base64
import hashlib
import PyRSS2Gen
import argparse
def parseArguments():
"""
Parse arguments for monitor Rss Generator.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--items_folder',
help='Path where to get *.status.json files which contain result of promises.')
parser.add_argument('--output',
help='The Path of file where feed file will be saved.')
parser.add_argument('--public_url',
help='Monitor Instance public URL.')
parser.add_argument('--private_url',
help='Monitor Instance private URL.')
parser.add_argument('--instance_name',
default='UNKNOW Software Instance',
help='Software Instance name.')
parser.add_argument('--hosting_name',
default='',
help='Hosting Subscription name.')
return parser.parse_args()
def getKey(item):
return item.pubDate
def main():
_, title, link, public_folder, previous_status_path, output_path = sys.argv
final_status = "OK";
# getting status
for filename in os.listdir(public_folder):
parser = parseArguments()
rss_item_list = []
for filename in os.listdir(parser.items_folder):
if filename.endswith(".status.json"):
filepath = os.path.join(public_folder, filename)
status = None
filepath = os.path.join(parser.items_folder, filename)
result_dict = None
try:
status = json.load(open(filepath, "r"))
result_dict = json.load(open(filepath, "r"))
except ValueError:
print "Failed to load json file: %s" % filepath
continue
try:
if status["status"] != "OK":
final_status = "BAD"
break
except KeyError:
final_status = "BAD"
break
# checking previous status
try:
status = open(previous_status_path, "r").readline(4)
if status == final_status:
return 0
except IOError:
pass
# update status
open(previous_status_path, "w").write(final_status)
# generating RSS
utcnow = datetime.datetime.utcnow()
open(output_path, "w").write(
newRssString(
title,
title,
link,
utcnow,
utcnow,
"60",
[
newRssItemString(
"Status is %s" % final_status,
"Status is %s" % final_status,
link,
newGuid("%s, %s" % (utcnow, final_status)),
utcnow,
description = result_dict.get('message', '')
event_time = datetime.datetime.fromtimestamp(result_dict['change-time'])
rss_item = PyRSS2Gen.RSSItem(
categories = [result_dict['status']],
source = PyRSS2Gen.Source(result_dict['title'], parser.public_url),
title = '[%s] %s' % (result_dict['status'], result_dict['title']),
comments = description,
description = "%s: %s\n%s" % (event_time, result_dict['status'], description),
link = parser.private_url,
pubDate = event_time,
guid = PyRSS2Gen.Guid(base64.b64encode("%s, %s" % (event_time, result_dict['status'])))
)
],
)
)
rss_item_list.append(rss_item)
def escapeHtml(string):
return string.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;").replace("\"", "&quot;")
def newGuid(string):
sha256 = hashlib.sha256()
sha256.update(string)
return sha256.hexdigest()
def newRssItemString(title, description, link, guid, pub_date, guid_is_perma_link=True):
return """<item>
<title>%(title)s</title>
<description>%(description)s</description>
<link>%(link)s</link>
<guid isPermaLink="%(guid_is_perma_link)s">%(guid)s</guid>
<pubDate>%(pub_date)s</pubDate>
</item>""" % {
"title": escapeHtml(title),
"description": escapeHtml(description),
"link": escapeHtml(link),
"guid": escapeHtml(guid),
"pub_date": escapeHtml(pub_date.strftime("%a, %d %b %Y %H:%M:%S +0000")),
"guid_is_perma_link": escapeHtml(repr(guid_is_perma_link).lower()),
}
def newRssString(title, description, link, last_build_date, pub_date, ttl, rss_item_string_list):
return """<?xml version="1.0" encoding="UTF-8" ?>
<rss version="2.0">
<channel>
<title>%(title)s</title>
<description>%(description)s</description>
<link>%(link)s</link>
<lastBuildDate>%(last_build_date)s</lastBuildDate>
<pubDate>%(pub_date)s</pubDate>
<ttl>%(ttl)s</ttl>
%(items)s
</channel>
</rss>
""" % {
"title": escapeHtml(title),
"description": escapeHtml(description),
"link": escapeHtml(link),
"last_build_date": escapeHtml(last_build_date.strftime("%a, %d %b %Y %H:%M:%S +0000")),
"pub_date": escapeHtml(pub_date.strftime("%a, %d %b %Y %H:%M:%S +0000")),
"ttl": escapeHtml(str(ttl)),
"items": "\n\n".join([" " + item.replace("\n", "\n ") for item in rss_item_string_list]),
}
### Build the rss feed
sorted(rss_item_list, key=getKey)
rss_feed = PyRSS2Gen.RSS2 (
title = parser.instance_name,
link = parser.public_url,
description = parser.hosting_name,
lastBuildDate = datetime.datetime.utcnow(),
items = rss_item_list
)
with open(parser.output, 'w') as frss:
frss.write(rss_feed.to_xml())
if __name__ == "__main__":
exit(main())
{% if domain -%}
{% set allow_domain = '|'.join(domain.replace('.', '\.').split()) -%}
SetEnvIf Origin "^http(s)?://(.+\.)?({{ allow_domain }})$" ORIGIN_DOMAIN=$0
Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN
{% endif -%}
\ No newline at end of file
[instance]
name = {{ instance_dict['name'] }}
root-name = {{ instance_dict['root-name'] }}
computer = {{ instance_dict['computer-id'] }}
ipv4 = {{ instance_dict['ipv4'] }}
ipv6 = {{ instance_dict['ipv6'] }}
software-release = {{ instance_dict['software-release'] }}
software-type = {{ instance_dict['software-type'] }}
partition = {{ instance_dict['partition-id'] }}
\ No newline at end of file
......@@ -11,7 +11,7 @@ ServerAdmin someone@email
Listen [{{ parameter_dict.get('listening-ip') }}]:{{ parameter_dict.get('port') }}
Define MonitorPort
</IfDefine>
DocumentRoot "{{ directory.get('www') }}"
DocumentRoot "{{ directory.get('webdav') }}"
ErrorLog "{{ parameter_dict.get('error-log') }}"
LoadModule unixd_module modules/mod_unixd.so
LoadModule access_compat_module modules/mod_access_compat.so
......@@ -31,6 +31,10 @@ LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule env_module modules/mod_env.so
LoadModule setenvif_module modules/mod_setenvif.so
# SSL Configuration
<IfDefine !SSLConfigured>
......@@ -49,6 +53,11 @@ SSLCipherSuite RC4-SHA:HIGH:!ADH
AddType application/hal+json .haljson
SSLEngine On
Include {{ parameter_dict.get('httpd-cors-config-file') }}
Header set Access-Control-Allow-Credentials "true"
Header set Access-Control-Allow-Methods "PROPFIND, PROPPATCH, COPY, MOVE, DELETE, MKCOL, LOCK, UNLOCK, PUT, GETLIB, VERSION-CONTROL, CHECKIN, CHECKOUT, UNCHECKOUT, REPORT, UPDATE, CANCELUPLOAD, HEAD, OPTIONS, GET, POST"
Header set Access-Control-Allow-Headers "Overwrite, Destination, Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Authorization"
{% if parameter_dict.has_key('monitor-url-list') -%}
RewriteEngine on
SSLProxyEngine on
......@@ -66,6 +75,27 @@ RewriteRule /monitor{{ index }}/(.*) {{ url }}/$1 [L,P]
{% endfor -%}
{% endif -%}
DavLockDB {{ directory.get('monitor-var') }}/DavLock
Alias /share {{ directory.get('webdav') }}
<Directory {{ directory.get('webdav') }}>
DirectoryIndex disabled
DAV On
Options Indexes FollowSymLinks
AuthType Basic
AuthName "webdav"
AuthUserFile "{{ parameter_dict.get('htpasswd-file') }}"
<LimitExcept OPTIONS>
Require valid-user
</LimitExcept>
</Directory>
<LocationMatch "/share/(jio_)?public">
<Limit GET HEAD OPTIONS REPORT PROPFIND>
Allow from all
Satisfy any
</Limit>
</LocationMatch>
ScriptSock {{ parameter_dict.get('cgid-pid-file') }}
<Directory {{ directory.get('www') }}>
SSLVerifyDepth 1
......@@ -123,3 +153,6 @@ Alias /cgi-bin {{ directory.get('cgi-bin') }}
Options Indexes FollowSymLinks
Satisfy all
</Directory>
{% if parameter_dict.get('httpd-include-file', '') -%}
Include {{ parameter_dict.get('httpd-include-file') }}
{% endif -%}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment