Commit 15a22114 authored by Alain Takoudjou's avatar Alain Takoudjou

monitor2: new stack working with erp5 renderjs web interface

parent e4adff53
...@@ -33,6 +33,12 @@ on-update = true ...@@ -33,6 +33,12 @@ on-update = true
<= monitor-download-base <= monitor-download-base
url = ${:_profile_base_location_}/templates/${:filename} url = ${:_profile_base_location_}/templates/${:filename}
[monitor-template-script]
<= monitor-download-base
url = ${:_profile_base_location_}/scripts/${:filename}
destination = ${buildout:parts-directory}/monitor-scripts
on-update = true
[eggs] [eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs += eggs +=
...@@ -40,9 +46,10 @@ eggs += ...@@ -40,9 +46,10 @@ eggs +=
cns.recipe.symlink cns.recipe.symlink
[extra-eggs] [extra-eggs]
recipe = zc.recipe.egg <= eggs
interpreter = pythonwitheggs interpreter = pythonwitheggs
eggs = eggs +=
psutil
PyRSS2Gen PyRSS2Gen
Jinja2 Jinja2
...@@ -85,7 +92,7 @@ md5sum = 04b664dfb47bfd3d01502768311aa239 ...@@ -85,7 +92,7 @@ md5sum = 04b664dfb47bfd3d01502768311aa239
# Monitor templates files # Monitor templates files
[monitor-httpd-conf] [monitor-httpd-conf]
<= monitor-template-base <= monitor-template-base
md5sum = 8a1aa7cba281877d6cf63cb8ade64b5e md5sum = 08137be9b80e0e13d9a906c264a2f51f
filename = monitor-httpd.conf.in filename = monitor-httpd.conf.in
[monitor-service-conf-template] [monitor-service-conf-template]
...@@ -101,7 +108,17 @@ md5sum = 8cde04bfd0c0e9bd56744b988275cfd8 ...@@ -101,7 +108,17 @@ md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
[monitor-conf] [monitor-conf]
<= monitor-template-base <= monitor-template-base
filename = monitor.conf.in filename = monitor.conf.in
md5sum = 2db5c08c7e8658981b4b1e3f27fd5967 md5sum = c8f024d741c6494d7c9ba01601d0b917
[monitor-instance-info]
<= monitor-template-base
filename = instance-info.conf.in
md5sum = 1bdb4e05c6be04f4e5766c64467fbcec
[monitor-httpd-cors]
<= monitor-template-base
filename = httpd-cors.cfg.in
md5sum = 5afad2bb6e088e080e907f1d837effbb
# End templates files # End templates files
...@@ -110,12 +127,17 @@ recipe = slapos.recipe.template:jinja2 ...@@ -110,12 +127,17 @@ recipe = slapos.recipe.template:jinja2
filename = template-monitor.cfg filename = template-monitor.cfg
template = ${:_profile_base_location_}/instance-monitor.cfg.jinja2.in template = ${:_profile_base_location_}/instance-monitor.cfg.jinja2.in
rendered = ${buildout:directory}/template-monitor.cfg rendered = ${buildout:directory}/template-monitor.cfg
md5sum = bf0578bb6863fea73c1c40bc009c7654 md5sum = c26a0f81d67dd28b7150b97eb21b4f37
context = context =
key apache_location apache:location key apache_location apache:location
key gzip_location gzip:location key gzip_location gzip:location
raw monitor_bin ${monitor2-bin:location}/${monitor2-bin:filename} raw monitor_bin ${monitor2-bin:location}/${monitor2-bin:filename}
raw monitor_collect ${monitor-collect:location}/${monitor-collect:filename}
raw monitor_conf_template ${monitor-conf:location}/${monitor-conf:filename} raw monitor_conf_template ${monitor-conf:location}/${monitor-conf:filename}
raw monitor_document_edit ${monitor-document-edit:location}/${monitor-document-edit:filename}
raw monitor_https_cors ${monitor-httpd-cors:location}/${monitor-httpd-cors:filename}
raw monitor_instance_info ${monitor-instance-info:location}/${monitor-instance-info:filename}
raw monitor_globalstate ${monitor-globalstate:location}/${monitor-globalstate:filename}
raw monitor_password_promise_template ${monitor-password-promise:location}/${monitor-password-promise:filename} raw monitor_password_promise_template ${monitor-password-promise:location}/${monitor-password-promise:filename}
raw monitor_password_cgi_template ${monitor-password-py-cgi:location}/${monitor-password-py-cgi:filename} raw monitor_password_cgi_template ${monitor-password-py-cgi:location}/${monitor-password-py-cgi:filename}
raw monitor_password_promise_interface_template ${monitor-password-promise-interface:location}/${monitor-password-promise-interface:filename} raw monitor_password_promise_interface_template ${monitor-password-promise-interface:location}/${monitor-password-promise-interface:filename}
...@@ -135,35 +157,49 @@ context = ...@@ -135,35 +157,49 @@ context =
raw monitor_service_conf_template ${monitor-service-conf-template:location}/${monitor-service-conf-template:filename} raw monitor_service_conf_template ${monitor-service-conf-template:location}/${monitor-service-conf-template:filename}
raw openssl_executable_location ${openssl:location}/bin/openssl raw openssl_executable_location ${openssl:location}/bin/openssl
raw python_executable ${buildout:executable} raw python_executable ${buildout:executable}
raw promise_executor_py ${run-promise-py:location}/${run-promise-py:filename} raw python_with_eggs ${buildout:directory}/bin/${extra-eggs:interpreter}
raw promise_executor_py ${run-promise-py:rendered}
raw template_wrapper ${template-wrapper:output} raw template_wrapper ${template-wrapper:output}
raw status2rss_executable_path ${status2rss-executable:location}/${status2rss-executable:filename} raw status2rss_executable_path ${status2rss-executable:location}/${status2rss-executable:filename}
[monitor2-bin] [monitor2-bin]
<= monitor-download-base <= monitor-template-script
filename = monitor.py filename = monitor.py
md5sum = 7e1f2210a87b2212d11524e06985dd49 md5sum = af9eba3b4e7f265f9779874df6b94afb
url = ${:_profile_base_location_}/scripts/${:filename}
[run-promise-py] [run-promise-py]
<= monitor-download-base recipe = slapos.recipe.template:jinja2
filename = run-promise.py template = ${:_profile_base_location_}/scripts/run-promise.py
md5sum = 8a46adcbc126ec9589d1810ba291d048 rendered = ${buildout:parts-directory}/monitor-scripts/run-promise.py
md5sum = 8ba8b661c55f2c5a379e9e42573be486
mode = 0755 mode = 0755
url = ${:_profile_base_location_}/scripts/${:filename} context =
raw python ${buildout:directory}/bin/${extra-eggs:interpreter}
[monitor-password-promise] [monitor-password-promise]
<= monitor-download-base <= monitor-template-script
filename = monitor-password-promise.py filename = monitor-password-promise.py
md5sum = f7e937d6619eb674f39f34718928d91d md5sum = f7e937d6619eb674f39f34718928d91d
url = ${:_profile_base_location_}/scripts/${:filename}
[status2rss-executable] [status2rss-executable]
<= monitor-download-base <= monitor-template-script
filename = status2rss.py filename = status2rss.py
md5sum = 65315ded80cd72f54f6e12d06ce813c4 md5sum = f297779d0881f4bd48081506efb492a4
url = ${:_profile_base_location_}/scripts/${:filename}
[monitor-globalstate]
<= monitor-template-script
filename = globalstate.py
md5sum = 384a1148cb3da9cf353a108fe70709c5
[monitor-collect]
<= monitor-template-script
filename = collect.py
md5sum = cc65aebd4c35b3172a7ca83abde761bc
[monitor-document-edit]
<= monitor-template-script
filename = monitor-document.py
md5sum = e9a41ff86b3d598e8f72d22d2ff3f838
[make-rss-script] [make-rss-script]
......
# Put here default Name for instance and Hosting Subscription
[slap-configuration]
root-instance-title = UNKNOW H-S
instance-title = UNKNOW Instance
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
[cron] [cron]
recipe = slapos.cookbook:cron recipe = slapos.cookbook:cron
cron-entries = ${logrotate-directory:cron-entries} cron-entries = ${logrotate-directory:cron-entries}
...@@ -56,8 +69,10 @@ bin = ${directory:bin} ...@@ -56,8 +69,10 @@ bin = ${directory:bin}
etc = ${directory:etc} etc = ${directory:etc}
run = ${directory:monitor}/run run = ${directory:monitor}/run
#run = ${directory:scripts} #run = ${directory:scripts}
promises = ${directory:monitor-promise}
pids = ${directory:run}/monitor pids = ${directory:run}/monitor
cgi-bin = ${directory:monitor}/cgi-bin cgi-bin = ${directory:monitor}/cgi-bin
webdav = ${directory:monitor}/webdav
public = ${directory:monitor}/public public = ${directory:monitor}/public
private = ${directory:monitor}/private private = ${directory:monitor}/private
services = ${directory:services} services = ${directory:services}
...@@ -65,7 +80,6 @@ services-conf = ${directory:etc}/monitor.conf.d ...@@ -65,7 +80,6 @@ services-conf = ${directory:etc}/monitor.conf.d
www = ${directory:monitor}/web www = ${directory:monitor}/web
web-dir = ${directory:monitor}/web web-dir = ${directory:monitor}/web
log = ${directory:log}/monitor log = ${directory:log}/monitor
promise-wrapper = ${directory:var}/monitor-promise-wrapper
monitor-var = ${directory:var}/monitor monitor-var = ${directory:var}/monitor
monitor-password-var = ${monitor-directory:monitor-var}/password monitor-password-var = ${monitor-directory:monitor-var}/password
monitor-password-interface = ${monitor-directory:monitor-password-var}/password/interface monitor-password-interface = ${monitor-directory:monitor-password-var}/password/interface
...@@ -110,28 +124,45 @@ ca-crl = ${ca-directory:crl} ...@@ -110,28 +124,45 @@ ca-crl = ${ca-directory:crl}
recipe = slapos.cookbook:certificate_authority.request recipe = slapos.cookbook:certificate_authority.request
key-file = ${monitor-httpd-conf-parameter:key-file} key-file = ${monitor-httpd-conf-parameter:key-file}
cert-file = ${monitor-httpd-conf-parameter:cert-file} cert-file = ${monitor-httpd-conf-parameter:cert-file}
executable = ${httpd-wrapper:wrapper-path} executable = ${monitor-httpd-wrapper:wrapper-path}
wrapper = ${directory:services}/monitor-httpd wrapper = ${directory:services}/monitor-httpd
[monitor-conf-parameters] [monitor-conf-parameters]
title = ${monitor-instance-parameter:monitor-title} title = ${monitor-instance-parameter:monitor-title}
root-title = ${monitor-instance-parameter:root-instance-title}
public-folder = ${monitor-directory:public} public-folder = ${monitor-directory:public}
private-folder = ${monitor-directory:private} private-folder = ${monitor-directory:private}
webdav-folder = ${monitor-directory:webdav}
web-folder = ${monitor-directory:web-dir} web-folder = ${monitor-directory:web-dir}
monitor-hal-json = ${monitor-directory:web-dir}/monitor.haljson base-url = ${monitor-frontend-promise:url}
#base-url = ${monitor-httpd-conf-parameter:url}
monitor-hal-json = ${monitor-directory:public}/monitor.hal.json
service-pid-folder = ${monitor-directory:pids} service-pid-folder = ${monitor-directory:pids}
crond-folder = ${logrotate-directory:cron-entries} crond-folder = ${logrotate-directory:cron-entries}
wraper-folder = ${monitor-directory:promise-wrapper} logrotate-folder = ${logrotate:logrotate-entries}
promise-runner = {{ promise_executor_py }} promise-runner = {{ promise_executor_py }}
promise-folder-list = promise-folder-list =
${directory:promises} ${directory:promises}
${directory:monitor-promise} ${directory:monitor-promise}
public-path-list = public-path-list =
${directory:log}
private-path-list = private-path-list =
${directory:log}
#
monitor-url-list = monitor-url-list =
${monitor-instance-parameter:opml-url-list}
parameter-file-path = ${monitor-instance-parameter:configuration-file-path}
parameter-list =
raw monitor-user ${monitor-instance-parameter:username}
htpasswd monitor-password ${monitor-htpassword-file:password-file} ${monitor-instance-parameter:username} ${httpd-monitor-htpasswd:htpasswd-path}
${monitor-instance-parameter:instance-configuration}
# htpasswd entry: htpasswd key password-file username htpasswd-file
collector-db = ${monitor-instance-parameter:collector-db}
collect-script = {{ monitor_collect }}
python = {{ python_with_eggs }}
[monitor-conf] [monitor-conf]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -140,10 +171,28 @@ rendered = ${directory:etc}/${:filename} ...@@ -140,10 +171,28 @@ rendered = ${directory:etc}/${:filename}
filename = monitor.conf filename = monitor.conf
context = section parameter_dict monitor-conf-parameters context = section parameter_dict monitor-conf-parameters
[instance-info-parameters]
name = ${monitor-instance-parameter:monitor-title}
root-name = ${monitor-instance-parameter:root-instance-title}
computer-id = ${slap-connection:computer-id}
ipv4 = ${slap-configuration:ipv4-random}
ipv6 = ${slap-configuration:ipv6-random}
software-release = ${slap-connection:software-release-url}
software-type = ${slap-configuration:slap-software-type}
partition-id = ${slap-connection:partition-id}
[monitor-instance-info]
recipe = slapos.recipe.template:jinja2
template = {{ monitor_instance_info }}
rendered = ${directory:etc}/${:filename}
filename = instance-info.conf
context =
section instance_dict instance-info-parameters
[python-symlink] [python-symlink]
recipe = plone.recipe.command recipe = plone.recipe.command
target = ${directory:bin} target = ${directory:bin}
command = ln -sf {{ python_executable }} ${:target}/python command = ln -sf {{ python_with_eggs }} ${:target}/python
update-command = ${:command} update-command = ${:command}
[start-monitor] [start-monitor]
...@@ -153,13 +202,27 @@ wrapper-path = ${directory:scripts}/bootstrap-monitor ...@@ -153,13 +202,27 @@ wrapper-path = ${directory:scripts}/bootstrap-monitor
environment = environment =
PATH=${python-symlink:target}:/usr/local/bin:/usr/bin:/bin PATH=${python-symlink:target}:/usr/local/bin:/usr/bin:/bin
[monitor-htpasswd]
recipe = slapos.cookbook:generate.password
storage-path = ${directory:etc}/.monitor_user
bytes = 8
username = admin
[monitor-htpassword-file]
recipe = plone.recipe.command
stop-on-error = true
password-file = ${directory:etc}/.monitor_pwd
command =
if [ ! -f "${:password-file}" ]; then echo "${monitor-instance-parameter:password}" > ${:password-file}; fi
update-command = ${:command}
[httpd-monitor-htpasswd] [httpd-monitor-htpasswd]
recipe = plone.recipe.command recipe = plone.recipe.command
stop-on-error = true stop-on-error = true
htpasswd-path = ${monitor-directory:etc}/monitor-htpasswd htpasswd-path = ${monitor-directory:etc}/monitor-htpasswd
command = {{ apache_location }}/bin/htpasswd -cb ${:htpasswd-path} ${:user} ${:password} command = {{ apache_location }}/bin/htpasswd -cb ${:htpasswd-path} ${:user} ${:password}
user = admin user = ${monitor-instance-parameter:username}
password = admin password = ${monitor-instance-parameter:password}
[monitor-httpd-conf-parameter] [monitor-httpd-conf-parameter]
listening-ip = ${monitor-instance-parameter:monitor-httpd-ipv6} listening-ip = ${monitor-instance-parameter:monitor-httpd-ipv6}
...@@ -171,7 +234,9 @@ error-log = ${monitor-directory:log}/httpd-error.log ...@@ -171,7 +234,9 @@ error-log = ${monitor-directory:log}/httpd-error.log
cert-file = ${ca-directory:certs}/httpd.crt cert-file = ${ca-directory:certs}/httpd.crt
key-file = ${ca-directory:certs}/httpd.key key-file = ${ca-directory:certs}/httpd.key
htpasswd-file = ${httpd-monitor-htpasswd:htpasswd-path} htpasswd-file = ${httpd-monitor-htpasswd:htpasswd-path}
url = https://[${monitor-instance-parameter:monitor-httpd-ipv6}]:${:port}/ url = https://[${monitor-instance-parameter:monitor-httpd-ipv6}]:${:port}
httpd-cors-config-file = ${monitor-httpd-cors:rendered}
httpd-include-file =
[monitor-httpd-conf] [monitor-httpd-conf]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
...@@ -182,19 +247,27 @@ context = ...@@ -182,19 +247,27 @@ context =
section directory monitor-directory section directory monitor-directory
section parameter_dict monitor-httpd-conf-parameter section parameter_dict monitor-httpd-conf-parameter
[httpd-wrapper] [monitor-httpd-cors]
recipe = slapos.recipe.template:jinja2
template = {{ monitor_https_cors }}
rendered = ${directory:etc}/httpd-cors.cfg
mode = 0600
context =
key domain monitor-instance-parameter:cors-domains
[monitor-httpd-wrapper]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = {{ apache_location }}/bin/httpd -f ${monitor-httpd-conf:rendered} -DFOREGROUND command-line = {{ apache_location }}/bin/httpd -f ${monitor-httpd-conf:rendered} -DFOREGROUND
wrapper-path = ${directory:bin}/monitor-httpd wrapper-path = ${directory:bin}/monitor-httpd
wait-for-files = wait-for-files =
${ca-directory:certs}/httpd.key ${ca-directory:certs}/httpd.key
${ca-directory:certs}/httpd.crt ${ca-directory:certs}/httpd.crt
${cgi-httpd-graceful-wrapper:rendered} ${monitor-httpd-graceful-wrapper:rendered}
[cgi-httpd-graceful-wrapper] [monitor-httpd-graceful-wrapper]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
template = {{ template_wrapper }} template = {{ template_wrapper }}
rendered = ${directory:run}/monitor-httpd-graceful rendered = ${directory:scripts}/monitor-httpd-graceful
mode = 0700 mode = 0700
context = context =
key content :command key content :command
...@@ -202,7 +275,9 @@ command = kill -USR1 $(cat ${monitor-httpd-conf-parameter:pid-file}) ...@@ -202,7 +275,9 @@ command = kill -USR1 $(cat ${monitor-httpd-conf-parameter:pid-file})
[monitor-status2rss-wrapper] [monitor-status2rss-wrapper]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = {{ python_executable }} {{ status2rss_executable_path }} '${monitor-instance-parameter:monitor-title}' '${monitor-httpd-conf-parameter:url}' ${monitor-directory:public} ${monitor-directory:monitor-status2rss-var}/previous_status ${monitor-directory:web-dir}/feed # XXX - hard-coded Urls
command-line = {{ python_with_eggs }} {{ status2rss_executable_path }} --output '${monitor-directory:public}/feed' --items_folder '${monitor-directory:public}' --public_url '${monitor-conf-parameters:base-url}/share/jio_public/' --private_url '${monitor-conf-parameters:base-url}/share/jio_private/' --instance_name '${monitor-conf-parameters:title}' --hosting_name '${monitor-conf-parameters:root-title}'
wrapper-path = ${directory:bin}/monitor-status2rss.py wrapper-path = ${directory:bin}/monitor-status2rss.py
[monitor-status2rss-cron-entry] [monitor-status2rss-cron-entry]
...@@ -212,6 +287,31 @@ name = monitor-status2rss ...@@ -212,6 +287,31 @@ name = monitor-status2rss
frequency = * * * * * frequency = * * * * *
command = ${monitor-status2rss-wrapper:wrapper-path} command = ${monitor-status2rss-wrapper:wrapper-path}
[monitor-globalstate-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ python_with_eggs }} {{ monitor_globalstate }} '${monitor-conf:rendered}' '${monitor-instance-info:rendered}'
wrapper-path = ${directory:bin}/monitor-globalstate
[monitor-configurator-wrapper]
recipe = slapos.cookbook:wrapper
# XXX - hard coded path
command-line = {{ python_with_eggs }} {{ monitor_document_edit }} --config_folder '${monitor-conf-parameters:private-folder}/config/.jio_documents' --output_cfg_file '${monitor-instance-parameter:configuration-file-path}' --htpasswd_bin '{{ apache_location }}/bin/htpasswd'
wrapper-path = ${directory:bin}/monitor-configurator
[monitor-globalstate-cron-entry]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = monitor-globalstate
frequency = * * * * *
command = ${monitor-globalstate-wrapper:wrapper-path}
[monitor-configurator-cron-entry]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = monitor-configurator
frequency = * * * * *
command = ${monitor-configurator-wrapper:wrapper-path}
[monitor-web-directory] [monitor-web-directory]
recipe = plone.recipe.command recipe = plone.recipe.command
command = cp -f {{ monitor_web_directory }}/* ${monitor-directory:web-dir} command = cp -f {{ monitor_web_directory }}/* ${monitor-directory:web-dir}
...@@ -252,8 +352,8 @@ context = section parameter_dict monitor-httpd-promise-conf-parameter ...@@ -252,8 +352,8 @@ context = section parameter_dict monitor-httpd-promise-conf-parameter
[monitor-httpd-promise-conf-parameter] [monitor-httpd-promise-conf-parameter]
title = Monitor httpd listening title = Monitor httpd listening
# frequency minute hour day mounth weekday # frequency 5 minute hour day mounth weekday
frequency = * * * * * frequency = */5 * * * *
public-path-list = ${monitor-httpd-conf-parameter:access-log} ${monitor-httpd-conf-parameter:error-log} public-path-list = ${monitor-httpd-conf-parameter:access-log} ${monitor-httpd-conf-parameter:error-log}
#private-path-list = #private-path-list =
...@@ -299,27 +399,67 @@ rendered = ${monitor-directory:monitor-password-interface}/index.html ...@@ -299,27 +399,67 @@ rendered = ${monitor-directory:monitor-password-interface}/index.html
context = context =
[publish] [publish]
recipe = slapos.cookbook:publish <= monitor-base
monitor-url = ${monitor-httpd-conf-parameter:url} monitor-base-url = ${monitor-conf-parameters:base-url}
monitor-url = ${:monitor-base-url}/public/feeds
monitor-user = ${monitor-instance-parameter:username}
monitor-password = ${monitor-instance-parameter:password}
[monitor-instance-parameter] [monitor-instance-parameter]
monitor-title = Monitoring interface monitor-title = ${slap-configuration:instance-title}
monitor-httpd-ipv6 = ${slap-configuration:ipv6-random}
[buildout] monitor-httpd-port = 8196
parts = root-instance-title = ${slap-configuration:root-instance-title}
monitor-web-directory opml-url-list =
monitor-web-monitor-logout-cgi cors-domains =
monitor-web-monitor-promise-runner-cgi # XXX Hard coded parameter
cron-entry-logrotate collector-db = /srv/slapgrid/var/data-log/collector.db
certificate-authority # Credentials
monitor-conf password = ${monitor-htpasswd:passwd}
start-monitor username = ${monitor-htpasswd:username}
ca-httpd # XXX: type key value
monitor-httpd-promise # ex raw monitor-password resqdsdsd34
monitor-httpd-promise-conf instance-configuration =
monitor-password-promise
monitor-password-promise-conf configuration-file-path = ${monitor-directory:etc}/monitor_knowledge0.cfg
monitor-password-cgi
monitor-password-promise-interface [monitor-frontend]
monitor-status2rss-cron-entry <= slap-connection
publish recipe = slapos.cookbook:requestoptional
name = Monitor Frontend ${monitor-instance-parameter:monitor-title}
# XXX We have hardcoded SR URL here.
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
slave = true
config-url = ${monitor-httpd-conf-parameter:url}
config-https-only = true
#software-type = custom-personal
return = domain secure_access
[monitor-frontend-promise]
recipe = slapos.cookbook:check_url_available
path = ${directory:promises}/monitor-http-frontend
url = ${monitor-frontend:connection-secure_access}
dash_path = {{ dash_executable_location }}
curl_path = {{ curl_executable_location }}
check-secure = 1
[monitor-base]
# create dependencies between required monitor parts
recipe = plone.recipe.command
command = true
update-command =
depends =
${monitor-globalstate-cron-entry:name}
${monitor-configurator-cron-entry:name}
${cron-entry-logrotate:name}
${certificate-authority:wrapper}
${monitor-conf:rendered}
${start-monitor:wrapper-path}
${ca-httpd:wrapper}
${monitor-httpd-promise:filename}
${monitor-status2rss-cron-entry:name}
#[buildout]
#parts =
# monitor-base
# publish
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010-2014 Vifib SARL and Contributors.
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import sqlite3
import os
import pwd
import time
import json
import argparse
import psutil
from time import strftime
from datetime import datetime, timedelta
def parseArguments():
"""
Parse arguments for monitor collector instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--output_folder',
help='Path of the folder where output files should be written.')
parser.add_argument('--partition_id',
help='ID of the computer partition to collect data from.')
parser.add_argument('--collector_db',
help='The path of slapos collect database.')
return parser.parse_args()
class RessourceCollect:
def __init__(self, db_path = None):
assert os.path.exists(db_path) and os.path.isfile(db_path)
self.uri = db_path
self.connection = None
self.cursor = None
def connect(self):
self.connection = sqlite3.connect(self.uri)
self.cursor = self.connection.cursor()
def close(self):
assert self.connection is not None
self.cursor.close()
self.connection.close()
def _execute(self, sql):
assert self.connection is not None
return self.cursor.execute(sql)
def select(self, table, date=None, columns="*", where=None):
""" Query database for a full table information """
if date is not None:
where_clause = " WHERE date = '%s' " % date
else:
where_clause = ""
if where is not None:
if where_clause == "":
where_clause += " WHERE 1 = 1 "
where_clause += " AND %s " % where
select_sql = "SELECT %s FROM %s %s " % (columns, table, where_clause)
return self._execute(select_sql)
def has_table(self, name):
self.connect()
check_result_cursor = self.select(
table="sqlite_master",
columns='name',
where="type='table' AND name='%s'" % name)
table_exists_result = zip(*check_result_cursor)
if not len(table_exists_result) or table_exists_result[0][0] is None:
return False
return True
def getPartitionCPULoadAverage(self, partition_id, date_scope):
self.connect()
query_result_cursor = self.select("user", date_scope,
columns="SUM(cpu_percent)",
where="partition = '%s'" % partition_id)
cpu_percent_sum = zip(*query_result_cursor)
if len(cpu_percent_sum) and cpu_percent_sum[0][0] is None:
return
query_result_cursor = self.select("user", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
sample_amount = zip(*query_result_cursor)
self.close()
if len(sample_amount) and len(cpu_percent_sum):
return round(cpu_percent_sum[0][0]/sample_amount[0][0], 2)
def getPartitionUsedMemoryAverage(self, partition_id, date_scope):
self.connect()
query_result_cursor = self.select("user", date_scope,
columns="SUM(memory_rss)",
where="partition = '%s'" % partition_id)
memory_sum = zip(*query_result_cursor)
if len(memory_sum) and memory_sum[0][0] is None:
return
query_result_cursor = self.select("user", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
sample_amount = zip(*query_result_cursor)
self.close()
if len(sample_amount) and len(memory_sum):
return round(memory_sum[0][0]/(sample_amount[0][0]*1024*1024.0), 2)
def getPartitionDiskUsedAverage(self, partition_id, date_scope):
if not self.has_table('folder'):
return
self.db.connect()
query_result_cursor = self.select("folder", date_scope,
columns="SUM(disk_used)",
where="partition = '%s'" % partition_id)
disk_used_sum = zip(*query_result_cursor)
if len(disk_used_sum) and disk_used_sum[0][0] is None:
return
query_result_cursor = self.select("folder", date_scope,
columns="COUNT(DISTINCT time)",
where="partition = '%s'" % partition_id)
collect_amount = zip(*query_result_cursor)
self.db.close()
if len(collect_amount) and len(disk_used_sum):
return round(disk_used_sum[0][0]/(collect_amount[0][0]*1024.0), 2)
def getPartitionConsumption(self, partition_id, where=""):
"""
Query collector db to get consumed ressource for last minute
"""
self.connect()
comsumption_list = []
if where != "":
where = "and %s" % where
date_scope = datetime.now().strftime('%Y-%m-%d')
min_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:00')
max_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:59')
sql_query = """select count(pid), SUM(cpu_percent) as cpu_result, SUM(cpu_time),
MAX(cpu_num_threads), SUM(memory_percent), SUM(memory_rss), pid, SUM(io_rw_counter),
SUM(io_cycles_counter) from user
where date='%s' and partition='%s' and (time between '%s' and '%s') %s
group by pid order by cpu_result desc""" % (
date_scope, partition_id, min_time, max_time, where)
query_result = self._execute(sql_query)
for result in query_result:
count = int(result[0])
if not count > 0:
continue
resource_dict = {
'pid': result[6],
'cpu_percent': round(result[1]/count, 2),
'cpu_time': round((result[2] or 0)/(60.0), 2),
'cpu_num_threads': round(result[3]/count, 2),
'memory_percent': round(result[4]/count, 2),
'memory_rss': round((result[5] or 0)/(1024*1024.0), 2),
'io_rw_counter': round(result[7]/count, 2),
'io_cycles_counter': round(result[8]/count, 2)
}
try:
pprocess = psutil.Process(int(result[6]))
except psutil.NoSuchProcess:
pass
else:
resource_dict['name'] = pprocess.name()
resource_dict['command'] = pprocess.cmdline()
resource_dict['user'] = pprocess.username()
resource_dict['date'] = datetime.fromtimestamp(pprocess.create_time()).strftime("%Y-%m-%d %H:%M:%S")
comsumption_list.append(resource_dict)
self.close()
return comsumption_list
def getPartitionComsumptionStatus(self, partition_id, where=""):
self.connect()
if where != "":
where = " and %s" % where
date_scope = datetime.now().strftime('%Y-%m-%d')
min_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:00')
max_time = (datetime.now() - timedelta(minutes=1)).strftime('%H:%M:59')
sql_query = """select count(pid), SUM(cpu_percent), SUM(cpu_time),
SUM(cpu_num_threads), SUM(memory_percent), SUM(memory_rss), SUM(io_rw_counter),
SUM(io_cycles_counter) from user where
date='%s' and partition='%s' and (time between '%s' and '%s') %s""" % (
date_scope, partition_id, min_time, max_time, where)
query_result = self._execute(sql_query)
result_list = zip(*query_result)
process_dict = memory_dict = io_dict = {}
if len(result_list):
result = result_list
process_dict = {'total_process': result[0][0],
'cpu_percent': round((result[1][0] or 0), 2),
'cpu_time': round((result[2][0] or 0)/(60.0), 2),
'cpu_num_threads': round((result[3][0] or 0), 2),
'date': '%s %s' % (date_scope, min_time)
}
memory_dict = {'memory_percent': round((result[4][0] or 0), 2),
'memory_rss': round((result[5][0] or 0)/(1024*1024.0), 2),
'date': '%s %s' % (date_scope, min_time)
}
io_dict = {'io_rw_counter': round((result[6][0] or 0), 2),
'io_cycles_counter': round((result[7][0] or 0), 2),
'disk_used': 0,
'date': '%s %s' % (date_scope, min_time)
}
if self.has_table('folder'):
disk_result_cursor = self.select(
"folder", date_scope,
columns="SUM(disk_used)",
where="partition='%s' and (time between '%s' and '%s') %s" % (
partition_id, min_time, max_time, where
)
)
disk_used_sum = zip(*disk_result_cursor)
if len(disk_used_sum) and disk_used_sum[0][0] is not None:
io_dict['disk_used'] = round(disk_used_sum[0][0]/1024.0, 2)
self.close()
return (process_dict, memory_dict, io_dict)
def appendToJsonFile(file_path, content, stepback=2):
with open (file_path, mode="r+") as jfile:
jfile.seek(0, 2)
position = jfile.tell() - stepback
jfile.seek(position)
jfile.write('%s}' % ',"{}"]'.format(content))
if __name__ == "__main__":
parser = parseArguments()
if not os.path.exists(parser.output_folder) and os.path.isdir(parser.output_folder):
raise Exception("Invalid ouput folder: %s" % parser.output_folder)
collector = RessourceCollect(parser.collector_db)
date_scope = datetime.now().strftime('%Y-%m-%d')
stat_info = os.stat(parser.output_folder)
partition_user = pwd.getpwuid(stat_info.st_uid)[0]
# Consumption global status
process_file = os.path.join(parser.output_folder, 'monitor_resource_process.data.json')
mem_file = os.path.join(parser.output_folder, 'monitor_resource_memory.data.json')
io_file = os.path.join(parser.output_folder, 'monitor_resource_io.data.json')
process_result, memory_result, io_result = collector.getPartitionComsumptionStatus(partition_user)
resource_file = os.path.join(parser.output_folder, 'monitor_process_resource.status.json')
label_list = ['date', 'total_process', 'cpu_percent', 'cpu_time', 'cpu_num_threads',
'memory_percent', 'memory_rss', 'io_rw_counter', 'io_cycles_counter',
'disk_used']
resource_status_dict = {}
if not os.path.exists(process_file):
with open(process_file, 'w') as fprocess:
data_dict = {
"date": time.time(),
"data": ["date, total process, CPU percent, CPU time, CPU threads"]
}
fprocess.write(json.dumps(data_dict))
if not os.path.exists(mem_file):
with open(mem_file, 'w') as fmem:
data_dict = {
"date": time.time(),
"data": ["date, memory used percent, memory used"]
}
fmem.write(json.dumps(data_dict))
if not os.path.exists(io_file):
with open(io_file, 'w') as fio:
data_dict = {
"date": time.time(),
"data": ["date, io rw counter, io cycles counter, disk used"]
}
fio.write(json.dumps(data_dict))
if process_result and process_result['total_process'] != 0.0:
appendToJsonFile(process_file, ", ".join(
[str(process_result[key]) for key in label_list if process_result.has_key(key)])
)
resource_status_dict.update(process_result)
if memory_result and memory_result['memory_rss'] != 0.0:
appendToJsonFile(mem_file, ", ".join(
[str(memory_result[key]) for key in label_list if memory_result.has_key(key)])
)
resource_status_dict.update(memory_result)
if io_result and io_result['io_rw_counter'] != 0.0:
appendToJsonFile(io_file, ", ".join(
[str(io_result[key]) for key in label_list if io_result.has_key(key)])
)
resource_status_dict.update(io_result)
with open(os.path.join(parser.output_folder, 'monitor_resource.status.json'), 'w') as fp:
fp.write(json.dumps(resource_status_dict))
# Consumption Ressource
resource_process_status_list = collector.getPartitionConsumption(partition_user)
if resource_process_status_list:
with open(resource_file, 'w') as rf:
rf.write(json.dumps(resource_process_status_list))
#!/usr/bin/env python
import sys
import os
import glob
import json
import ConfigParser
import time
from datetime import datetime
def softConfigGet(config, *args, **kwargs):
try:
return config.get(*args, **kwargs)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return ""
def generateStatisticsData(stat_file_path, content):
# csv document for statictics
if not os.path.exists(stat_file_path):
with open(stat_file_path, 'w') as fstat:
data_dict = {
"date": time.time(),
"data": ["Date, Success, Error, Warning"]
}
fstat.write(json.dumps(data_dict))
current_state = ''
if content.has_key('state'):
current_state = '%s, %s, %s, %s' % (
content['date'],
content['state']['success'],
content['state']['error'],
content['state']['warning'])
# append to file
if current_state:
with open (stat_file_path, mode="r+") as fstat:
fstat.seek(0,2)
position = fstat.tell() -2
fstat.seek(position)
fstat.write('%s}' % ',"{}"]'.format(current_state))
def main(args_list):
monitor_file, instance_file = args_list
monitor_config = ConfigParser.ConfigParser()
monitor_config.read(monitor_file)
base_folder = monitor_config.get('monitor', 'private-folder')
status_folder = monitor_config.get('monitor', 'public-folder')
base_url = monitor_config.get('monitor', 'base-url')
related_monitor_list = monitor_config.get("monitor", "monitor-url-list").split()
statistic_folder = os.path.join(base_folder, 'data', '.jio_documents')
parameter_file = os.path.join(base_folder, 'config', '.jio_documents', 'config.json')
if not os.path.exists(statistic_folder):
try:
os.makedirs(statistic_folder)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(statistic_folder):
pass
else: raise
# search for all status files
file_list = filter(os.path.isfile,
glob.glob("%s/*.status.json" % status_folder)
)
error = warning = success = 0
latest_date = ''
status = 'OK'
promise_list = []
global_state_file = os.path.join(base_folder, 'monitor.global.json')
public_state_file = os.path.join(status_folder, 'monitor.global.json')
for file in file_list:
try:
with open(file, 'r') as temp_file:
tmp_json = json.loads(temp_file.read())
except ValueError:
# bad json file ?
continue
if tmp_json['status'] == 'ERROR':
error += 1
elif tmp_json['status'] == 'OK':
success += 1
elif tmp_json['status'] == 'WARNING':
warning += 1
if tmp_json['start-date'] > latest_date:
latest_date = tmp_json['start-date']
tmp_json['time'] = tmp_json['start-date'].split(' ')[1]
del tmp_json['start-date']
promise_list.append(tmp_json)
if error:
status = 'ERROR'
elif warning:
status = 'WARNING'
if not latest_date:
latest_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
global_state_dict = dict(
status=status,
state={
'error': error,
'success': success,
'warning': warning,
},
date=latest_date,
_links={"rss_url": {"href": "%s/public/feed" % base_url},
"public_url": {"href": "%s/share/jio_public/" % base_url},
"private_url": {"href": "%s/share/jio_private/" % base_url}
},
data={'state': 'monitor_state.data',
'process_state': 'monitor_process_resource.status',
'process_resource': 'monitor_resource_process.data',
'memory_resource': 'monitor_resource_memory.data',
'io_resource': 'monitor_resource_io.data',
'monitor_process_state': 'monitor_resource.status'}
)
global_state_dict['_embedded'] = {'promises': promise_list}
if os.path.exists(instance_file):
config = ConfigParser.ConfigParser()
config.read(instance_file)
if 'instance' in config.sections():
instance_dict = {}
global_state_dict['title'] = config.get('instance', 'name')
global_state_dict['hosting-title'] = config.get('instance', 'root-name')
if not global_state_dict['title']:
global_state_dict['title'] = 'Instance Monitoring'
instance_dict['computer'] = config.get('instance', 'computer')
instance_dict['ipv4'] = config.get('instance', 'ipv4')
instance_dict['ipv6'] = config.get('instance', 'ipv6')
instance_dict['software-release'] = config.get('instance', 'software-release')
instance_dict['software-type'] = config.get('instance', 'software-type')
instance_dict['partition'] = config.get('instance', 'partition')
global_state_dict['_embedded'].update({'instance' : instance_dict})
if related_monitor_list:
global_state_dict['_links']['related_monitor'] = [{'href': "%s/share/jio_public" % url}
for url in related_monitor_list]
if os.path.exists(parameter_file):
with open(parameter_file) as cfile:
global_state_dict['parameters'] = json.loads(cfile.read())
# Public information with the link to private folder
public_state_dict = dict(
status=status,
date=latest_date,
_links={'monitor': {'href': '%s/share/jio_private/' % base_url}},
title=global_state_dict.get('title', '')
)
public_state_dict['hosting-title'] = global_state_dict.get('hosting-title', '')
public_state_dict['_links']['related_monitor'] = global_state_dict['_links'].get('related_monitor', [])
with open(global_state_file, 'w') as fglobal:
fglobal.write(json.dumps(global_state_dict))
with open(public_state_file, 'w') as fpglobal:
fpglobal.write(json.dumps(public_state_dict))
generateStatisticsData(
os.path.join(statistic_folder, 'monitor_state.data.json'),
global_state_dict)
return 0
if __name__ == "__main__":
if len(sys.argv) < 3:
print("Usage: %s <monitor_conf_path> <instance_conf_path>" % sys.argv[0])
sys.exit(2)
sys.exit(main(sys.argv[1:]))
#!/usr/bin/env python
import sys
import os
import json
import argparse
import subprocess
from datetime import datetime
def parseArguments():
"""
Parse arguments for monitor instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--config_folder',
help='Path where json configuration/document will be read and write')
parser.add_argument('--htpasswd_bin',
help='Path apache htpasswd binary. Needed to write htpasswd file.')
parser.add_argument('--output_cfg_file',
help='Ouput parameters in cfg file.')
return parser.parse_args()
def fileWrite(file_path, content):
if os.path.exists(file_path):
try:
with open(file_path, 'w') as wf:
wf.write(content)
return True
except OSError, e:
print "ERROR while writing changes to %s.\n %s" % (file_path, str(e))
return False
def htpasswdWrite(htpasswd_bin, parameter_dict, value):
if not os.path.exists(parameter_dict['file']):
return False
command = [htpasswd_bin, '-cb', parameter_dict['htpasswd'], parameter_dict['user'], value]
process = subprocess.Popen(
command,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
result = process.communicate()[0]
if process.returncode != 0:
print result
return False
with open(parameter_dict['file'], 'w') as pfile:
pfile.write(value)
return True
def applyEditChage(parser):
parameter_tmp_file = os.path.join(parser.config_folder, 'config.tmp.json')
config_file = os.path.join(parser.config_folder, 'config.json')
parameter_config_file = os.path.join(parser.config_folder, 'config.parameters.json')
if not os.path.exists(parameter_tmp_file) or not os.path.isfile(parameter_tmp_file):
return {}
if not os.path.exists(config_file):
print "ERROR: Config file doesn't exist... Exiting"
return {}
new_parameter_list = []
parameter_list = []
description_dict = {}
result_dict = {}
try:
with open(parameter_tmp_file) as tmpfile:
new_parameter_list = json.loads(tmpfile.read())
except ValueError:
print "Error: Couldn't parse json file %s" % parameter_tmp_file
with open(parameter_config_file) as tmpfile:
description_dict = json.loads(tmpfile.read())
for i in range(0, len(new_parameter_list)):
key = new_parameter_list[i]['key']
if key != '':
description_entry = description_dict[key]
if description_entry['type'] == 'file':
result_dict[key] = fileWrite(description_entry['file'], new_parameter_list[i]['value'])
elif description_entry['type'] == 'htpasswd':
result_dict[key] = htpasswdWrite(parser.htpasswd_bin, description_entry, new_parameter_list[i]['value'])
if (parser.output_cfg_file):
try:
with open(parser.output_cfg_file, 'w') as pfile:
pfile.write('[public]\n')
for parameter in new_parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % parser.output_cfg_file
pass
return result_dict
if __name__ == "__main__":
parser = parseArguments()
parameter_tmp_file = os.path.join(parser.config_folder, 'config.tmp.json')
config_file = os.path.join(parser.config_folder, 'config.json')
result_dict = applyEditChage(parser)
if result_dict != {}:
status = True
for key in result_dict:
if not result_dict[key]:
status = False
if status and os.path.exists(parameter_tmp_file):
try:
os.unlink(config_file)
except OSError, e:
print "ERROR cannot remove file: %s" % parameter_tmp_file
else:
os.rename(parameter_tmp_file, config_file)
...@@ -3,12 +3,32 @@ ...@@ -3,12 +3,32 @@
import sys import sys
import os import os
import stat import stat
import subprocess
import threading
import json import json
import ConfigParser import ConfigParser
import traceback import traceback
import argparse import argparse
import time
import glob
import urllib2
import ssl
from datetime import datetime
OPML_START = """<?xml version="1.0" encoding="UTF-8"?>
<!-- OPML generated by SlapOS -->
<opml version="1.1">
<head>
<title>%(root_title)s</title>
<dateCreated>%(creation_date)s</dateCreated>
<dateModified>%(modification_date)s</dateModified>
</head>
<body>
<outline text="%(outline_title)s">"""
OPML_END = """ </outline>
</body>
</opml>"""
OPML_OUTLINE_FEED = '<outline text="%(title)s" title="%(title)s" type="rss" version="RSS" htmlUrl="%(html_url)s" xmlUrl="%(xml_url)s" url="%(global_url)s" />'
def parseArguments(): def parseArguments():
""" """
...@@ -62,16 +82,29 @@ class Monitoring(object): ...@@ -62,16 +82,29 @@ class Monitoring(object):
# Set Monitor variables # Set Monitor variables
self.monitor_hal_json = config.get("monitor", "monitor-hal-json") self.monitor_hal_json = config.get("monitor", "monitor-hal-json")
self.title = config.get("monitor", "title") self.title = config.get("monitor", "title")
self.root_title = config.get("monitor", "root-title")
self.service_pid_folder = config.get("monitor", "service-pid-folder") self.service_pid_folder = config.get("monitor", "service-pid-folder")
self.crond_folder = config.get("monitor", "crond-folder") self.crond_folder = config.get("monitor", "crond-folder")
self.wraper_folder = config.get("monitor", "wraper-folder") self.logrotate_d = config.get("monitor", "logrotate-folder")
self.promise_runner = config.get("monitor", "promise-runner") self.promise_runner = config.get("monitor", "promise-runner")
self.promise_folder_list = config.get("monitor", "promise-folder-list").split() self.promise_folder_list = config.get("monitor", "promise-folder-list").split()
self.public_folder = config.get("monitor", "public-folder") self.public_folder = config.get("monitor", "public-folder")
self.private_folder = config.get("monitor", "private-folder") self.private_folder = config.get("monitor", "private-folder")
self.collector_db = config.get("monitor", "collector-db")
self.collect_script = config.get("monitor", "collect-script")
self.webdav_folder = config.get("monitor", "webdav-folder")
self.webdav_url = '%s/share' % config.get("monitor", "base-url")
self.public_url = '%s/public' % config.get("monitor", "base-url")
self.status_history_folder = os.path.join(self.public_folder, 'history')
self.python = config.get("monitor", "python") or "python"
self.public_path_list = config.get("monitor", "public-path-list").split() self.public_path_list = config.get("monitor", "public-path-list").split()
self.private_path_list = config.get("monitor", "private-path-list").split() self.private_path_list = config.get("monitor", "private-path-list").split()
self.monitor_url_list = config.get("monitor", "monitor-url-list").split() self.monitor_url_list = config.get("monitor", "monitor-url-list").split()
self.parameter_list = [param.strip() for param in config.get("monitor", "parameter-list").split('\n') if param]
# Use this file to write knowledge0_cfg required by webrunner
self.parameter_cfg_file = config.get("monitor", "parameter-file-path").strip()
self.config_folder = os.path.join(self.private_folder, 'config')
self.promise_dict = {} self.promise_dict = {}
for promise_folder in self.promise_folder_list: for promise_folder in self.promise_folder_list:
...@@ -86,6 +119,49 @@ class Monitoring(object): ...@@ -86,6 +119,49 @@ class Monitoring(object):
traceback.print_exc() traceback.print_exc()
return config return config
def readInstanceConfiguration(self):
type_list = ['raw', 'file', 'htpasswd']
configuration_list = []
if not self.parameter_list:
return []
for config in self.parameter_list:
config_list = config.strip().split(' ')
# type: config_list[0]
if len(config_list) >= 3 and config_list[0] in type_list:
if config_list[0] == 'raw':
configuration_list.append(dict(
key='',
title=config_list[1],
value=' '.join(config_list[2:])
))
elif (config_list[0] == 'file' or config_list[0] == 'htpasswd') and \
os.path.exists(config_list[2]) and os.path.isfile(config_list[2]):
try:
with open(config_list[2]) as cfile:
parameter = dict(
key=config_list[1],
title=config_list[1],
value=cfile.read(),
description={
"type": config_list[0],
"file": config_list[2]
}
)
if config_list[0] == 'htpasswd':
if len(config_list) != 5 or not os.path.exists(config_list[4]):
print 'htpasswd file is not specified: %s' % str(config_list)
return
parameter['description']['user'] = config_list[3]
parameter['description']['htpasswd'] = config_list[4]
configuration_list.append(parameter)
except OSError, e:
print 'Cannot read file %s, Error is: %s' % (config_list[2], str(e))
pass
return configuration_list
def setupPromiseDictFromFolder(self, folder): def setupPromiseDictFromFolder(self, folder):
for filename in os.listdir(folder): for filename in os.listdir(folder):
path = os.path.join(folder, filename) path = os.path.join(folder, filename)
...@@ -101,11 +177,12 @@ class Monitoring(object): ...@@ -101,11 +177,12 @@ class Monitoring(object):
# if promise_name in promise_dict: # if promise_name in promise_dict:
# loadConfig([path], promise_dict[promise_name]["configuration"]) # loadConfig([path], promise_dict[promise_name]["configuration"])
def createSymlinksFromConfig(self, destination_folder, source_path_list, service_name=""): def createSymlinksFromConfig(self, destination_folder, source_path_list, name=""):
if destination_folder: if destination_folder:
if source_path_list: if source_path_list:
for path in source_path_list: for path in source_path_list:
dirname = os.path.join(destination_folder, service_name) path = path.rstrip('/')
dirname = os.path.join(destination_folder, name)
try: try:
mkdirAll(dirname) # could also raise OSError mkdirAll(dirname) # could also raise OSError
os.symlink(path, os.path.join(dirname, os.path.basename(path))) os.symlink(path, os.path.join(dirname, os.path.basename(path)))
...@@ -113,27 +190,160 @@ class Monitoring(object): ...@@ -113,27 +190,160 @@ class Monitoring(object):
if e.errno != os.errno.EEXIST: if e.errno != os.errno.EEXIST:
raise raise
def getMonitorTitleFromUrl(self, monitor_url):
# This file should be generated
if not monitor_url.startswith('https://') or not monitor_url.startswith('http://'):
return 'Unknow Instance'
if not monitor_url.endswith('/'):
monitor_url = monitor_url + '/'
context = ssl._create_unverified_context()
url = monitor_url + '/.jio_documents/monitor.global.json' # XXX Hard Coded path
try:
response = urllib2.urlopen(url, context=context)
except urllib2.HTTPError:
return 'Unknow Instance'
else:
try:
monitor_dict = json.loads(response.read())
return monitor_dict.get('title', 'Unknow Instance')
except ValueError, e:
print "Bad Json file at %s" % url
return 'Unknow Instance'
def configureFolders(self):
# configure public and private folder
self.createSymlinksFromConfig(self.webdav_folder, [self.public_folder])
self.createSymlinksFromConfig(self.webdav_folder, [self.private_folder])
#configure jio_documents folder
jio_public = os.path.join(self.webdav_folder, 'jio_public')
jio_private = os.path.join(self.webdav_folder, 'jio_private')
mkdirAll(jio_public)
mkdirAll(jio_private)
mkdirAll(self.status_history_folder)
try:
os.symlink(self.public_folder, os.path.join(jio_public, '.jio_documents'))
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
try:
os.symlink(self.private_folder, os.path.join(jio_private, '.jio_documents'))
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
self.data_folder = os.path.join(self.private_folder, 'data', '.jio_documents')
config_folder = os.path.join(self.config_folder, '.jio_documents')
mkdirAll(self.data_folder)
mkdirAll(config_folder)
try:
os.symlink(os.path.join(self.private_folder, 'data'),
os.path.join(jio_private, 'data'))
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
try:
os.symlink(self.config_folder, os.path.join(jio_private, 'config'))
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
def makeConfigurationFiles(self):
config_folder = os.path.join(self.config_folder, '.jio_documents')
parameter_config_file = os.path.join(config_folder, 'config.parameters.json')
parameter_file = os.path.join(config_folder, 'config.json')
#mkdirAll(config_folder)
parameter_list = self.readInstanceConfiguration()
description_dict = {}
if parameter_list:
for i in range(0, len(parameter_list)):
key = parameter_list[i]['key']
if key:
description_dict[key] = parameter_list[i].pop('description')
with open(parameter_config_file, 'w') as config_file:
config_file.write(json.dumps(description_dict))
with open(parameter_file, 'w') as config_file:
config_file.write(json.dumps(parameter_list))
try:
with open(self.parameter_cfg_file, 'w') as pfile:
pfile.write('[public]\n')
for parameter in parameter_list:
if parameter['key']:
pfile.write('%s = %s\n' % (parameter['key'], parameter['value']))
except OSError, e:
print "Error failed to create file %s" % self.parameter_cfg_file
pass
def generateOpmlFile(self, feed_url_list, output_file):
if os.path.exists(output_file):
creation_date = datetime.fromtimestamp(os.path.getctime(output_file)).utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
else:
creation_date = modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
opml_content = OPML_START % {'creation_date': creation_date,
'modification_date': modification_date,
'outline_title': 'Monitoring RSS Feed list',
'root_title': self.root_title}
opml_content += OPML_OUTLINE_FEED % {'title': self.title,
'html_url': self.public_url + '/feed',
'xml_url': self.public_url + '/feed',
'global_url': "%s/jio_public/" % self.webdav_url}
for feed_url in feed_url_list:
opml_content += OPML_OUTLINE_FEED % {'title': self.getMonitorTitleFromUrl(feed_url + "/share/jio_public/"),
'html_url': feed_url + '/public/feed',
'xml_url': feed_url + '/public/feed',
'global_url': "%s/share/jio_public/" % feed_url}
opml_content += OPML_END
with open(output_file, 'w') as wfile:
wfile.write(opml_content)
def generateLogrotateEntry(self, name, file_list, option_list):
"""
Will add a new entry in logrotate.d folder. This can help to rotate data file daily
"""
content = "%(logfiles)s {\n%(options)s\n}\n" % {
'logfiles': ' '.join(file_list),
'options': '\n'.join(option_list)
}
file_path = os.path.join(self.logrotate_d, name)
with open(file_path, 'w') as flog:
flog.write(content)
def generateMonitorHalJson(self): def generateMonitorHalJson(self):
monitor_link_dict = {"webdav": {"href": self.webdav_url},
"public": {"href": "%s/public" % self.webdav_url},
"private": {"href": "%s/private" % self.webdav_url},
"rss": {"href": "%s/feed" % self.public_url},
"jio_public": {"href": "%s/jio_public/" % self.webdav_url},
"jio_private": {"href": "%s/jio_private/" % self.webdav_url}
}
if self.title: if self.title:
self.monitor_dict["title"] = self.title self.monitor_dict["title"] = self.title
if self.monitor_url_list: if self.monitor_url_list:
self.monitor_dict["_links"] = {"related_monitor": [{"href": url} monitor_link_dict["related_monitor"] = [{"href": url}
for url in self.monitor_url_list]} for url in self.monitor_url_list]
self.monitor_dict["_links"] = monitor_link_dict
if self.promise_items: if self.promise_items:
service_list = [] service_list = []
for service_name, promise in self.promise_items: for service_name, promise in self.promise_items:
service_config = promise["configuration"] service_config = promise["configuration"]
tmp = softConfigGet(service_config, "service", "title")
service_dict = {} service_dict = {}
service_dict["id"] = service_name service_dict["id"] = service_name
service_dict["_links"] = {"status": {"href": "/public/%s.status.json" % service_name}} # hardcoded service_dict["_links"] = {"status": {"href": "%s/public/%s.status.json" % (self.webdav_url, service_name)}} # hardcoded
tmp = softConfigGet(service_config, "service", "title")
if tmp: if tmp:
service_dict["title"] = tmp service_dict["title"] = tmp
interface_path = os.path.join(self.private_folder, service_name, "interface/index.html") # hardcoded
if os.path.isfile(interface_path):
service_dict["_links"]["interface"] = {"href": "/private/%s/interface/" % service_name} # hardcoded
else:
service_dict["_links"]["interface"] = {"href": "/default-promise-interface.html?service_name=%s" % service_name} # XXX hardcoded
service_list.append(service_dict) service_list.append(service_dict)
self.monitor_dict["_embedded"] = {"service": service_list} self.monitor_dict["_embedded"] = {"service": service_list}
...@@ -143,27 +353,60 @@ class Monitoring(object): ...@@ -143,27 +353,60 @@ class Monitoring(object):
def generateServiceCronEntries(self): def generateServiceCronEntries(self):
# XXX only if at least one configuration file is modified, then write in the cron # XXX only if at least one configuration file is modified, then write in the cron
#cron_line_list = ['PATH=%s\n' % os.environ['PATH']]
cron_line_list = [] cron_line_list = []
service_name_list = [name.replace('.status.json', '')
for name in os.listdir(self.public_folder) if name.endswith('.status.json')]
for service_name, promise in self.promise_items: for service_name, promise in self.promise_items:
service_config = promise["configuration"] service_config = promise["configuration"]
service_status_path = "%s/%s.status.json" % (self.public_folder, service_name) # hardcoded service_status_path = "%s/%s.status.json" % (self.public_folder, service_name) # hardcoded
mkdirAll(os.path.dirname(service_status_path)) mkdirAll(os.path.dirname(service_status_path))
command = ("%s %s %s " % (
self.promise_runner, promise_cmd_line = [
os.path.join(self.service_pid_folder, "%s.pid" % service_name),
service_status_path,)
) + promise["path"]
cron_line_list.append("%s %s" % (
softConfigGet(service_config, "service", "frequency") or "* * * * *", softConfigGet(service_config, "service", "frequency") or "* * * * *",
command.replace("%", "\\%"), self.promise_runner,
)) '--pid_path %s' % os.path.join(self.service_pid_folder,
wrapper_path = os.path.join(self.wraper_folder, service_name) "%s.pid" % service_name),
'--output %s' % service_status_path,
'--promise_script %s' % promise["path"],
'--promise_name "%s"' % service_name,
'--monitor_url "%s/jio_private/"' % self.webdav_url, # XXX hardcoded,
'--history_folder %s' % self.status_history_folder,
'--instance_name "%s"' % self.title,
'--hosting_name "%s"' % self.root_title]
cron_line_list.append(' '.join(promise_cmd_line))
if service_name in service_name_list:
service_name_list.pop(service_name_list.index(service_name))
"""wrapper_path = os.path.join(self.wraper_folder, service_name)
with open(wrapper_path, "w") as fp: with open(wrapper_path, "w") as fp:
fp.write("#!/bin/sh\n%s" % command) # XXX hardcoded, use dash, sh or bash binary! fp.write("#!/bin/sh\n%s" % command) # XXX hardcoded, use dash, sh or bash binary!
os.chmod(wrapper_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IROTH ) os.chmod(wrapper_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IROTH )"""
if service_name_list != []:
# XXX Some service was removed, delete his status file so monitor will not consider his status anymore
for service_name in service_name_list:
status_path = os.path.join(self.public_folder, '%s.status.json' % service_name)
if os.path.exists(status_path):
try:
os.unlink(status_path)
except OSError, e:
print "Error: Failed to delete %s" % status_path, str(e)
pass
with open(self.crond_folder + "/monitor-promises", "w") as fp: with open(self.crond_folder + "/monitor-promises", "w") as fp:
fp.write("\n".join(cron_line_list)) fp.write("\n".join(cron_line_list))
def addCronEntry(self, name, frequency, command):
entry_line = '%s %s' % (frequency, command)
cron_entry_file = os.path.join(self.crond_folder, name)
with open(cron_entry_file, "w") as cronf:
cronf.write(entry_line)
def bootstrapMonitor(self): def bootstrapMonitor(self):
# create symlinks from service configurations # create symlinks from service configurations
self.promise_items = self.promise_dict.items() self.promise_items = self.promise_dict.items()
...@@ -184,13 +427,36 @@ class Monitoring(object): ...@@ -184,13 +427,36 @@ class Monitoring(object):
self.createSymlinksFromConfig(self.public_folder, self.public_path_list) self.createSymlinksFromConfig(self.public_folder, self.public_path_list)
self.createSymlinksFromConfig(self.private_folder, self.private_path_list) self.createSymlinksFromConfig(self.private_folder, self.private_path_list)
self.configureFolders()
# generate monitor.json # generate monitor.json
self.monitor_dict = {} self.monitor_dict = {}
self.generateMonitorHalJson() self.generateMonitorHalJson()
# Generate OPML file
self.generateOpmlFile(self.monitor_url_list,
os.path.join(self.public_folder, 'feeds'))
# put promises to a cron file # put promises to a cron file
self.generateServiceCronEntries() self.generateServiceCronEntries()
# Generate parameters files and scripts
self.makeConfigurationFiles()
# Rotate monitor data files
option_list = [
'daily', 'nocreate', 'noolddir', 'rotate 30',
'nocompress', 'extension .json', 'dateext',
'dateformat -%Y-%m-%d', 'notifempty'
]
file_list = ["%s/*.data.json" % self.data_folder]
self.generateLogrotateEntry('monitor.data', file_list, option_list)
# Add cron entry for SlapOS Collect
command = "%s %s --output_folder %s --collector_db %s" % (self.python,
self.collect_script, self.data_folder, self.collector_db)
self.addCronEntry('monitor_collect', '* * * * *', command)
return 0 return 0
......
#!/usr/bin/env python
import json
import os
import time
from datetime import datetime
OPML_START = """<?xml version="1.0" encoding="UTF-8"?>
<!-- OPML generated by SlapOS -->
<opml version="1.1">
<head>
<title>SlapOS Monitoring Status Lists</title>
<dateCreated>%(creation_date)s</dateCreated>
<dateModified>%(mondification_date)s</dateModified>
</head>
<body>
<outline text="%(outline_title)s">"""
OPML_END = """ </outline>
</body>
</opml>"""
OPML_OUTLINE_FEED = '<outline text="%(title)s" title="%(title)s" type="rss" version="RSS" htmlUrl="%(html_url)s" xmlUrl="%(xml_url)s" />'
def main(config_file, output_file):
feed_url_list = []
if os.path.exists(output_file):
creation_date = datetime.fromtimestamp(os.path.getctime(output_file)).utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
else:
creation_date = modification_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
with open(config_file, 'r') as fconfig:
feed_url_list = json.loads(fconfig.read())
opml_content = OPML_START
for feed_line in feed_url_list:
opml_content += OPML_OUTLINE_FEED % {'title': feed_line['title'], 'html_url': feed_line['url'], 'xml_url': feed_line['url']}
opml_content += OPML_END
with open(output_file, 'w') as wfile:
wfile.write(opml_content)
if __name__ == "__main__":
if len(sys.argv) < 3:
print("Usage: %s <rss_conf_file> <output_path>" % sys.argv[0])
sys.exit(2)
config_file = sys.argv[1]
output_file = sys.argv[2]
main(config_file, output_file)
\ No newline at end of file
#!/usr/bin/env python #!{{ python }}
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import sys import sys
import os import os
import subprocess import subprocess
import json import json
import psutil
import time
from shutil import copyfile
import glob
import argparse
def parseArguments():
"""
Parse arguments for monitor collector instance.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--pid_path',
help='Path where the pid of this process will be writen.')
parser.add_argument('--output',
help='The Path of file where Json result of this promise will be saved.')
parser.add_argument('--promise_script',
help='Promise script to execute.')
parser.add_argument('--promise_name',
help='Title to give to this promise.')
parser.add_argument('--monitor_url',
help='Monitor Instance website URL.')
parser.add_argument('--history_folder',
help='Path where old result file will be placed before generate a new json result file.')
parser.add_argument('--instance_name',
default='UNKNOW Software Instance',
help='Software Instance name.')
parser.add_argument('--hosting_name',
default='UNKNOW Hosting Subscription',
help='Hosting Subscription name.')
return parser.parse_args()
def main(): def main():
if len(sys.argv) < 4: parser = parseArguments()
print("Usage: %s <pid_path> <output_path> <command...>" % sys.argv[0])
return 2 if os.path.exists(parser.pid_path):
pid_path=sys.argv[1] with open(parser.pid_path, "r") as pidfile:
output_path=sys.argv[2]
if os.path.exists(pid_path):
with open(pid_path, "r") as pidfile:
try: try:
pid = int(pidfile.read(6)) pid = int(pidfile.read(6))
except ValueError: except ValueError:
...@@ -21,27 +49,88 @@ def main(): ...@@ -21,27 +49,88 @@ def main():
if pid and os.path.exists("/proc/" + str(pid)): if pid and os.path.exists("/proc/" + str(pid)):
print("A process is already running with pid " + str(pid)) print("A process is already running with pid " + str(pid))
return 1 return 1
with open(pid_path, "w") as pidfile: start_date = ""
process = executeCommand(sys.argv[3:]) with open(parser.pid_path, "w") as pidfile:
process = executeCommand(parser.promise_script)
ps_process = psutil.Process(process.pid)
start_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ps_process.create_time()))
pidfile.write(str(process.pid)) pidfile.write(str(process.pid))
status_json = generateStatusJsonFromProcess(process)
with open(output_path, "w") as outputfile: status_json = generateStatusJsonFromProcess(process, start_date=start_date)
status_json['_links'] = {"monitor": {"href": parser.monitor_url}}
status_json['title'] = parser.promise_name
status_json['instance'] = parser.instance_name
status_json['hosting_subscription'] = parser.hosting_name
# Save the lastest status change date (needed for rss)
status_json['change-time'] = ps_process.create_time()
if os.path.exists(parser.output):
with open(parser.output) as f:
last_result = json.loads(f.read())
if status_json['status'] == last_result['status'] and last_result.has_key('change-time'):
status_json['change-time'] = last_result['change-time']
updateStatusHistoryFolder(
parser.promise_name,
parser.output,
parser.history_folder
)
with open(parser.output, "w") as outputfile:
json.dump(status_json, outputfile) json.dump(status_json, outputfile)
os.remove(pid_path) os.remove(parser.pid_path)
def updateStatusHistoryFolder(name, status_file, history_folder):
old_history_list = []
history_path = os.path.join(history_folder, name, '.jio_documents')
if not os.path.exists(status_file):
return
if not os.path.exists(history_folder):
return
if not os.path.exists(history_path):
try:
os.makedirs(history_path)
except OSError, e:
if e.errno == os.errno.EEXIST and os.path.isdir(history_path):
pass
else: raise
with open(status_file, 'r') as sf:
status_dict = json.loads(sf.read())
filename = '%s.status.json' % (
status_dict['start-date'].replace(' ', '_').replace(':', ''))
copyfile(status_file, os.path.join(history_path, filename))
# Don't let history foler grow too much, keep 40 files
file_list = filter(os.path.isfile,
glob.glob("%s/*.status.json" % history_path)
)
file_count = len(file_list)
if file_count > 40:
file_list.sort(key=lambda x: os.path.getmtime(x))
while file_count > 40:
to_delete = file_list.pop(0)
try:
os.unlink(to_delete)
file_count -= 1
except OSError:
raise
def generateStatusJsonFromProcess(process): def generateStatusJsonFromProcess(process, start_date=None, title=None):
stdout, stderr = process.communicate() stdout, stderr = process.communicate()
try: try:
status_json = json.loads(stdout) status_json = json.loads(stdout)
except ValueError: except ValueError:
status_json = {} status_json = {}
if process.returncode != 0: if process.returncode != 0:
status_json["status"] = "error" status_json["status"] = "ERROR"
elif not status_json.get("status"): elif not status_json.get("status"):
status_json["status"] = "OK" status_json["status"] = "OK"
if stderr: if stderr:
status_json["error"] = stderr status_json["message"] = stderr
if start_date:
status_json["start-date"] = start_date
if title:
status_json["title"] = title
return status_json return status_json
......
...@@ -4,104 +4,73 @@ import json ...@@ -4,104 +4,73 @@ import json
import datetime import datetime
import base64 import base64
import hashlib import hashlib
import PyRSS2Gen
import argparse
def parseArguments():
"""
Parse arguments for monitor Rss Generator.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--items_folder',
help='Path where to get *.status.json files which contain result of promises.')
parser.add_argument('--output',
help='The Path of file where feed file will be saved.')
parser.add_argument('--public_url',
help='Monitor Instance public URL.')
parser.add_argument('--private_url',
help='Monitor Instance private URL.')
parser.add_argument('--instance_name',
default='UNKNOW Software Instance',
help='Software Instance name.')
parser.add_argument('--hosting_name',
default='',
help='Hosting Subscription name.')
return parser.parse_args()
def getKey(item):
return item.pubDate
def main(): def main():
_, title, link, public_folder, previous_status_path, output_path = sys.argv parser = parseArguments()
final_status = "OK";
# getting status rss_item_list = []
for filename in os.listdir(public_folder): for filename in os.listdir(parser.items_folder):
if filename.endswith(".status.json"): if filename.endswith(".status.json"):
filepath = os.path.join(public_folder, filename) filepath = os.path.join(parser.items_folder, filename)
status = None result_dict = None
try: try:
status = json.load(open(filepath, "r")) result_dict = json.load(open(filepath, "r"))
except ValueError: except ValueError:
print "Failed to load json file: %s" % filepath
continue continue
try: description = result_dict.get('message', '')
if status["status"] != "OK": event_time = datetime.datetime.fromtimestamp(result_dict['change-time'])
final_status = "BAD" rss_item = PyRSS2Gen.RSSItem(
break categories = [result_dict['status']],
except KeyError: source = PyRSS2Gen.Source(result_dict['title'], parser.public_url),
final_status = "BAD" title = '[%s] %s' % (result_dict['status'], result_dict['title']),
break comments = description,
# checking previous status description = "%s: %s\n%s" % (event_time, result_dict['status'], description),
try: link = parser.private_url,
status = open(previous_status_path, "r").readline(4) pubDate = event_time,
if status == final_status: guid = PyRSS2Gen.Guid(base64.b64encode("%s, %s" % (event_time, result_dict['status'])))
return 0
except IOError:
pass
# update status
open(previous_status_path, "w").write(final_status)
# generating RSS
utcnow = datetime.datetime.utcnow()
open(output_path, "w").write(
newRssString(
title,
title,
link,
utcnow,
utcnow,
"60",
[
newRssItemString(
"Status is %s" % final_status,
"Status is %s" % final_status,
link,
newGuid("%s, %s" % (utcnow, final_status)),
utcnow,
) )
], rss_item_list.append(rss_item)
)
)
def escapeHtml(string):
return string.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;").replace("\"", "&quot;")
def newGuid(string): ### Build the rss feed
sha256 = hashlib.sha256() sorted(rss_item_list, key=getKey)
sha256.update(string) rss_feed = PyRSS2Gen.RSS2 (
return sha256.hexdigest() title = parser.instance_name,
link = parser.public_url,
def newRssItemString(title, description, link, guid, pub_date, guid_is_perma_link=True): description = parser.hosting_name,
return """<item> lastBuildDate = datetime.datetime.utcnow(),
<title>%(title)s</title> items = rss_item_list
<description>%(description)s</description> )
<link>%(link)s</link> with open(parser.output, 'w') as frss:
<guid isPermaLink="%(guid_is_perma_link)s">%(guid)s</guid> frss.write(rss_feed.to_xml())
<pubDate>%(pub_date)s</pubDate>
</item>""" % {
"title": escapeHtml(title),
"description": escapeHtml(description),
"link": escapeHtml(link),
"guid": escapeHtml(guid),
"pub_date": escapeHtml(pub_date.strftime("%a, %d %b %Y %H:%M:%S +0000")),
"guid_is_perma_link": escapeHtml(repr(guid_is_perma_link).lower()),
}
def newRssString(title, description, link, last_build_date, pub_date, ttl, rss_item_string_list):
return """<?xml version="1.0" encoding="UTF-8" ?>
<rss version="2.0">
<channel>
<title>%(title)s</title>
<description>%(description)s</description>
<link>%(link)s</link>
<lastBuildDate>%(last_build_date)s</lastBuildDate>
<pubDate>%(pub_date)s</pubDate>
<ttl>%(ttl)s</ttl>
%(items)s
</channel>
</rss>
""" % {
"title": escapeHtml(title),
"description": escapeHtml(description),
"link": escapeHtml(link),
"last_build_date": escapeHtml(last_build_date.strftime("%a, %d %b %Y %H:%M:%S +0000")),
"pub_date": escapeHtml(pub_date.strftime("%a, %d %b %Y %H:%M:%S +0000")),
"ttl": escapeHtml(str(ttl)),
"items": "\n\n".join([" " + item.replace("\n", "\n ") for item in rss_item_string_list]),
}
if __name__ == "__main__": if __name__ == "__main__":
exit(main()) exit(main())
{% if domain -%}
{% set allow_domain = '|'.join(domain.replace('.', '\.').split()) -%}
SetEnvIf Origin "^http(s)?://(.+\.)?({{ allow_domain }})$" ORIGIN_DOMAIN=$0
Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN
{% endif -%}
\ No newline at end of file
[instance]
name = {{ instance_dict['name'] }}
root-name = {{ instance_dict['root-name'] }}
computer = {{ instance_dict['computer-id'] }}
ipv4 = {{ instance_dict['ipv4'] }}
ipv6 = {{ instance_dict['ipv6'] }}
software-release = {{ instance_dict['software-release'] }}
software-type = {{ instance_dict['software-type'] }}
partition = {{ instance_dict['partition-id'] }}
\ No newline at end of file
...@@ -11,7 +11,7 @@ ServerAdmin someone@email ...@@ -11,7 +11,7 @@ ServerAdmin someone@email
Listen [{{ parameter_dict.get('listening-ip') }}]:{{ parameter_dict.get('port') }} Listen [{{ parameter_dict.get('listening-ip') }}]:{{ parameter_dict.get('port') }}
Define MonitorPort Define MonitorPort
</IfDefine> </IfDefine>
DocumentRoot "{{ directory.get('www') }}" DocumentRoot "{{ directory.get('webdav') }}"
ErrorLog "{{ parameter_dict.get('error-log') }}" ErrorLog "{{ parameter_dict.get('error-log') }}"
LoadModule unixd_module modules/mod_unixd.so LoadModule unixd_module modules/mod_unixd.so
LoadModule access_compat_module modules/mod_access_compat.so LoadModule access_compat_module modules/mod_access_compat.so
...@@ -31,6 +31,10 @@ LoadModule proxy_module modules/mod_proxy.so ...@@ -31,6 +31,10 @@ LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule rewrite_module modules/mod_rewrite.so LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so LoadModule headers_module modules/mod_headers.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule env_module modules/mod_env.so
LoadModule setenvif_module modules/mod_setenvif.so
# SSL Configuration # SSL Configuration
<IfDefine !SSLConfigured> <IfDefine !SSLConfigured>
...@@ -49,6 +53,11 @@ SSLCipherSuite RC4-SHA:HIGH:!ADH ...@@ -49,6 +53,11 @@ SSLCipherSuite RC4-SHA:HIGH:!ADH
AddType application/hal+json .haljson AddType application/hal+json .haljson
SSLEngine On SSLEngine On
Include {{ parameter_dict.get('httpd-cors-config-file') }}
Header set Access-Control-Allow-Credentials "true"
Header set Access-Control-Allow-Methods "PROPFIND, PROPPATCH, COPY, MOVE, DELETE, MKCOL, LOCK, UNLOCK, PUT, GETLIB, VERSION-CONTROL, CHECKIN, CHECKOUT, UNCHECKOUT, REPORT, UPDATE, CANCELUPLOAD, HEAD, OPTIONS, GET, POST"
Header set Access-Control-Allow-Headers "Overwrite, Destination, Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Authorization"
{% if parameter_dict.has_key('monitor-url-list') -%} {% if parameter_dict.has_key('monitor-url-list') -%}
RewriteEngine on RewriteEngine on
SSLProxyEngine on SSLProxyEngine on
...@@ -66,6 +75,27 @@ RewriteRule /monitor{{ index }}/(.*) {{ url }}/$1 [L,P] ...@@ -66,6 +75,27 @@ RewriteRule /monitor{{ index }}/(.*) {{ url }}/$1 [L,P]
{% endfor -%} {% endfor -%}
{% endif -%} {% endif -%}
DavLockDB {{ directory.get('monitor-var') }}/DavLock
Alias /share {{ directory.get('webdav') }}
<Directory {{ directory.get('webdav') }}>
DirectoryIndex disabled
DAV On
Options Indexes FollowSymLinks
AuthType Basic
AuthName "webdav"
AuthUserFile "{{ parameter_dict.get('htpasswd-file') }}"
<LimitExcept OPTIONS>
Require valid-user
</LimitExcept>
</Directory>
<LocationMatch "/share/(jio_)?public">
<Limit GET HEAD OPTIONS REPORT PROPFIND>
Allow from all
Satisfy any
</Limit>
</LocationMatch>
ScriptSock {{ parameter_dict.get('cgid-pid-file') }} ScriptSock {{ parameter_dict.get('cgid-pid-file') }}
<Directory {{ directory.get('www') }}> <Directory {{ directory.get('www') }}>
SSLVerifyDepth 1 SSLVerifyDepth 1
...@@ -123,3 +153,6 @@ Alias /cgi-bin {{ directory.get('cgi-bin') }} ...@@ -123,3 +153,6 @@ Alias /cgi-bin {{ directory.get('cgi-bin') }}
Options Indexes FollowSymLinks Options Indexes FollowSymLinks
Satisfy all Satisfy all
</Directory> </Directory>
{% if parameter_dict.get('httpd-include-file', '') -%}
Include {{ parameter_dict.get('httpd-include-file') }}
{% endif -%}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment