Commit fd078caf authored by Marco Mariani's avatar Marco Mariani

Merge remote-tracking branch 'origin/resiliency_annotated'

parents 3cf3f4fa af114351
0.31.2 (Unrelease)
=================
* No change yet.
0.31.2 (unreleased)
===================
* pubsub: support multiple notifications and callbacks. [Marco Mariani]
* pubsub: print/return errors from subprocess or notifications. [Marco Mariani]
0.3.1 (2012-10-02)
=================
......
# -*- coding: utf-8 -*-
# vim: set et sts=2:
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
......@@ -24,23 +26,23 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import argparse
import gdbm
from json import loads as unjson
import json
import logging
import logging.handlers
import os
import Queue
import select
from StringIO import StringIO
import StringIO
import socket
import os
import logging
import logging.handlers
import signal
import subprocess
import argparse
cleanup_data = {}
def cleanup(signum=None, frame=None):
global cleanup_data
cleanup_functions = dict(
sockets=lambda sock: sock.close(),
subprocesses=lambda process: process.terminate(),
......@@ -48,6 +50,7 @@ def cleanup(signum=None, frame=None):
)
for data, function in cleanup_functions.iteritems():
for item in cleanup_data.get(data, []):
# XXX will these lists ever have more than 1 element??
# Swallow everything !
try:
function(item)
......@@ -89,7 +92,6 @@ class TaskRunner(object):
self._command = None
def run(self, command, time):
global cleanup_data
self._time = time
self._command = command
self._task = subprocess.Popen([command], stdin=subprocess.PIPE,
......@@ -106,8 +108,6 @@ class TaskRunner(object):
return self._task.stdout.fileno()
def main():
global cleanup_data
parser = argparse.ArgumentParser(
description="Run a single threaded execution queue.")
parser.add_argument('--database', nargs=1, required=True,
......@@ -169,7 +169,7 @@ def main():
conn.settimeout(args.timeout)
request_string = StringIO()
request_string = StringIO.StringIO()
segment = None
try:
while segment != '':
......@@ -180,7 +180,7 @@ def main():
command = '127'
try:
request = unjson(request_string.getvalue())
request = json.loads(request_string.getvalue())
timestamp = request['timestamp']
command = str(request['command'])
task_queue.put([command, timestamp])
......@@ -231,3 +231,4 @@ def main():
if __name__ == '__main__':
main()
from datetime import datetime
import csv
import feedparser
import io
import socket
import json
import time
......@@ -82,16 +83,16 @@ def notify():
except AttributeError:
abort(httplib.BAD_REQUEST)
with open(callback_filepath, 'r') as callback_file:
callback = callback_file.read()
abort_it = False
for callback in io.open(callback_filepath, 'r', encoding='utf8'):
timestamp = int(math.floor(time.mktime(feed.feed.updated_parsed)))
equeue_request = json.dumps(dict(
command=callback,
timestamp=timestamp,
))
equeue_request = json.dumps({
'command': callback,
'timestamp': timestamp,
})
equeue_socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
equeue_socket.connect(app.config['EQUEUE_SOCKET'])
......@@ -100,6 +101,10 @@ def notify():
equeue_socket.close()
if result != callback:
abort_it = True
if abort_it:
# XXX if possible, communicate info about the failed callbacks
abort(httplib.INTERNAL_SERVER_ERROR)
return '', httplib.NO_CONTENT
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import math
import subprocess
import os
import uuid
import argparse
import csv
import time
import urllib2
from urlparse import urlparse
import httplib
import os
import socket
import subprocess
import sys
import argparse
import time
import urllib2
import urlparse
import uuid
def main():
parser = argparse.ArgumentParser()
......@@ -40,11 +40,14 @@ def main():
command.stdin.flush()
command.stdin.close()
if command.wait() != 0:
command_failed = (command.wait() != 0)
command_stderr = command.stderr.read()
if command_failed:
content = ("<p>Failed with returncode <em>%d</em>.</p>"
"<p>Standard error output is :</p><pre>%s</pre>") % (
command.poll(),
command.stderr.read().replace('&', '&amp;')\
command_stderr.replace('&', '&amp;')\
.replace('<', '&lt;')\
.replace('>', '&gt;'),
)
......@@ -54,15 +57,22 @@ def main():
with open(args.logfile[0], 'a') as file_:
cvsfile = csv.writer(file_)
cvsfile.writerow([
int(math.floor(time.time())), # Timestamp
int(time.time()),
args.title[0],
content,
'slapos:%s' % uuid.uuid4(),
])
if command_failed:
sys.stderr.write('%s\n' % command_stderr)
sys.exit(1)
feed = urllib2.urlopen(args.feed_url[0])
body = feed.read()
some_notification_failed = False
for notif_url in args.notification_url:
notification_url = urlparse(notif_url)
notification_url = urlparse.urlparse(notif_url)
notification_port = notification_url.port
if notification_port is None:
notification_port = socket.getservbyname(notification_url.scheme)
......@@ -70,14 +80,17 @@ def main():
headers = {'Content-Type': feed.info().getheader('Content-Type')}
notification = httplib.HTTPConnection(notification_url.hostname,
notification_port)
notification.request('POST', notification_url.path, feed.read(), headers)
notification.request('POST', notification_url.path, body, headers)
response = notification.getresponse()
if not (200 <= response.status < 300):
print >> sys.stderr, "The remote server didn't send a successfull reponse."
print >> sys.stderr, "It's response was %r" % response.reason
return 1
return 0
sys.stderr.write("The remote server at %s didn't send a successful reponse.\n" % notif_url)
sys.stderr.write("Its response was %r\n" % response.reason)
some_notification_failed = True
if some_notification_failed:
sys.exit(1)
if __name__ == '__main__':
main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment