root / ganeti / ganeti-eventd.py @ 348f53de
History | View | Annotate | Download (6.8 kB)
1 |
#!/usr/bin/env python
|
---|---|
2 |
#
|
3 |
# Copyright (c) 2010 Greek Research and Technology Network
|
4 |
#
|
5 |
"""Ganeti notification daemon with amqp
|
6 |
|
7 |
A daemon to monitor the Ganeti job queue and publish job progress
|
8 |
and Ganeti VM state notifications over a 0mq PUB endpoint.
|
9 |
|
10 |
"""
|
11 |
|
12 |
from django.core.management import setup_environ |
13 |
|
14 |
import sys |
15 |
import os |
16 |
path = os.path.normpath(os.path.join(os.getcwd(), '..'))
|
17 |
sys.path.append(path) |
18 |
import synnefo.settings as settings |
19 |
|
20 |
setup_environ(settings) |
21 |
|
22 |
import time |
23 |
import json |
24 |
import logging |
25 |
import pyinotify |
26 |
import daemon |
27 |
import daemon.pidlockfile |
28 |
import socket |
29 |
from signal import signal, SIGINT, SIGTERM |
30 |
|
31 |
from amqplib import client_0_8 as amqp |
32 |
|
33 |
from threading import Thread, Event, currentThread |
34 |
|
35 |
from ganeti import utils |
36 |
from ganeti import jqueue |
37 |
from ganeti import constants |
38 |
from ganeti import serializer |
39 |
|
40 |
class JobFileHandler(pyinotify.ProcessEvent): |
41 |
def __init__(self, logger): |
42 |
pyinotify.ProcessEvent.__init__(self)
|
43 |
self.logger = logger
|
44 |
self.chan = None |
45 |
|
46 |
def open_channel(self): |
47 |
conn = None
|
48 |
while conn == None: |
49 |
handler_logger.info("Attempting to connect to %s", settings.RABBIT_HOST)
|
50 |
conn = amqp.Connection( host=settings.RABBIT_HOST, |
51 |
userid=settings.RABBIT_USERNAME, |
52 |
password=settings.RABBIT_PASSWORD, |
53 |
virtual_host=settings.RABBIT_VHOST) |
54 |
time.sleep(1)
|
55 |
|
56 |
handler_logger.info("Connection succesful, opening channel")
|
57 |
return conn.channel()
|
58 |
|
59 |
def process_IN_CLOSE_WRITE(self, event): |
60 |
if self.chan == None: |
61 |
self.chan = self.open_channel() |
62 |
|
63 |
jobfile = os.path.join(event.path, event.name) |
64 |
if not event.name.startswith("job-"): |
65 |
self.logger.debug("Not a job file: %s" % event.path) |
66 |
return
|
67 |
|
68 |
try:
|
69 |
data = utils.ReadFile(jobfile) |
70 |
except IOError: |
71 |
return
|
72 |
|
73 |
data = serializer.LoadJson(data) |
74 |
job = jqueue._QueuedJob.Restore(None, data)
|
75 |
|
76 |
for op in job.ops: |
77 |
instances = ""
|
78 |
try:
|
79 |
instances = " ".join(op.input.instances)
|
80 |
except AttributeError: |
81 |
pass
|
82 |
|
83 |
try:
|
84 |
instances = op.input.instance_name |
85 |
except AttributeError: |
86 |
pass
|
87 |
|
88 |
# Get the last line of the op log as message
|
89 |
try:
|
90 |
logmsg = op.log[-1][-1] |
91 |
except IndexError: |
92 |
logmsg = None
|
93 |
|
94 |
self.logger.debug("%d: %s(%s) %s %s", |
95 |
int(job.id), op.input.OP_ID, instances, op.status, logmsg)
|
96 |
|
97 |
# Construct message
|
98 |
msg = { |
99 |
"type": "ganeti-op-status", |
100 |
"instance": instances,
|
101 |
"operation": op.input.OP_ID,
|
102 |
"jobId": int(job.id), |
103 |
"status": op.status,
|
104 |
"logmsg": logmsg
|
105 |
} |
106 |
if logmsg:
|
107 |
msg["message"] = logmsg
|
108 |
|
109 |
self.logger.debug("PUSHing msg: %s", json.dumps(msg)) |
110 |
msg = amqp.Message(json.dumps(msg)) |
111 |
msg.properties["delivery_mode"] = 2 #Persistent |
112 |
try:
|
113 |
self.chan.basic_publish(msg,exchange="ganeti",routing_key="eventd") |
114 |
except socket.error:
|
115 |
self.logger.error("Server went away, reconnecting...") |
116 |
self.chan = self.open_channel() |
117 |
self.chan.basic_publish(msg,exchange="ganeti",routing_key="eventd") |
118 |
except Exception: |
119 |
self.logger.error("Uknown error (msg: %s)", msg) |
120 |
raise
|
121 |
|
122 |
handler_logger = None
|
123 |
def fatal_signal_handler(signum, frame): |
124 |
global handler_logger
|
125 |
|
126 |
handler_logger.info("Caught fatal signal %d, will raise SystemExit",
|
127 |
signum) |
128 |
raise SystemExit |
129 |
|
130 |
def parse_arguments(args): |
131 |
from optparse import OptionParser |
132 |
|
133 |
parser = OptionParser() |
134 |
parser.add_option("-d", "--debug", action="store_true", dest="debug", |
135 |
help="Enable debugging information")
|
136 |
parser.add_option("-l", "--log", dest="log_file", |
137 |
default=settings.GANETI_EVENTD_LOG_FILE, |
138 |
metavar="FILE",
|
139 |
help="Write log to FILE instead of %s" %
|
140 |
settings.GANETI_EVENTD_LOG_FILE), |
141 |
parser.add_option('--pid-file', dest="pid_file", |
142 |
default=settings.GANETI_EVENTD_PID_FILE, |
143 |
metavar='PIDFILE',
|
144 |
help="Save PID to file (default: %s)" %
|
145 |
settings.GANETI_EVENTD_PID_FILE) |
146 |
|
147 |
return parser.parse_args(args)
|
148 |
|
149 |
def main(): |
150 |
global handler_logger
|
151 |
|
152 |
(opts, args) = parse_arguments(sys.argv[1:])
|
153 |
|
154 |
# Create pidfile
|
155 |
pidf = daemon.pidlockfile.TimeoutPIDLockFile(opts.pid_file, 10)
|
156 |
|
157 |
# Initialize logger
|
158 |
lvl = logging.DEBUG if opts.debug else logging.INFO |
159 |
logger = logging.getLogger("ganeti-amqpd")
|
160 |
logger.setLevel(lvl) |
161 |
formatter = logging.Formatter("%(asctime)s %(module)s[%(process)d] %(levelname)s: %(message)s",
|
162 |
"%Y-%m-%d %H:%M:%S")
|
163 |
handler = logging.FileHandler(opts.log_file) |
164 |
handler.setFormatter(formatter) |
165 |
logger.addHandler(handler) |
166 |
handler_logger = logger |
167 |
|
168 |
# Become a daemon:
|
169 |
# Redirect stdout and stderr to handler.stream to catch
|
170 |
# early errors in the daemonization process [e.g., pidfile creation]
|
171 |
# which will otherwise go to /dev/null.
|
172 |
daemon_context = daemon.DaemonContext( |
173 |
pidfile=pidf, |
174 |
umask=022,
|
175 |
stdout=handler.stream, |
176 |
stderr=handler.stream, |
177 |
files_preserve=[handler.stream]) |
178 |
daemon_context.open() |
179 |
logger.info("Became a daemon")
|
180 |
|
181 |
# Catch signals to ensure graceful shutdown
|
182 |
signal(SIGINT, fatal_signal_handler) |
183 |
signal(SIGTERM, fatal_signal_handler) |
184 |
|
185 |
|
186 |
# Monitor the Ganeti job queue, create and push notifications
|
187 |
wm = pyinotify.WatchManager() |
188 |
mask = pyinotify.EventsCodes.ALL_FLAGS["IN_CLOSE_WRITE"]
|
189 |
handler = JobFileHandler(logger) |
190 |
notifier = pyinotify.Notifier(wm, handler) |
191 |
|
192 |
try:
|
193 |
# Fail if adding the inotify() watch fails for any reason
|
194 |
res = wm.add_watch(constants.QUEUE_DIR, mask) |
195 |
if res[constants.QUEUE_DIR] < 0: |
196 |
raise Exception("pyinotify add_watch returned negative watch descriptor") |
197 |
|
198 |
logger.info("Now watching %s" % constants.QUEUE_DIR)
|
199 |
|
200 |
while True: # loop forever |
201 |
# process the queue of events as explained above
|
202 |
notifier.process_events() |
203 |
if notifier.check_events():
|
204 |
# read notified events and enqeue them
|
205 |
notifier.read_events() |
206 |
except SystemExit: |
207 |
logger.info("SystemExit")
|
208 |
except:
|
209 |
logger.exception("Caught exception, terminating")
|
210 |
finally:
|
211 |
# destroy the inotify's instance on this interrupt (stop monitoring)
|
212 |
notifier.stop() |
213 |
raise
|
214 |
|
215 |
if __name__ == "__main__": |
216 |
sys.exit(main()) |
217 |
|
218 |
# vim: set sta sts=4 shiftwidth=4 sw=4 et ai :
|