+ assert 0 <= seconds, \
+ "Seconds must be larger than or equal to 0, but are %s" % seconds
+ assert 0 <= microseconds <= 999999, \
+ "Microseconds must be 0-999999, but are %s" % microseconds
+
+ return float(seconds) + (float(microseconds) * 0.000001)
+
+
+def GetNodeDaemonPort():
+ """Get the node daemon port for this cluster.
+
+ Note that this routine does not read a ganeti-specific file, but
+ instead uses C{socket.getservbyname} to allow pre-customization of
+ this parameter outside of Ganeti.
+
+ @rtype: int
+
+ """
+ try:
+ port = socket.getservbyname("ganeti-noded", "tcp")
+ except socket.error:
+ port = constants.DEFAULT_NODED_PORT
+
+ return port
+
+
+def SetupLogging(logfile, debug=False, stderr_logging=False, program="",
+ multithreaded=False):
+ """Configures the logging module.
+
+ @type logfile: str
+ @param logfile: the filename to which we should log
+ @type debug: boolean
+ @param debug: whether to enable debug messages too or
+ only those at C{INFO} and above level
+ @type stderr_logging: boolean
+ @param stderr_logging: whether we should also log to the standard error
+ @type program: str
+ @param program: the name under which we should log messages
+ @type multithreaded: boolean
+ @param multithreaded: if True, will add the thread name to the log file
+ @raise EnvironmentError: if we can't open the log file and
+ stderr logging is disabled
+
+ """
+ fmt = "%(asctime)s: " + program + " pid=%(process)d"
+ if multithreaded:
+ fmt += "/%(threadName)s"
+ if debug:
+ fmt += " %(module)s:%(lineno)s"
+ fmt += " %(levelname)s %(message)s"
+ formatter = logging.Formatter(fmt)
+
+ root_logger = logging.getLogger("")
+ root_logger.setLevel(logging.NOTSET)
+
+ # Remove all previously setup handlers
+ for handler in root_logger.handlers:
+ handler.close()
+ root_logger.removeHandler(handler)
+
+ if stderr_logging:
+ stderr_handler = logging.StreamHandler()
+ stderr_handler.setFormatter(formatter)
+ if debug:
+ stderr_handler.setLevel(logging.NOTSET)
+ else:
+ stderr_handler.setLevel(logging.CRITICAL)
+ root_logger.addHandler(stderr_handler)
+
+ # this can fail, if the logging directories are not setup or we have
+ # a permisssion problem; in this case, it's best to log but ignore
+ # the error if stderr_logging is True, and if false we re-raise the
+ # exception since otherwise we could run but without any logs at all
+ try:
+ logfile_handler = logging.FileHandler(logfile)
+ logfile_handler.setFormatter(formatter)
+ if debug:
+ logfile_handler.setLevel(logging.DEBUG)
+ else:
+ logfile_handler.setLevel(logging.INFO)
+ root_logger.addHandler(logfile_handler)
+ except EnvironmentError:
+ if stderr_logging:
+ logging.exception("Failed to enable logging to file '%s'", logfile)
+ else:
+ # we need to re-raise the exception
+ raise
+
+def IsNormAbsPath(path):
+ """Check whether a path is absolute and also normalized
+
+ This avoids things like /dir/../../other/path to be valid.
+
+ """
+ return os.path.normpath(path) == path and os.path.isabs(path)
+
+def TailFile(fname, lines=20):
+ """Return the last lines from a file.
+
+ @note: this function will only read and parse the last 4KB of
+ the file; if the lines are very long, it could be that less
+ than the requested number of lines are returned
+
+ @param fname: the file name
+ @type lines: int
+ @param lines: the (maximum) number of lines to return
+
+ """
+ fd = open(fname, "r")
+ try:
+ fd.seek(0, 2)
+ pos = fd.tell()
+ pos = max(0, pos-4096)
+ fd.seek(pos, 0)
+ raw_data = fd.read()
+ finally:
+ fd.close()
+
+ rows = raw_data.splitlines()
+ return rows[-lines:]
+
+
+def SafeEncode(text):
+ """Return a 'safe' version of a source string.
+
+ This function mangles the input string and returns a version that
+ should be safe to disply/encode as ASCII. To this end, we first
+ convert it to ASCII using the 'backslashreplace' encoding which
+ should get rid of any non-ASCII chars, and then we again encode it
+ via 'string_escape' which converts '\n' into '\\n' so that log
+ messages remain one-line.
+
+ @type text: str or unicode
+ @param text: input data
+ @rtype: str
+ @return: a safe version of text
+
+ """
+ text = text.encode('ascii', 'backslashreplace')
+ text = text.encode('string_escape')
+ return text
+
+
+def CommaJoin(names):
+ """Nicely join a set of identifiers.
+
+ @param names: set, list or tuple
+ @return: a string with the formatted results
+
+ """
+ return ", ".join(["'%s'" % val for val in names])
+
+
+def LockedMethod(fn):
+ """Synchronized object access decorator.
+
+ This decorator is intended to protect access to an object using the
+ object's own lock which is hardcoded to '_lock'.
+
+ """
+ def _LockDebug(*args, **kwargs):
+ if debug_locks:
+ logging.debug(*args, **kwargs)
+
+ def wrapper(self, *args, **kwargs):
+ assert hasattr(self, '_lock')
+ lock = self._lock
+ _LockDebug("Waiting for %s", lock)
+ lock.acquire()
+ try:
+ _LockDebug("Acquired %s", lock)
+ result = fn(self, *args, **kwargs)
+ finally:
+ _LockDebug("Releasing %s", lock)
+ lock.release()
+ _LockDebug("Released %s", lock)
+ return result
+ return wrapper
+
+
+def LockFile(fd):
+ """Locks a file using POSIX locks.
+
+ @type fd: int
+ @param fd: the file descriptor we need to lock
+
+ """
+ try:
+ fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except IOError, err:
+ if err.errno == errno.EAGAIN:
+ raise errors.LockError("File already locked")
+ raise
+
+
+class FileLock(object):
+ """Utility class for file locks.
+
+ """
+ def __init__(self, filename):
+ """Constructor for FileLock.
+
+ This will open the file denoted by the I{filename} argument.
+
+ @type filename: str
+ @param filename: path to the file to be locked
+
+ """
+ self.filename = filename
+ self.fd = open(self.filename, "w")
+
+ def __del__(self):
+ self.Close()
+
+ def Close(self):
+ """Close the file and release the lock.
+
+ """
+ if self.fd:
+ self.fd.close()
+ self.fd = None
+
+ def _flock(self, flag, blocking, timeout, errmsg):
+ """Wrapper for fcntl.flock.
+
+ @type flag: int
+ @param flag: operation flag
+ @type blocking: bool
+ @param blocking: whether the operation should be done in blocking mode.
+ @type timeout: None or float
+ @param timeout: for how long the operation should be retried (implies
+ non-blocking mode).
+ @type errmsg: string
+ @param errmsg: error message in case operation fails.
+
+ """
+ assert self.fd, "Lock was closed"
+ assert timeout is None or timeout >= 0, \
+ "If specified, timeout must be positive"
+
+ if timeout is not None:
+ flag |= fcntl.LOCK_NB
+ timeout_end = time.time() + timeout
+
+ # Blocking doesn't have effect with timeout
+ elif not blocking:
+ flag |= fcntl.LOCK_NB
+ timeout_end = None
+
+ retry = True
+ while retry:
+ try:
+ fcntl.flock(self.fd, flag)
+ retry = False
+ except IOError, err:
+ if err.errno in (errno.EAGAIN, ):
+ if timeout_end is not None and time.time() < timeout_end:
+ # Wait before trying again
+ time.sleep(max(0.1, min(1.0, timeout)))
+ else:
+ raise errors.LockError(errmsg)
+ else:
+ logging.exception("fcntl.flock failed")
+ raise
+
+ def Exclusive(self, blocking=False, timeout=None):
+ """Locks the file in exclusive mode.
+
+ @type blocking: boolean
+ @param blocking: whether to block and wait until we
+ can lock the file or return immediately
+ @type timeout: int or None
+ @param timeout: if not None, the duration to wait for the lock
+ (in blocking mode)
+
+ """
+ self._flock(fcntl.LOCK_EX, blocking, timeout,
+ "Failed to lock %s in exclusive mode" % self.filename)
+
+ def Shared(self, blocking=False, timeout=None):
+ """Locks the file in shared mode.
+
+ @type blocking: boolean
+ @param blocking: whether to block and wait until we
+ can lock the file or return immediately
+ @type timeout: int or None
+ @param timeout: if not None, the duration to wait for the lock
+ (in blocking mode)
+
+ """
+ self._flock(fcntl.LOCK_SH, blocking, timeout,
+ "Failed to lock %s in shared mode" % self.filename)
+
+ def Unlock(self, blocking=True, timeout=None):
+ """Unlocks the file.
+
+ According to C{flock(2)}, unlocking can also be a nonblocking
+ operation::
+
+ To make a non-blocking request, include LOCK_NB with any of the above
+ operations.
+
+ @type blocking: boolean
+ @param blocking: whether to block and wait until we
+ can lock the file or return immediately
+ @type timeout: int or None
+ @param timeout: if not None, the duration to wait for the lock
+ (in blocking mode)
+
+ """
+ self._flock(fcntl.LOCK_UN, blocking, timeout,
+ "Failed to unlock %s" % self.filename)
+
+
+class SignalHandler(object):
+ """Generic signal handler class.
+
+ It automatically restores the original handler when deconstructed or
+ when L{Reset} is called. You can either pass your own handler
+ function in or query the L{called} attribute to detect whether the
+ signal was sent.
+
+ @type signum: list
+ @ivar signum: the signals we handle
+ @type called: boolean
+ @ivar called: tracks whether any of the signals have been raised
+
+ """
+ def __init__(self, signum):
+ """Constructs a new SignalHandler instance.
+
+ @type signum: int or list of ints
+ @param signum: Single signal number or set of signal numbers
+
+ """
+ if isinstance(signum, (int, long)):
+ self.signum = set([signum])
+ else:
+ self.signum = set(signum)
+
+ self.called = False
+
+ self._previous = {}
+ try:
+ for signum in self.signum:
+ # Setup handler
+ prev_handler = signal.signal(signum, self._HandleSignal)
+ try:
+ self._previous[signum] = prev_handler
+ except:
+ # Restore previous handler
+ signal.signal(signum, prev_handler)
+ raise
+ except:
+ # Reset all handlers
+ self.Reset()
+ # Here we have a race condition: a handler may have already been called,
+ # but there's not much we can do about it at this point.
+ raise
+
+ def __del__(self):
+ self.Reset()
+
+ def Reset(self):
+ """Restore previous handler.
+
+ This will reset all the signals to their previous handlers.
+
+ """
+ for signum, prev_handler in self._previous.items():
+ signal.signal(signum, prev_handler)
+ # If successful, remove from dict
+ del self._previous[signum]
+
+ def Clear(self):
+ """Unsets the L{called} flag.
+
+ This function can be used in case a signal may arrive several times.
+
+ """
+ self.called = False
+
+ def _HandleSignal(self, signum, frame):
+ """Actual signal handling function.
+
+ """
+ # This is not nice and not absolutely atomic, but it appears to be the only
+ # solution in Python -- there are no atomic types.
+ self.called = True
+
+
+class FieldSet(object):
+ """A simple field set.
+
+ Among the features are:
+ - checking if a string is among a list of static string or regex objects
+ - checking if a whole list of string matches
+ - returning the matching groups from a regex match
+
+ Internally, all fields are held as regular expression objects.
+
+ """
+ def __init__(self, *items):
+ self.items = [re.compile("^%s$" % value) for value in items]
+
+ def Extend(self, other_set):
+ """Extend the field set with the items from another one"""
+ self.items.extend(other_set.items)
+
+ def Matches(self, field):
+ """Checks if a field matches the current set
+
+ @type field: str
+ @param field: the string to match
+ @return: either False or a regular expression match object
+
+ """
+ for m in itertools.ifilter(None, (val.match(field) for val in self.items)):
+ return m
+ return False
+
+ def NonMatching(self, items):
+ """Returns the list of fields not matching the current set
+
+ @type items: list
+ @param items: the list of fields to check
+ @rtype: list
+ @return: list of non-matching fields
+
+ """
+ return [val for val in items if not self.Matches(val)]