X-Git-Url: https://code.grnet.gr/git/ganeti-local/blobdiff_plain/f65f63ef8d43adbec08450e4ca7e77e7ba0629f5..e8022bef81d07552d7c5b430050f6901bea60942:/lib/utils.py diff --git a/lib/utils.py b/lib/utils.py index 8a4087a..6877dc7 100644 --- a/lib/utils.py +++ b/lib/utils.py @@ -27,9 +27,7 @@ the command line scripts. """ -import sys import os -import sha import time import subprocess import re @@ -43,10 +41,17 @@ import select import fcntl import resource import logging +import logging.handlers import signal from cStringIO import StringIO +try: + from hashlib import sha1 +except ImportError: + import sha + sha1 = sha.new + from ganeti import errors from ganeti import constants @@ -54,12 +59,13 @@ from ganeti import constants _locksheld = [] _re_shell_unquoted = re.compile('^[-.,=:/_+@A-Za-z0-9]+$') -debug = False debug_locks = False #: when set to True, L{RunCmd} is disabled no_fork = False +_RANDOM_UUID_FILE = "/proc/sys/kernel/random/uuid" + class RunResult(object): """Holds the result of running external programs. @@ -112,13 +118,13 @@ class RunResult(object): output = property(_GetOutput, None, None, "Return full output") -def RunCmd(cmd, env=None, output=None, cwd='/'): +def RunCmd(cmd, env=None, output=None, cwd='/', reset_env=False): """Execute a (shell) command. The command should not read from its standard input, as it will be closed. - @type cmd: string or list + @type cmd: string or list @param cmd: Command to run @type env: dict @param env: Additional environment @@ -129,9 +135,11 @@ def RunCmd(cmd, env=None, output=None, cwd='/'): @type cwd: string @param cwd: if specified, will be used as the working directory for the command; the default will be / + @type reset_env: boolean + @param reset_env: whether to reset or keep the default os environment @rtype: L{RunResult} @return: RunResult instance - @raise erors.ProgrammerError: if we call this when forks are disabled + @raise errors.ProgrammerError: if we call this when forks are disabled """ if no_fork: @@ -146,16 +154,27 @@ def RunCmd(cmd, env=None, output=None, cwd='/'): shell = True logging.debug("RunCmd '%s'", strcmd) - cmd_env = os.environ.copy() - cmd_env["LC_ALL"] = "C" + if not reset_env: + cmd_env = os.environ.copy() + cmd_env["LC_ALL"] = "C" + else: + cmd_env = {} + if env is not None: cmd_env.update(env) - if output is None: - out, err, status = _RunCmdPipe(cmd, cmd_env, shell, cwd) - else: - status = _RunCmdFile(cmd, cmd_env, shell, output, cwd) - out = err = "" + try: + if output is None: + out, err, status = _RunCmdPipe(cmd, cmd_env, shell, cwd) + else: + status = _RunCmdFile(cmd, cmd_env, shell, output, cwd) + out = err = "" + except OSError, err: + if err.errno == errno.ENOENT: + raise errors.OpExecError("Can't execute '%s': not found (%s)" % + (strcmd, err)) + else: + raise if status >= 0: exitcode = status @@ -166,6 +185,7 @@ def RunCmd(cmd, env=None, output=None, cwd='/'): return RunResult(exitcode, signal_, out, err, strcmd) + def _RunCmdPipe(cmd, env, via_shell, cwd): """Run a command and return its output. @@ -268,6 +288,43 @@ def _RunCmdFile(cmd, env, via_shell, output, cwd): return status +def RunParts(dir_name, env=None, reset_env=False): + """Run Scripts or programs in a directory + + @type dir_name: string + @param dir_name: absolute path to a directory + @type env: dict + @param env: The environment to use + @type reset_env: boolean + @param reset_env: whether to reset or keep the default os environment + @rtype: list of tuples + @return: list of (name, (one of RUNDIR_STATUS), RunResult) + + """ + rr = [] + + try: + dir_contents = ListVisibleFiles(dir_name) + except OSError, err: + logging.warning("RunParts: skipping %s (cannot list: %s)", dir_name, err) + return rr + + for relname in sorted(dir_contents): + fname = os.path.join(dir_name, relname) + if not (os.path.isfile(fname) and os.access(fname, os.X_OK) and + constants.EXT_PLUGIN_MASK.match(relname) is not None): + rr.append((relname, constants.RUNPARTS_SKIP, None)) + else: + try: + result = RunCmd([fname], env=env, reset_env=reset_env) + except Exception, err: # pylint: disable-msg=W0703 + rr.append((relname, constants.RUNPARTS_ERR, str(err))) + else: + rr.append((relname, constants.RUNPARTS_RUN, result)) + + return rr + + def RemoveFile(filename): """Remove a file ignoring some errors. @@ -306,11 +363,43 @@ def RenameFile(old, new, mkdir=False, mkdir_mode=0750): # as efficient. if mkdir and err.errno == errno.ENOENT: # Create directory and try again - os.makedirs(os.path.dirname(new), mkdir_mode) + dirname = os.path.dirname(new) + try: + os.makedirs(dirname, mode=mkdir_mode) + except OSError, err: + # Ignore EEXIST. This is only handled in os.makedirs as included in + # Python 2.5 and above. + if err.errno != errno.EEXIST or not os.path.exists(dirname): + raise + return os.rename(old, new) + raise +def ResetTempfileModule(): + """Resets the random name generator of the tempfile module. + + This function should be called after C{os.fork} in the child process to + ensure it creates a newly seeded random generator. Otherwise it would + generate the same random parts as the parent process. If several processes + race for the creation of a temporary file, this could lead to one not getting + a temporary name. + + """ + # pylint: disable-msg=W0212 + if hasattr(tempfile, "_once_lock") and hasattr(tempfile, "_name_sequence"): + tempfile._once_lock.acquire() + try: + # Reset random name generator + tempfile._name_sequence = None + finally: + tempfile._once_lock.release() + else: + logging.critical("The tempfile module misses at least one of the" + " '_once_lock' and '_name_sequence' attributes") + + def _FingerprintFile(filename): """Compute the fingerprint of a file. @@ -329,7 +418,7 @@ def _FingerprintFile(filename): f = open(filename) - fp = sha.sha() + fp = sha1() while True: data = f.read(4096) if not data: @@ -360,30 +449,71 @@ def FingerprintFiles(files): return ret -def CheckDict(target, template, logname=None): - """Ensure a dictionary has a required set of keys. - - For the given dictionaries I{target} and I{template}, ensure - I{target} has all the keys from I{template}. Missing keys are added - with values from template. +def ForceDictType(target, key_types, allowed_values=None): + """Force the values of a dict to have certain types. @type target: dict - @param target: the dictionary to update - @type template: dict - @param template: the dictionary holding the default values - @type logname: str or None - @param logname: if not None, causes the missing keys to be - logged with this name + @param target: the dict to update + @type key_types: dict + @param key_types: dict mapping target dict keys to types + in constants.ENFORCEABLE_TYPES + @type allowed_values: list + @keyword allowed_values: list of specially allowed values """ - missing = [] - for k in template: - if k not in target: - missing.append(k) - target[k] = template[k] + if allowed_values is None: + allowed_values = [] + + if not isinstance(target, dict): + msg = "Expected dictionary, got '%s'" % target + raise errors.TypeEnforcementError(msg) - if missing and logname: - logging.warning('%s missing keys %s', logname, ', '.join(missing)) + for key in target: + if key not in key_types: + msg = "Unknown key '%s'" % key + raise errors.TypeEnforcementError(msg) + + if target[key] in allowed_values: + continue + + ktype = key_types[key] + if ktype not in constants.ENFORCEABLE_TYPES: + msg = "'%s' has non-enforceable type %s" % (key, ktype) + raise errors.ProgrammerError(msg) + + if ktype == constants.VTYPE_STRING: + if not isinstance(target[key], basestring): + if isinstance(target[key], bool) and not target[key]: + target[key] = '' + else: + msg = "'%s' (value %s) is not a valid string" % (key, target[key]) + raise errors.TypeEnforcementError(msg) + elif ktype == constants.VTYPE_BOOL: + if isinstance(target[key], basestring) and target[key]: + if target[key].lower() == constants.VALUE_FALSE: + target[key] = False + elif target[key].lower() == constants.VALUE_TRUE: + target[key] = True + else: + msg = "'%s' (value %s) is not a valid boolean" % (key, target[key]) + raise errors.TypeEnforcementError(msg) + elif target[key]: + target[key] = True + else: + target[key] = False + elif ktype == constants.VTYPE_SIZE: + try: + target[key] = ParseUnit(target[key]) + except errors.UnitParseError, err: + msg = "'%s' (value %s) is not a valid size. error: %s" % \ + (key, target[key], err) + raise errors.TypeEnforcementError(msg) + elif ktype == constants.VTYPE_INT: + try: + target[key] = int(target[key]) + except (ValueError, TypeError): + msg = "'%s' (value %s) is not a valid integer" % (key, target[key]) + raise errors.TypeEnforcementError(msg) def IsProcessAlive(pid): @@ -415,27 +545,27 @@ def ReadPidFile(pidfile): @type pidfile: string @param pidfile: path to the file containing the pid @rtype: int - @return: The process id, if the file exista and contains a valid PID, + @return: The process id, if the file exists and contains a valid PID, otherwise 0 """ try: - pf = open(pidfile, 'r') + raw_data = ReadFile(pidfile) except EnvironmentError, err: if err.errno != errno.ENOENT: - logging.exception("Can't read pid file?!") + logging.exception("Can't read pid file") return 0 try: - pid = int(pf.read()) - except ValueError, err: + pid = int(raw_data) + except (TypeError, ValueError), err: logging.info("Can't parse pid file contents", exc_info=True) return 0 return pid -def MatchNameComponent(key, name_list): +def MatchNameComponent(key, name_list, case_sensitive=True): """Try to match a name against a list. This function will try to match a name like test1 against a list @@ -443,23 +573,42 @@ def MatchNameComponent(key, name_list): this list, I{'test1'} as well as I{'test1.example'} will match, but not I{'test1.ex'}. A multiple match will be considered as no match at all (e.g. I{'test1'} against C{['test1.example.com', - 'test1.example.org']}). + 'test1.example.org']}), except when the key fully matches an entry + (e.g. I{'test1'} against C{['test1', 'test1.example.com']}). @type key: str @param key: the name to be searched @type name_list: list @param name_list: the list of strings against which to search the key + @type case_sensitive: boolean + @param case_sensitive: whether to provide a case-sensitive match @rtype: None or str @return: None if there is no match I{or} if there are multiple matches, otherwise the element from the list which matches """ - mo = re.compile("^%s(\..*)?$" % re.escape(key)) - names_filtered = [name for name in name_list if mo.match(name) is not None] - if len(names_filtered) != 1: - return None - return names_filtered[0] + if key in name_list: + return key + + re_flags = 0 + if not case_sensitive: + re_flags |= re.IGNORECASE + key = key.upper() + mo = re.compile("^%s(\..*)?$" % re.escape(key), re_flags) + names_filtered = [] + string_matches = [] + for name in name_list: + if mo.match(name) is not None: + names_filtered.append(name) + if not case_sensitive and key == name.upper(): + string_matches.append(name) + + if len(string_matches) == 1: + return string_matches[0] + if len(names_filtered) == 1: + return names_filtered[0] + return None class HostInfo: @@ -517,6 +666,16 @@ class HostInfo: return result +def GetHostInfo(name=None): + """Lookup host name and raise an OpPrereqError for failures""" + + try: + return HostInfo(name) + except errors.ResolverError, err: + raise errors.OpPrereqError("The given name (%s) does not resolve: %s" % + (err[0], err[2]), errors.ECODE_RESOLVER) + + def ListVolumeGroups(): """List volume groups and their size @@ -557,37 +716,6 @@ def BridgeExists(bridge): return os.path.isdir("/sys/class/net/%s/bridge" % bridge) -def CheckBEParams(beparams): - """Checks whether the user-supplied be-params are valid, - and converts them from string format where appropriate. - - @type beparams: dict - @param beparams: new params dict - - """ - if beparams: - for item in beparams: - if item not in constants.BES_PARAMETERS: - raise errors.OpPrereqError("Unknown backend parameter %s" % item) - if item in (constants.BE_MEMORY, constants.BE_VCPUS): - val = beparams[item] - if val != constants.VALUE_DEFAULT: - try: - val = int(val) - except ValueError, err: - raise errors.OpPrereqError("Invalid %s size: %s" % (item, str(err))) - beparams[item] = val - if item in (constants.BE_AUTO_BALANCE): - val = beparams[item] - if not isinstance(val, bool): - if val == constants.VALUE_TRUE: - beparams[item] = True - elif val == constants.VALUE_FALSE: - beparams[item] = False - else: - raise errors.OpPrereqError("Invalid %s value: %s" % (item, val)) - - def NiceSort(name_list): """Sort a list of strings based on digit and non-digit groupings. @@ -642,7 +770,7 @@ def TryConvert(fn, val): """ try: nv = fn(val) - except (ValueError, TypeError), err: + except (ValueError, TypeError): nv = val return nv @@ -656,7 +784,7 @@ def IsValidIP(ip): @type ip: str @param ip: the address to be checked @rtype: a regular expression match object - @return: a regular epression match object, or None if the + @return: a regular expression match object, or None if the address is not valid """ @@ -689,7 +817,7 @@ def BuildShellCmd(template, *args): This function will check all arguments in the args list so that they are valid shell parameters (i.e. they don't contain shell - metacharaters). If everything is ok, it will return the result of + metacharacters). If everything is ok, it will return the result of template % args. @type template: str @@ -750,7 +878,7 @@ def ParseUnit(input_string): is always an int in MiB. """ - m = re.match('^([.\d]+)\s*([a-zA-Z]+)?$', input_string) + m = re.match('^([.\d]+)\s*([a-zA-Z]+)?$', str(input_string)) if not m: raise errors.UnitParseError("Invalid format") @@ -862,6 +990,7 @@ def SetEtcHostsEntry(file_name, ip, hostname, aliases): @param aliases: the list of aliases to add for the hostname """ + # FIXME: use WriteFile + fn rather than duplicating its efforts # Ensure aliases are unique aliases = UniqueSequence([hostname] + aliases)[1:] @@ -884,6 +1013,7 @@ def SetEtcHostsEntry(file_name, ip, hostname, aliases): out.flush() os.fsync(out) + os.chmod(tmpname, 0644) os.rename(tmpname, file_name) finally: f.close() @@ -917,6 +1047,7 @@ def RemoveEtcHostsEntry(file_name, hostname): @param hostname: the hostname to be removed """ + # FIXME: use WriteFile + fn rather than duplicating its efforts fd, tmpname = tempfile.mkstemp(dir=os.path.dirname(file_name)) try: out = os.fdopen(fd, 'w') @@ -938,6 +1069,7 @@ def RemoveEtcHostsEntry(file_name, hostname): out.flush() os.fsync(out) + os.chmod(tmpname, 0644) os.rename(tmpname, file_name) finally: f.close() @@ -1014,7 +1146,7 @@ def ShellQuoteArgs(args): @type args: list @param args: list of arguments to be quoted @rtype: str - @return: the quoted arguments concatenaned with spaces + @return: the quoted arguments concatenated with spaces """ return ' '.join([ShellQuote(i) for i in args]) @@ -1031,7 +1163,7 @@ def TcpPing(target, port, timeout=10, live_port_needed=False, source=None): @type port: int @param port: the port to connect to @type timeout: int - @param timeout: the timeout on the connection attemp + @param timeout: the timeout on the connection attempt @type live_port_needed: boolean @param live_port_needed: whether a closed port will cause the function to return failure, as if there was a timeout @@ -1048,7 +1180,7 @@ def TcpPing(target, port, timeout=10, live_port_needed=False, source=None): if source is not None: try: sock.bind((source, 0)) - except socket.error, (errcode, errstring): + except socket.error, (errcode, _): if errcode == errno.EADDRNOTAVAIL: success = False @@ -1060,7 +1192,7 @@ def TcpPing(target, port, timeout=10, live_port_needed=False, source=None): success = True except socket.timeout: success = False - except socket.error, (errcode, errstring): + except socket.error, (errcode, _): success = (not live_port_needed) and (errcode == errno.ECONNREFUSED) return success @@ -1073,7 +1205,7 @@ def OwnIpAddress(address): address. @type address: string - @param address: the addres to check + @param address: the address to check @rtype: bool @return: True if we own the address @@ -1125,41 +1257,54 @@ def NewUUID(): @rtype: str """ - f = open("/proc/sys/kernel/random/uuid", "r") - try: - return f.read(128).rstrip("\n") - finally: - f.close() + return ReadFile(_RANDOM_UUID_FILE, size=128).rstrip("\n") -def GenerateSecret(): +def GenerateSecret(numbytes=20): """Generates a random secret. - This will generate a pseudo-random secret, and return its sha digest + This will generate a pseudo-random secret returning an hex string (so that it can be used where an ASCII string is needed). + @param numbytes: the number of bytes which will be represented by the returned + string (defaulting to 20, the length of a SHA1 hash) @rtype: str - @return: a sha1 hexdigest of a block of 64 random bytes + @return: an hex representation of the pseudo-random sequence """ - return sha.new(os.urandom(64)).hexdigest() + return os.urandom(numbytes).encode('hex') + + +def EnsureDirs(dirs): + """Make required directories, if they don't exist. + @param dirs: list of tuples (dir_name, dir_mode) + @type dirs: list of (string, integer) -def ReadFile(file_name, size=None): + """ + for dir_name, dir_mode in dirs: + try: + os.mkdir(dir_name, dir_mode) + except EnvironmentError, err: + if err.errno != errno.EEXIST: + raise errors.GenericError("Cannot create needed directory" + " '%s': %s" % (dir_name, err)) + if not os.path.isdir(dir_name): + raise errors.GenericError("%s is not a directory" % dir_name) + + +def ReadFile(file_name, size=-1): """Reads a file. - @type size: None or int - @param size: Read at most size bytes + @type size: int + @param size: Read at most size bytes (if negative, entire file) @rtype: str - @return: the (possibly partial) conent of the file + @return: the (possibly partial) content of the file """ f = open(file_name, "r") try: - if size is None: - return f.read() - else: - return f.read(size) + return f.read(size) finally: f.close() @@ -1178,7 +1323,7 @@ def WriteFile(file_name, fn=None, data=None, mtime/atime of the file. If the function doesn't raise an exception, it has succeeded and the - target file has the new contents. If the file has raised an + target file has the new contents. If the function has raised an exception, an existing target file should be unmodified and the temporary file should be removed. @@ -1187,7 +1332,7 @@ def WriteFile(file_name, fn=None, data=None, @type fn: callable @param fn: content writing function, called with file descriptor as parameter - @type data: sr + @type data: str @param data: contents of the file @type mode: int @param mode: file mode @@ -1210,7 +1355,7 @@ def WriteFile(file_name, fn=None, data=None, @return: None if the 'close' parameter evaluates to True, otherwise the file descriptor - @raise errors.ProgrammerError: if an of the arguments are not valid + @raise errors.ProgrammerError: if any of the arguments are not valid """ if not os.path.isabs(file_name): @@ -1229,6 +1374,7 @@ def WriteFile(file_name, fn=None, data=None, dir_name, base_name = os.path.split(file_name) fd, new_name = tempfile.mkstemp('.new', base_name, dir_name) + do_remove = True # here we need to make sure we remove the temp file, if any error # leaves it in place try: @@ -1249,13 +1395,15 @@ def WriteFile(file_name, fn=None, data=None, os.utime(new_name, (atime, mtime)) if not dry_run: os.rename(new_name, file_name) + do_remove = False finally: if close: os.close(fd) result = None else: result = fd - RemoveFile(new_name) + if do_remove: + RemoveFile(new_name) return result @@ -1288,16 +1436,16 @@ def FirstFree(seq, base=0): return None -def all(seq, pred=bool): +def all(seq, pred=bool): # pylint: disable-msg=W0622 "Returns True if pred(x) is True for every element in the iterable" - for elem in itertools.ifilterfalse(pred, seq): + for _ in itertools.ifilterfalse(pred, seq): return False return True -def any(seq, pred=bool): +def any(seq, pred=bool): # pylint: disable-msg=W0622 "Returns True if pred(x) is True for at least one element in the iterable" - for elem in itertools.ifilter(pred, seq): + for _ in itertools.ifilter(pred, seq): return True return False @@ -1308,7 +1456,7 @@ def UniqueSequence(seq): Element order is preserved. @type seq: sequence - @param seq: the sequence with the source elementes + @param seq: the sequence with the source elements @rtype: list @return: list of unique elements from seq @@ -1317,20 +1465,26 @@ def UniqueSequence(seq): return [i for i in seq if i not in seen and not seen.add(i)] -def IsValidMac(mac): - """Predicate to check if a MAC address is valid. +def NormalizeAndValidateMac(mac): + """Normalizes and check if a MAC address is valid. - Checks wether the supplied MAC address is formally correct, only - accepts colon separated format. + Checks whether the supplied MAC address is formally correct, only + accepts colon separated format. Normalize it to all lower. @type mac: str @param mac: the MAC to be validated - @rtype: boolean - @return: True is the MAC seems valid + @rtype: str + @return: returns the normalized and validated MAC. + + @raise errors.OpPrereqError: If the MAC isn't valid """ - mac_check = re.compile("^([0-9a-f]{2}(:|$)){6}$") - return mac_check.match(mac) is not None + mac_check = re.compile("^([0-9a-f]{2}(:|$)){6}$", re.I) + if not mac_check.match(mac): + raise errors.OpPrereqError("Invalid MAC address specified: %s" % + mac, errors.ECODE_INVAL) + + return mac.lower() def TestDelay(duration): @@ -1343,9 +1497,9 @@ def TestDelay(duration): """ if duration < 0: - return False + return False, "Invalid sleep duration" time.sleep(duration) - return True + return True, None def _CloseFDNoErr(fd, retries=5): @@ -1409,9 +1563,11 @@ def Daemonize(logfile): @type logfile: str @param logfile: the logfile to which we should redirect stdout/stderr @rtype: int - @returns: the value zero + @return: the value zero """ + # pylint: disable-msg=W0212 + # yes, we really want os._exit UMASK = 077 WORKDIR = "/" @@ -1454,6 +1610,19 @@ def DaemonPidFileName(name): return os.path.join(constants.RUN_GANETI_DIR, "%s.pid" % name) +def EnsureDaemon(name): + """Check for and start daemon if not alive. + + """ + result = RunCmd([constants.DAEMON_UTIL, "check-and-start", name]) + if result.failed: + logging.error("Can't start daemon '%s', failure %s, output: %s", + name, result.fail_reason, result.output) + return False + + return True + + def WritePidFile(name): """Write the current process pidfile. @@ -1482,12 +1651,11 @@ def RemovePidFile(name): @param name: the daemon name used to derive the pidfile name """ - pid = os.getpid() pidfilename = DaemonPidFileName(name) # TODO: we could check here that the file contains our pid try: RemoveFile(pidfilename) - except: + except: # pylint: disable-msg=W0702 pass @@ -1524,24 +1692,31 @@ def KillProcess(pid, signal_=signal.SIGTERM, timeout=30, if not IsProcessAlive(pid): return + _helper(pid, signal_, waitpid) + if timeout <= 0: return - # Wait up to $timeout seconds - end = time.time() + timeout - wait = 0.01 - while time.time() < end and IsProcessAlive(pid): + def _CheckProcess(): + if not IsProcessAlive(pid): + return + try: (result_pid, _) = os.waitpid(pid, os.WNOHANG) - if result_pid > 0: - break except OSError: - pass - time.sleep(wait) - # Make wait time longer for next try - if wait < 0.1: - wait *= 1.5 + raise RetryAgain() + + if result_pid > 0: + return + + raise RetryAgain() + + try: + # Wait up to $timeout seconds + Retry(_CheckProcess, (0.01, 1.5, 0.1), timeout) + except RetryTimeout: + pass if IsProcessAlive(pid): # Kill process if it's still alive @@ -1566,9 +1741,17 @@ def FindFile(name, search_path, test=os.path.exists): @return: full path to the object if found, None otherwise """ + # validate the filename mask + if constants.EXT_PLUGIN_MASK.match(name) is None: + logging.critical("Invalid value passed for external script name: '%s'", + name) + return None + for dir_name in search_path: item_name = os.path.sep.join([dir_name, name]) - if test(item_name): + # check the user test and that we're indeed resolving to the given + # basename + if test(item_name) and os.path.basename(item_name) == name: return item_name return None @@ -1634,47 +1817,68 @@ def MergeTime(timetuple): return float(seconds) + (float(microseconds) * 0.000001) -def GetNodeDaemonPort(): - """Get the node daemon port for this cluster. +def GetDaemonPort(daemon_name): + """Get the daemon port for this cluster. Note that this routine does not read a ganeti-specific file, but instead uses C{socket.getservbyname} to allow pre-customization of this parameter outside of Ganeti. + @type daemon_name: string + @param daemon_name: daemon name (in constants.DAEMONS_PORTS) @rtype: int """ + if daemon_name not in constants.DAEMONS_PORTS: + raise errors.ProgrammerError("Unknown daemon: %s" % daemon_name) + + (proto, default_port) = constants.DAEMONS_PORTS[daemon_name] try: - port = socket.getservbyname("ganeti-noded", "tcp") + port = socket.getservbyname(daemon_name, proto) except socket.error: - port = constants.DEFAULT_NODED_PORT + port = default_port return port -def SetupLogging(logfile, debug=False, stderr_logging=False, program=""): +def SetupLogging(logfile, debug=0, stderr_logging=False, program="", + multithreaded=False, syslog=constants.SYSLOG_USAGE): """Configures the logging module. @type logfile: str @param logfile: the filename to which we should log - @type debug: boolean - @param debug: whether to enable debug messages too or + @type debug: integer + @param debug: if greater than zero, enable debug messages, otherwise only those at C{INFO} and above level @type stderr_logging: boolean @param stderr_logging: whether we should also log to the standard error @type program: str @param program: the name under which we should log messages + @type multithreaded: boolean + @param multithreaded: if True, will add the thread name to the log file + @type syslog: string + @param syslog: one of 'no', 'yes', 'only': + - if no, syslog is not used + - if yes, syslog is used (in addition to file-logging) + - if only, only syslog is used @raise EnvironmentError: if we can't open the log file and - stderr logging is disabled + syslog/stderr logging is disabled """ - fmt = "%(asctime)s: " + program + " " + fmt = "%(asctime)s: " + program + " pid=%(process)d" + sft = program + "[%(process)d]:" + if multithreaded: + fmt += "/%(threadName)s" + sft += " (%(threadName)s)" if debug: - fmt += ("pid=%(process)d/%(threadName)s %(levelname)s" - " %(module)s:%(lineno)s %(message)s") - else: - fmt += "pid=%(process)d %(levelname)s %(message)s" + fmt += " %(module)s:%(lineno)s" + # no debug info for syslog loggers + fmt += " %(levelname)s %(message)s" + # yes, we do want the textual level, as remote syslog will probably + # lose the error level, and it's easier to grep for it + sft += " %(levelname)s %(message)s" formatter = logging.Formatter(fmt) + sys_fmt = logging.Formatter(sft) root_logger = logging.getLogger("") root_logger.setLevel(logging.NOTSET) @@ -1693,24 +1897,43 @@ def SetupLogging(logfile, debug=False, stderr_logging=False, program=""): stderr_handler.setLevel(logging.CRITICAL) root_logger.addHandler(stderr_handler) - # this can fail, if the logging directories are not setup or we have - # a permisssion problem; in this case, it's best to log but ignore - # the error if stderr_logging is True, and if false we re-raise the - # exception since otherwise we could run but without any logs at all - try: - logfile_handler = logging.FileHandler(logfile) - logfile_handler.setFormatter(formatter) - if debug: - logfile_handler.setLevel(logging.DEBUG) - else: - logfile_handler.setLevel(logging.INFO) - root_logger.addHandler(logfile_handler) - except EnvironmentError, err: - if stderr_logging: - logging.exception("Failed to enable logging to file '%s'", logfile) - else: - # we need to re-raise the exception - raise + if syslog in (constants.SYSLOG_YES, constants.SYSLOG_ONLY): + facility = logging.handlers.SysLogHandler.LOG_DAEMON + syslog_handler = logging.handlers.SysLogHandler(constants.SYSLOG_SOCKET, + facility) + syslog_handler.setFormatter(sys_fmt) + # Never enable debug over syslog + syslog_handler.setLevel(logging.INFO) + root_logger.addHandler(syslog_handler) + + if syslog != constants.SYSLOG_ONLY: + # this can fail, if the logging directories are not setup or we have + # a permisssion problem; in this case, it's best to log but ignore + # the error if stderr_logging is True, and if false we re-raise the + # exception since otherwise we could run but without any logs at all + try: + logfile_handler = logging.FileHandler(logfile) + logfile_handler.setFormatter(formatter) + if debug: + logfile_handler.setLevel(logging.DEBUG) + else: + logfile_handler.setLevel(logging.INFO) + root_logger.addHandler(logfile_handler) + except EnvironmentError: + if stderr_logging or syslog == constants.SYSLOG_YES: + logging.exception("Failed to enable logging to file '%s'", logfile) + else: + # we need to re-raise the exception + raise + + +def IsNormAbsPath(path): + """Check whether a path is absolute and also normalized + + This avoids things like /dir/../../other/path to be valid. + + """ + return os.path.normpath(path) == path and os.path.isabs(path) def TailFile(fname, lines=20): @@ -1739,6 +1962,189 @@ def TailFile(fname, lines=20): return rows[-lines:] +def SafeEncode(text): + """Return a 'safe' version of a source string. + + This function mangles the input string and returns a version that + should be safe to display/encode as ASCII. To this end, we first + convert it to ASCII using the 'backslashreplace' encoding which + should get rid of any non-ASCII chars, and then we process it + through a loop copied from the string repr sources in the python; we + don't use string_escape anymore since that escape single quotes and + backslashes too, and that is too much; and that escaping is not + stable, i.e. string_escape(string_escape(x)) != string_escape(x). + + @type text: str or unicode + @param text: input data + @rtype: str + @return: a safe version of text + + """ + if isinstance(text, unicode): + # only if unicode; if str already, we handle it below + text = text.encode('ascii', 'backslashreplace') + resu = "" + for char in text: + c = ord(char) + if char == '\t': + resu += r'\t' + elif char == '\n': + resu += r'\n' + elif char == '\r': + resu += r'\'r' + elif c < 32 or c >= 127: # non-printable + resu += "\\x%02x" % (c & 0xff) + else: + resu += char + return resu + + +def UnescapeAndSplit(text, sep=","): + """Split and unescape a string based on a given separator. + + This function splits a string based on a separator where the + separator itself can be escape in order to be an element of the + elements. The escaping rules are (assuming coma being the + separator): + - a plain , separates the elements + - a sequence \\\\, (double backslash plus comma) is handled as a + backslash plus a separator comma + - a sequence \, (backslash plus comma) is handled as a + non-separator comma + + @type text: string + @param text: the string to split + @type sep: string + @param text: the separator + @rtype: string + @return: a list of strings + + """ + # we split the list by sep (with no escaping at this stage) + slist = text.split(sep) + # next, we revisit the elements and if any of them ended with an odd + # number of backslashes, then we join it with the next + rlist = [] + while slist: + e1 = slist.pop(0) + if e1.endswith("\\"): + num_b = len(e1) - len(e1.rstrip("\\")) + if num_b % 2 == 1: + e2 = slist.pop(0) + # here the backslashes remain (all), and will be reduced in + # the next step + rlist.append(e1 + sep + e2) + continue + rlist.append(e1) + # finally, replace backslash-something with something + rlist = [re.sub(r"\\(.)", r"\1", v) for v in rlist] + return rlist + + +def CommaJoin(names): + """Nicely join a set of identifiers. + + @param names: set, list or tuple + @return: a string with the formatted results + + """ + return ", ".join([str(val) for val in names]) + + +def BytesToMebibyte(value): + """Converts bytes to mebibytes. + + @type value: int + @param value: Value in bytes + @rtype: int + @return: Value in mebibytes + + """ + return int(round(value / (1024.0 * 1024.0), 0)) + + +def CalculateDirectorySize(path): + """Calculates the size of a directory recursively. + + @type path: string + @param path: Path to directory + @rtype: int + @return: Size in mebibytes + + """ + size = 0 + + for (curpath, _, files) in os.walk(path): + for filename in files: + st = os.lstat(os.path.join(curpath, filename)) + size += st.st_size + + return BytesToMebibyte(size) + + +def GetFilesystemStats(path): + """Returns the total and free space on a filesystem. + + @type path: string + @param path: Path on filesystem to be examined + @rtype: int + @return: tuple of (Total space, Free space) in mebibytes + + """ + st = os.statvfs(path) + + fsize = BytesToMebibyte(st.f_bavail * st.f_frsize) + tsize = BytesToMebibyte(st.f_blocks * st.f_frsize) + return (tsize, fsize) + + +def RunInSeparateProcess(fn): + """Runs a function in a separate process. + + Note: Only boolean return values are supported. + + @type fn: callable + @param fn: Function to be called + @rtype: tuple of (int/None, int/None) + @return: Exit code and signal number + + """ + pid = os.fork() + if pid == 0: + # Child process + try: + # In case the function uses temporary files + ResetTempfileModule() + + # Call function + result = int(bool(fn())) + assert result in (0, 1) + except: # pylint: disable-msg=W0702 + logging.exception("Error while calling function in separate process") + # 0 and 1 are reserved for the return value + result = 33 + + os._exit(result) # pylint: disable-msg=W0212 + + # Parent process + + # Avoid zombies and check exit code + (_, status) = os.waitpid(pid, 0) + + if os.WIFSIGNALED(status): + exitcode = None + signum = os.WTERMSIG(status) + else: + exitcode = os.WEXITSTATUS(status) + signum = None + + if not (exitcode in (0, 1) and signum is None): + raise errors.GenericError("Child program failed (code=%s, signal=%s)" % + (exitcode, signum)) + + return bool(exitcode) + + def LockedMethod(fn): """Synchronized object access decorator. @@ -1751,6 +2157,7 @@ def LockedMethod(fn): logging.debug(*args, **kwargs) def wrapper(self, *args, **kwargs): + # pylint: disable-msg=W0212 assert hasattr(self, '_lock') lock = self._lock _LockDebug("Waiting for %s", lock) @@ -1781,6 +2188,202 @@ def LockFile(fd): raise +def FormatTime(val): + """Formats a time value. + + @type val: float or None + @param val: the timestamp as returned by time.time() + @return: a string value or N/A if we don't have a valid timestamp + + """ + if val is None or not isinstance(val, (int, float)): + return "N/A" + # these two codes works on Linux, but they are not guaranteed on all + # platforms + return time.strftime("%F %T", time.localtime(val)) + + +def ReadWatcherPauseFile(filename, now=None, remove_after=3600): + """Reads the watcher pause file. + + @type filename: string + @param filename: Path to watcher pause file + @type now: None, float or int + @param now: Current time as Unix timestamp + @type remove_after: int + @param remove_after: Remove watcher pause file after specified amount of + seconds past the pause end time + + """ + if now is None: + now = time.time() + + try: + value = ReadFile(filename) + except IOError, err: + if err.errno != errno.ENOENT: + raise + value = None + + if value is not None: + try: + value = int(value) + except ValueError: + logging.warning(("Watcher pause file (%s) contains invalid value," + " removing it"), filename) + RemoveFile(filename) + value = None + + if value is not None: + # Remove file if it's outdated + if now > (value + remove_after): + RemoveFile(filename) + value = None + + elif now > value: + value = None + + return value + + +class RetryTimeout(Exception): + """Retry loop timed out. + + """ + + +class RetryAgain(Exception): + """Retry again. + + """ + + +class _RetryDelayCalculator(object): + """Calculator for increasing delays. + + """ + __slots__ = [ + "_factor", + "_limit", + "_next", + "_start", + ] + + def __init__(self, start, factor, limit): + """Initializes this class. + + @type start: float + @param start: Initial delay + @type factor: float + @param factor: Factor for delay increase + @type limit: float or None + @param limit: Upper limit for delay or None for no limit + + """ + assert start > 0.0 + assert factor >= 1.0 + assert limit is None or limit >= 0.0 + + self._start = start + self._factor = factor + self._limit = limit + + self._next = start + + def __call__(self): + """Returns current delay and calculates the next one. + + """ + current = self._next + + # Update for next run + if self._limit is None or self._next < self._limit: + self._next = min(self._limit, self._next * self._factor) + + return current + + +#: Special delay to specify whole remaining timeout +RETRY_REMAINING_TIME = object() + + +def Retry(fn, delay, timeout, args=None, wait_fn=time.sleep, + _time_fn=time.time): + """Call a function repeatedly until it succeeds. + + The function C{fn} is called repeatedly until it doesn't throw L{RetryAgain} + anymore. Between calls a delay, specified by C{delay}, is inserted. After a + total of C{timeout} seconds, this function throws L{RetryTimeout}. + + C{delay} can be one of the following: + - callable returning the delay length as a float + - Tuple of (start, factor, limit) + - L{RETRY_REMAINING_TIME} to sleep until the timeout expires (this is + useful when overriding L{wait_fn} to wait for an external event) + - A static delay as a number (int or float) + + @type fn: callable + @param fn: Function to be called + @param delay: Either a callable (returning the delay), a tuple of (start, + factor, limit) (see L{_RetryDelayCalculator}), + L{RETRY_REMAINING_TIME} or a number (int or float) + @type timeout: float + @param timeout: Total timeout + @type wait_fn: callable + @param wait_fn: Waiting function + @return: Return value of function + + """ + assert callable(fn) + assert callable(wait_fn) + assert callable(_time_fn) + + if args is None: + args = [] + + end_time = _time_fn() + timeout + + if callable(delay): + # External function to calculate delay + calc_delay = delay + + elif isinstance(delay, (tuple, list)): + # Increasing delay with optional upper boundary + (start, factor, limit) = delay + calc_delay = _RetryDelayCalculator(start, factor, limit) + + elif delay is RETRY_REMAINING_TIME: + # Always use the remaining time + calc_delay = None + + else: + # Static delay + calc_delay = lambda: delay + + assert calc_delay is None or callable(calc_delay) + + while True: + try: + # pylint: disable-msg=W0142 + return fn(*args) + except RetryAgain: + pass + + remaining_time = end_time - _time_fn() + + if remaining_time < 0.0: + raise RetryTimeout() + + assert remaining_time >= 0.0 + + if calc_delay is None: + wait_fn(remaining_time) + else: + current_delay = calc_delay() + if current_delay > 0.0: + wait_fn(current_delay) + + class FileLock(object): """Utility class for file locks. @@ -1804,7 +2407,7 @@ class FileLock(object): """Close the file and release the lock. """ - if self.fd: + if hasattr(self, "fd") and self.fd: self.fd.close() self.fd = None @@ -1835,6 +2438,8 @@ class FileLock(object): flag |= fcntl.LOCK_NB timeout_end = None + # TODO: Convert to utils.Retry + retry = True while retry: try: @@ -1900,6 +2505,43 @@ class FileLock(object): "Failed to unlock %s" % self.filename) +def SignalHandled(signums): + """Signal Handled decoration. + + This special decorator installs a signal handler and then calls the target + function. The function must accept a 'signal_handlers' keyword argument, + which will contain a dict indexed by signal number, with SignalHandler + objects as values. + + The decorator can be safely stacked with iself, to handle multiple signals + with different handlers. + + @type signums: list + @param signums: signals to intercept + + """ + def wrap(fn): + def sig_function(*args, **kwargs): + assert 'signal_handlers' not in kwargs or \ + kwargs['signal_handlers'] is None or \ + isinstance(kwargs['signal_handlers'], dict), \ + "Wrong signal_handlers parameter in original function call" + if 'signal_handlers' in kwargs and kwargs['signal_handlers'] is not None: + signal_handlers = kwargs['signal_handlers'] + else: + signal_handlers = {} + kwargs['signal_handlers'] = signal_handlers + sighandler = SignalHandler(signums) + try: + for sig in signums: + signal_handlers[sig] = sighandler + return fn(*args, **kwargs) + finally: + sighandler.Reset() + return sig_function + return wrap + + class SignalHandler(object): """Generic signal handler class. @@ -1921,11 +2563,7 @@ class SignalHandler(object): @param signum: Single signal number or set of signal numbers """ - if isinstance(signum, (int, long)): - self.signum = set([signum]) - else: - self.signum = set(signum) - + self.signum = set(signum) self.called = False self._previous = {} @@ -1968,7 +2606,8 @@ class SignalHandler(object): """ self.called = False - def _HandleSignal(self, signum, frame): + # we don't care about arguments, but we leave them named for the future + def _HandleSignal(self, signum, frame): # pylint: disable-msg=W0613 """Actual signal handling function. """ @@ -2000,12 +2639,12 @@ class FieldSet(object): @type field: str @param field: the string to match - @return: either False or a regular expression match object + @return: either None or a regular expression match object """ for m in itertools.ifilter(None, (val.match(field) for val in self.items)): return m - return False + return None def NonMatching(self, items): """Returns the list of fields not matching the current set