import os.path
import copy
import time
+import logging
from cStringIO import StringIO
from ganeti import utils
-from ganeti import logger
from ganeti import errors
from ganeti import constants
from ganeti import opcodes
from ganeti import luxi
from ganeti import ssconf
+from ganeti import rpc
-from optparse import (OptionParser, make_option, TitledHelpFormatter,
+from optparse import (OptionParser, TitledHelpFormatter,
Option, OptionValueError)
-__all__ = ["DEBUG_OPT", "NOHDR_OPT", "SEP_OPT", "GenericMain",
- "SubmitOpCode", "GetClient",
- "cli_option", "GenerateTable", "AskUser",
- "ARGS_NONE", "ARGS_FIXED", "ARGS_ATLEAST", "ARGS_ANY", "ARGS_ONE",
- "USEUNITS_OPT", "FIELDS_OPT", "FORCE_OPT", "SUBMIT_OPT",
- "ListTags", "AddTags", "RemoveTags", "TAG_SRC_OPT",
- "FormatError", "SplitNodeOption", "SubmitOrSend",
- "JobSubmittedException",
- ]
+
+__all__ = [
+ # Command line options
+ "BACKEND_OPT",
+ "CONFIRM_OPT",
+ "DEBUG_OPT",
+ "DEBUG_SIMERR_OPT",
+ "DISK_TEMPLATE_OPT",
+ "FIELDS_OPT",
+ "FILESTORE_DIR_OPT",
+ "FILESTORE_DRIVER_OPT",
+ "HVLIST_OPT",
+ "HVOPTS_OPT",
+ "HYPERVISOR_OPT",
+ "IALLOCATOR_OPT",
+ "FORCE_OPT",
+ "NOHDR_OPT",
+ "NOIPCHECK_OPT",
+ "NONICS_OPT",
+ "NWSYNC_OPT",
+ "OS_OPT",
+ "SEP_OPT",
+ "SUBMIT_OPT",
+ "SYNC_OPT",
+ "TAG_SRC_OPT",
+ "USEUNITS_OPT",
+ "VERBOSE_OPT",
+ # Generic functions for CLI programs
+ "GenericMain",
+ "GetClient",
+ "GetOnlineNodes",
+ "JobExecutor",
+ "JobSubmittedException",
+ "ParseTimespec",
+ "SubmitOpCode",
+ "SubmitOrSend",
+ "UsesRPC",
+ # Formatting functions
+ "ToStderr", "ToStdout",
+ "FormatError",
+ "GenerateTable",
+ "AskUser",
+ "FormatTimestamp",
+ # Tags functions
+ "ListTags",
+ "AddTags",
+ "RemoveTags",
+ # command line options support infrastructure
+ "ARGS_MANY_INSTANCES",
+ "ARGS_MANY_NODES",
+ "ARGS_NONE",
+ "ARGS_ONE_INSTANCE",
+ "ARGS_ONE_NODE",
+ "ArgChoice",
+ "ArgCommand",
+ "ArgFile",
+ "ArgHost",
+ "ArgInstance",
+ "ArgJobId",
+ "ArgNode",
+ "ArgSuggest",
+ "ArgUnknown",
+ "OPT_COMPL_INST_ADD_NODES",
+ "OPT_COMPL_MANY_NODES",
+ "OPT_COMPL_ONE_IALLOCATOR",
+ "OPT_COMPL_ONE_INSTANCE",
+ "OPT_COMPL_ONE_NODE",
+ "OPT_COMPL_ONE_OS",
+ "cli_option",
+ "SplitNodeOption",
+ ]
+
+NO_PREFIX = "no_"
+UN_PREFIX = "-"
+
+
+class _Argument:
+ def __init__(self, min=0, max=None):
+ self.min = min
+ self.max = max
+
+ def __repr__(self):
+ return ("<%s min=%s max=%s>" %
+ (self.__class__.__name__, self.min, self.max))
+
+
+class ArgSuggest(_Argument):
+ """Suggesting argument.
+
+ Value can be any of the ones passed to the constructor.
+
+ """
+ def __init__(self, min=0, max=None, choices=None):
+ _Argument.__init__(self, min=min, max=max)
+ self.choices = choices
+
+ def __repr__(self):
+ return ("<%s min=%s max=%s choices=%r>" %
+ (self.__class__.__name__, self.min, self.max, self.choices))
+
+
+class ArgChoice(ArgSuggest):
+ """Choice argument.
+
+ Value can be any of the ones passed to the constructor. Like L{ArgSuggest},
+ but value must be one of the choices.
+
+ """
+
+
+class ArgUnknown(_Argument):
+ """Unknown argument to program (e.g. determined at runtime).
+
+ """
+
+
+class ArgInstance(_Argument):
+ """Instances argument.
+
+ """
+
+
+class ArgNode(_Argument):
+ """Node argument.
+
+ """
+
+class ArgJobId(_Argument):
+ """Job ID argument.
+
+ """
+
+
+class ArgFile(_Argument):
+ """File path argument.
+
+ """
+
+
+class ArgCommand(_Argument):
+ """Command argument.
+
+ """
+
+
+class ArgHost(_Argument):
+ """Host argument.
+
+ """
+
+
+ARGS_NONE = []
+ARGS_MANY_INSTANCES = [ArgInstance()]
+ARGS_MANY_NODES = [ArgNode()]
+ARGS_ONE_INSTANCE = [ArgInstance(min=1, max=1)]
+ARGS_ONE_NODE = [ArgNode(min=1, max=1)]
+
def _ExtractTagsObject(opts, args):
result = list(result)
result.sort()
for tag in result:
- print tag
+ ToStdout(tag)
def AddTags(opts, args):
SubmitOpCode(op)
-DEBUG_OPT = make_option("-d", "--debug", default=False,
- action="store_true",
- help="Turn debugging on")
+def check_unit(option, opt, value):
+ """OptParsers custom converter for units.
-NOHDR_OPT = make_option("--no-headers", default=False,
- action="store_true", dest="no_headers",
- help="Don't display column headers")
+ """
+ try:
+ return utils.ParseUnit(value)
+ except errors.UnitParseError, err:
+ raise OptionValueError("option %s: %s" % (opt, err))
-SEP_OPT = make_option("--separator", default=None,
- action="store", dest="separator",
- help="Separator between output fields"
- " (defaults to one space)")
-USEUNITS_OPT = make_option("--human-readable", default=False,
- action="store_true", dest="human_readable",
- help="Print sizes in human readable format")
+def _SplitKeyVal(opt, data):
+ """Convert a KeyVal string into a dict.
-FIELDS_OPT = make_option("-o", "--output", dest="output", action="store",
- type="string", help="Comma separated list of"
- " output fields",
- metavar="FIELDS")
+ This function will convert a key=val[,...] string into a dict. Empty
+ values will be converted specially: keys which have the prefix 'no_'
+ will have the value=False and the prefix stripped, the others will
+ have value=True.
-FORCE_OPT = make_option("-f", "--force", dest="force", action="store_true",
- default=False, help="Force the operation")
+ @type opt: string
+ @param opt: a string holding the option name for which we process the
+ data, used in building error messages
+ @type data: string
+ @param data: a string of the format key=val,key=val,...
+ @rtype: dict
+ @return: {key=val, key=val}
+ @raises errors.ParameterError: if there are duplicate keys
-TAG_SRC_OPT = make_option("--from", dest="tags_source",
- default=None, help="File with tag names")
+ """
+ kv_dict = {}
+ if data:
+ for elem in data.split(","):
+ if "=" in elem:
+ key, val = elem.split("=", 1)
+ else:
+ if elem.startswith(NO_PREFIX):
+ key, val = elem[len(NO_PREFIX):], False
+ elif elem.startswith(UN_PREFIX):
+ key, val = elem[len(UN_PREFIX):], None
+ else:
+ key, val = elem, True
+ if key in kv_dict:
+ raise errors.ParameterError("Duplicate key '%s' in option %s" %
+ (key, opt))
+ kv_dict[key] = val
+ return kv_dict
-SUBMIT_OPT = make_option("--submit", dest="submit_only",
- default=False, action="store_true",
- help="Submit the job and return the job ID, but"
- " don't wait for the job to finish")
+def check_ident_key_val(option, opt, value):
+ """Custom parser for ident:key=val,key=val options.
-def ARGS_FIXED(val):
- """Macro-like function denoting a fixed number of arguments"""
- return -val
+ This will store the parsed values as a tuple (ident, {key: val}). As such,
+ multiple uses of this option via action=append is possible.
+ """
+ if ":" not in value:
+ ident, rest = value, ''
+ else:
+ ident, rest = value.split(":", 1)
+
+ if ident.startswith(NO_PREFIX):
+ if rest:
+ msg = "Cannot pass options when removing parameter groups: %s" % value
+ raise errors.ParameterError(msg)
+ retval = (ident[len(NO_PREFIX):], False)
+ elif ident.startswith(UN_PREFIX):
+ if rest:
+ msg = "Cannot pass options when removing parameter groups: %s" % value
+ raise errors.ParameterError(msg)
+ retval = (ident[len(UN_PREFIX):], None)
+ else:
+ kv_dict = _SplitKeyVal(opt, rest)
+ retval = (ident, kv_dict)
+ return retval
-def ARGS_ATLEAST(val):
- """Macro-like function denoting a minimum number of arguments"""
- return val
+def check_key_val(option, opt, value):
+ """Custom parser class for key=val,key=val options.
-ARGS_NONE = None
-ARGS_ONE = ARGS_FIXED(1)
-ARGS_ANY = ARGS_ATLEAST(0)
+ This will store the parsed values as a dict {key: val}.
+ """
+ return _SplitKeyVal(opt, value)
-def check_unit(option, opt, value):
- """OptParsers custom converter for units.
- """
- try:
- return utils.ParseUnit(value)
- except errors.UnitParseError, err:
- raise OptionValueError("option %s: %s" % (opt, err))
+# completion_suggestion is normally a list. Using numeric values not evaluating
+# to False for dynamic completion.
+(OPT_COMPL_MANY_NODES,
+ OPT_COMPL_ONE_NODE,
+ OPT_COMPL_ONE_INSTANCE,
+ OPT_COMPL_ONE_OS,
+ OPT_COMPL_ONE_IALLOCATOR,
+ OPT_COMPL_INST_ADD_NODES) = range(100, 106)
+
+OPT_COMPL_ALL = frozenset([
+ OPT_COMPL_MANY_NODES,
+ OPT_COMPL_ONE_NODE,
+ OPT_COMPL_ONE_INSTANCE,
+ OPT_COMPL_ONE_OS,
+ OPT_COMPL_ONE_IALLOCATOR,
+ OPT_COMPL_INST_ADD_NODES,
+ ])
class CliOption(Option):
"""Custom option class for optparse.
"""
- TYPES = Option.TYPES + ("unit",)
- TYPE_CHECKER = copy.copy(Option.TYPE_CHECKER)
+ ATTRS = Option.ATTRS + [
+ "completion_suggest",
+ ]
+ TYPES = Option.TYPES + (
+ "identkeyval",
+ "keyval",
+ "unit",
+ )
+ TYPE_CHECKER = Option.TYPE_CHECKER.copy()
+ TYPE_CHECKER["identkeyval"] = check_ident_key_val
+ TYPE_CHECKER["keyval"] = check_key_val
TYPE_CHECKER["unit"] = check_unit
cli_option = CliOption
+DEBUG_OPT = cli_option("-d", "--debug", default=False,
+ action="store_true",
+ help="Turn debugging on")
+
+NOHDR_OPT = cli_option("--no-headers", default=False,
+ action="store_true", dest="no_headers",
+ help="Don't display column headers")
+
+SEP_OPT = cli_option("--separator", default=None,
+ action="store", dest="separator",
+ help=("Separator between output fields"
+ " (defaults to one space)"))
+
+USEUNITS_OPT = cli_option("--units", default=None,
+ dest="units", choices=('h', 'm', 'g', 't'),
+ help="Specify units for output (one of hmgt)")
+
+FIELDS_OPT = cli_option("-o", "--output", dest="output", action="store",
+ type="string", metavar="FIELDS",
+ help="Comma separated list of output fields")
+
+FORCE_OPT = cli_option("-f", "--force", dest="force", action="store_true",
+ default=False, help="Force the operation")
+
+CONFIRM_OPT = cli_option("--yes", dest="confirm", action="store_true",
+ default=False, help="Do not require confirmation")
+
+TAG_SRC_OPT = cli_option("--from", dest="tags_source",
+ default=None, help="File with tag names")
+
+SUBMIT_OPT = cli_option("--submit", dest="submit_only",
+ default=False, action="store_true",
+ help=("Submit the job and return the job ID, but"
+ " don't wait for the job to finish"))
+
+SYNC_OPT = cli_option("--sync", dest="do_locking",
+ default=False, action="store_true",
+ help=("Grab locks while doing the queries"
+ " in order to ensure more consistent results"))
+
+_DRY_RUN_OPT = cli_option("--dry-run", default=False,
+ action="store_true",
+ help=("Do not execute the operation, just run the"
+ " check steps and verify it it could be"
+ " executed"))
+
+VERBOSE_OPT = cli_option("-v", "--verbose", default=False,
+ action="store_true",
+ help="Increase the verbosity of the operation")
+
+DEBUG_SIMERR_OPT = cli_option("--debug-simulate-errors", default=False,
+ action="store_true", dest="simulate_errors",
+ help="Debugging option that makes the operation"
+ " treat most runtime checks as failed")
+
+NWSYNC_OPT = cli_option("--no-wait-for-sync", dest="wait_for_sync",
+ default=True, action="store_false",
+ help="Don't wait for sync (DANGEROUS!)")
+
+DISK_TEMPLATE_OPT = cli_option("-t", "--disk-template", dest="disk_template",
+ help="Custom disk setup (diskless, file,"
+ " plain or drbd)",
+ default=None, metavar="TEMPL",
+ choices=list(constants.DISK_TEMPLATES))
+
+NONICS_OPT = cli_option("--no-nics", default=False, action="store_true",
+ help="Do not create any network cards for"
+ " the instance")
+
+FILESTORE_DIR_OPT = cli_option("--file-storage-dir", dest="file_storage_dir",
+ help="Relative path under default cluster-wide"
+ " file storage dir to store file-based disks",
+ default=None, metavar="<DIR>")
+
+FILESTORE_DRIVER_OPT = cli_option("--file-driver", dest="file_driver",
+ help="Driver to use for image files",
+ default="loop", metavar="<DRIVER>",
+ choices=list(constants.FILE_DRIVER))
+
+IALLOCATOR_OPT = cli_option("-I", "--iallocator", metavar="<NAME>",
+ help="Select nodes for the instance automatically"
+ " using the <NAME> iallocator plugin",
+ default=None, type="string",
+ completion_suggest=OPT_COMPL_ONE_IALLOCATOR)
+
+OS_OPT = cli_option("-o", "--os-type", dest="os", help="What OS to run",
+ metavar="<os>",
+ completion_suggest=OPT_COMPL_ONE_OS)
+
+BACKEND_OPT = cli_option("-B", "--backend-parameters", dest="beparams",
+ type="keyval", default={},
+ help="Backend parameters")
+
+HVOPTS_OPT = cli_option("-H", "--hypervisor-parameters", type="keyval",
+ default={}, dest="hvparams",
+ help="Hypervisor parameters")
+
+HYPERVISOR_OPT = cli_option("-H", "--hypervisor-parameters", dest="hypervisor",
+ help="Hypervisor and hypervisor options, in the"
+ " format hypervisor:option=value,option=value,...",
+ default=None, type="identkeyval")
+
+HVLIST_OPT = cli_option("-H", "--hypervisor-parameters", dest="hvparams",
+ help="Hypervisor and hypervisor options, in the"
+ " format hypervisor:option=value,option=value,...",
+ default=[], action="append", type="identkeyval")
+
+NOIPCHECK_OPT = cli_option("--no-ip-check", dest="ip_check", default=True,
+ action="store_false",
+ help="Don't check that the instance's IP"
+ " is alive")
+
+
+
def _ParseArgs(argv, commands, aliases):
- """Parses the command line and return the function which must be
- executed together with its arguments
+ """Parser for the command line arguments.
- Arguments:
- argv: the command line
+ This function parses the arguments and returns the function which
+ must be executed together with its (modified) arguments.
- commands: dictionary with special contents, see the design doc for
- cmdline handling
- aliases: dictionary with command aliases {'alias': 'target, ...}
+ @param argv: the command line
+ @param commands: dictionary with special contents, see the design
+ doc for cmdline handling
+ @param aliases: dictionary with command aliases {'alias': 'target, ...}
"""
if len(argv) == 0:
binary = argv[0].split("/")[-1]
if len(argv) > 1 and argv[1] == "--version":
- print "%s (ganeti) %s" % (binary, constants.RELEASE_VERSION)
+ ToStdout("%s (ganeti) %s", binary, constants.RELEASE_VERSION)
# Quit right away. That way we don't have to care about this special
# argument. optparse.py does it the same.
sys.exit(0)
# let's do a nice thing
sortedcmds = commands.keys()
sortedcmds.sort()
- print ("Usage: %(bin)s {command} [options...] [argument...]"
- "\n%(bin)s <command> --help to see details, or"
- " man %(bin)s\n" % {"bin": binary})
+
+ ToStdout("Usage: %s {command} [options...] [argument...]", binary)
+ ToStdout("%s <command> --help to see details, or man %s", binary, binary)
+ ToStdout("")
+
# compute the max line length for cmd + usage
mlen = max([len(" %s" % cmd) for cmd in commands])
mlen = min(60, mlen) # should not get here...
+
# and format a nice command list
- print "Commands:"
+ ToStdout("Commands:")
for cmd in sortedcmds:
cmdstr = " %s" % (cmd,)
help_text = commands[cmd][4]
- help_lines = textwrap.wrap(help_text, 79-3-mlen)
- print "%-*s - %s" % (mlen, cmdstr, help_lines.pop(0))
+ help_lines = textwrap.wrap(help_text, 79 - 3 - mlen)
+ ToStdout("%-*s - %s", mlen, cmdstr, help_lines.pop(0))
for line in help_lines:
- print "%-*s %s" % (mlen, "", line)
- print
+ ToStdout("%-*s %s", mlen, "", line)
+
+ ToStdout("")
+
return None, None, None
# get command, unalias it, and look it up in commands
cmd = aliases[cmd]
- func, nargs, parser_opts, usage, description = commands[cmd]
- parser = OptionParser(option_list=parser_opts,
+ func, args_def, parser_opts, usage, description = commands[cmd]
+ parser = OptionParser(option_list=parser_opts + [_DRY_RUN_OPT],
description=description,
formatter=TitledHelpFormatter(),
usage="%%prog %s %s" % (cmd, usage))
parser.disable_interspersed_args()
options, args = parser.parse_args()
- if nargs is None:
- if len(args) != 0:
- print >> sys.stderr, ("Error: Command %s expects no arguments" % cmd)
- return None, None, None
- elif nargs < 0 and len(args) != -nargs:
- print >> sys.stderr, ("Error: Command %s expects %d argument(s)" %
- (cmd, -nargs))
- return None, None, None
- elif nargs >= 0 and len(args) < nargs:
- print >> sys.stderr, ("Error: Command %s expects at least %d argument(s)" %
- (cmd, nargs))
+
+ if not _CheckArguments(cmd, args_def, args):
return None, None, None
return func, options, args
+def _CheckArguments(cmd, args_def, args):
+ """Verifies the arguments using the argument definition.
+
+ Algorithm:
+
+ 1. Abort with error if values specified by user but none expected.
+
+ 1. For each argument in definition
+
+ 1. Keep running count of minimum number of values (min_count)
+ 1. Keep running count of maximum number of values (max_count)
+ 1. If it has an unlimited number of values
+
+ 1. Abort with error if it's not the last argument in the definition
+
+ 1. If last argument has limited number of values
+
+ 1. Abort with error if number of values doesn't match or is too large
+
+ 1. Abort with error if user didn't pass enough values (min_count)
+
+ """
+ if args and not args_def:
+ ToStderr("Error: Command %s expects no arguments", cmd)
+ return False
+
+ min_count = None
+ max_count = None
+ check_max = None
+
+ last_idx = len(args_def) - 1
+
+ for idx, arg in enumerate(args_def):
+ if min_count is None:
+ min_count = arg.min
+ elif arg.min is not None:
+ min_count += arg.min
+
+ if max_count is None:
+ max_count = arg.max
+ elif arg.max is not None:
+ max_count += arg.max
+
+ if idx == last_idx:
+ check_max = (arg.max is not None)
+
+ elif arg.max is None:
+ raise errors.ProgrammerError("Only the last argument can have max=None")
+
+ if check_max:
+ # Command with exact number of arguments
+ if (min_count is not None and max_count is not None and
+ min_count == max_count and len(args) != min_count):
+ ToStderr("Error: Command %s expects %d argument(s)", cmd, min_count)
+ return False
+
+ # Command with limited number of arguments
+ if max_count is not None and len(args) > max_count:
+ ToStderr("Error: Command %s expects only %d argument(s)",
+ cmd, max_count)
+ return False
+
+ # Command with some required arguments
+ if min_count is not None and len(args) < min_count:
+ ToStderr("Error: Command %s expects at least %d argument(s)",
+ cmd, min_count)
+ return False
+
+ return True
+
+
def SplitNodeOption(value):
"""Splits the value of a --node option.
return (value, None)
+def UsesRPC(fn):
+ def wrapper(*args, **kwargs):
+ rpc.Init()
+ try:
+ return fn(*args, **kwargs)
+ finally:
+ rpc.Shutdown()
+ return wrapper
+
+
def AskUser(text, choices=None):
"""Ask the user a question.
- Args:
- text - the question to ask.
+ @param text: the question to ask
- choices - list with elements tuples (input_char, return_value,
- description); if not given, it will default to: [('y', True,
- 'Perform the operation'), ('n', False, 'Do no do the operation')];
- note that the '?' char is reserved for help
+ @param choices: list with elements tuples (input_char, return_value,
+ description); if not given, it will default to: [('y', True,
+ 'Perform the operation'), ('n', False, 'Do no do the operation')];
+ note that the '?' char is reserved for help
- Returns: one of the return values from the choices list; if input is
- not possible (i.e. not running with a tty, we return the last entry
- from the list
+ @return: one of the return values from the choices list; if input is
+ not possible (i.e. not running with a tty, we return the last
+ entry from the list
"""
if choices is None:
choices = [('y', True, 'Perform the operation'),
('n', False, 'Do not perform the operation')]
if not choices or not isinstance(choices, list):
- raise errors.ProgrammerError("Invalid choiches argument to AskUser")
+ raise errors.ProgrammerError("Invalid choices argument to AskUser")
for entry in choices:
if not isinstance(entry, tuple) or len(entry) < 3 or entry[0] == '?':
- raise errors.ProgrammerError("Invalid choiches element to AskUser")
+ raise errors.ProgrammerError("Invalid choices element to AskUser")
answer = choices[-1][1]
new_text = []
if callable(feedback_fn):
feedback_fn(log_entry[1:])
else:
- print "%s %s" % (time.ctime(utils.MergeTime(timestamp)), message)
+ encoded = utils.SafeEncode(message)
+ ToStdout("%s %s", time.ctime(utils.MergeTime(timestamp)), encoded)
prev_logmsg_serial = max(prev_logmsg_serial, serial)
# TODO: Handle canceled and archived jobs
- elif status in (constants.JOB_STATUS_SUCCESS, constants.JOB_STATUS_ERROR):
+ elif status in (constants.JOB_STATUS_SUCCESS,
+ constants.JOB_STATUS_ERROR,
+ constants.JOB_STATUS_CANCELING,
+ constants.JOB_STATUS_CANCELED):
break
prev_job_info = job_info
- jobs = cl.QueryJobs([job_id], ["status", "opresult"])
+ jobs = cl.QueryJobs([job_id], ["status", "opstatus", "opresult"])
if not jobs:
raise errors.JobLost("Job with id %s lost" % job_id)
- status, result = jobs[0]
+ status, opstatus, result = jobs[0]
if status == constants.JOB_STATUS_SUCCESS:
- return result[0]
+ return result
+ elif status in (constants.JOB_STATUS_CANCELING,
+ constants.JOB_STATUS_CANCELED):
+ raise errors.OpExecError("Job was canceled")
else:
+ has_ok = False
+ for idx, (status, msg) in enumerate(zip(opstatus, result)):
+ if status == constants.OP_STATUS_SUCCESS:
+ has_ok = True
+ elif status == constants.OP_STATUS_ERROR:
+ errors.MaybeRaise(msg)
+ if has_ok:
+ raise errors.OpExecError("partial failure (opcode %d): %s" %
+ (idx, msg))
+ else:
+ raise errors.OpExecError(str(msg))
+ # default failure mode
raise errors.OpExecError(result)
job_id = SendJob([op], cl)
- return PollJob(job_id, cl=cl, feedback_fn=feedback_fn)
+ op_results = PollJob(job_id, cl=cl, feedback_fn=feedback_fn)
+
+ return op_results[0]
def SubmitOrSend(op, opts, cl=None, feedback_fn=None):
whether to just send the job and print its identifier. It is used in
order to simplify the implementation of the '--submit' option.
+ It will also add the dry-run parameter from the options passed, if true.
+
"""
+ if opts and opts.dry_run:
+ op.dry_run = opts.dry_run
if opts and opts.submit_only:
job_id = SendJob([op], cl=cl)
raise JobSubmittedException(job_id)
msg = str(err)
if isinstance(err, errors.ConfigurationError):
txt = "Corrupt configuration file: %s" % msg
- logger.Error(txt)
+ logging.error(txt)
obuf.write(txt + "\n")
obuf.write("Aborting.")
retcode = 2
obuf.write("Failure: command execution error:\n%s" % msg)
elif isinstance(err, errors.TagError):
obuf.write("Failure: invalid tag(s) given:\n%s" % msg)
+ elif isinstance(err, errors.JobQueueDrainError):
+ obuf.write("Failure: the job queue is marked for drain and doesn't"
+ " accept new requests\n")
+ elif isinstance(err, errors.JobQueueFull):
+ obuf.write("Failure: the job queue is full and doesn't accept new"
+ " job submissions until old jobs are archived\n")
+ elif isinstance(err, errors.TypeEnforcementError):
+ obuf.write("Parameter Error: %s" % msg)
+ elif isinstance(err, errors.ParameterError):
+ obuf.write("Failure: unknown/wrong parameter name '%s'" % msg)
elif isinstance(err, errors.GenericError):
obuf.write("Unhandled Ganeti error: %s" % msg)
elif isinstance(err, luxi.NoMasterError):
if aliases is None:
aliases = {}
- func, options, args = _ParseArgs(sys.argv, commands, aliases)
+ try:
+ func, options, args = _ParseArgs(sys.argv, commands, aliases)
+ except errors.ParameterError, err:
+ result, err_msg = FormatError(err)
+ ToStderr(err_msg)
+ return 1
+
if func is None: # parse error
return 1
for key, val in override.iteritems():
setattr(options, key, val)
- logger.SetupLogging(constants.LOG_COMMANDS, debug=options.debug,
- stderr_logging=True, program=binary)
-
- utils.debug = options.debug
+ utils.SetupLogging(constants.LOG_COMMANDS, debug=options.debug,
+ stderr_logging=True, program=binary)
if old_cmdline:
- logger.Info("run with arguments '%s'" % old_cmdline)
+ logging.info("run with arguments '%s'", old_cmdline)
else:
- logger.Info("run with no arguments")
+ logging.info("run with no arguments")
try:
result = func(options, args)
- except (errors.GenericError, luxi.ProtocolError), err:
+ except (errors.GenericError, luxi.ProtocolError,
+ JobSubmittedException), err:
result, err_msg = FormatError(err)
- logger.ToStderr(err_msg)
+ logging.exception("Error during command processing")
+ ToStderr(err_msg)
return result
def GenerateTable(headers, fields, separator, data,
- numfields=None, unitfields=None):
+ numfields=None, unitfields=None,
+ units=None):
"""Prints a table with headers and different fields.
- Args:
- headers: Dict of header titles or None if no headers should be shown
- fields: List of fields to show
- separator: String used to separate fields or None for spaces
- data: Data to be printed
- numfields: List of fields to be aligned to right
- unitfields: List of fields to be formatted as units
+ @type headers: dict
+ @param headers: dictionary mapping field names to headers for
+ the table
+ @type fields: list
+ @param fields: the field names corresponding to each row in
+ the data field
+ @param separator: the separator to be used; if this is None,
+ the default 'smart' algorithm is used which computes optimal
+ field width, otherwise just the separator is used between
+ each field
+ @type data: list
+ @param data: a list of lists, each sublist being one row to be output
+ @type numfields: list
+ @param numfields: a list with the fields that hold numeric
+ values and thus should be right-aligned
+ @type unitfields: list
+ @param unitfields: a list with the fields that hold numeric
+ values that should be formatted with the units field
+ @type units: string or None
+ @param units: the units we should use for formatting, or None for
+ automatic choice (human-readable for non-separator usage, otherwise
+ megabytes); this is a one-letter string
"""
+ if units is None:
+ if separator:
+ units = "m"
+ else:
+ units = "h"
+
if numfields is None:
numfields = []
if unitfields is None:
unitfields = []
+ numfields = utils.FieldSet(*numfields)
+ unitfields = utils.FieldSet(*unitfields)
+
format_fields = []
for field in fields:
if headers and field not in headers:
- raise errors.ProgrammerError("Missing header description for field '%s'"
- % field)
+ # TODO: handle better unknown fields (either revert to old
+ # style of raising exception, or deal more intelligently with
+ # variable fields)
+ headers[field] = field
if separator is not None:
format_fields.append("%s")
- elif field in numfields:
+ elif numfields.Matches(field):
format_fields.append("%*s")
else:
format_fields.append("%-*s")
format = separator.replace("%", "%%").join(format_fields)
for row in data:
+ if row is None:
+ continue
for idx, val in enumerate(row):
- if fields[idx] in unitfields:
+ if unitfields.Matches(fields[idx]):
try:
val = int(val)
except ValueError:
pass
else:
- val = row[idx] = utils.FormatUnit(val)
+ val = row[idx] = utils.FormatUnit(val, units)
val = row[idx] = str(val)
if separator is None:
mlens[idx] = max(mlens[idx], len(val))
for line in data:
args = []
+ if line is None:
+ line = ['-' for _ in fields]
for idx in xrange(len(fields)):
if separator is None:
args.append(mlens[idx])
result.append(format % tuple(args))
return result
+
+
+def FormatTimestamp(ts):
+ """Formats a given timestamp.
+
+ @type ts: timestamp
+ @param ts: a timeval-type timestamp, a tuple of seconds and microseconds
+
+ @rtype: string
+ @return: a string with the formatted timestamp
+
+ """
+ if not isinstance (ts, (tuple, list)) or len(ts) != 2:
+ return '?'
+ sec, usec = ts
+ return time.strftime("%F %T", time.localtime(sec)) + ".%06d" % usec
+
+
+def ParseTimespec(value):
+ """Parse a time specification.
+
+ The following suffixed will be recognized:
+
+ - s: seconds
+ - m: minutes
+ - h: hours
+ - d: day
+ - w: weeks
+
+ Without any suffix, the value will be taken to be in seconds.
+
+ """
+ value = str(value)
+ if not value:
+ raise errors.OpPrereqError("Empty time specification passed")
+ suffix_map = {
+ 's': 1,
+ 'm': 60,
+ 'h': 3600,
+ 'd': 86400,
+ 'w': 604800,
+ }
+ if value[-1] not in suffix_map:
+ try:
+ value = int(value)
+ except ValueError:
+ raise errors.OpPrereqError("Invalid time specification '%s'" % value)
+ else:
+ multiplier = suffix_map[value[-1]]
+ value = value[:-1]
+ if not value: # no data left after stripping the suffix
+ raise errors.OpPrereqError("Invalid time specification (only"
+ " suffix passed)")
+ try:
+ value = int(value) * multiplier
+ except ValueError:
+ raise errors.OpPrereqError("Invalid time specification '%s'" % value)
+ return value
+
+
+def GetOnlineNodes(nodes, cl=None, nowarn=False):
+ """Returns the names of online nodes.
+
+ This function will also log a warning on stderr with the names of
+ the online nodes.
+
+ @param nodes: if not empty, use only this subset of nodes (minus the
+ offline ones)
+ @param cl: if not None, luxi client to use
+ @type nowarn: boolean
+ @param nowarn: by default, this function will output a note with the
+ offline nodes that are skipped; if this parameter is True the
+ note is not displayed
+
+ """
+ if cl is None:
+ cl = GetClient()
+
+ result = cl.QueryNodes(names=nodes, fields=["name", "offline"],
+ use_locking=False)
+ offline = [row[0] for row in result if row[1]]
+ if offline and not nowarn:
+ ToStderr("Note: skipping offline node(s): %s" % ", ".join(offline))
+ return [row[0] for row in result if not row[1]]
+
+
+def _ToStream(stream, txt, *args):
+ """Write a message to a stream, bypassing the logging system
+
+ @type stream: file object
+ @param stream: the file to which we should write
+ @type txt: str
+ @param txt: the message
+
+ """
+ if args:
+ args = tuple(args)
+ stream.write(txt % args)
+ else:
+ stream.write(txt)
+ stream.write('\n')
+ stream.flush()
+
+
+def ToStdout(txt, *args):
+ """Write a message to stdout only, bypassing the logging system
+
+ This is just a wrapper over _ToStream.
+
+ @type txt: str
+ @param txt: the message
+
+ """
+ _ToStream(sys.stdout, txt, *args)
+
+
+def ToStderr(txt, *args):
+ """Write a message to stderr only, bypassing the logging system
+
+ This is just a wrapper over _ToStream.
+
+ @type txt: str
+ @param txt: the message
+
+ """
+ _ToStream(sys.stderr, txt, *args)
+
+
+class JobExecutor(object):
+ """Class which manages the submission and execution of multiple jobs.
+
+ Note that instances of this class should not be reused between
+ GetResults() calls.
+
+ """
+ def __init__(self, cl=None, verbose=True):
+ self.queue = []
+ if cl is None:
+ cl = GetClient()
+ self.cl = cl
+ self.verbose = verbose
+ self.jobs = []
+
+ def QueueJob(self, name, *ops):
+ """Record a job for later submit.
+
+ @type name: string
+ @param name: a description of the job, will be used in WaitJobSet
+ """
+ self.queue.append((name, ops))
+
+ def SubmitPending(self):
+ """Submit all pending jobs.
+
+ """
+ results = self.cl.SubmitManyJobs([row[1] for row in self.queue])
+ for ((status, data), (name, _)) in zip(results, self.queue):
+ self.jobs.append((status, data, name))
+
+ def GetResults(self):
+ """Wait for and return the results of all jobs.
+
+ @rtype: list
+ @return: list of tuples (success, job results), in the same order
+ as the submitted jobs; if a job has failed, instead of the result
+ there will be the error message
+
+ """
+ if not self.jobs:
+ self.SubmitPending()
+ results = []
+ if self.verbose:
+ ok_jobs = [row[1] for row in self.jobs if row[0]]
+ if ok_jobs:
+ ToStdout("Submitted jobs %s", ", ".join(ok_jobs))
+ for submit_status, jid, name in self.jobs:
+ if not submit_status:
+ ToStderr("Failed to submit job for %s: %s", name, jid)
+ results.append((False, jid))
+ continue
+ if self.verbose:
+ ToStdout("Waiting for job %s for %s...", jid, name)
+ try:
+ job_result = PollJob(jid, cl=self.cl)
+ success = True
+ except (errors.GenericError, luxi.ProtocolError), err:
+ _, job_result = FormatError(err)
+ success = False
+ # the error message will always be shown, verbose or not
+ ToStderr("Job %s for %s has failed: %s", jid, name, job_result)
+
+ results.append((success, job_result))
+ return results
+
+ def WaitOrShow(self, wait):
+ """Wait for job results or only print the job IDs.
+
+ @type wait: boolean
+ @param wait: whether to wait or not
+
+ """
+ if wait:
+ return self.GetResults()
+ else:
+ if not self.jobs:
+ self.SubmitPending()
+ for status, result, name in self.jobs:
+ if status:
+ ToStdout("%s: %s", result, name)
+ else:
+ ToStderr("Failure for %s: %s", name, result)