#!/usr/bin/python
#
-# Copyright (C) 2007, 2008 Google Inc.
+# Copyright (C) 2007, 2008, 2009, 2010, 2011, 2012 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
"""Tool to upgrade the configuration file.
-This code handles only the types supported by simplejson. As an example, "set"
-is a "list". Old Pickle based configurations files are converted to JSON during
-the process.
+This code handles only the types supported by simplejson. As an
+example, 'set' is a 'list'.
"""
import os
import os.path
import sys
-import re
import optparse
-import tempfile
-import simplejson
+import logging
+from ganeti import constants
+from ganeti import serializer
from ganeti import utils
from ganeti import cli
+from ganeti import bootstrap
+from ganeti import config
+from ganeti import netutils
options = None
args = None
+#: Target major version we will upgrade to
+TARGET_MAJOR = 2
+#: Target minor version we will upgrade to
+TARGET_MINOR = 6
+
+
class Error(Exception):
"""Generic exception"""
pass
-# {{{ Support for old Pickle files
-class UpgradeDict(dict):
- """Base class for internal config classes.
-
- """
- def __setstate__(self, state):
- self.update(state)
-
- def __getstate__(self):
- return self.copy()
-
-
-def FindGlobal(module, name):
- """Wraps Ganeti config classes to internal ones.
-
- This function may only return types supported by simplejson.
+def SetupLogging():
+ """Configures the logging module.
"""
- if module == "ganeti.objects":
- return UpgradeDict
- elif module == "__builtin__" and name == "set":
- return list
-
- return getattr(sys.modules[module], name)
-
-
-def ReadPickleFile(f):
- """Reads an old Pickle configuration.
+ formatter = logging.Formatter("%(asctime)s: %(message)s")
+
+ stderr_handler = logging.StreamHandler()
+ stderr_handler.setFormatter(formatter)
+ if options.debug:
+ stderr_handler.setLevel(logging.NOTSET)
+ elif options.verbose:
+ stderr_handler.setLevel(logging.INFO)
+ else:
+ stderr_handler.setLevel(logging.WARNING)
- """
- import cPickle
+ root_logger = logging.getLogger("")
+ root_logger.setLevel(logging.NOTSET)
+ root_logger.addHandler(stderr_handler)
- loader = cPickle.Unpickler(f)
- loader.find_global = FindGlobal
- return loader.load()
+def CheckHostname(path):
+ """Ensures hostname matches ssconf value.
-def IsPickleFile(f):
- """Checks whether a file is using the Pickle format.
+ @param path: Path to ssconf file
"""
- magic = f.read(128)
- try:
- return not re.match('^\s*\{', magic)
- finally:
- f.seek(-len(magic), 1)
-# }}}
+ ssconf_master_node = utils.ReadOneLineFile(path)
+ hostname = netutils.GetHostname().name
+ if ssconf_master_node == hostname:
+ return True
-def ReadJsonFile(f):
- """Reads a JSON file.
+ logging.warning("Warning: ssconf says master node is '%s', but this"
+ " machine's name is '%s'; this tool must be run on"
+ " the master node", ssconf_master_node, hostname)
+ return False
- """
- return simplejson.load(f)
-
-def ReadConfig(path):
- """Reads configuration file.
-
- """
- f = open(path, 'r')
- try:
- if IsPickleFile(f):
- return ReadPickleFile(f)
- else:
- return ReadJsonFile(f)
- finally:
- f.close()
+def UpgradeInstances(config_data):
+ for instance in config_data["instances"].values():
+ hotplug_info = instance.get("hotplug_info", None)
+ if hotplug_info:
+ try:
+ del hotplug_info["pci_pool"]
+ except:
+ pass
-def WriteConfig(path, data):
- """Writes the configuration file.
+def main():
+ """Main program.
"""
- if not options.dry_run:
- utils.CreateBackup(path)
-
- (fd, name) = tempfile.mkstemp(dir=os.path.dirname(path))
- f = os.fdopen(fd, 'w')
- try:
- try:
- simplejson.dump(data, f)
- f.flush()
- if options.dry_run:
- os.unlink(name)
- else:
- os.rename(name, path)
- except:
- os.unlink(name)
- raise
- finally:
- f.close()
-
-
-def UpdateFromVersion2To3(cfg):
- """Updates the configuration from version 2 to 3.
-
- """
- if cfg['cluster']['config_version'] != 2:
- return
-
- # Add port pool
- if 'tcpudp_port_pool' not in cfg['cluster']:
- cfg['cluster']['tcpudp_port_pool'] = []
-
- # Add bridge settings
- if 'default_bridge' not in cfg['cluster']:
- cfg['cluster']['default_bridge'] = 'xen-br0'
- for inst in cfg['instances'].values():
- for nic in inst['nics']:
- if 'bridge' not in nic:
- nic['bridge'] = None
-
- cfg['cluster']['config_version'] = 3
-
-
-# Main program
-if __name__ == "__main__":
- program = os.path.basename(sys.argv[0])
+ global options, args # pylint: disable=W0603
# Option parsing
- parser = optparse.OptionParser(usage="%prog [options] <config-file>")
- parser.add_option('--dry-run', dest='dry_run',
+ parser = optparse.OptionParser(usage="%prog [--debug|--verbose] [--force]")
+ parser.add_option("--dry-run", dest="dry_run",
action="store_true",
help="Try to do the conversion, but don't write"
" output file")
parser.add_option(cli.FORCE_OPT)
- parser.add_option('--verbose', dest='verbose',
- action="store_true",
- help="Verbose output")
+ parser.add_option(cli.DEBUG_OPT)
+ parser.add_option(cli.VERBOSE_OPT)
+ parser.add_option("--ignore-hostname", dest="ignore_hostname",
+ action="store_true", default=False,
+ help="Don't abort if hostname doesn't match")
+ parser.add_option("--path", help="Convert configuration in this"
+ " directory instead of '%s'" % constants.DATA_DIR,
+ default=constants.DATA_DIR, dest="data_dir")
+ parser.add_option("--no-verify",
+ help="Do not verify configuration after upgrade",
+ action="store_true", dest="no_verify", default=False)
(options, args) = parser.parse_args()
+ # We need to keep filenames locally because they might be renamed between
+ # versions.
+ options.data_dir = os.path.abspath(options.data_dir)
+ options.CONFIG_DATA_PATH = options.data_dir + "/config.data"
+ options.SERVER_PEM_PATH = options.data_dir + "/server.pem"
+ options.KNOWN_HOSTS_PATH = options.data_dir + "/known_hosts"
+ options.RAPI_CERT_FILE = options.data_dir + "/rapi.pem"
+ options.SPICE_CERT_FILE = options.data_dir + "/spice.pem"
+ options.SPICE_CACERT_FILE = options.data_dir + "/spice-ca.pem"
+ options.RAPI_USERS_FILE = options.data_dir + "/rapi/users"
+ options.RAPI_USERS_FILE_PRE24 = options.data_dir + "/rapi_users"
+ options.CONFD_HMAC_KEY = options.data_dir + "/hmac.key"
+ options.CDS_FILE = options.data_dir + "/cluster-domain-secret"
+ options.SSCONF_MASTER_NODE = options.data_dir + "/ssconf_master_node"
+ options.WATCHER_STATEFILE = options.data_dir + "/watcher.data"
+
+ SetupLogging()
+
# Option checking
if args:
- cfg_file = args[0]
- else:
- raise Error("Configuration file not specified")
+ raise Error("No arguments expected")
+
+ # Check master name
+ if not (CheckHostname(options.SSCONF_MASTER_NODE) or options.ignore_hostname):
+ logging.error("Aborting due to hostname mismatch")
+ sys.exit(constants.EXIT_FAILURE)
if not options.force:
- usertext = ("%s MUST run on the master node. Is this the master"
- " node?" % program)
+ usertext = ("Please make sure you have read the upgrade notes for"
+ " Ganeti %s (available in the UPGRADE file and included"
+ " in other documentation formats). Continue with upgrading"
+ " configuration?" % constants.RELEASE_VERSION)
if not cli.AskUser(usertext):
- sys.exit(1)
+ sys.exit(constants.EXIT_FAILURE)
- config = ReadConfig(cfg_file)
+ # Check whether it's a Ganeti configuration directory
+ if not (os.path.isfile(options.CONFIG_DATA_PATH) and
+ os.path.isfile(options.SERVER_PEM_PATH) and
+ os.path.isfile(options.KNOWN_HOSTS_PATH)):
+ raise Error(("%s does not seem to be a Ganeti configuration"
+ " directory") % options.data_dir)
- if options.verbose:
- import pprint
- print "Before upgrade:"
- pprint.pprint(config)
- print
+ config_data = serializer.LoadJson(utils.ReadFile(options.CONFIG_DATA_PATH))
- UpdateFromVersion2To3(config)
+ try:
+ config_version = config_data["version"]
+ except KeyError:
+ raise Error("Unable to determine configuration version")
+
+ (config_major, config_minor, config_revision) = \
+ constants.SplitVersion(config_version)
+
+ logging.info("Found configuration version %s (%d.%d.%d)",
+ config_version, config_major, config_minor, config_revision)
+
+ if "config_version" in config_data["cluster"]:
+ raise Error("Inconsistent configuration: found config_version in"
+ " configuration file")
+
+ # Upgrade from 2.0/2.1/2.2/2.3 to 2.4
+ if config_major == 2 and config_minor in (0, 1, 2, 3, 4, 5):
+ if config_revision != 0:
+ logging.warning("Config revision is %s, not 0", config_revision)
+
+ config_data["version"] = constants.BuildVersion(TARGET_MAJOR,
+ TARGET_MINOR, 0)
+
+ if "instances" not in config_data:
+ raise Error("Can't find the 'instances' key in the configuration!")
+ for instance, iobj in config_data["instances"].items():
+ if "disks" not in iobj:
+ raise Error("Instance '%s' doesn't have a disks entry?!" % instance)
+ disks = iobj["disks"]
+ for idx, dobj in enumerate(disks):
+ expected = "disk/%s" % idx
+ current = dobj.get("iv_name", "")
+ if current != expected:
+ logging.warning("Updating iv_name for instance %s/disk %s"
+ " from '%s' to '%s'",
+ instance, idx, current, expected)
+ dobj["iv_name"] = expected
+
+ elif config_major == TARGET_MAJOR and config_minor == TARGET_MINOR:
+ logging.info("No changes necessary")
- if options.verbose:
- print "After upgrade:"
- pprint.pprint(config)
- print
+ else:
+ raise Error("Configuration version %d.%d.%d not supported by this tool" %
+ (config_major, config_minor, config_revision))
+
+ if (os.path.isfile(options.RAPI_USERS_FILE_PRE24) and
+ not os.path.islink(options.RAPI_USERS_FILE_PRE24)):
+ if os.path.exists(options.RAPI_USERS_FILE):
+ raise Error("Found pre-2.4 RAPI users file at %s, but another file"
+ " already exists at %s" %
+ (options.RAPI_USERS_FILE_PRE24, options.RAPI_USERS_FILE))
+ logging.info("Found pre-2.4 RAPI users file at %s, renaming to %s",
+ options.RAPI_USERS_FILE_PRE24, options.RAPI_USERS_FILE)
+ if not options.dry_run:
+ utils.RenameFile(options.RAPI_USERS_FILE_PRE24, options.RAPI_USERS_FILE,
+ mkdir=True, mkdir_mode=0750)
+
+ # Create a symlink for RAPI users file
+ if (not (os.path.islink(options.RAPI_USERS_FILE_PRE24) or
+ os.path.isfile(options.RAPI_USERS_FILE_PRE24)) and
+ os.path.isfile(options.RAPI_USERS_FILE)):
+ logging.info("Creating symlink from %s to %s",
+ options.RAPI_USERS_FILE_PRE24, options.RAPI_USERS_FILE)
+ if not options.dry_run:
+ os.symlink(options.RAPI_USERS_FILE, options.RAPI_USERS_FILE_PRE24)
+
+ # Remove old watcher state file if it exists
+ if os.path.exists(options.WATCHER_STATEFILE):
+ logging.info("Removing watcher state file %s", options.WATCHER_STATEFILE)
+ if not options.dry_run:
+ utils.RemoveFile(options.WATCHER_STATEFILE)
+
+ UpgradeInstances(config_data)
- WriteConfig(cfg_file, config)
+ try:
+ logging.info("Writing configuration file to %s", options.CONFIG_DATA_PATH)
+ utils.WriteFile(file_name=options.CONFIG_DATA_PATH,
+ data=serializer.DumpJson(config_data),
+ mode=0600,
+ dry_run=options.dry_run,
+ backup=True)
+
+ if not options.dry_run:
+ bootstrap.GenerateClusterCrypto(False, False, False, False, False,
+ nodecert_file=options.SERVER_PEM_PATH,
+ rapicert_file=options.RAPI_CERT_FILE,
+ spicecert_file=options.SPICE_CERT_FILE,
+ spicecacert_file=options.SPICE_CACERT_FILE,
+ hmackey_file=options.CONFD_HMAC_KEY,
+ cds_file=options.CDS_FILE)
+
+ except Exception:
+ logging.critical("Writing configuration failed. It is probably in an"
+ " inconsistent state and needs manual intervention.")
+ raise
+
+ # test loading the config file
+ if not (options.dry_run or options.no_verify):
+ logging.info("Testing the new config file...")
+ cfg = config.ConfigWriter(cfg_file=options.CONFIG_DATA_PATH,
+ accept_foreign=options.ignore_hostname,
+ offline=True)
+ # if we reached this, it's all fine
+ vrfy = cfg.VerifyConfig()
+ if vrfy:
+ logging.error("Errors after conversion:")
+ for item in vrfy:
+ logging.error(" - %s", item)
+ del cfg
+ logging.info("File loaded successfully")
+
+ cli.ToStderr("Configuration successfully upgraded for version %s.",
+ constants.RELEASE_VERSION)
- print "The configuration file has been updated successfully. Please run"
- print " gnt-cluster copyfile %s" % cfg_file
- print "now."
-# vim: set foldmethod=marker :
+if __name__ == "__main__":
+ main()