root / tools / cfgupgrade @ fdb85e3d
History | View | Annotate | Download (12.7 kB)
1 |
#!/usr/bin/python |
---|---|
2 |
# |
3 |
|
4 |
# Copyright (C) 2007, 2008, 2009, 2010, 2011, 2012, 2013 Google Inc. |
5 |
# |
6 |
# This program is free software; you can redistribute it and/or modify |
7 |
# it under the terms of the GNU General Public License as published by |
8 |
# the Free Software Foundation; either version 2 of the License, or |
9 |
# (at your option) any later version. |
10 |
# |
11 |
# This program is distributed in the hope that it will be useful, but |
12 |
# WITHOUT ANY WARRANTY; without even the implied warranty of |
13 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
14 |
# General Public License for more details. |
15 |
# |
16 |
# You should have received a copy of the GNU General Public License |
17 |
# along with this program; if not, write to the Free Software |
18 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA |
19 |
# 02110-1301, USA. |
20 |
|
21 |
|
22 |
"""Tool to upgrade the configuration file. |
23 |
|
24 |
This code handles only the types supported by simplejson. As an |
25 |
example, 'set' is a 'list'. |
26 |
|
27 |
""" |
28 |
|
29 |
|
30 |
import os |
31 |
import os.path |
32 |
import sys |
33 |
import optparse |
34 |
import logging |
35 |
import time |
36 |
from cStringIO import StringIO |
37 |
|
38 |
from ganeti import constants |
39 |
from ganeti import serializer |
40 |
from ganeti import utils |
41 |
from ganeti import cli |
42 |
from ganeti import bootstrap |
43 |
from ganeti import config |
44 |
from ganeti import netutils |
45 |
from ganeti import pathutils |
46 |
|
47 |
|
48 |
options = None |
49 |
args = None |
50 |
|
51 |
|
52 |
#: Target major version we will upgrade to |
53 |
TARGET_MAJOR = 2 |
54 |
#: Target minor version we will upgrade to |
55 |
TARGET_MINOR = 7 |
56 |
|
57 |
|
58 |
class Error(Exception): |
59 |
"""Generic exception""" |
60 |
pass |
61 |
|
62 |
|
63 |
def SetupLogging(): |
64 |
"""Configures the logging module. |
65 |
|
66 |
""" |
67 |
formatter = logging.Formatter("%(asctime)s: %(message)s") |
68 |
|
69 |
stderr_handler = logging.StreamHandler() |
70 |
stderr_handler.setFormatter(formatter) |
71 |
if options.debug: |
72 |
stderr_handler.setLevel(logging.NOTSET) |
73 |
elif options.verbose: |
74 |
stderr_handler.setLevel(logging.INFO) |
75 |
else: |
76 |
stderr_handler.setLevel(logging.WARNING) |
77 |
|
78 |
root_logger = logging.getLogger("") |
79 |
root_logger.setLevel(logging.NOTSET) |
80 |
root_logger.addHandler(stderr_handler) |
81 |
|
82 |
|
83 |
def CheckHostname(path): |
84 |
"""Ensures hostname matches ssconf value. |
85 |
|
86 |
@param path: Path to ssconf file |
87 |
|
88 |
""" |
89 |
ssconf_master_node = utils.ReadOneLineFile(path) |
90 |
hostname = netutils.GetHostname().name |
91 |
|
92 |
if ssconf_master_node == hostname: |
93 |
return True |
94 |
|
95 |
logging.warning("Warning: ssconf says master node is '%s', but this" |
96 |
" machine's name is '%s'; this tool must be run on" |
97 |
" the master node", ssconf_master_node, hostname) |
98 |
return False |
99 |
|
100 |
|
101 |
def UpgradeNetworks(config_data): |
102 |
networks = config_data.get("networks", None) |
103 |
if not networks: |
104 |
config_data["networks"] = {} |
105 |
|
106 |
|
107 |
def UpgradeGroups(config_data): |
108 |
for group in config_data["nodegroups"].values(): |
109 |
networks = group.get("networks", None) |
110 |
if not networks: |
111 |
group["networks"] = {} |
112 |
|
113 |
|
114 |
def UpgradeInstances(config_data): |
115 |
network2uuid = dict((n["name"], n["uuid"]) |
116 |
for n in config_data["networks"].values()) |
117 |
for inst in config_data["instances"].values(): |
118 |
for nic in inst["nics"]: |
119 |
name = nic.get("network", None) |
120 |
if name: |
121 |
uuid = network2uuid.get(name, None) |
122 |
if uuid: |
123 |
print("NIC with network name %s found." |
124 |
" Substituting with uuid %s." % (name, uuid)) |
125 |
nic["network"] = uuid |
126 |
|
127 |
|
128 |
def main(): |
129 |
"""Main program. |
130 |
|
131 |
""" |
132 |
global options, args # pylint: disable=W0603 |
133 |
|
134 |
# Option parsing |
135 |
parser = optparse.OptionParser(usage="%prog [--debug|--verbose] [--force]") |
136 |
parser.add_option("--dry-run", dest="dry_run", |
137 |
action="store_true", |
138 |
help="Try to do the conversion, but don't write" |
139 |
" output file") |
140 |
parser.add_option(cli.FORCE_OPT) |
141 |
parser.add_option(cli.DEBUG_OPT) |
142 |
parser.add_option(cli.VERBOSE_OPT) |
143 |
parser.add_option("--ignore-hostname", dest="ignore_hostname", |
144 |
action="store_true", default=False, |
145 |
help="Don't abort if hostname doesn't match") |
146 |
parser.add_option("--path", help="Convert configuration in this" |
147 |
" directory instead of '%s'" % pathutils.DATA_DIR, |
148 |
default=pathutils.DATA_DIR, dest="data_dir") |
149 |
parser.add_option("--confdir", |
150 |
help=("Use this directory instead of '%s'" % |
151 |
pathutils.CONF_DIR), |
152 |
default=pathutils.CONF_DIR, dest="conf_dir") |
153 |
parser.add_option("--no-verify", |
154 |
help="Do not verify configuration after upgrade", |
155 |
action="store_true", dest="no_verify", default=False) |
156 |
(options, args) = parser.parse_args() |
157 |
|
158 |
# We need to keep filenames locally because they might be renamed between |
159 |
# versions. |
160 |
options.data_dir = os.path.abspath(options.data_dir) |
161 |
options.CONFIG_DATA_PATH = options.data_dir + "/config.data" |
162 |
options.SERVER_PEM_PATH = options.data_dir + "/server.pem" |
163 |
options.KNOWN_HOSTS_PATH = options.data_dir + "/known_hosts" |
164 |
options.RAPI_CERT_FILE = options.data_dir + "/rapi.pem" |
165 |
options.SPICE_CERT_FILE = options.data_dir + "/spice.pem" |
166 |
options.SPICE_CACERT_FILE = options.data_dir + "/spice-ca.pem" |
167 |
options.RAPI_USERS_FILE = options.data_dir + "/rapi/users" |
168 |
options.RAPI_USERS_FILE_PRE24 = options.data_dir + "/rapi_users" |
169 |
options.CONFD_HMAC_KEY = options.data_dir + "/hmac.key" |
170 |
options.CDS_FILE = options.data_dir + "/cluster-domain-secret" |
171 |
options.SSCONF_MASTER_NODE = options.data_dir + "/ssconf_master_node" |
172 |
options.WATCHER_STATEFILE = options.data_dir + "/watcher.data" |
173 |
options.FILE_STORAGE_PATHS_FILE = options.conf_dir + "/file-storage-paths" |
174 |
|
175 |
SetupLogging() |
176 |
|
177 |
# Option checking |
178 |
if args: |
179 |
raise Error("No arguments expected") |
180 |
|
181 |
# Check master name |
182 |
if not (CheckHostname(options.SSCONF_MASTER_NODE) or options.ignore_hostname): |
183 |
logging.error("Aborting due to hostname mismatch") |
184 |
sys.exit(constants.EXIT_FAILURE) |
185 |
|
186 |
if not options.force: |
187 |
usertext = ("Please make sure you have read the upgrade notes for" |
188 |
" Ganeti %s (available in the UPGRADE file and included" |
189 |
" in other documentation formats). Continue with upgrading" |
190 |
" configuration?" % constants.RELEASE_VERSION) |
191 |
if not cli.AskUser(usertext): |
192 |
sys.exit(constants.EXIT_FAILURE) |
193 |
|
194 |
# Check whether it's a Ganeti configuration directory |
195 |
if not (os.path.isfile(options.CONFIG_DATA_PATH) and |
196 |
os.path.isfile(options.SERVER_PEM_PATH) and |
197 |
os.path.isfile(options.KNOWN_HOSTS_PATH)): |
198 |
raise Error(("%s does not seem to be a Ganeti configuration" |
199 |
" directory") % options.data_dir) |
200 |
|
201 |
if not os.path.isdir(options.conf_dir): |
202 |
raise Error("Not a directory: %s" % options.conf_dir) |
203 |
|
204 |
config_data = serializer.LoadJson(utils.ReadFile(options.CONFIG_DATA_PATH)) |
205 |
|
206 |
try: |
207 |
config_version = config_data["version"] |
208 |
except KeyError: |
209 |
raise Error("Unable to determine configuration version") |
210 |
|
211 |
(config_major, config_minor, config_revision) = \ |
212 |
constants.SplitVersion(config_version) |
213 |
|
214 |
logging.info("Found configuration version %s (%d.%d.%d)", |
215 |
config_version, config_major, config_minor, config_revision) |
216 |
|
217 |
if "config_version" in config_data["cluster"]: |
218 |
raise Error("Inconsistent configuration: found config_version in" |
219 |
" configuration file") |
220 |
|
221 |
# Upgrade from 2.{0..6} to 2.7 |
222 |
if config_major == 2 and config_minor in (0, 1, 2, 3, 4, 5, 6): |
223 |
if config_revision != 0: |
224 |
logging.warning("Config revision is %s, not 0", config_revision) |
225 |
|
226 |
config_data["version"] = constants.BuildVersion(TARGET_MAJOR, |
227 |
TARGET_MINOR, 0) |
228 |
|
229 |
if "instances" not in config_data: |
230 |
raise Error("Can't find the 'instances' key in the configuration!") |
231 |
for instance, iobj in config_data["instances"].items(): |
232 |
if "disks" not in iobj: |
233 |
raise Error("Instance '%s' doesn't have a disks entry?!" % instance) |
234 |
disks = iobj["disks"] |
235 |
for idx, dobj in enumerate(disks): |
236 |
expected = "disk/%s" % idx |
237 |
current = dobj.get("iv_name", "") |
238 |
if current != expected: |
239 |
logging.warning("Updating iv_name for instance %s/disk %s" |
240 |
" from '%s' to '%s'", |
241 |
instance, idx, current, expected) |
242 |
dobj["iv_name"] = expected |
243 |
|
244 |
elif config_major == TARGET_MAJOR and config_minor == TARGET_MINOR: |
245 |
logging.info("No changes necessary") |
246 |
|
247 |
else: |
248 |
raise Error("Configuration version %d.%d.%d not supported by this tool" % |
249 |
(config_major, config_minor, config_revision)) |
250 |
|
251 |
if (os.path.isfile(options.RAPI_USERS_FILE_PRE24) and |
252 |
not os.path.islink(options.RAPI_USERS_FILE_PRE24)): |
253 |
if os.path.exists(options.RAPI_USERS_FILE): |
254 |
raise Error("Found pre-2.4 RAPI users file at %s, but another file" |
255 |
" already exists at %s" % |
256 |
(options.RAPI_USERS_FILE_PRE24, options.RAPI_USERS_FILE)) |
257 |
logging.info("Found pre-2.4 RAPI users file at %s, renaming to %s", |
258 |
options.RAPI_USERS_FILE_PRE24, options.RAPI_USERS_FILE) |
259 |
if not options.dry_run: |
260 |
utils.RenameFile(options.RAPI_USERS_FILE_PRE24, options.RAPI_USERS_FILE, |
261 |
mkdir=True, mkdir_mode=0750) |
262 |
|
263 |
# Create a symlink for RAPI users file |
264 |
if (not (os.path.islink(options.RAPI_USERS_FILE_PRE24) or |
265 |
os.path.isfile(options.RAPI_USERS_FILE_PRE24)) and |
266 |
os.path.isfile(options.RAPI_USERS_FILE)): |
267 |
logging.info("Creating symlink from %s to %s", |
268 |
options.RAPI_USERS_FILE_PRE24, options.RAPI_USERS_FILE) |
269 |
if not options.dry_run: |
270 |
os.symlink(options.RAPI_USERS_FILE, options.RAPI_USERS_FILE_PRE24) |
271 |
|
272 |
# Remove old watcher state file if it exists |
273 |
if os.path.exists(options.WATCHER_STATEFILE): |
274 |
logging.info("Removing watcher state file %s", options.WATCHER_STATEFILE) |
275 |
if not options.dry_run: |
276 |
utils.RemoveFile(options.WATCHER_STATEFILE) |
277 |
|
278 |
# Write file storage paths |
279 |
if not os.path.exists(options.FILE_STORAGE_PATHS_FILE): |
280 |
cluster = config_data["cluster"] |
281 |
file_storage_dir = cluster.get("file_storage_dir") |
282 |
shared_file_storage_dir = cluster.get("shared_file_storage_dir") |
283 |
del cluster |
284 |
|
285 |
logging.info("Ganeti 2.7 and later only allow whitelisted directories" |
286 |
" for file storage; writing existing configuration values" |
287 |
" into '%s'", |
288 |
options.FILE_STORAGE_PATHS_FILE) |
289 |
|
290 |
if file_storage_dir: |
291 |
logging.info("File storage directory: %s", file_storage_dir) |
292 |
if shared_file_storage_dir: |
293 |
logging.info("Shared file storage directory: %s", |
294 |
shared_file_storage_dir) |
295 |
|
296 |
buf = StringIO() |
297 |
buf.write("# List automatically generated from configuration by\n") |
298 |
buf.write("# cfgupgrade at %s\n" % time.asctime()) |
299 |
if file_storage_dir: |
300 |
buf.write("%s\n" % file_storage_dir) |
301 |
if shared_file_storage_dir: |
302 |
buf.write("%s\n" % shared_file_storage_dir) |
303 |
utils.WriteFile(file_name=options.FILE_STORAGE_PATHS_FILE, |
304 |
data=buf.getvalue(), |
305 |
mode=0600, |
306 |
dry_run=options.dry_run, |
307 |
backup=True) |
308 |
|
309 |
UpgradeNetworks(config_data) |
310 |
UpgradeGroups(config_data) |
311 |
UpgradeInstances(config_data) |
312 |
|
313 |
try: |
314 |
logging.info("Writing configuration file to %s", options.CONFIG_DATA_PATH) |
315 |
utils.WriteFile(file_name=options.CONFIG_DATA_PATH, |
316 |
data=serializer.DumpJson(config_data), |
317 |
mode=0600, |
318 |
dry_run=options.dry_run, |
319 |
backup=True) |
320 |
|
321 |
if not options.dry_run: |
322 |
bootstrap.GenerateClusterCrypto( |
323 |
False, False, False, False, False, |
324 |
nodecert_file=options.SERVER_PEM_PATH, |
325 |
rapicert_file=options.RAPI_CERT_FILE, |
326 |
spicecert_file=options.SPICE_CERT_FILE, |
327 |
spicecacert_file=options.SPICE_CACERT_FILE, |
328 |
hmackey_file=options.CONFD_HMAC_KEY, |
329 |
cds_file=options.CDS_FILE) |
330 |
|
331 |
except Exception: |
332 |
logging.critical("Writing configuration failed. It is probably in an" |
333 |
" inconsistent state and needs manual intervention.") |
334 |
raise |
335 |
|
336 |
# test loading the config file |
337 |
all_ok = True |
338 |
if not (options.dry_run or options.no_verify): |
339 |
logging.info("Testing the new config file...") |
340 |
cfg = config.ConfigWriter(cfg_file=options.CONFIG_DATA_PATH, |
341 |
accept_foreign=options.ignore_hostname, |
342 |
offline=True) |
343 |
# if we reached this, it's all fine |
344 |
vrfy = cfg.VerifyConfig() |
345 |
if vrfy: |
346 |
logging.error("Errors after conversion:") |
347 |
for item in vrfy: |
348 |
logging.error(" - %s", item) |
349 |
all_ok = False |
350 |
else: |
351 |
logging.info("File loaded successfully after upgrading") |
352 |
del cfg |
353 |
|
354 |
if all_ok: |
355 |
cli.ToStderr("Configuration successfully upgraded to version %s.", |
356 |
constants.RELEASE_VERSION) |
357 |
else: |
358 |
cli.ToStderr("Configuration upgraded to version %s, but there are errors." |
359 |
"\nPlease review the file.", constants.RELEASE_VERSION) |
360 |
|
361 |
|
362 |
if __name__ == "__main__": |
363 |
main() |