import unittest
import re
+import itertools
+import operator
from ganeti import _autoconf
from ganeti import utils
from ganeti import cmdlib
from ganeti import build
+from ganeti import compat
+from ganeti import mcpu
+from ganeti import opcodes
+from ganeti import constants
+from ganeti.rapi import baserlib
+from ganeti.rapi import rlib2
from ganeti.rapi import connector
import testutils
-class TestDocs(unittest.TestCase):
- """Documentation tests"""
+VALID_URI_RE = re.compile(r"^[-/a-z0-9]*$")
+
+RAPI_OPCODE_EXCLUDE = frozenset([
+ # Not yet implemented
+ opcodes.OpBackupQuery,
+ opcodes.OpBackupRemove,
+ opcodes.OpClusterConfigQuery,
+ opcodes.OpClusterRepairDiskSizes,
+ opcodes.OpClusterVerify,
+ opcodes.OpClusterVerifyDisks,
+ opcodes.OpInstanceChangeGroup,
+ opcodes.OpInstanceMove,
+ opcodes.OpNodeQueryvols,
+ opcodes.OpOobCommand,
+ opcodes.OpTagsSearch,
+ opcodes.OpClusterActivateMasterIp,
+ opcodes.OpClusterDeactivateMasterIp,
+
+ # Difficult if not impossible
+ opcodes.OpClusterDestroy,
+ opcodes.OpClusterPostInit,
+ opcodes.OpClusterRename,
+ opcodes.OpNodeAdd,
+ opcodes.OpNodeRemove,
+
+ # Very sensitive in nature
+ opcodes.OpRestrictedCommand,
+
+ # Helper opcodes (e.g. submitted by LUs)
+ opcodes.OpClusterVerifyConfig,
+ opcodes.OpClusterVerifyGroup,
+ opcodes.OpGroupEvacuate,
+ opcodes.OpGroupVerifyDisks,
+
+ # Test opcodes
+ opcodes.OpTestAllocator,
+ opcodes.OpTestDelay,
+ opcodes.OpTestDummy,
+ opcodes.OpTestJqueue,
+ ])
+
+
+def _ReadDocFile(filename):
+ return utils.ReadFile("%s/doc/%s" %
+ (testutils.GetSourceDir(), filename))
+
+
+class TestHooksDocs(unittest.TestCase):
+ HOOK_PATH_OK = frozenset([
+ "master-ip-turnup",
+ "master-ip-turndown",
+ ])
- @staticmethod
- def _ReadDocFile(filename):
- return utils.ReadFile("%s/doc/%s" %
- (testutils.GetSourceDir(), filename))
-
- def testHookDocs(self):
+ def test(self):
"""Check whether all hooks are documented.
"""
- hooksdoc = self._ReadDocFile("hooks.rst")
+ hooksdoc = _ReadDocFile("hooks.rst")
+
+ # Reverse mapping from LU to opcode
+ lu2opcode = dict((lu, op)
+ for (op, lu) in mcpu.Processor.DISPATCH_TABLE.items())
+ assert len(lu2opcode) == len(mcpu.Processor.DISPATCH_TABLE), \
+ "Found duplicate entries"
+
+ hooks_paths = frozenset(re.findall("^:directory:\s*(.+)\s*$", hooksdoc,
+ re.M))
+ self.assertTrue(self.HOOK_PATH_OK.issubset(hooks_paths),
+ msg="Whitelisted path not found in documentation")
+
+ raw_hooks_ops = re.findall("^OP_(?!CODE$).+$", hooksdoc, re.M)
+ hooks_ops = set()
+ duplicate_ops = set()
+ for op in raw_hooks_ops:
+ if op in hooks_ops:
+ duplicate_ops.add(op)
+ else:
+ hooks_ops.add(op)
+
+ self.assertFalse(duplicate_ops,
+ msg="Found duplicate opcode documentation: %s" %
+ utils.CommaJoin(duplicate_ops))
+
+ seen_paths = set()
+ seen_ops = set()
+
+ self.assertFalse(duplicate_ops,
+ msg="Found duplicated hook documentation: %s" %
+ utils.CommaJoin(duplicate_ops))
for name in dir(cmdlib):
- obj = getattr(cmdlib, name)
+ lucls = getattr(cmdlib, name)
- if (isinstance(obj, type) and
- issubclass(obj, cmdlib.LogicalUnit) and
- hasattr(obj, "HPATH")):
- self._CheckHook(name, obj, hooksdoc)
+ if (isinstance(lucls, type) and
+ issubclass(lucls, cmdlib.LogicalUnit) and
+ hasattr(lucls, "HPATH")):
+ if lucls.HTYPE is None:
+ continue
- def _CheckHook(self, name, lucls, hooksdoc):
- if lucls.HTYPE is None:
- return
+ opcls = lu2opcode.get(lucls, None)
- # TODO: Improve this test (e.g. find hooks documented but no longer
- # existing)
+ if opcls:
+ seen_ops.add(opcls.OP_ID)
+ self.assertTrue(opcls.OP_ID in hooks_ops,
+ msg="Missing hook documentation for %s" %
+ opcls.OP_ID)
+ self.assertTrue(lucls.HPATH in hooks_paths,
+ msg="Missing documentation for hook %s/%s" %
+ (lucls.HTYPE, lucls.HPATH))
+ seen_paths.add(lucls.HPATH)
- pattern = r"^:directory:\s*%s\s*$" % re.escape(lucls.HPATH)
+ missed_ops = hooks_ops - seen_ops
+ missed_paths = hooks_paths - seen_paths - self.HOOK_PATH_OK
- self.assert_(re.findall(pattern, hooksdoc, re.M),
- msg=("Missing documentation for hook %s/%s" %
- (lucls.HTYPE, lucls.HPATH)))
+ self.assertFalse(missed_ops,
+ msg="Op documents hook not existing anymore: %s" %
+ utils.CommaJoin(missed_ops))
+ self.assertFalse(missed_paths,
+ msg="Hook path does not exist in opcode: %s" %
+ utils.CommaJoin(missed_paths))
- def testRapiDocs(self):
- """Check whether all RAPI resources are documented.
- """
- rapidoc = self._ReadDocFile("rapi.rst")
+class TestRapiDocs(unittest.TestCase):
+ def _CheckRapiResource(self, uri, fixup, handler):
+ docline = "%s resource." % uri
+ self.assertEqual(handler.__doc__.splitlines()[0].strip(), docline,
+ msg=("First line of %r's docstring is not %r" %
+ (handler, docline)))
- node_name = "[node_name]"
- instance_name = "[instance_name]"
- group_name = "[group_name]"
- job_id = "[job_id]"
- disk_index = "[disk_index]"
+ # Apply fixes before testing
+ for (rx, value) in fixup.items():
+ uri = rx.sub(value, uri)
- resources = connector.GetHandlers(re.escape(node_name),
- re.escape(instance_name),
- re.escape(group_name),
- re.escape(job_id),
- re.escape(disk_index))
+ self.assertTrue(VALID_URI_RE.match(uri), msg="Invalid URI %r" % uri)
+
+ def test(self):
+ """Check whether all RAPI resources are documented.
+
+ """
+ rapidoc = _ReadDocFile("rapi.rst")
+
+ node_name = re.escape("[node_name]")
+ instance_name = re.escape("[instance_name]")
+ group_name = re.escape("[group_name]")
+ network_name = re.escape("[network_name]")
+ job_id = re.escape("[job_id]")
+ disk_index = re.escape("[disk_index]")
+ query_res = re.escape("[resource]")
+
+ resources = connector.GetHandlers(node_name, instance_name,
+ group_name, network_name,
+ job_id, disk_index, query_res)
+
+ handler_dups = utils.FindDuplicates(resources.values())
+ self.assertFalse(handler_dups,
+ msg=("Resource handlers used more than once: %r" %
+ handler_dups))
+
+ uri_check_fixup = {
+ re.compile(node_name): "node1examplecom",
+ re.compile(instance_name): "inst1examplecom",
+ re.compile(group_name): "group4440",
+ re.compile(network_name): "network5550",
+ re.compile(job_id): "9409",
+ re.compile(disk_index): "123",
+ re.compile(query_res): "lock",
+ }
+
+ assert compat.all(VALID_URI_RE.match(value)
+ for value in uri_check_fixup.values()), \
+ "Fixup values must be valid URIs, too"
titles = []
prefix_exception = frozenset(["/", "/version", "/2"])
undocumented = []
+ used_uris = []
for key, handler in resources.iteritems():
# Regex objects
if hasattr(key, "match"):
self.assert_(key.pattern.startswith("^/2/"),
msg="Pattern %r does not start with '^/2/'" % key.pattern)
+ self.assertEqual(key.pattern[-1], "$")
found = False
for title in titles:
- if (title.startswith("``") and
- title.endswith("``") and
- key.match(title[2:-2])):
- found = True
- break
+ if title.startswith("``") and title.endswith("``"):
+ uri = title[2:-2]
+ if key.match(uri):
+ self._CheckRapiResource(uri, uri_check_fixup, handler)
+ used_uris.append(uri)
+ found = True
+ break
if not found:
# TODO: Find better way of identifying resource
self.assert_(key.startswith("/2/") or key in prefix_exception,
msg="Path %r does not start with '/2/'" % key)
- if ("``%s``" % key) not in titles:
+ if ("``%s``" % key) in titles:
+ self._CheckRapiResource(key, {}, handler)
+ used_uris.append(key)
+ else:
undocumented.append(key)
self.failIf(undocumented,
msg=("Missing RAPI resource documentation for %s" %
utils.CommaJoin(undocumented)))
+ uri_dups = utils.FindDuplicates(used_uris)
+ self.failIf(uri_dups,
+ msg=("URIs matched by more than one resource: %s" %
+ utils.CommaJoin(uri_dups)))
+
+ self._FindRapiMissing(resources.values())
+ self._CheckTagHandlers(resources.values())
+
+ def _FindRapiMissing(self, handlers):
+ used = frozenset(itertools.chain(*map(baserlib.GetResourceOpcodes,
+ handlers)))
+
+ unexpected = used & RAPI_OPCODE_EXCLUDE
+ self.assertFalse(unexpected,
+ msg=("Found RAPI resources for excluded opcodes: %s" %
+ utils.CommaJoin(_GetOpIds(unexpected))))
+
+ missing = (frozenset(opcodes.OP_MAPPING.values()) - used -
+ RAPI_OPCODE_EXCLUDE)
+ self.assertFalse(missing,
+ msg=("Missing RAPI resources for opcodes: %s" %
+ utils.CommaJoin(_GetOpIds(missing))))
+
+ def _CheckTagHandlers(self, handlers):
+ tag_handlers = filter(lambda x: issubclass(x, rlib2._R_Tags), handlers)
+ self.assertEqual(frozenset(map(operator.attrgetter("TAG_LEVEL"),
+ tag_handlers)),
+ constants.VALID_TAG_TYPES)
+
+
+def _GetOpIds(ops):
+ """Returns C{OP_ID} for all opcodes in passed sequence.
+
+ """
+ return sorted(opcls.OP_ID for opcls in ops)
+
class TestManpages(unittest.TestCase):
"""Manpage tests"""