X-Git-Url: https://code.grnet.gr/git/ganeti-local/blobdiff_plain/6915bc28fe053e92aa16cf2d974d205f1140219c..09bf5d24a29ee617d1eab566fa9b07d60cf2ed1a:/test/docs_unittest.py diff --git a/test/docs_unittest.py b/test/docs_unittest.py index 6e66748..d91976c 100755 --- a/test/docs_unittest.py +++ b/test/docs_unittest.py @@ -23,65 +23,190 @@ import unittest import re +import itertools +import operator from ganeti import _autoconf from ganeti import utils from ganeti import cmdlib from ganeti import build +from ganeti import compat +from ganeti import mcpu +from ganeti import opcodes +from ganeti import constants +from ganeti.rapi import baserlib +from ganeti.rapi import rlib2 from ganeti.rapi import connector import testutils -class TestDocs(unittest.TestCase): - """Documentation tests""" +VALID_URI_RE = re.compile(r"^[-/a-z0-9]*$") + +RAPI_OPCODE_EXCLUDE = frozenset([ + # Not yet implemented + opcodes.OpBackupQuery, + opcodes.OpBackupRemove, + opcodes.OpClusterConfigQuery, + opcodes.OpClusterRepairDiskSizes, + opcodes.OpClusterVerify, + opcodes.OpClusterVerifyDisks, + opcodes.OpInstanceChangeGroup, + opcodes.OpInstanceMove, + opcodes.OpNodeQueryvols, + opcodes.OpOobCommand, + opcodes.OpTagsSearch, + opcodes.OpClusterActivateMasterIp, + opcodes.OpClusterDeactivateMasterIp, + + # Difficult if not impossible + opcodes.OpClusterDestroy, + opcodes.OpClusterPostInit, + opcodes.OpClusterRename, + opcodes.OpNodeAdd, + opcodes.OpNodeRemove, + + # Helper opcodes (e.g. submitted by LUs) + opcodes.OpClusterVerifyConfig, + opcodes.OpClusterVerifyGroup, + opcodes.OpGroupEvacuate, + opcodes.OpGroupVerifyDisks, + + # Test opcodes + opcodes.OpTestAllocator, + opcodes.OpTestDelay, + opcodes.OpTestDummy, + opcodes.OpTestJqueue, + ]) + + +def _ReadDocFile(filename): + return utils.ReadFile("%s/doc/%s" % + (testutils.GetSourceDir(), filename)) + + +class TestHooksDocs(unittest.TestCase): + HOOK_PATH_OK = frozenset([ + "master-ip-turnup", + "master-ip-turndown", + ]) - @staticmethod - def _ReadDocFile(filename): - return utils.ReadFile("%s/doc/%s" % - (testutils.GetSourceDir(), filename)) - - def testHookDocs(self): + def test(self): """Check whether all hooks are documented. """ - hooksdoc = self._ReadDocFile("hooks.rst") + hooksdoc = _ReadDocFile("hooks.rst") + + # Reverse mapping from LU to opcode + lu2opcode = dict((lu, op) + for (op, lu) in mcpu.Processor.DISPATCH_TABLE.items()) + assert len(lu2opcode) == len(mcpu.Processor.DISPATCH_TABLE), \ + "Found duplicate entries" + + hooks_paths = frozenset(re.findall("^:directory:\s*(.+)\s*$", hooksdoc, + re.M)) + self.assertTrue(self.HOOK_PATH_OK.issubset(hooks_paths), + msg="Whitelisted path not found in documentation") + + raw_hooks_ops = re.findall("^OP_(?!CODE$).+$", hooksdoc, re.M) + hooks_ops = set() + duplicate_ops = set() + for op in raw_hooks_ops: + if op in hooks_ops: + duplicate_ops.add(op) + else: + hooks_ops.add(op) + + self.assertFalse(duplicate_ops, + msg="Found duplicate opcode documentation: %s" % + utils.CommaJoin(duplicate_ops)) + + seen_paths = set() + seen_ops = set() + + self.assertFalse(duplicate_ops, + msg="Found duplicated hook documentation: %s" % + utils.CommaJoin(duplicate_ops)) for name in dir(cmdlib): - obj = getattr(cmdlib, name) + lucls = getattr(cmdlib, name) - if (isinstance(obj, type) and - issubclass(obj, cmdlib.LogicalUnit) and - hasattr(obj, "HPATH")): - self._CheckHook(name, obj, hooksdoc) + if (isinstance(lucls, type) and + issubclass(lucls, cmdlib.LogicalUnit) and + hasattr(lucls, "HPATH")): + if lucls.HTYPE is None: + continue - def _CheckHook(self, name, lucls, hooksdoc): - if lucls.HTYPE is None: - return + opcls = lu2opcode.get(lucls, None) - # TODO: Improve this test (e.g. find hooks documented but no longer - # existing) + if opcls: + seen_ops.add(opcls.OP_ID) + self.assertTrue(opcls.OP_ID in hooks_ops, + msg="Missing hook documentation for %s" % + opcls.OP_ID) + self.assertTrue(lucls.HPATH in hooks_paths, + msg="Missing documentation for hook %s/%s" % + (lucls.HTYPE, lucls.HPATH)) + seen_paths.add(lucls.HPATH) - pattern = r"^:directory:\s*%s\s*$" % re.escape(lucls.HPATH) + missed_ops = hooks_ops - seen_ops + missed_paths = hooks_paths - seen_paths - self.HOOK_PATH_OK - self.assert_(re.findall(pattern, hooksdoc, re.M), - msg=("Missing documentation for hook %s/%s" % - (lucls.HTYPE, lucls.HPATH))) + self.assertFalse(missed_ops, + msg="Op documents hook not existing anymore: %s" % + utils.CommaJoin(missed_ops)) + self.assertFalse(missed_paths, + msg="Hook path does not exist in opcode: %s" % + utils.CommaJoin(missed_paths)) - def testRapiDocs(self): - """Check whether all RAPI resources are documented. - """ - rapidoc = self._ReadDocFile("rapi.rst") +class TestRapiDocs(unittest.TestCase): + def _CheckRapiResource(self, uri, fixup, handler): + docline = "%s resource." % uri + self.assertEqual(handler.__doc__.splitlines()[0].strip(), docline, + msg=("First line of %r's docstring is not %r" % + (handler, docline))) - node_name = "[node_name]" - instance_name = "[instance_name]" - job_id = "[job_id]" + # Apply fixes before testing + for (rx, value) in fixup.items(): + uri = rx.sub(value, uri) - resources = connector.GetHandlers(re.escape(node_name), - re.escape(instance_name), - re.escape(job_id)) + self.assertTrue(VALID_URI_RE.match(uri), msg="Invalid URI %r" % uri) + + def test(self): + """Check whether all RAPI resources are documented. + + """ + rapidoc = _ReadDocFile("rapi.rst") + + node_name = re.escape("[node_name]") + instance_name = re.escape("[instance_name]") + group_name = re.escape("[group_name]") + job_id = re.escape("[job_id]") + disk_index = re.escape("[disk_index]") + query_res = re.escape("[resource]") + + resources = connector.GetHandlers(node_name, instance_name, group_name, + job_id, disk_index, query_res) + + handler_dups = utils.FindDuplicates(resources.values()) + self.assertFalse(handler_dups, + msg=("Resource handlers used more than once: %r" % + handler_dups)) + + uri_check_fixup = { + re.compile(node_name): "node1examplecom", + re.compile(instance_name): "inst1examplecom", + re.compile(group_name): "group4440", + re.compile(job_id): "9409", + re.compile(disk_index): "123", + re.compile(query_res): "lock", + } + + assert compat.all(VALID_URI_RE.match(value) + for value in uri_check_fixup.values()), \ + "Fixup values must be valid URIs, too" titles = [] @@ -92,29 +217,81 @@ class TestDocs(unittest.TestCase): prevline = line + prefix_exception = frozenset(["/", "/version", "/2"]) + undocumented = [] + used_uris = [] for key, handler in resources.iteritems(): # Regex objects if hasattr(key, "match"): + self.assert_(key.pattern.startswith("^/2/"), + msg="Pattern %r does not start with '^/2/'" % key.pattern) + self.assertEqual(key.pattern[-1], "$") + found = False for title in titles: - if (title.startswith("``") and - title.endswith("``") and - key.match(title[2:-2])): - found = True - break + if title.startswith("``") and title.endswith("``"): + uri = title[2:-2] + if key.match(uri): + self._CheckRapiResource(uri, uri_check_fixup, handler) + used_uris.append(uri) + found = True + break if not found: # TODO: Find better way of identifying resource - undocumented.append(str(handler)) + undocumented.append(key.pattern) + + else: + self.assert_(key.startswith("/2/") or key in prefix_exception, + msg="Path %r does not start with '/2/'" % key) - elif ("``%s``" % key) not in titles: - undocumented.append(key) + if ("``%s``" % key) in titles: + self._CheckRapiResource(key, {}, handler) + used_uris.append(key) + else: + undocumented.append(key) self.failIf(undocumented, msg=("Missing RAPI resource documentation for %s" % - " ,".join(undocumented))) + utils.CommaJoin(undocumented))) + + uri_dups = utils.FindDuplicates(used_uris) + self.failIf(uri_dups, + msg=("URIs matched by more than one resource: %s" % + utils.CommaJoin(uri_dups))) + + self._FindRapiMissing(resources.values()) + self._CheckTagHandlers(resources.values()) + + def _FindRapiMissing(self, handlers): + used = frozenset(itertools.chain(*map(baserlib.GetResourceOpcodes, + handlers))) + + unexpected = used & RAPI_OPCODE_EXCLUDE + self.assertFalse(unexpected, + msg=("Found RAPI resources for excluded opcodes: %s" % + utils.CommaJoin(_GetOpIds(unexpected)))) + + missing = (frozenset(opcodes.OP_MAPPING.values()) - used - + RAPI_OPCODE_EXCLUDE) + self.assertFalse(missing, + msg=("Missing RAPI resources for opcodes: %s" % + utils.CommaJoin(_GetOpIds(missing)))) + + def _CheckTagHandlers(self, handlers): + tag_handlers = filter(lambda x: issubclass(x, rlib2._R_Tags), handlers) + self.assertEqual(frozenset(map(operator.attrgetter("TAG_LEVEL"), + tag_handlers)), + constants.VALID_TAG_TYPES) + + +def _GetOpIds(ops): + """Returns C{OP_ID} for all opcodes in passed sequence. + + """ + return sorted(opcls.OP_ID for opcls in ops) class TestManpages(unittest.TestCase): @@ -122,7 +299,7 @@ class TestManpages(unittest.TestCase): @staticmethod def _ReadManFile(name): - return utils.ReadFile("%s/man/%s.sgml" % + return utils.ReadFile("%s/man/%s.rst" % (testutils.GetSourceDir(), name)) @staticmethod @@ -139,14 +316,14 @@ class TestManpages(unittest.TestCase): missing = [] for cmd in commands: - pattern = "\s*%s" % re.escape(cmd) - if not re.findall(pattern, mantext, re.S): + pattern = r"^(\| )?\*\*%s\*\*" % re.escape(cmd) + if not re.findall(pattern, mantext, re.DOTALL | re.MULTILINE): missing.append(cmd) self.failIf(missing, msg=("Manpage for '%s' missing documentation for %s" % - (script, " ,".join(missing)))) + (script, utils.CommaJoin(missing)))) if __name__ == "__main__": - unittest.main() + testutils.GanetiTestProgram()