root / test / docs_unittest.py @ 42d4d8b9
History | View | Annotate | Download (8.7 kB)
1 |
#!/usr/bin/python
|
---|---|
2 |
#
|
3 |
|
4 |
# Copyright (C) 2009 Google Inc.
|
5 |
#
|
6 |
# This program is free software; you can redistribute it and/or modify
|
7 |
# it under the terms of the GNU General Public License as published by
|
8 |
# the Free Software Foundation; either version 2 of the License, or
|
9 |
# (at your option) any later version.
|
10 |
#
|
11 |
# This program is distributed in the hope that it will be useful, but
|
12 |
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
13 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
14 |
# General Public License for more details.
|
15 |
#
|
16 |
# You should have received a copy of the GNU General Public License
|
17 |
# along with this program; if not, write to the Free Software
|
18 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
19 |
# 02110-1301, USA.
|
20 |
|
21 |
|
22 |
"""Script for unittesting documentation"""
|
23 |
|
24 |
import unittest |
25 |
import re |
26 |
import itertools |
27 |
import operator |
28 |
|
29 |
from ganeti import _autoconf |
30 |
from ganeti import utils |
31 |
from ganeti import cmdlib |
32 |
from ganeti import build |
33 |
from ganeti import compat |
34 |
from ganeti import mcpu |
35 |
from ganeti import opcodes |
36 |
from ganeti import constants |
37 |
from ganeti.rapi import baserlib |
38 |
from ganeti.rapi import rlib2 |
39 |
from ganeti.rapi import connector |
40 |
|
41 |
import testutils |
42 |
|
43 |
|
44 |
VALID_URI_RE = re.compile(r"^[-/a-z0-9]*$")
|
45 |
|
46 |
RAPI_OPCODE_EXCLUDE = frozenset([
|
47 |
# Not yet implemented
|
48 |
opcodes.OpBackupQuery, |
49 |
opcodes.OpBackupRemove, |
50 |
opcodes.OpClusterConfigQuery, |
51 |
opcodes.OpClusterRepairDiskSizes, |
52 |
opcodes.OpClusterVerify, |
53 |
opcodes.OpClusterVerifyDisks, |
54 |
opcodes.OpInstanceChangeGroup, |
55 |
opcodes.OpInstanceMove, |
56 |
opcodes.OpNodeQueryvols, |
57 |
opcodes.OpOobCommand, |
58 |
opcodes.OpTagsSearch, |
59 |
|
60 |
# Difficult if not impossible
|
61 |
opcodes.OpClusterDestroy, |
62 |
opcodes.OpClusterPostInit, |
63 |
opcodes.OpClusterRename, |
64 |
opcodes.OpNodeAdd, |
65 |
opcodes.OpNodeRemove, |
66 |
|
67 |
# Helper opcodes (e.g. submitted by LUs)
|
68 |
opcodes.OpClusterVerifyConfig, |
69 |
opcodes.OpClusterVerifyGroup, |
70 |
opcodes.OpGroupEvacuate, |
71 |
opcodes.OpGroupVerifyDisks, |
72 |
|
73 |
# Test opcodes
|
74 |
opcodes.OpTestAllocator, |
75 |
opcodes.OpTestDelay, |
76 |
opcodes.OpTestDummy, |
77 |
opcodes.OpTestJqueue, |
78 |
]) |
79 |
|
80 |
|
81 |
def _ReadDocFile(filename): |
82 |
return utils.ReadFile("%s/doc/%s" % |
83 |
(testutils.GetSourceDir(), filename)) |
84 |
|
85 |
|
86 |
class TestHooksDocs(unittest.TestCase): |
87 |
def test(self): |
88 |
"""Check whether all hooks are documented.
|
89 |
|
90 |
"""
|
91 |
hooksdoc = _ReadDocFile("hooks.rst")
|
92 |
|
93 |
# Reverse mapping from LU to opcode
|
94 |
lu2opcode = dict((lu, op)
|
95 |
for (op, lu) in mcpu.Processor.DISPATCH_TABLE.items()) |
96 |
assert len(lu2opcode) == len(mcpu.Processor.DISPATCH_TABLE), \ |
97 |
"Found duplicate entries"
|
98 |
|
99 |
for name in dir(cmdlib): |
100 |
obj = getattr(cmdlib, name)
|
101 |
|
102 |
if (isinstance(obj, type) and |
103 |
issubclass(obj, cmdlib.LogicalUnit) and |
104 |
hasattr(obj, "HPATH")): |
105 |
self._CheckHook(name, obj, hooksdoc, lu2opcode)
|
106 |
|
107 |
def _CheckHook(self, name, lucls, hooksdoc, lu2opcode): |
108 |
opcls = lu2opcode.get(lucls, None)
|
109 |
|
110 |
if lucls.HTYPE is None: |
111 |
return
|
112 |
|
113 |
# TODO: Improve this test (e.g. find hooks documented but no longer
|
114 |
# existing)
|
115 |
|
116 |
if opcls:
|
117 |
self.assertTrue(re.findall("^%s$" % re.escape(opcls.OP_ID), |
118 |
hooksdoc, re.M), |
119 |
msg=("Missing hook documentation for %s" %
|
120 |
(opcls.OP_ID))) |
121 |
|
122 |
pattern = r"^:directory:\s*%s\s*$" % re.escape(lucls.HPATH)
|
123 |
|
124 |
self.assert_(re.findall(pattern, hooksdoc, re.M),
|
125 |
msg=("Missing documentation for hook %s/%s" %
|
126 |
(lucls.HTYPE, lucls.HPATH))) |
127 |
|
128 |
|
129 |
class TestRapiDocs(unittest.TestCase): |
130 |
def _CheckRapiResource(self, uri, fixup, handler): |
131 |
docline = "%s resource." % uri
|
132 |
self.assertEqual(handler.__doc__.splitlines()[0].strip(), docline, |
133 |
msg=("First line of %r's docstring is not %r" %
|
134 |
(handler, docline))) |
135 |
|
136 |
# Apply fixes before testing
|
137 |
for (rx, value) in fixup.items(): |
138 |
uri = rx.sub(value, uri) |
139 |
|
140 |
self.assertTrue(VALID_URI_RE.match(uri), msg="Invalid URI %r" % uri) |
141 |
|
142 |
def test(self): |
143 |
"""Check whether all RAPI resources are documented.
|
144 |
|
145 |
"""
|
146 |
rapidoc = _ReadDocFile("rapi.rst")
|
147 |
|
148 |
node_name = re.escape("[node_name]")
|
149 |
instance_name = re.escape("[instance_name]")
|
150 |
group_name = re.escape("[group_name]")
|
151 |
job_id = re.escape("[job_id]")
|
152 |
disk_index = re.escape("[disk_index]")
|
153 |
query_res = re.escape("[resource]")
|
154 |
|
155 |
resources = connector.GetHandlers(node_name, instance_name, group_name, |
156 |
job_id, disk_index, query_res) |
157 |
|
158 |
handler_dups = utils.FindDuplicates(resources.values()) |
159 |
self.assertFalse(handler_dups,
|
160 |
msg=("Resource handlers used more than once: %r" %
|
161 |
handler_dups)) |
162 |
|
163 |
uri_check_fixup = { |
164 |
re.compile(node_name): "node1examplecom",
|
165 |
re.compile(instance_name): "inst1examplecom",
|
166 |
re.compile(group_name): "group4440",
|
167 |
re.compile(job_id): "9409",
|
168 |
re.compile(disk_index): "123",
|
169 |
re.compile(query_res): "lock",
|
170 |
} |
171 |
|
172 |
assert compat.all(VALID_URI_RE.match(value)
|
173 |
for value in uri_check_fixup.values()), \ |
174 |
"Fixup values must be valid URIs, too"
|
175 |
|
176 |
titles = [] |
177 |
|
178 |
prevline = None
|
179 |
for line in rapidoc.splitlines(): |
180 |
if re.match(r"^\++$", line): |
181 |
titles.append(prevline) |
182 |
|
183 |
prevline = line |
184 |
|
185 |
prefix_exception = frozenset(["/", "/version", "/2"]) |
186 |
|
187 |
undocumented = [] |
188 |
used_uris = [] |
189 |
|
190 |
for key, handler in resources.iteritems(): |
191 |
# Regex objects
|
192 |
if hasattr(key, "match"): |
193 |
self.assert_(key.pattern.startswith("^/2/"), |
194 |
msg="Pattern %r does not start with '^/2/'" % key.pattern)
|
195 |
self.assertEqual(key.pattern[-1], "$") |
196 |
|
197 |
found = False
|
198 |
for title in titles: |
199 |
if title.startswith("``") and title.endswith("``"): |
200 |
uri = title[2:-2] |
201 |
if key.match(uri):
|
202 |
self._CheckRapiResource(uri, uri_check_fixup, handler)
|
203 |
used_uris.append(uri) |
204 |
found = True
|
205 |
break
|
206 |
|
207 |
if not found: |
208 |
# TODO: Find better way of identifying resource
|
209 |
undocumented.append(key.pattern) |
210 |
|
211 |
else:
|
212 |
self.assert_(key.startswith("/2/") or key in prefix_exception, |
213 |
msg="Path %r does not start with '/2/'" % key)
|
214 |
|
215 |
if ("``%s``" % key) in titles: |
216 |
self._CheckRapiResource(key, {}, handler)
|
217 |
used_uris.append(key) |
218 |
else:
|
219 |
undocumented.append(key) |
220 |
|
221 |
self.failIf(undocumented,
|
222 |
msg=("Missing RAPI resource documentation for %s" %
|
223 |
utils.CommaJoin(undocumented))) |
224 |
|
225 |
uri_dups = utils.FindDuplicates(used_uris) |
226 |
self.failIf(uri_dups,
|
227 |
msg=("URIs matched by more than one resource: %s" %
|
228 |
utils.CommaJoin(uri_dups))) |
229 |
|
230 |
self._FindRapiMissing(resources.values())
|
231 |
self._CheckTagHandlers(resources.values())
|
232 |
|
233 |
def _FindRapiMissing(self, handlers): |
234 |
used = frozenset(itertools.chain(*map(baserlib.GetResourceOpcodes, |
235 |
handlers))) |
236 |
|
237 |
unexpected = used & RAPI_OPCODE_EXCLUDE |
238 |
self.assertFalse(unexpected,
|
239 |
msg=("Found RAPI resources for excluded opcodes: %s" %
|
240 |
utils.CommaJoin(_GetOpIds(unexpected)))) |
241 |
|
242 |
missing = (frozenset(opcodes.OP_MAPPING.values()) - used -
|
243 |
RAPI_OPCODE_EXCLUDE) |
244 |
self.assertFalse(missing,
|
245 |
msg=("Missing RAPI resources for opcodes: %s" %
|
246 |
utils.CommaJoin(_GetOpIds(missing)))) |
247 |
|
248 |
def _CheckTagHandlers(self, handlers): |
249 |
tag_handlers = filter(lambda x: issubclass(x, rlib2._R_Tags), handlers) |
250 |
self.assertEqual(frozenset(map(operator.attrgetter("TAG_LEVEL"), |
251 |
tag_handlers)), |
252 |
constants.VALID_TAG_TYPES) |
253 |
|
254 |
|
255 |
def _GetOpIds(ops): |
256 |
"""Returns C{OP_ID} for all opcodes in passed sequence.
|
257 |
|
258 |
"""
|
259 |
return sorted(opcls.OP_ID for opcls in ops) |
260 |
|
261 |
|
262 |
class TestManpages(unittest.TestCase): |
263 |
"""Manpage tests"""
|
264 |
|
265 |
@staticmethod
|
266 |
def _ReadManFile(name): |
267 |
return utils.ReadFile("%s/man/%s.rst" % |
268 |
(testutils.GetSourceDir(), name)) |
269 |
|
270 |
@staticmethod
|
271 |
def _LoadScript(name): |
272 |
return build.LoadModule("scripts/%s" % name) |
273 |
|
274 |
def test(self): |
275 |
for script in _autoconf.GNT_SCRIPTS: |
276 |
self._CheckManpage(script,
|
277 |
self._ReadManFile(script),
|
278 |
self._LoadScript(script).commands.keys())
|
279 |
|
280 |
def _CheckManpage(self, script, mantext, commands): |
281 |
missing = [] |
282 |
|
283 |
for cmd in commands: |
284 |
pattern = r"^(\| )?\*\*%s\*\*" % re.escape(cmd)
|
285 |
if not re.findall(pattern, mantext, re.DOTALL | re.MULTILINE): |
286 |
missing.append(cmd) |
287 |
|
288 |
self.failIf(missing,
|
289 |
msg=("Manpage for '%s' missing documentation for %s" %
|
290 |
(script, utils.CommaJoin(missing)))) |
291 |
|
292 |
|
293 |
if __name__ == "__main__": |
294 |
testutils.GanetiTestProgram() |