root / ci / utils.py @ 62f3f54f
History | View | Annotate | Download (21.5 kB)
1 |
#!/usr/bin/env python
|
---|---|
2 |
|
3 |
"""
|
4 |
Synnefo ci utils module
|
5 |
"""
|
6 |
|
7 |
import os |
8 |
import sys |
9 |
import time |
10 |
import logging |
11 |
import fabric.api as fabric |
12 |
import subprocess |
13 |
import tempfile |
14 |
from ConfigParser import ConfigParser, DuplicateSectionError |
15 |
|
16 |
from kamaki.cli import config as kamaki_config |
17 |
from kamaki.clients.astakos import AstakosClient |
18 |
from kamaki.clients.cyclades import CycladesClient |
19 |
from kamaki.clients.image import ImageClient |
20 |
|
21 |
DEFAULT_CONFIG_FILE = "new_config"
|
22 |
# UUID of owner of system images
|
23 |
DEFAULT_SYSTEM_IMAGES_UUID = [ |
24 |
"25ecced9-bf53-4145-91ee-cf47377e9fb2", # production (okeanos.grnet.gr) |
25 |
"04cbe33f-29b7-4ef1-94fb-015929e5fc06", # testing (okeanos.io) |
26 |
] |
27 |
|
28 |
|
29 |
def _run(cmd, verbose): |
30 |
"""Run fabric with verbose level"""
|
31 |
if verbose:
|
32 |
args = ('running',)
|
33 |
else:
|
34 |
args = ('running', 'stdout',) |
35 |
with fabric.hide(*args): # Used * or ** magic. pylint: disable-msg=W0142 |
36 |
return fabric.run(cmd)
|
37 |
|
38 |
|
39 |
def _red(msg): |
40 |
"""Red color"""
|
41 |
#return "\x1b[31m" + str(msg) + "\x1b[0m"
|
42 |
return str(msg) |
43 |
|
44 |
|
45 |
def _yellow(msg): |
46 |
"""Yellow color"""
|
47 |
#return "\x1b[33m" + str(msg) + "\x1b[0m"
|
48 |
return str(msg) |
49 |
|
50 |
|
51 |
def _green(msg): |
52 |
"""Green color"""
|
53 |
#return "\x1b[32m" + str(msg) + "\x1b[0m"
|
54 |
return str(msg) |
55 |
|
56 |
|
57 |
def _check_fabric(fun): |
58 |
"""Check if fabric env has been set"""
|
59 |
def wrapper(self, *args, **kwargs): |
60 |
"""wrapper function"""
|
61 |
if not self.fabric_installed: |
62 |
self.setup_fabric()
|
63 |
return fun(self, *args, **kwargs) |
64 |
return wrapper
|
65 |
|
66 |
|
67 |
def _check_kamaki(fun): |
68 |
"""Check if kamaki has been initialized"""
|
69 |
def wrapper(self, *args, **kwargs): |
70 |
"""wrapper function"""
|
71 |
if not self.kamaki_installed: |
72 |
self.setup_kamaki()
|
73 |
return fun(self, *args, **kwargs) |
74 |
return wrapper
|
75 |
|
76 |
|
77 |
class _MyFormatter(logging.Formatter): |
78 |
"""Logging Formatter"""
|
79 |
def format(self, record): |
80 |
format_orig = self._fmt
|
81 |
if record.levelno == logging.DEBUG:
|
82 |
self._fmt = " %(msg)s" |
83 |
elif record.levelno == logging.INFO:
|
84 |
self._fmt = "%(msg)s" |
85 |
elif record.levelno == logging.WARNING:
|
86 |
self._fmt = _yellow("[W] %(msg)s") |
87 |
elif record.levelno == logging.ERROR:
|
88 |
self._fmt = _red("[E] %(msg)s") |
89 |
result = logging.Formatter.format(self, record)
|
90 |
self._fmt = format_orig
|
91 |
return result
|
92 |
|
93 |
|
94 |
class SynnefoCI(object): |
95 |
"""SynnefoCI python class"""
|
96 |
|
97 |
def __init__(self, config_file=None, cleanup_config=False, cloud=None): |
98 |
""" Initialize SynnefoCI python class
|
99 |
|
100 |
Setup logger, local_dir, config and kamaki
|
101 |
"""
|
102 |
# Setup logger
|
103 |
self.logger = logging.getLogger('synnefo-ci') |
104 |
self.logger.setLevel(logging.DEBUG)
|
105 |
handler = logging.StreamHandler() |
106 |
handler.setFormatter(_MyFormatter()) |
107 |
self.logger.addHandler(handler)
|
108 |
|
109 |
# Get our local dir
|
110 |
self.ci_dir = os.path.dirname(os.path.abspath(__file__))
|
111 |
self.repo_dir = os.path.dirname(self.ci_dir) |
112 |
|
113 |
# Read config file
|
114 |
if config_file is None: |
115 |
config_file = DEFAULT_CONFIG_FILE |
116 |
if not os.path.isabs(config_file): |
117 |
config_file = os.path.join(self.ci_dir, config_file)
|
118 |
|
119 |
self.config = ConfigParser()
|
120 |
self.config.optionxform = str |
121 |
self.config.read(config_file)
|
122 |
temp_config = self.config.get('Global', 'temporary_config') |
123 |
if cleanup_config:
|
124 |
try:
|
125 |
os.remove(temp_config) |
126 |
except OSError: |
127 |
pass
|
128 |
else:
|
129 |
self.config.read(self.config.get('Global', 'temporary_config')) |
130 |
|
131 |
# Set kamaki cloud
|
132 |
if cloud is not None: |
133 |
self.kamaki_cloud = cloud
|
134 |
elif self.config.has_option("Deployment", "kamaki_cloud"): |
135 |
kamaki_cloud = self.config.get("Deployment", "kamaki_cloud") |
136 |
if kamaki_cloud == "": |
137 |
self.kamaki_cloud = None |
138 |
else:
|
139 |
self.kamaki_cloud = None |
140 |
|
141 |
# Initialize variables
|
142 |
self.fabric_installed = False |
143 |
self.kamaki_installed = False |
144 |
self.cyclades_client = None |
145 |
self.image_client = None |
146 |
|
147 |
def setup_kamaki(self): |
148 |
"""Initialize kamaki
|
149 |
|
150 |
Setup cyclades_client and image_client
|
151 |
"""
|
152 |
|
153 |
config = kamaki_config.Config() |
154 |
if self.kamaki_cloud is None: |
155 |
self.kamaki_cloud = config.get_global("default_cloud") |
156 |
|
157 |
self.logger.info("Setup kamaki client, using cloud '%s'.." % |
158 |
self.kamaki_cloud)
|
159 |
auth_url = config.get_cloud(self.kamaki_cloud, "url") |
160 |
self.logger.debug("Authentication URL is %s" % _green(auth_url)) |
161 |
token = config.get_cloud(self.kamaki_cloud, "token") |
162 |
#self.logger.debug("Token is %s" % _green(token))
|
163 |
|
164 |
astakos_client = AstakosClient(auth_url, token) |
165 |
|
166 |
cyclades_url = \ |
167 |
astakos_client.get_service_endpoints('compute')['publicURL'] |
168 |
self.logger.debug("Cyclades API url is %s" % _green(cyclades_url)) |
169 |
self.cyclades_client = CycladesClient(cyclades_url, token)
|
170 |
self.cyclades_client.CONNECTION_RETRY_LIMIT = 2 |
171 |
|
172 |
image_url = \ |
173 |
astakos_client.get_service_endpoints('image')['publicURL'] |
174 |
self.logger.debug("Images API url is %s" % _green(image_url)) |
175 |
self.image_client = ImageClient(cyclades_url, token)
|
176 |
self.image_client.CONNECTION_RETRY_LIMIT = 2 |
177 |
|
178 |
def _wait_transition(self, server_id, current_status, new_status): |
179 |
"""Wait for server to go from current_status to new_status"""
|
180 |
self.logger.debug("Waiting for server to become %s" % new_status) |
181 |
timeout = self.config.getint('Global', 'build_timeout') |
182 |
sleep_time = 5
|
183 |
while True: |
184 |
server = self.cyclades_client.get_server_details(server_id)
|
185 |
if server['status'] == new_status: |
186 |
return server
|
187 |
elif timeout < 0: |
188 |
self.logger.error(
|
189 |
"Waiting for server to become %s timed out" % new_status)
|
190 |
self.destroy_server(False) |
191 |
sys.exit(-1)
|
192 |
elif server['status'] == current_status: |
193 |
# Sleep for #n secs and continue
|
194 |
timeout = timeout - sleep_time |
195 |
time.sleep(sleep_time) |
196 |
else:
|
197 |
self.logger.error(
|
198 |
"Server failed with status %s" % server['status']) |
199 |
self.destroy_server(False) |
200 |
sys.exit(-1)
|
201 |
|
202 |
@_check_kamaki
|
203 |
def destroy_server(self, wait=True): |
204 |
"""Destroy slave server"""
|
205 |
server_id = self.config.getint('Temporary Options', 'server_id') |
206 |
self.logger.info("Destoying server with id %s " % server_id) |
207 |
self.cyclades_client.delete_server(server_id)
|
208 |
if wait:
|
209 |
self._wait_transition(server_id, "ACTIVE", "DELETED") |
210 |
|
211 |
@_check_kamaki
|
212 |
def create_server(self, image_id=None, flavor_id=None): |
213 |
"""Create slave server"""
|
214 |
self.logger.info("Create a new server..") |
215 |
if image_id is None: |
216 |
image = self._find_image()
|
217 |
self.logger.debug("Will use image \"%s\"" % _green(image['name'])) |
218 |
image_id = image["id"]
|
219 |
self.logger.debug("Image has id %s" % _green(image_id)) |
220 |
if flavor_id is None: |
221 |
flavor_id = self.config.getint("Deployment", "flavor_id") |
222 |
server = self.cyclades_client.create_server(
|
223 |
self.config.get('Deployment', 'server_name'), |
224 |
flavor_id, |
225 |
image_id) |
226 |
server_id = server['id']
|
227 |
self.write_config('server_id', server_id) |
228 |
self.logger.debug("Server got id %s" % _green(server_id)) |
229 |
server_user = server['metadata']['users'] |
230 |
self.write_config('server_user', server_user) |
231 |
self.logger.debug("Server's admin user is %s" % _green(server_user)) |
232 |
server_passwd = server['adminPass']
|
233 |
self.write_config('server_passwd', server_passwd) |
234 |
|
235 |
server = self._wait_transition(server_id, "BUILD", "ACTIVE") |
236 |
self._get_server_ip_and_port(server)
|
237 |
self._copy_ssh_keys()
|
238 |
|
239 |
self.setup_fabric()
|
240 |
self.logger.info("Setup firewall") |
241 |
accept_ssh_from = self.config.get('Global', 'filter_access_network') |
242 |
if accept_ssh_from != "": |
243 |
self.logger.debug("Block ssh except from %s" % accept_ssh_from) |
244 |
cmd = """
|
245 |
local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
|
246 |
cut -d':' -f2 | cut -d' ' -f1)
|
247 |
iptables -A INPUT -s localhost -j ACCEPT
|
248 |
iptables -A INPUT -s $local_ip -j ACCEPT
|
249 |
iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
|
250 |
iptables -A INPUT -p tcp --dport 22 -j DROP
|
251 |
""".format(accept_ssh_from)
|
252 |
_run(cmd, False)
|
253 |
|
254 |
def _find_image(self): |
255 |
"""Find a suitable image to use
|
256 |
|
257 |
It has to belong to one of the `DEFAULT_SYSTEM_IMAGES_UUID'
|
258 |
users and contain the word given by `image_name' option.
|
259 |
"""
|
260 |
image_name = self.config.get('Deployment', 'image_name').lower() |
261 |
images = self.image_client.list_public(detail=True)['images'] |
262 |
# Select images by `system_uuid' user
|
263 |
images = [x for x in images |
264 |
if x['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID] |
265 |
# Select images with `image_name' in their names
|
266 |
images = [x for x in images |
267 |
if x['name'].lower().find(image_name) != -1] |
268 |
# Let's select the first one
|
269 |
return images[0] |
270 |
|
271 |
def _get_server_ip_and_port(self, server): |
272 |
"""Compute server's IPv4 and ssh port number"""
|
273 |
self.logger.info("Get server connection details..") |
274 |
server_ip = server['attachments'][0]['ipv4'] |
275 |
if ".okeanos.io" in self.cyclades_client.base_url: |
276 |
tmp1 = int(server_ip.split(".")[2]) |
277 |
tmp2 = int(server_ip.split(".")[3]) |
278 |
server_ip = "gate.okeanos.io"
|
279 |
server_port = 10000 + tmp1 * 256 + tmp2 |
280 |
else:
|
281 |
server_port = 22
|
282 |
self.write_config('server_ip', server_ip) |
283 |
self.logger.debug("Server's IPv4 is %s" % _green(server_ip)) |
284 |
self.write_config('server_port', server_port) |
285 |
self.logger.debug("Server's ssh port is %s" % _green(server_port)) |
286 |
|
287 |
@_check_fabric
|
288 |
def _copy_ssh_keys(self): |
289 |
"""Upload/Install ssh keys to server"""
|
290 |
if not self.config.has_option("Deployment", "ssh_keys"): |
291 |
return
|
292 |
authorized_keys = self.config.get("Deployment", |
293 |
"ssh_keys")
|
294 |
if authorized_keys != "" and os.path.exists(authorized_keys): |
295 |
keyfile = '/tmp/%s.pub' % fabric.env.user
|
296 |
_run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False) |
297 |
fabric.put(authorized_keys, keyfile) |
298 |
_run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False) |
299 |
_run('rm %s' % keyfile, False) |
300 |
self.logger.debug("Uploaded ssh authorized keys") |
301 |
else:
|
302 |
self.logger.debug("No ssh keys found") |
303 |
|
304 |
def write_config(self, option, value, section="Temporary Options"): |
305 |
"""Write changes back to config file"""
|
306 |
try:
|
307 |
self.config.add_section(section)
|
308 |
except DuplicateSectionError:
|
309 |
pass
|
310 |
self.config.set(section, option, str(value)) |
311 |
temp_conf_file = self.config.get('Global', 'temporary_config') |
312 |
with open(temp_conf_file, 'wb') as tcf: |
313 |
self.config.write(tcf)
|
314 |
|
315 |
def setup_fabric(self): |
316 |
"""Setup fabric environment"""
|
317 |
self.logger.info("Setup fabric parameters..") |
318 |
fabric.env.user = self.config.get('Temporary Options', 'server_user') |
319 |
fabric.env.host_string = \ |
320 |
self.config.get('Temporary Options', 'server_ip') |
321 |
fabric.env.port = self.config.getint('Temporary Options', |
322 |
'server_port')
|
323 |
fabric.env.password = self.config.get('Temporary Options', |
324 |
'server_passwd')
|
325 |
fabric.env.connection_attempts = 10
|
326 |
fabric.env.shell = "/bin/bash -c"
|
327 |
fabric.env.disable_known_hosts = True
|
328 |
fabric.env.output_prefix = None
|
329 |
|
330 |
def _check_hash_sum(self, localfile, remotefile): |
331 |
"""Check hash sums of two files"""
|
332 |
self.logger.debug("Check hash sum for local file %s" % localfile) |
333 |
hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0] |
334 |
self.logger.debug("Local file has sha256 hash %s" % hash1) |
335 |
self.logger.debug("Check hash sum for remote file %s" % remotefile) |
336 |
hash2 = _run("sha256sum %s" % remotefile, False) |
337 |
hash2 = hash2.split(' ')[0] |
338 |
self.logger.debug("Remote file has sha256 hash %s" % hash2) |
339 |
if hash1 != hash2:
|
340 |
self.logger.error("Hashes differ.. aborting") |
341 |
sys.exit(-1)
|
342 |
|
343 |
@_check_fabric
|
344 |
def clone_repo(self): |
345 |
"""Clone Synnefo repo from slave server"""
|
346 |
self.logger.info("Configure repositories on remote server..") |
347 |
self.logger.debug("Setup apt, install curl and git") |
348 |
cmd = """
|
349 |
echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
|
350 |
apt-get update
|
351 |
apt-get install curl git --yes
|
352 |
echo -e "\n\ndeb {0}" >> /etc/apt/sources.list
|
353 |
curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
|
354 |
apt-get update
|
355 |
git config --global user.name {1}
|
356 |
git config --global user.email {2}
|
357 |
""".format(self.config.get('Global', 'apt_repo'), |
358 |
self.config.get('Global', 'git_config_name'), |
359 |
self.config.get('Global', 'git_config_mail')) |
360 |
_run(cmd, False)
|
361 |
|
362 |
synnefo_repo = self.config.get('Global', 'synnefo_repo') |
363 |
synnefo_branch = self.config.get("Global", "synnefo_branch") |
364 |
if synnefo_branch == "": |
365 |
synnefo_branch = \ |
366 |
subprocess.Popen( |
367 |
["git", "rev-parse", "--abbrev-ref", "HEAD"], |
368 |
stdout=subprocess.PIPE).communicate()[0].strip()
|
369 |
if synnefo_branch == "HEAD": |
370 |
synnefo_branch = \ |
371 |
subprocess.Popen( |
372 |
["git", "rev-parse", "--short", "HEAD"], |
373 |
stdout=subprocess.PIPE).communicate()[0].strip()
|
374 |
self.logger.info("Will use branch %s" % synnefo_branch) |
375 |
# Currently clonning synnefo can fail unexpectedly
|
376 |
cloned = False
|
377 |
for i in range(10): |
378 |
self.logger.debug("Clone synnefo from %s" % synnefo_repo) |
379 |
try:
|
380 |
_run("git clone %s synnefo" % synnefo_repo, False) |
381 |
cloned = True
|
382 |
break
|
383 |
except BaseException: |
384 |
self.logger.warning("Clonning synnefo failed.. retrying %s" |
385 |
% i) |
386 |
cmd = """
|
387 |
cd synnefo
|
388 |
for branch in `git branch -a | grep remotes | \
|
389 |
grep -v HEAD | grep -v master`; do
|
390 |
git branch --track ${branch##*/} $branch
|
391 |
done
|
392 |
git checkout %s
|
393 |
""" % (synnefo_branch)
|
394 |
_run(cmd, False)
|
395 |
|
396 |
if not cloned: |
397 |
self.logger.error("Can not clone Synnefo repo.") |
398 |
sys.exit(-1)
|
399 |
|
400 |
deploy_repo = self.config.get('Global', 'deploy_repo') |
401 |
self.logger.debug("Clone snf-deploy from %s" % deploy_repo) |
402 |
_run("git clone --depth 1 %s" % deploy_repo, False) |
403 |
|
404 |
@_check_fabric
|
405 |
def build_synnefo(self): |
406 |
"""Build Synnefo packages"""
|
407 |
self.logger.info("Build Synnefo packages..") |
408 |
self.logger.debug("Install development packages") |
409 |
cmd = """
|
410 |
apt-get update
|
411 |
apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
|
412 |
python-dev python-all python-pip --yes
|
413 |
pip install devflow
|
414 |
"""
|
415 |
_run(cmd, False)
|
416 |
|
417 |
if self.config.get('Global', 'patch_pydist') == "True": |
418 |
self.logger.debug("Patch pydist.py module") |
419 |
cmd = r"""
|
420 |
sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \ |
421 |
/usr/share/python/debpython/pydist.py |
422 |
"""
|
423 |
_run(cmd, False)
|
424 |
|
425 |
self.logger.debug("Build snf-deploy package")
|
426 |
cmd = """
|
427 |
git checkout -t origin/debian |
428 |
git-buildpackage --git-upstream-branch=master \ |
429 |
--git-debian-branch=debian \ |
430 |
--git-export-dir=../snf-deploy_build-area \ |
431 |
-uc -us |
432 |
"""
|
433 |
with fabric.cd("snf-deploy"):
|
434 |
_run(cmd, True)
|
435 |
|
436 |
self.logger.debug("Install snf-deploy package")
|
437 |
cmd = """
|
438 |
dpkg -i snf-deploy*.deb |
439 |
apt-get -f install --yes |
440 |
"""
|
441 |
with fabric.cd("snf-deploy_build-area"):
|
442 |
with fabric.settings(warn_only=True):
|
443 |
_run(cmd, True)
|
444 |
|
445 |
self.logger.debug("Build synnefo packages")
|
446 |
cmd = """
|
447 |
devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign |
448 |
"""
|
449 |
with fabric.cd("synnefo"):
|
450 |
_run(cmd, True)
|
451 |
|
452 |
self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
|
453 |
cmd = """
|
454 |
cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/ |
455 |
"""
|
456 |
_run(cmd, False)
|
457 |
|
458 |
@_check_fabric
|
459 |
def build_documentation(self):
|
460 |
"""Build Synnefo documentation""" |
461 |
self.logger.info("Build Synnefo documentation..")
|
462 |
_run("pip install -U Sphinx", False)
|
463 |
with fabric.cd("synnefo"):
|
464 |
_run("devflow-update-version; "
|
465 |
"./ci/make_docs.sh synnefo_documentation", False)
|
466 |
|
467 |
def fetch_documentation(self, dest=None):
|
468 |
"""Fetch Synnefo documentation""" |
469 |
self.logger.info("Fetch Synnefo documentation..")
|
470 |
if dest is None:
|
471 |
dest = "synnefo_documentation"
|
472 |
dest = os.path.abspath(dest)
|
473 |
if not os.path.exists(dest):
|
474 |
os.makedirs(dest)
|
475 |
self.fetch_compressed("synnefo/synnefo_documentation", dest)
|
476 |
self.logger.info("Downloaded documentation to %s" %
|
477 |
_green(dest))
|
478 |
|
479 |
@_check_fabric
|
480 |
def deploy_synnefo(self, schema=None):
|
481 |
"""Deploy Synnefo using snf-deploy""" |
482 |
self.logger.info("Deploy Synnefo..")
|
483 |
if schema is None:
|
484 |
schema = self.config.get('Global', 'schema')
|
485 |
self.logger.debug("Will use %s schema" % schema)
|
486 |
|
487 |
schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
|
488 |
if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
|
489 |
raise ValueError("Unknown schema: %s" % schema)
|
490 |
|
491 |
self.logger.debug("Upload schema files to server")
|
492 |
with fabric.quiet():
|
493 |
fabric.put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
|
494 |
|
495 |
self.logger.debug("Change password in nodes.conf file")
|
496 |
cmd = """
|
497 |
sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
|
498 |
""".format(fabric.env.password)
|
499 |
_run(cmd, False)
|
500 |
|
501 |
self.logger.debug("Run snf-deploy")
|
502 |
cmd = """
|
503 |
snf-deploy all --autoconf
|
504 |
"""
|
505 |
_run(cmd, True)
|
506 |
|
507 |
@_check_fabric
|
508 |
def unit_test(self):
|
509 |
"""Run Synnefo unit test suite""" |
510 |
self.logger.info("Run Synnefo unit test suite")
|
511 |
component = self.config.get('Unit Tests', 'component')
|
512 |
|
513 |
self.logger.debug("Install needed packages")
|
514 |
cmd = """
|
515 |
pip install mock |
516 |
pip install factory_boy |
517 |
"""
|
518 |
_run(cmd, False)
|
519 |
|
520 |
self.logger.debug("Upload tests.sh file")
|
521 |
unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
|
522 |
with fabric.quiet():
|
523 |
fabric.put(unit_tests_file, ".")
|
524 |
|
525 |
self.logger.debug("Run unit tests")
|
526 |
cmd = """
|
527 |
bash tests.sh {0}
|
528 |
""".format(component)
|
529 |
_run(cmd, True)
|
530 |
|
531 |
@_check_fabric
|
532 |
def run_burnin(self):
|
533 |
"""Run burnin functional test suite""" |
534 |
self.logger.info("Run Burnin functional test suite")
|
535 |
cmd = """
|
536 |
auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3) |
537 |
token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3) |
538 |
images_user=$(kamaki image list -l | grep owner | \ |
539 |
cut -d':' -f2 | tr -d ' ') |
540 |
snf-burnin --auth-url=$auth_url --token=$token \ |
541 |
--force-flavor=2 --image-id=all \ |
542 |
--system-images-user=$images_user \
|
543 |
{0}
|
544 |
log_folder=$(ls -1d /var/log/burnin/* | tail -n1) |
545 |
for i in $(ls $log_folder/*/details*); do |
546 |
echo -e "\\n\\n"
|
547 |
echo -e "***** $i\\n"
|
548 |
cat $i
|
549 |
done |
550 |
""".format(self.config.get('Burnin', 'cmd_options'))
|
551 |
_run(cmd, True)
|
552 |
|
553 |
@_check_fabric
|
554 |
def fetch_compressed(self, src, dest=None):
|
555 |
"""Create a tarball and fetch it locally""" |
556 |
self.logger.debug("Creating tarball of %s" % src)
|
557 |
basename = os.path.basename(src)
|
558 |
tar_file = basename + ".tgz"
|
559 |
cmd = "tar czf %s %s" % (tar_file, src)
|
560 |
_run(cmd, False)
|
561 |
if not os.path.exists(dest):
|
562 |
os.makedirs(dest)
|
563 |
|
564 |
tmp_dir = tempfile.mkdtemp()
|
565 |
fabric.get(tar_file, tmp_dir)
|
566 |
|
567 |
dest_file = os.path.join(tmp_dir, tar_file)
|
568 |
self._check_hash_sum(dest_file, tar_file)
|
569 |
self.logger.debug("Untar packages file %s" % dest_file)
|
570 |
cmd = """
|
571 |
cd %s |
572 |
tar xzf %s |
573 |
cp -r %s/* %s |
574 |
rm -r %s |
575 |
""" % (tmp_dir, tar_file, src, dest, tmp_dir)
|
576 |
os.system(cmd)
|
577 |
self.logger.info("Downloaded %s to %s" %
|
578 |
(src, _green(dest)))
|
579 |
|
580 |
@_check_fabric
|
581 |
def fetch_packages(self, dest=None):
|
582 |
"""Fetch Synnefo packages""" |
583 |
if dest is None:
|
584 |
dest = self.config.get('Global', 'pkgs_dir')
|
585 |
dest = os.path.abspath(dest)
|
586 |
if not os.path.exists(dest):
|
587 |
os.makedirs(dest)
|
588 |
self.fetch_compressed("synnefo_build-area", dest)
|
589 |
self.logger.info("Downloaded debian packages to %s" %
|
590 |
_green(dest))
|
591 |
|