root / ci / utils.py @ 358750d8
History | View | Annotate | Download (20.9 kB)
1 |
#!/usr/bin/env python
|
---|---|
2 |
|
3 |
"""
|
4 |
Synnefo ci utils module
|
5 |
"""
|
6 |
|
7 |
import os |
8 |
import sys |
9 |
import time |
10 |
import logging |
11 |
import fabric.api as fabric |
12 |
import subprocess |
13 |
import tempfile |
14 |
from ConfigParser import ConfigParser, DuplicateSectionError |
15 |
|
16 |
from kamaki.cli import config as kamaki_config |
17 |
from kamaki.clients.astakos import AstakosClient |
18 |
from kamaki.clients.cyclades import CycladesClient |
19 |
from kamaki.clients.image import ImageClient |
20 |
|
21 |
DEFAULT_CONFIG_FILE = "new_config"
|
22 |
|
23 |
|
24 |
def _run(cmd, verbose): |
25 |
"""Run fabric with verbose level"""
|
26 |
if verbose:
|
27 |
args = ('running',)
|
28 |
else:
|
29 |
args = ('running', 'stdout',) |
30 |
with fabric.hide(*args):
|
31 |
return fabric.run(cmd)
|
32 |
|
33 |
|
34 |
def _red(msg): |
35 |
"""Red color"""
|
36 |
#return "\x1b[31m" + str(msg) + "\x1b[0m"
|
37 |
return str(msg) |
38 |
|
39 |
|
40 |
def _yellow(msg): |
41 |
"""Yellow color"""
|
42 |
#return "\x1b[33m" + str(msg) + "\x1b[0m"
|
43 |
return str(msg) |
44 |
|
45 |
|
46 |
def _green(msg): |
47 |
"""Green color"""
|
48 |
#return "\x1b[32m" + str(msg) + "\x1b[0m"
|
49 |
return str(msg) |
50 |
|
51 |
|
52 |
def _check_fabric(fun): |
53 |
"""Check if fabric env has been set"""
|
54 |
def wrapper(self, *args, **kwargs): |
55 |
"""wrapper function"""
|
56 |
if not self.fabric_installed: |
57 |
self.setup_fabric()
|
58 |
return fun(self, *args, **kwargs) |
59 |
return wrapper
|
60 |
|
61 |
|
62 |
def _check_kamaki(fun): |
63 |
"""Check if kamaki has been initialized"""
|
64 |
def wrapper(self, *args, **kwargs): |
65 |
"""wrapper function"""
|
66 |
if not self.kamaki_installed: |
67 |
self.setup_kamaki()
|
68 |
return fun(self, *args, **kwargs) |
69 |
return wrapper
|
70 |
|
71 |
|
72 |
class _MyFormatter(logging.Formatter): |
73 |
"""Logging Formatter"""
|
74 |
def format(self, record): |
75 |
format_orig = self._fmt
|
76 |
if record.levelno == logging.DEBUG:
|
77 |
self._fmt = " %(msg)s" |
78 |
elif record.levelno == logging.INFO:
|
79 |
self._fmt = "%(msg)s" |
80 |
elif record.levelno == logging.WARNING:
|
81 |
self._fmt = _yellow("[W] %(msg)s") |
82 |
elif record.levelno == logging.ERROR:
|
83 |
self._fmt = _red("[E] %(msg)s") |
84 |
result = logging.Formatter.format(self, record)
|
85 |
self._fmt = format_orig
|
86 |
return result
|
87 |
|
88 |
|
89 |
class SynnefoCI(object): |
90 |
"""SynnefoCI python class"""
|
91 |
|
92 |
def __init__(self, config_file=None, cleanup_config=False, cloud=None): |
93 |
""" Initialize SynnefoCI python class
|
94 |
|
95 |
Setup logger, local_dir, config and kamaki
|
96 |
"""
|
97 |
# Setup logger
|
98 |
self.logger = logging.getLogger('synnefo-ci') |
99 |
self.logger.setLevel(logging.DEBUG)
|
100 |
handler = logging.StreamHandler() |
101 |
handler.setFormatter(_MyFormatter()) |
102 |
self.logger.addHandler(handler)
|
103 |
|
104 |
# Get our local dir
|
105 |
self.ci_dir = os.path.dirname(os.path.abspath(__file__))
|
106 |
self.repo_dir = os.path.dirname(self.ci_dir) |
107 |
|
108 |
# Read config file
|
109 |
if config_file is None: |
110 |
config_file = DEFAULT_CONFIG_FILE |
111 |
if not os.path.isabs(config_file): |
112 |
config_file = os.path.join(self.ci_dir, config_file)
|
113 |
|
114 |
self.config = ConfigParser()
|
115 |
self.config.optionxform = str |
116 |
self.config.read(config_file)
|
117 |
temp_config = self.config.get('Global', 'temporary_config') |
118 |
if cleanup_config:
|
119 |
try:
|
120 |
os.remove(temp_config) |
121 |
except:
|
122 |
pass
|
123 |
else:
|
124 |
self.config.read(self.config.get('Global', 'temporary_config')) |
125 |
|
126 |
# Set kamaki cloud
|
127 |
if cloud is not None: |
128 |
self.kamaki_cloud = cloud
|
129 |
elif self.config.has_option("Deployment", "kamaki_cloud"): |
130 |
kamaki_cloud = self.config.get("Deployment", "kamaki_cloud") |
131 |
if kamaki_cloud == "": |
132 |
self.kamaki_cloud = None |
133 |
else:
|
134 |
self.kamaki_cloud = None |
135 |
|
136 |
# Initialize variables
|
137 |
self.fabric_installed = False |
138 |
self.kamaki_installed = False |
139 |
self.cyclades_client = None |
140 |
self.image_client = None |
141 |
|
142 |
def setup_kamaki(self): |
143 |
"""Initialize kamaki
|
144 |
|
145 |
Setup cyclades_client and image_client
|
146 |
"""
|
147 |
|
148 |
config = kamaki_config.Config() |
149 |
if self.kamaki_cloud is None: |
150 |
self.kamaki_cloud = config.get_global("default_cloud") |
151 |
|
152 |
self.logger.info("Setup kamaki client, using cloud '%s'.." % |
153 |
self.kamaki_cloud)
|
154 |
auth_url = config.get_cloud(self.kamaki_cloud, "url") |
155 |
self.logger.debug("Authentication URL is %s" % _green(auth_url)) |
156 |
token = config.get_cloud(self.kamaki_cloud, "token") |
157 |
#self.logger.debug("Token is %s" % _green(token))
|
158 |
|
159 |
astakos_client = AstakosClient(auth_url, token) |
160 |
|
161 |
cyclades_url = \ |
162 |
astakos_client.get_service_endpoints('compute')['publicURL'] |
163 |
self.logger.debug("Cyclades API url is %s" % _green(cyclades_url)) |
164 |
self.cyclades_client = CycladesClient(cyclades_url, token)
|
165 |
self.cyclades_client.CONNECTION_RETRY_LIMIT = 2 |
166 |
|
167 |
image_url = \ |
168 |
astakos_client.get_service_endpoints('image')['publicURL'] |
169 |
self.logger.debug("Images API url is %s" % _green(image_url)) |
170 |
self.image_client = ImageClient(cyclades_url, token)
|
171 |
self.image_client.CONNECTION_RETRY_LIMIT = 2 |
172 |
|
173 |
def _wait_transition(self, server_id, current_status, new_status): |
174 |
"""Wait for server to go from current_status to new_status"""
|
175 |
self.logger.debug("Waiting for server to become %s" % new_status) |
176 |
timeout = self.config.getint('Global', 'build_timeout') |
177 |
sleep_time = 5
|
178 |
while True: |
179 |
server = self.cyclades_client.get_server_details(server_id)
|
180 |
if server['status'] == new_status: |
181 |
return server
|
182 |
elif timeout < 0: |
183 |
self.logger.error(
|
184 |
"Waiting for server to become %s timed out" % new_status)
|
185 |
self.destroy_server(False) |
186 |
sys.exit(-1)
|
187 |
elif server['status'] == current_status: |
188 |
# Sleep for #n secs and continue
|
189 |
timeout = timeout - sleep_time |
190 |
time.sleep(sleep_time) |
191 |
else:
|
192 |
self.logger.error(
|
193 |
"Server failed with status %s" % server['status']) |
194 |
self.destroy_server(False) |
195 |
sys.exit(-1)
|
196 |
|
197 |
@_check_kamaki
|
198 |
def destroy_server(self, wait=True): |
199 |
"""Destroy slave server"""
|
200 |
server_id = self.config.getint('Temporary Options', 'server_id') |
201 |
self.logger.info("Destoying server with id %s " % server_id) |
202 |
self.cyclades_client.delete_server(server_id)
|
203 |
if wait:
|
204 |
self._wait_transition(server_id, "ACTIVE", "DELETED") |
205 |
|
206 |
@_check_kamaki
|
207 |
def create_server(self, image_id=None, flavor_id=None): |
208 |
"""Create slave server"""
|
209 |
self.logger.info("Create a new server..") |
210 |
if image_id is None: |
211 |
image = self._find_image()
|
212 |
self.logger.debug("Will use image \"%s\"" % _green(image['name'])) |
213 |
image_id = image["id"]
|
214 |
self.logger.debug("Image has id %s" % _green(image_id)) |
215 |
if flavor_id is None: |
216 |
flavor_id = self.config.getint("Deployment", "flavor_id") |
217 |
server = self.cyclades_client.create_server(
|
218 |
self.config.get('Deployment', 'server_name'), |
219 |
flavor_id, |
220 |
image_id) |
221 |
server_id = server['id']
|
222 |
self.write_config('server_id', server_id) |
223 |
self.logger.debug("Server got id %s" % _green(server_id)) |
224 |
server_user = server['metadata']['users'] |
225 |
self.write_config('server_user', server_user) |
226 |
self.logger.debug("Server's admin user is %s" % _green(server_user)) |
227 |
server_passwd = server['adminPass']
|
228 |
self.write_config('server_passwd', server_passwd) |
229 |
|
230 |
server = self._wait_transition(server_id, "BUILD", "ACTIVE") |
231 |
self._get_server_ip_and_port(server)
|
232 |
self._copy_ssh_keys()
|
233 |
|
234 |
self.setup_fabric()
|
235 |
self.logger.info("Setup firewall") |
236 |
accept_ssh_from = self.config.get('Global', 'filter_access_network') |
237 |
if accept_ssh_from != "": |
238 |
self.logger.debug("Block ssh except from %s" % accept_ssh_from) |
239 |
cmd = """
|
240 |
local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
|
241 |
cut -d':' -f2 | cut -d' ' -f1)
|
242 |
iptables -A INPUT -s localhost -j ACCEPT
|
243 |
iptables -A INPUT -s $local_ip -j ACCEPT
|
244 |
iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
|
245 |
iptables -A INPUT -p tcp --dport 22 -j DROP
|
246 |
""".format(accept_ssh_from)
|
247 |
_run(cmd, False)
|
248 |
|
249 |
def _find_image(self): |
250 |
"""Find a suitable image to use
|
251 |
|
252 |
It has to belong to the `system_uuid' user and
|
253 |
contain the word `image_name'
|
254 |
"""
|
255 |
system_uuid = self.config.get('Deployment', 'system_uuid') |
256 |
image_name = self.config.get('Deployment', 'image_name').lower() |
257 |
images = self.image_client.list_public(detail=True)['images'] |
258 |
# Select images by `system_uuid' user
|
259 |
images = [x for x in images if x['user_id'] == system_uuid] |
260 |
# Select images with `image_name' in their names
|
261 |
images = \ |
262 |
[x for x in images if x['name'].lower().find(image_name) != -1] |
263 |
# Let's select the first one
|
264 |
return images[0] |
265 |
|
266 |
def _get_server_ip_and_port(self, server): |
267 |
"""Compute server's IPv4 and ssh port number"""
|
268 |
self.logger.info("Get server connection details..") |
269 |
server_ip = server['attachments'][0]['ipv4'] |
270 |
if ".okeanos.io" in self.cyclades_client.base_url: |
271 |
tmp1 = int(server_ip.split(".")[2]) |
272 |
tmp2 = int(server_ip.split(".")[3]) |
273 |
server_ip = "gate.okeanos.io"
|
274 |
server_port = 10000 + tmp1 * 256 + tmp2 |
275 |
else:
|
276 |
server_port = 22
|
277 |
self.write_config('server_ip', server_ip) |
278 |
self.logger.debug("Server's IPv4 is %s" % _green(server_ip)) |
279 |
self.write_config('server_port', server_port) |
280 |
self.logger.debug("Server's ssh port is %s" % _green(server_port)) |
281 |
|
282 |
@_check_fabric
|
283 |
def _copy_ssh_keys(self): |
284 |
if not self.config.has_option("Deployment", "ssh_keys"): |
285 |
return
|
286 |
authorized_keys = self.config.get("Deployment", |
287 |
"ssh_keys")
|
288 |
if authorized_keys != "" and os.path.exists(authorized_keys): |
289 |
keyfile = '/tmp/%s.pub' % fabric.env.user
|
290 |
_run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False) |
291 |
fabric.put(authorized_keys, keyfile) |
292 |
_run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False) |
293 |
_run('rm %s' % keyfile, False) |
294 |
self.logger.debug("Uploaded ssh authorized keys") |
295 |
else:
|
296 |
self.logger.debug("No ssh keys found") |
297 |
|
298 |
def write_config(self, option, value, section="Temporary Options"): |
299 |
"""Write changes back to config file"""
|
300 |
try:
|
301 |
self.config.add_section(section)
|
302 |
except DuplicateSectionError:
|
303 |
pass
|
304 |
self.config.set(section, option, str(value)) |
305 |
temp_conf_file = self.config.get('Global', 'temporary_config') |
306 |
with open(temp_conf_file, 'wb') as tcf: |
307 |
self.config.write(tcf)
|
308 |
|
309 |
def setup_fabric(self): |
310 |
"""Setup fabric environment"""
|
311 |
self.logger.info("Setup fabric parameters..") |
312 |
fabric.env.user = self.config.get('Temporary Options', 'server_user') |
313 |
fabric.env.host_string = \ |
314 |
self.config.get('Temporary Options', 'server_ip') |
315 |
fabric.env.port = self.config.getint('Temporary Options', |
316 |
'server_port')
|
317 |
fabric.env.password = self.config.get('Temporary Options', |
318 |
'server_passwd')
|
319 |
fabric.env.connection_attempts = 10
|
320 |
fabric.env.shell = "/bin/bash -c"
|
321 |
fabric.env.disable_known_hosts = True
|
322 |
fabric.env.output_prefix = None
|
323 |
|
324 |
def _check_hash_sum(self, localfile, remotefile): |
325 |
"""Check hash sums of two files"""
|
326 |
self.logger.debug("Check hash sum for local file %s" % localfile) |
327 |
hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0] |
328 |
self.logger.debug("Local file has sha256 hash %s" % hash1) |
329 |
self.logger.debug("Check hash sum for remote file %s" % remotefile) |
330 |
hash2 = _run("sha256sum %s" % remotefile, False) |
331 |
hash2 = hash2.split(' ')[0] |
332 |
self.logger.debug("Remote file has sha256 hash %s" % hash2) |
333 |
if hash1 != hash2:
|
334 |
self.logger.error("Hashes differ.. aborting") |
335 |
sys.exit(-1)
|
336 |
|
337 |
@_check_fabric
|
338 |
def clone_repo(self): |
339 |
"""Clone Synnefo repo from slave server"""
|
340 |
self.logger.info("Configure repositories on remote server..") |
341 |
self.logger.debug("Setup apt, install curl and git") |
342 |
cmd = """
|
343 |
echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
|
344 |
apt-get update
|
345 |
apt-get install curl git --yes
|
346 |
echo -e "\n\ndeb {0}" >> /etc/apt/sources.list
|
347 |
curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
|
348 |
apt-get update
|
349 |
git config --global user.name {1}
|
350 |
git config --global user.mail {2}
|
351 |
""".format(self.config.get('Global', 'apt_repo'), |
352 |
self.config.get('Global', 'git_config_name'), |
353 |
self.config.get('Global', 'git_config_mail')) |
354 |
_run(cmd, False)
|
355 |
|
356 |
synnefo_repo = self.config.get('Global', 'synnefo_repo') |
357 |
synnefo_branch = self.config.get("Global", "synnefo_branch") |
358 |
if synnefo_branch == "": |
359 |
synnefo_branch =\ |
360 |
subprocess.Popen(["git", "rev-parse", "--abbrev-ref", "HEAD"], |
361 |
stdout=subprocess.PIPE).communicate()[0].strip()
|
362 |
if synnefo_branch == "HEAD": |
363 |
synnefo_branch = \ |
364 |
subprocess.Popen(["git", "rev-parse","--short", "HEAD"], |
365 |
stdout=subprocess.PIPE).communicate()[0].strip()
|
366 |
self.logger.info("Will use branch %s" % synnefo_branch) |
367 |
# Currently clonning synnefo can fail unexpectedly
|
368 |
cloned = False
|
369 |
for i in range(10): |
370 |
self.logger.debug("Clone synnefo from %s" % synnefo_repo) |
371 |
try:
|
372 |
_run("git clone %s synnefo" % synnefo_repo, False) |
373 |
cloned = True
|
374 |
break
|
375 |
except:
|
376 |
self.logger.warning("Clonning synnefo failed.. retrying %s" |
377 |
% i) |
378 |
cmd ="""
|
379 |
cd synnefo
|
380 |
for branch in `git branch -a | grep remotes | grep -v HEAD | grep -v master`; do
|
381 |
git branch --track ${branch##*/} $branch
|
382 |
done
|
383 |
git checkout %s
|
384 |
""" % (synnefo_branch)
|
385 |
_run(cmd, False)
|
386 |
|
387 |
if not cloned: |
388 |
self.logger.error("Can not clone Synnefo repo.") |
389 |
sys.exit(-1)
|
390 |
|
391 |
deploy_repo = self.config.get('Global', 'deploy_repo') |
392 |
self.logger.debug("Clone snf-deploy from %s" % deploy_repo) |
393 |
_run("git clone --depth 1 %s" % deploy_repo, False) |
394 |
|
395 |
@_check_fabric
|
396 |
def build_synnefo(self): |
397 |
"""Build Synnefo packages"""
|
398 |
self.logger.info("Build Synnefo packages..") |
399 |
self.logger.debug("Install development packages") |
400 |
cmd = """
|
401 |
apt-get update
|
402 |
apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
|
403 |
python-dev python-all python-pip --yes
|
404 |
pip install devflow
|
405 |
"""
|
406 |
_run(cmd, False)
|
407 |
|
408 |
if self.config.get('Global', 'patch_pydist') == "True": |
409 |
self.logger.debug("Patch pydist.py module") |
410 |
cmd = r"""
|
411 |
sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \ |
412 |
/usr/share/python/debpython/pydist.py |
413 |
"""
|
414 |
_run(cmd, False)
|
415 |
|
416 |
self.logger.debug("Build snf-deploy package")
|
417 |
cmd = """
|
418 |
git checkout -t origin/debian |
419 |
git-buildpackage --git-upstream-branch=master \ |
420 |
--git-debian-branch=debian \ |
421 |
--git-export-dir=../snf-deploy_build-area \ |
422 |
-uc -us |
423 |
"""
|
424 |
with fabric.cd("snf-deploy"):
|
425 |
_run(cmd, True)
|
426 |
|
427 |
self.logger.debug("Install snf-deploy package")
|
428 |
cmd = """
|
429 |
dpkg -i snf-deploy*.deb |
430 |
apt-get -f install --yes |
431 |
"""
|
432 |
with fabric.cd("snf-deploy_build-area"):
|
433 |
with fabric.settings(warn_only=True):
|
434 |
_run(cmd, True)
|
435 |
|
436 |
self.logger.debug("Build synnefo packages")
|
437 |
cmd = """
|
438 |
devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign |
439 |
"""
|
440 |
with fabric.cd("synnefo"):
|
441 |
_run(cmd, True)
|
442 |
|
443 |
self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
|
444 |
cmd = """
|
445 |
cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/ |
446 |
"""
|
447 |
_run(cmd, False)
|
448 |
|
449 |
|
450 |
@_check_fabric
|
451 |
def build_documentation(self):
|
452 |
self.logger.info("Build Synnefo documentation..")
|
453 |
_run("pip install -U Sphinx", False)
|
454 |
with fabric.cd("synnefo"):
|
455 |
_run("devflow-update-version; ./ci/make_docs.sh synnefo_documentation", False)
|
456 |
|
457 |
def fetch_documentation(self, dest=None):
|
458 |
if dest is None:
|
459 |
dest = "synnefo_documentation"
|
460 |
dest = os.path.abspath(dest)
|
461 |
if not os.path.exists(dest):
|
462 |
os.makedirs(dest)
|
463 |
self.fetch_compressed("synnefo/synnefo_documentation", dest)
|
464 |
self.logger.info("Downloaded documentation to %s" %
|
465 |
_green(dest))
|
466 |
|
467 |
@_check_fabric
|
468 |
def deploy_synnefo(self, schema=None):
|
469 |
"""Deploy Synnefo using snf-deploy""" |
470 |
self.logger.info("Deploy Synnefo..")
|
471 |
if schema is None:
|
472 |
schema = self.config.get('Global', 'schema')
|
473 |
self.logger.debug("Will use %s schema" % schema)
|
474 |
|
475 |
schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
|
476 |
if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
|
477 |
raise ValueError("Unknown schema: %s" % schema)
|
478 |
|
479 |
self.logger.debug("Upload schema files to server")
|
480 |
with fabric.quiet():
|
481 |
fabric.put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
|
482 |
|
483 |
self.logger.debug("Change password in nodes.conf file")
|
484 |
cmd = """
|
485 |
sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
|
486 |
""".format(fabric.env.password)
|
487 |
_run(cmd, False)
|
488 |
|
489 |
self.logger.debug("Run snf-deploy")
|
490 |
cmd = """
|
491 |
snf-deploy all --autoconf
|
492 |
"""
|
493 |
_run(cmd, True)
|
494 |
|
495 |
@_check_fabric
|
496 |
def unit_test(self):
|
497 |
"""Run Synnefo unit test suite""" |
498 |
self.logger.info("Run Synnefo unit test suite")
|
499 |
component = self.config.get('Unit Tests', 'component')
|
500 |
|
501 |
self.logger.debug("Install needed packages")
|
502 |
cmd = """
|
503 |
pip install mock |
504 |
pip install factory_boy |
505 |
"""
|
506 |
_run(cmd, False)
|
507 |
|
508 |
self.logger.debug("Upload tests.sh file")
|
509 |
unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
|
510 |
with fabric.quiet():
|
511 |
fabric.put(unit_tests_file, ".")
|
512 |
|
513 |
self.logger.debug("Run unit tests")
|
514 |
cmd = """
|
515 |
bash tests.sh {0}
|
516 |
""".format(component)
|
517 |
_run(cmd, True)
|
518 |
|
519 |
@_check_fabric
|
520 |
def run_burnin(self):
|
521 |
"""Run burnin functional test suite""" |
522 |
self.logger.info("Run Burnin functional test suite")
|
523 |
cmd = """
|
524 |
auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3) |
525 |
token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3) |
526 |
images_user=$(kamaki image list -l | grep owner | \ |
527 |
cut -d':' -f2 | tr -d ' ') |
528 |
snf-burnin --auth-url=$auth_url --token=$token \ |
529 |
--force-flavor=2 --image-id=all \ |
530 |
--system-images-user=$images_user \
|
531 |
{0}
|
532 |
log_folder=$(ls -1d /var/log/burnin/* | tail -n1) |
533 |
for i in $(ls $log_folder/*/details*); do |
534 |
echo -e "\\n\\n"
|
535 |
echo -e "***** $i\\n"
|
536 |
cat $i
|
537 |
done |
538 |
""".format(self.config.get('Burnin', 'cmd_options'))
|
539 |
_run(cmd, True)
|
540 |
|
541 |
@_check_fabric
|
542 |
def fetch_compressed(self, src, dest=None):
|
543 |
self.logger.debug("Creating tarball of %s" % src)
|
544 |
basename = os.path.basename(src)
|
545 |
tar_file = basename + ".tgz"
|
546 |
cmd = "tar czf %s %s" % (tar_file, src)
|
547 |
_run(cmd, False)
|
548 |
if not os.path.exists(dest):
|
549 |
os.makedirs(dest)
|
550 |
|
551 |
tmp_dir = tempfile.mkdtemp()
|
552 |
fabric.get(tar_file, tmp_dir)
|
553 |
|
554 |
dest_file = os.path.join(tmp_dir, tar_file)
|
555 |
self._check_hash_sum(dest_file, tar_file)
|
556 |
self.logger.debug("Untar packages file %s" % dest_file)
|
557 |
cmd = """
|
558 |
cd %s |
559 |
tar xzf %s |
560 |
cp -r %s/* %s |
561 |
rm -r %s |
562 |
""" % (tmp_dir, tar_file, src, dest, tmp_dir)
|
563 |
os.system(cmd)
|
564 |
self.logger.info("Downloaded %s to %s" %
|
565 |
(src, _green(dest)))
|
566 |
|
567 |
@_check_fabric
|
568 |
def fetch_packages(self, dest=None):
|
569 |
if dest is None:
|
570 |
dest = self.config.get('Global', 'pkgs_dir')
|
571 |
dest = os.path.abspath(dest)
|
572 |
if not os.path.exists(dest):
|
573 |
os.makedirs(dest)
|
574 |
self.fetch_compressed("synnefo_build-area", dest)
|
575 |
self.logger.info("Downloaded debian packages to %s" %
|
576 |
_green(dest))
|
577 |
|