Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ c314fcd2

History | View | Annotate | Download (24.2 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import re
9
import sys
10
import time
11
import logging
12
import fabric.api as fabric
13
import subprocess
14
import tempfile
15
from ConfigParser import ConfigParser, DuplicateSectionError
16

    
17
from kamaki.cli import config as kamaki_config
18
from kamaki.clients.astakos import AstakosClient
19
from kamaki.clients.cyclades import CycladesClient
20
from kamaki.clients.image import ImageClient
21
from kamaki.clients.compute import ComputeClient
22

    
23
DEFAULT_CONFIG_FILE = "new_config"
24
# UUID of owner of system images
25
DEFAULT_SYSTEM_IMAGES_UUID = [
26
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
27
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
28
    ]
29

    
30

    
31
def _run(cmd, verbose):
32
    """Run fabric with verbose level"""
33
    if verbose:
34
        args = ('running',)
35
    else:
36
        args = ('running', 'stdout',)
37
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
38
        return fabric.run(cmd)
39

    
40

    
41
def _put(local, remote):
42
    """Run fabric put command without output"""
43
    with fabric.quiet():
44
        fabric.put(local, remote)
45

    
46

    
47
def _red(msg):
48
    """Red color"""
49
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
50
    return str(msg)
51

    
52

    
53
def _yellow(msg):
54
    """Yellow color"""
55
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
56
    return str(msg)
57

    
58

    
59
def _green(msg):
60
    """Green color"""
61
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
62
    return str(msg)
63

    
64

    
65
def _check_fabric(fun):
66
    """Check if fabric env has been set"""
67
    def wrapper(self, *args, **kwargs):
68
        """wrapper function"""
69
        if not self.fabric_installed:
70
            self.setup_fabric()
71
        return fun(self, *args, **kwargs)
72
    return wrapper
73

    
74

    
75
def _check_kamaki(fun):
76
    """Check if kamaki has been initialized"""
77
    def wrapper(self, *args, **kwargs):
78
        """wrapper function"""
79
        if not self.kamaki_installed:
80
            self.setup_kamaki()
81
        return fun(self, *args, **kwargs)
82
    return wrapper
83

    
84

    
85
class _MyFormatter(logging.Formatter):
86
    """Logging Formatter"""
87
    def format(self, record):
88
        format_orig = self._fmt
89
        if record.levelno == logging.DEBUG:
90
            self._fmt = "  %(msg)s"
91
        elif record.levelno == logging.INFO:
92
            self._fmt = "%(msg)s"
93
        elif record.levelno == logging.WARNING:
94
            self._fmt = _yellow("[W] %(msg)s")
95
        elif record.levelno == logging.ERROR:
96
            self._fmt = _red("[E] %(msg)s")
97
        result = logging.Formatter.format(self, record)
98
        self._fmt = format_orig
99
        return result
100

    
101

    
102
# Too few public methods. pylint: disable-msg=R0903
103
class _InfoFilter(logging.Filter):
104
    """Logging Filter that allows DEBUG and INFO messages only"""
105
    def filter(self, rec):
106
        """The filter"""
107
        return rec.levelno in (logging.DEBUG, logging.INFO)
108

    
109

    
110
# Too many instance attributes. pylint: disable-msg=R0902
111
class SynnefoCI(object):
112
    """SynnefoCI python class"""
113

    
114
    def __init__(self, config_file=None, cleanup_config=False, cloud=None):
115
        """ Initialize SynnefoCI python class
116

117
        Setup logger, local_dir, config and kamaki
118
        """
119
        # Setup logger
120
        self.logger = logging.getLogger('synnefo-ci')
121
        self.logger.setLevel(logging.DEBUG)
122

    
123
        handler1 = logging.StreamHandler(sys.stdout)
124
        handler1.setLevel(logging.DEBUG)
125
        handler1.addFilter(_InfoFilter())
126
        handler1.setFormatter(_MyFormatter())
127
        handler2 = logging.StreamHandler(sys.stderr)
128
        handler2.setLevel(logging.WARNING)
129
        handler2.setFormatter(_MyFormatter())
130

    
131
        self.logger.addHandler(handler1)
132
        self.logger.addHandler(handler2)
133

    
134
        # Get our local dir
135
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
136
        self.repo_dir = os.path.dirname(self.ci_dir)
137

    
138
        # Read config file
139
        if config_file is None:
140
            config_file = DEFAULT_CONFIG_FILE
141
        if not os.path.isabs(config_file):
142
            config_file = os.path.join(self.ci_dir, config_file)
143

    
144
        self.config = ConfigParser()
145
        self.config.optionxform = str
146
        self.config.read(config_file)
147
        temp_config = self.config.get('Global', 'temporary_config')
148
        if cleanup_config:
149
            try:
150
                os.remove(temp_config)
151
            except OSError:
152
                pass
153
        else:
154
            self.config.read(self.config.get('Global', 'temporary_config'))
155

    
156
        # Set kamaki cloud
157
        if cloud is not None:
158
            self.kamaki_cloud = cloud
159
        elif self.config.has_option("Deployment", "kamaki_cloud"):
160
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
161
            if kamaki_cloud == "":
162
                self.kamaki_cloud = None
163
        else:
164
            self.kamaki_cloud = None
165

    
166
        # Initialize variables
167
        self.fabric_installed = False
168
        self.kamaki_installed = False
169
        self.cyclades_client = None
170
        self.compute_client = None
171
        self.image_client = None
172

    
173
    def setup_kamaki(self):
174
        """Initialize kamaki
175

176
        Setup cyclades_client, image_client and compute_client
177
        """
178

    
179
        config = kamaki_config.Config()
180
        if self.kamaki_cloud is None:
181
            self.kamaki_cloud = config.get_global("default_cloud")
182

    
183
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
184
                         self.kamaki_cloud)
185
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
186
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
187
        token = config.get_cloud(self.kamaki_cloud, "token")
188
        #self.logger.debug("Token is %s" % _green(token))
189

    
190
        astakos_client = AstakosClient(auth_url, token)
191

    
192
        cyclades_url = \
193
            astakos_client.get_service_endpoints('compute')['publicURL']
194
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
195
        self.cyclades_client = CycladesClient(cyclades_url, token)
196
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
197

    
198
        image_url = \
199
            astakos_client.get_service_endpoints('image')['publicURL']
200
        self.logger.debug("Images API url is %s" % _green(image_url))
201
        self.image_client = ImageClient(cyclades_url, token)
202
        self.image_client.CONNECTION_RETRY_LIMIT = 2
203

    
204
        compute_url = \
205
            astakos_client.get_service_endpoints('compute')['publicURL']
206
        self.logger.debug("Compute API url is %s" % _green(compute_url))
207
        self.compute_client = ComputeClient(compute_url, token)
208
        self.compute_client.CONNECTION_RETRY_LIMIT = 2
209

    
210
    def _wait_transition(self, server_id, current_status, new_status):
211
        """Wait for server to go from current_status to new_status"""
212
        self.logger.debug("Waiting for server to become %s" % new_status)
213
        timeout = self.config.getint('Global', 'build_timeout')
214
        sleep_time = 5
215
        while True:
216
            server = self.cyclades_client.get_server_details(server_id)
217
            if server['status'] == new_status:
218
                return server
219
            elif timeout < 0:
220
                self.logger.error(
221
                    "Waiting for server to become %s timed out" % new_status)
222
                self.destroy_server(False)
223
                sys.exit(-1)
224
            elif server['status'] == current_status:
225
                # Sleep for #n secs and continue
226
                timeout = timeout - sleep_time
227
                time.sleep(sleep_time)
228
            else:
229
                self.logger.error(
230
                    "Server failed with status %s" % server['status'])
231
                self.destroy_server(False)
232
                sys.exit(-1)
233

    
234
    @_check_kamaki
235
    def destroy_server(self, wait=True):
236
        """Destroy slave server"""
237
        server_id = self.config.getint('Temporary Options', 'server_id')
238
        self.logger.info("Destoying server with id %s " % server_id)
239
        self.cyclades_client.delete_server(server_id)
240
        if wait:
241
            self._wait_transition(server_id, "ACTIVE", "DELETED")
242

    
243
    @_check_kamaki
244
    def create_server(self, image_id=None, flavor_name=None, ssh_keys=None):
245
        """Create slave server"""
246
        self.logger.info("Create a new server..")
247
        if image_id is None:
248
            image = self._find_image()
249
            self.logger.debug("Will use image \"%s\"" % _green(image['name']))
250
            image_id = image["id"]
251
        self.logger.debug("Image has id %s" % _green(image_id))
252
        flavor_id = self._find_flavor(flavor_name)
253
        server = self.cyclades_client.create_server(
254
            self.config.get('Deployment', 'server_name'),
255
            flavor_id,
256
            image_id)
257
        server_id = server['id']
258
        self.write_config('server_id', server_id)
259
        self.logger.debug("Server got id %s" % _green(server_id))
260
        server_user = server['metadata']['users']
261
        self.write_config('server_user', server_user)
262
        self.logger.debug("Server's admin user is %s" % _green(server_user))
263
        server_passwd = server['adminPass']
264
        self.write_config('server_passwd', server_passwd)
265

    
266
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
267
        self._get_server_ip_and_port(server)
268
        self._copy_ssh_keys(ssh_keys)
269

    
270
        self.setup_fabric()
271
        self.logger.info("Setup firewall")
272
        accept_ssh_from = self.config.get('Global', 'accept_ssh_from')
273
        if accept_ssh_from != "":
274
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
275
            cmd = """
276
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
277
                cut -d':' -f2 | cut -d' ' -f1)
278
            iptables -A INPUT -s localhost -j ACCEPT
279
            iptables -A INPUT -s $local_ip -j ACCEPT
280
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
281
            iptables -A INPUT -p tcp --dport 22 -j DROP
282
            """.format(accept_ssh_from)
283
            _run(cmd, False)
284

    
285
    def _find_flavor(self, flavor_name):
286
        """Given a flavor_name (reg expression) find a flavor id to use"""
287
        # Get a list of flavor names from config file
288
        flavor_names = self.config.get('Deployment', 'flavor_name').split(",")
289
        if flavor_name is not None:
290
            # If we have a flavor_name to use, add it to our list
291
            flavor_names.insert(0, flavor_name)
292

    
293
        flavors = self.compute_client.list_flavors()
294
        for flname in flavor_names:
295
            sflname = flname.strip()
296
            self.logger.debug("Try to find a flavor with name \"%s\"" % sflname)
297
            fls = [f for f in flavors
298
                   if re.search(sflname, f['name']) is not None]
299
            if fls:
300
                self.logger.debug("Will use %s with id %s"
301
                                  % (fls[0]['name'], fls[0]['id']))
302
                return fls[0]['id']
303

    
304
        self.logger.error("No matching flavor found.. aborting")
305
        sys.exit(1)
306

    
307
    def _find_image(self):
308
        """Find a suitable image to use
309

310
        It has to belong to one of the `DEFAULT_SYSTEM_IMAGES_UUID'
311
        users and contain the word given by `image_name' option.
312
        """
313
        image_name = self.config.get('Deployment', 'image_name').lower()
314
        images = self.image_client.list_public(detail=True)['images']
315
        # Select images by `system_uuid' user
316
        images = [x for x in images
317
                  if x['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID]
318
        # Select images with `image_name' in their names
319
        images = [x for x in images
320
                  if x['name'].lower().find(image_name) != -1]
321
        # Let's select the first one
322
        return images[0]
323

    
324
    def _get_server_ip_and_port(self, server):
325
        """Compute server's IPv4 and ssh port number"""
326
        self.logger.info("Get server connection details..")
327
        server_ip = server['attachments'][0]['ipv4']
328
        if ".okeanos.io" in self.cyclades_client.base_url:
329
            tmp1 = int(server_ip.split(".")[2])
330
            tmp2 = int(server_ip.split(".")[3])
331
            server_ip = "gate.okeanos.io"
332
            server_port = 10000 + tmp1 * 256 + tmp2
333
        else:
334
            server_port = 22
335
        self.write_config('server_ip', server_ip)
336
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
337
        self.write_config('server_port', server_port)
338
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
339
        self.logger.debug("Access server using \"ssh -p %s %s@%s\"" %
340
                          (server_port, server['metadata']['users'], server_ip))
341

    
342
    @_check_fabric
343
    def _copy_ssh_keys(self, ssh_keys):
344
        """Upload/Install ssh keys to server"""
345
        self.logger.debug("Check for authentication keys to use")
346
        if ssh_keys is None:
347
            ssh_keys = self.config.get("Deployment", "ssh_keys")
348

    
349
        if ssh_keys != "":
350
            self.logger.debug("Will use %s authentication keys file" % ssh_keys)
351
            keyfile = '/tmp/%s.pub' % fabric.env.user
352
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
353
            if ssh_keys.startswith("http://") or \
354
                    ssh_keys.startswith("https://") or \
355
                    ssh_keys.startswith("ftp://"):
356
                cmd = """
357
                apt-get update
358
                apt-get install wget --yes
359
                wget {0} -O {1} --no-check-certificate
360
                """.format(ssh_keys, keyfile)
361
                _run(cmd, False)
362
            elif os.path.exists(ssh_keys):
363
                _put(ssh_keys, keyfile)
364
            else:
365
                self.logger.debug("No ssh keys found")
366
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
367
            _run('rm %s' % keyfile, False)
368
            self.logger.debug("Uploaded ssh authorized keys")
369
        else:
370
            self.logger.debug("No ssh keys found")
371

    
372
    def write_config(self, option, value, section="Temporary Options"):
373
        """Write changes back to config file"""
374
        try:
375
            self.config.add_section(section)
376
        except DuplicateSectionError:
377
            pass
378
        self.config.set(section, option, str(value))
379
        temp_conf_file = self.config.get('Global', 'temporary_config')
380
        with open(temp_conf_file, 'wb') as tcf:
381
            self.config.write(tcf)
382

    
383
    def setup_fabric(self):
384
        """Setup fabric environment"""
385
        self.logger.info("Setup fabric parameters..")
386
        fabric.env.user = self.config.get('Temporary Options', 'server_user')
387
        fabric.env.host_string = \
388
            self.config.get('Temporary Options', 'server_ip')
389
        fabric.env.port = self.config.getint('Temporary Options',
390
                                             'server_port')
391
        fabric.env.password = self.config.get('Temporary Options',
392
                                              'server_passwd')
393
        fabric.env.connection_attempts = 10
394
        fabric.env.shell = "/bin/bash -c"
395
        fabric.env.disable_known_hosts = True
396
        fabric.env.output_prefix = None
397

    
398
    def _check_hash_sum(self, localfile, remotefile):
399
        """Check hash sums of two files"""
400
        self.logger.debug("Check hash sum for local file %s" % localfile)
401
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
402
        self.logger.debug("Local file has sha256 hash %s" % hash1)
403
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
404
        hash2 = _run("sha256sum %s" % remotefile, False)
405
        hash2 = hash2.split(' ')[0]
406
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
407
        if hash1 != hash2:
408
            self.logger.error("Hashes differ.. aborting")
409
            sys.exit(-1)
410

    
411
    @_check_fabric
412
    def clone_repo(self):
413
        """Clone Synnefo repo from slave server"""
414
        self.logger.info("Configure repositories on remote server..")
415
        self.logger.debug("Setup apt, install curl and git")
416
        cmd = """
417
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
418
        apt-get update
419
        apt-get install curl git --yes
420
        echo -e "\n\ndeb {0}" >> /etc/apt/sources.list
421
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
422
        apt-get update
423
        git config --global user.name {1}
424
        git config --global user.email {2}
425
        """.format(self.config.get('Global', 'apt_repo'),
426
                   self.config.get('Global', 'git_config_name'),
427
                   self.config.get('Global', 'git_config_mail'))
428
        _run(cmd, False)
429

    
430
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
431
        synnefo_branch = self.config.get("Global", "synnefo_branch")
432
        if synnefo_branch == "":
433
            synnefo_branch = \
434
                subprocess.Popen(
435
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
436
                    stdout=subprocess.PIPE).communicate()[0].strip()
437
            if synnefo_branch == "HEAD":
438
                synnefo_branch = \
439
                    subprocess.Popen(
440
                        ["git", "rev-parse", "--short", "HEAD"],
441
                        stdout=subprocess.PIPE).communicate()[0].strip()
442
        self.logger.info("Will use branch %s" % synnefo_branch)
443
        # Currently clonning synnefo can fail unexpectedly
444
        cloned = False
445
        for i in range(10):
446
            self.logger.debug("Clone synnefo from %s" % synnefo_repo)
447
            try:
448
                _run("git clone %s synnefo" % synnefo_repo, False)
449
                cloned = True
450
                break
451
            except BaseException:
452
                self.logger.warning("Clonning synnefo failed.. retrying %s"
453
                                    % i)
454
        cmd = """
455
        cd synnefo
456
        for branch in `git branch -a | grep remotes | \
457
                       grep -v HEAD | grep -v master`; do
458
            git branch --track ${branch##*/} $branch
459
        done
460
        git checkout %s
461
        """ % (synnefo_branch)
462
        _run(cmd, False)
463

    
464
        if not cloned:
465
            self.logger.error("Can not clone Synnefo repo.")
466
            sys.exit(-1)
467

    
468
        deploy_repo = self.config.get('Global', 'deploy_repo')
469
        self.logger.debug("Clone snf-deploy from %s" % deploy_repo)
470
        _run("git clone --depth 1 %s" % deploy_repo, False)
471

    
472
    @_check_fabric
473
    def build_synnefo(self):
474
        """Build Synnefo packages"""
475
        self.logger.info("Build Synnefo packages..")
476
        self.logger.debug("Install development packages")
477
        cmd = """
478
        apt-get update
479
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
480
                python-dev python-all python-pip --yes
481
        pip install devflow
482
        """
483
        _run(cmd, False)
484

    
485
        if self.config.get('Global', 'patch_pydist') == "True":
486
            self.logger.debug("Patch pydist.py module")
487
            cmd = r"""
488
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
489
                /usr/share/python/debpython/pydist.py
490
            """
491
            _run(cmd, False)
492

493
        self.logger.debug("Build snf-deploy package")
494
        cmd = """
495
        git checkout -t origin/debian
496
        git-buildpackage --git-upstream-branch=master \
497
                --git-debian-branch=debian \
498
                --git-export-dir=../snf-deploy_build-area \
499
                -uc -us
500
        """
501
        with fabric.cd("snf-deploy"):
502
            _run(cmd, True)
503

504
        self.logger.debug("Install snf-deploy package")
505
        cmd = """
506
        dpkg -i snf-deploy*.deb
507
        apt-get -f install --yes
508
        """
509
        with fabric.cd("snf-deploy_build-area"):
510
            with fabric.settings(warn_only=True):
511
                _run(cmd, True)
512

513
        self.logger.debug("Build synnefo packages")
514
        cmd = """
515
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
516
        """
517
        with fabric.cd("synnefo"):
518
            _run(cmd, True)
519

520
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
521
        cmd = """
522
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
523
        """
524
        _run(cmd, False)
525

526
    @_check_fabric
527
    def build_documentation(self):
528
        """Build Synnefo documentation"""
529
        self.logger.info("Build Synnefo documentation..")
530
        _run("pip install -U Sphinx", False)
531
        with fabric.cd("synnefo"):
532
            _run("devflow-update-version; "
533
                 "./ci/make_docs.sh synnefo_documentation", False)
534

535
    def fetch_documentation(self, dest=None):
536
        """Fetch Synnefo documentation"""
537
        self.logger.info("Fetch Synnefo documentation..")
538
        if dest is None:
539
            dest = "synnefo_documentation"
540
        dest = os.path.abspath(dest)
541
        if not os.path.exists(dest):
542
            os.makedirs(dest)
543
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
544
        self.logger.info("Downloaded documentation to %s" %
545
                         _green(dest))
546

547
    @_check_fabric
548
    def deploy_synnefo(self, schema=None):
549
        """Deploy Synnefo using snf-deploy"""
550
        self.logger.info("Deploy Synnefo..")
551
        if schema is None:
552
            schema = self.config.get('Global', 'schema')
553
        self.logger.debug("Will use %s schema" % schema)
554

555
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
556
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
557
            raise ValueError("Unknown schema: %s" % schema)
558

559
        self.logger.debug("Upload schema files to server")
560
        _put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
561

562
        self.logger.debug("Change password in nodes.conf file")
563
        cmd = """
564
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
565
        """.format(fabric.env.password)
566
        _run(cmd, False)
567

568
        self.logger.debug("Run snf-deploy")
569
        cmd = """
570
        snf-deploy --disable-colors --autoconf all
571
        """
572
        _run(cmd, True)
573

574
    @_check_fabric
575
    def unit_test(self):
576
        """Run Synnefo unit test suite"""
577
        self.logger.info("Run Synnefo unit test suite")
578
        component = self.config.get('Unit Tests', 'component')
579

580
        self.logger.debug("Install needed packages")
581
        cmd = """
582
        pip install mock
583
        pip install factory_boy
584
        """
585
        _run(cmd, False)
586

587
        self.logger.debug("Upload tests.sh file")
588
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
589
        _put(unit_tests_file, ".")
590

591
        self.logger.debug("Run unit tests")
592
        cmd = """
593
        bash tests.sh {0}
594
        """.format(component)
595
        _run(cmd, True)
596

597
    @_check_fabric
598
    def run_burnin(self):
599
        """Run burnin functional test suite"""
600
        self.logger.info("Run Burnin functional test suite")
601
        cmd = """
602
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
603
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
604
        images_user=$(kamaki image list -l | grep owner | \
605
                      cut -d':' -f2 | tr -d ' ')
606
        snf-burnin --auth-url=$auth_url --token=$token \
607
            --force-flavor=2 --image-id=all \
608
            --system-images-user=$images_user \
609
            {0}
610
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
611
        for i in $(ls $log_folder/*/details*); do
612
            echo -e "\\n\\n"
613
            echo -e "***** $i\\n"
614
            cat $i
615
        done
616
        """.format(self.config.get('Burnin', 'cmd_options'))
617
        _run(cmd, True)
618

619
    @_check_fabric
620
    def fetch_compressed(self, src, dest=None):
621
        """Create a tarball and fetch it locally"""
622
        self.logger.debug("Creating tarball of %s" % src)
623
        basename = os.path.basename(src)
624
        tar_file = basename + ".tgz"
625
        cmd = "tar czf %s %s" % (tar_file, src)
626
        _run(cmd, False)
627
        if not os.path.exists(dest):
628
            os.makedirs(dest)
629

630
        tmp_dir = tempfile.mkdtemp()
631
        fabric.get(tar_file, tmp_dir)
632

633
        dest_file = os.path.join(tmp_dir, tar_file)
634
        self._check_hash_sum(dest_file, tar_file)
635
        self.logger.debug("Untar packages file %s" % dest_file)
636
        cmd = """
637
        cd %s
638
        tar xzf %s
639
        cp -r %s/* %s
640
        rm -r %s
641
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
642
        os.system(cmd)
643
        self.logger.info("Downloaded %s to %s" %
644
                         (src, _green(dest)))
645

646
    @_check_fabric
647
    def fetch_packages(self, dest=None):
648
        """Fetch Synnefo packages"""
649
        if dest is None:
650
            dest = self.config.get('Global', 'pkgs_dir')
651
        dest = os.path.abspath(dest)
652
        if not os.path.exists(dest):
653
            os.makedirs(dest)
654
        self.fetch_compressed("synnefo_build-area", dest)
655
        self.logger.info("Downloaded debian packages to %s" %
656
                         _green(dest))
657