Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ 79144a72

History | View | Annotate | Download (24.5 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import re
9
import sys
10
import time
11
import logging
12
import fabric.api as fabric
13
import subprocess
14
import tempfile
15
from ConfigParser import ConfigParser, DuplicateSectionError
16

    
17
from kamaki.cli import config as kamaki_config
18
from kamaki.clients.astakos import AstakosClient
19
from kamaki.clients.cyclades import CycladesClient
20
from kamaki.clients.image import ImageClient
21
from kamaki.clients.compute import ComputeClient
22

    
23
DEFAULT_CONFIG_FILE = "new_config"
24
# UUID of owner of system images
25
DEFAULT_SYSTEM_IMAGES_UUID = [
26
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
27
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
28
    ]
29

    
30

    
31
def _run(cmd, verbose):
32
    """Run fabric with verbose level"""
33
    if verbose:
34
        args = ('running',)
35
    else:
36
        args = ('running', 'stdout',)
37
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
38
        return fabric.run(cmd)
39

    
40

    
41
def _put(local, remote):
42
    """Run fabric put command without output"""
43
    with fabric.quiet():
44
        fabric.put(local, remote)
45

    
46

    
47
def _red(msg):
48
    """Red color"""
49
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
50
    return str(msg)
51

    
52

    
53
def _yellow(msg):
54
    """Yellow color"""
55
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
56
    return str(msg)
57

    
58

    
59
def _green(msg):
60
    """Green color"""
61
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
62
    return str(msg)
63

    
64

    
65
def _check_fabric(fun):
66
    """Check if fabric env has been set"""
67
    def wrapper(self, *args, **kwargs):
68
        """wrapper function"""
69
        if not self.fabric_installed:
70
            self.setup_fabric()
71
        return fun(self, *args, **kwargs)
72
    return wrapper
73

    
74

    
75
def _check_kamaki(fun):
76
    """Check if kamaki has been initialized"""
77
    def wrapper(self, *args, **kwargs):
78
        """wrapper function"""
79
        if not self.kamaki_installed:
80
            self.setup_kamaki()
81
        return fun(self, *args, **kwargs)
82
    return wrapper
83

    
84

    
85
class _MyFormatter(logging.Formatter):
86
    """Logging Formatter"""
87
    def format(self, record):
88
        format_orig = self._fmt
89
        if record.levelno == logging.DEBUG:
90
            self._fmt = "  %(msg)s"
91
        elif record.levelno == logging.INFO:
92
            self._fmt = "%(msg)s"
93
        elif record.levelno == logging.WARNING:
94
            self._fmt = _yellow("[W] %(msg)s")
95
        elif record.levelno == logging.ERROR:
96
            self._fmt = _red("[E] %(msg)s")
97
        result = logging.Formatter.format(self, record)
98
        self._fmt = format_orig
99
        return result
100

    
101

    
102
# Too few public methods. pylint: disable-msg=R0903
103
class _InfoFilter(logging.Filter):
104
    """Logging Filter that allows DEBUG and INFO messages only"""
105
    def filter(self, rec):
106
        """The filter"""
107
        return rec.levelno in (logging.DEBUG, logging.INFO)
108

    
109

    
110
# Too many instance attributes. pylint: disable-msg=R0902
111
class SynnefoCI(object):
112
    """SynnefoCI python class"""
113

    
114
    def __init__(self, config_file=None, build_id=None, cloud=None):
115
        """ Initialize SynnefoCI python class
116

117
        Setup logger, local_dir, config and kamaki
118
        """
119
        # Setup logger
120
        self.logger = logging.getLogger('synnefo-ci')
121
        self.logger.setLevel(logging.DEBUG)
122

    
123
        handler1 = logging.StreamHandler(sys.stdout)
124
        handler1.setLevel(logging.DEBUG)
125
        handler1.addFilter(_InfoFilter())
126
        handler1.setFormatter(_MyFormatter())
127
        handler2 = logging.StreamHandler(sys.stderr)
128
        handler2.setLevel(logging.WARNING)
129
        handler2.setFormatter(_MyFormatter())
130

    
131
        self.logger.addHandler(handler1)
132
        self.logger.addHandler(handler2)
133

    
134
        # Get our local dir
135
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
136
        self.repo_dir = os.path.dirname(self.ci_dir)
137

    
138
        # Read config file
139
        if config_file is None:
140
            config_file = DEFAULT_CONFIG_FILE
141
        if not os.path.isabs(config_file):
142
            config_file = os.path.join(self.ci_dir, config_file)
143
        self.config = ConfigParser()
144
        self.config.optionxform = str
145
        self.config.read(config_file)
146

    
147
        # Read temporary_config file
148
        temp_config = self.config.get('Global', 'temporary_config')
149
        self.temp_config = ConfigParser()
150
        self.temp_config.optionxform = str
151
        self.temp_config.read(temp_config)
152
        if build_id is not None:
153
            self.build_id = build_id
154
        else:
155
            # Find a uniq build_id to use
156
            ids = self.temp_config.sections()
157
            if ids:
158
                max_id = int(max(self.temp_config.sections(), key=int))
159
                self.build_id = max_id + 1
160
            else:
161
                self.build_id = 1
162
        self.logger.info("Will use %s as build id" % _green(self.build_id))
163
        # If build_id doesn't exist create a new one
164
        try:
165
            self.temp_config.add_section(str(self.build_id))
166
            creation_time = time.strftime("%a, %d %b %Y %X", time.localtime())
167
            self.write_config("created", creation_time)
168
        except DuplicateSectionError:
169
            pass
170

    
171
        # Set kamaki cloud
172
        if cloud is not None:
173
            self.kamaki_cloud = cloud
174
        elif self.config.has_option("Deployment", "kamaki_cloud"):
175
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
176
            if kamaki_cloud == "":
177
                self.kamaki_cloud = None
178
        else:
179
            self.kamaki_cloud = None
180

    
181
        # Initialize variables
182
        self.fabric_installed = False
183
        self.kamaki_installed = False
184
        self.cyclades_client = None
185
        self.compute_client = None
186
        self.image_client = None
187

    
188
    def setup_kamaki(self):
189
        """Initialize kamaki
190

191
        Setup cyclades_client, image_client and compute_client
192
        """
193

    
194
        config = kamaki_config.Config()
195
        if self.kamaki_cloud is None:
196
            self.kamaki_cloud = config.get_global("default_cloud")
197

    
198
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
199
                         self.kamaki_cloud)
200
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
201
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
202
        token = config.get_cloud(self.kamaki_cloud, "token")
203
        #self.logger.debug("Token is %s" % _green(token))
204

    
205
        astakos_client = AstakosClient(auth_url, token)
206

    
207
        cyclades_url = \
208
            astakos_client.get_service_endpoints('compute')['publicURL']
209
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
210
        self.cyclades_client = CycladesClient(cyclades_url, token)
211
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
212

    
213
        image_url = \
214
            astakos_client.get_service_endpoints('image')['publicURL']
215
        self.logger.debug("Images API url is %s" % _green(image_url))
216
        self.image_client = ImageClient(cyclades_url, token)
217
        self.image_client.CONNECTION_RETRY_LIMIT = 2
218

    
219
        compute_url = \
220
            astakos_client.get_service_endpoints('compute')['publicURL']
221
        self.logger.debug("Compute API url is %s" % _green(compute_url))
222
        self.compute_client = ComputeClient(compute_url, token)
223
        self.compute_client.CONNECTION_RETRY_LIMIT = 2
224

    
225
    def _wait_transition(self, server_id, current_status, new_status):
226
        """Wait for server to go from current_status to new_status"""
227
        self.logger.debug("Waiting for server to become %s" % new_status)
228
        timeout = self.config.getint('Global', 'build_timeout')
229
        sleep_time = 5
230
        while True:
231
            server = self.cyclades_client.get_server_details(server_id)
232
            if server['status'] == new_status:
233
                return server
234
            elif timeout < 0:
235
                self.logger.error(
236
                    "Waiting for server to become %s timed out" % new_status)
237
                self.destroy_server(False)
238
                sys.exit(-1)
239
            elif server['status'] == current_status:
240
                # Sleep for #n secs and continue
241
                timeout = timeout - sleep_time
242
                time.sleep(sleep_time)
243
            else:
244
                self.logger.error(
245
                    "Server failed with status %s" % server['status'])
246
                self.destroy_server(False)
247
                sys.exit(-1)
248

    
249
    @_check_kamaki
250
    def destroy_server(self, wait=True):
251
        """Destroy slave server"""
252
        server_id = self.temp_config.getint(str(self.build_id), 'server_id')
253
        self.logger.info("Destoying server with id %s " % server_id)
254
        self.cyclades_client.delete_server(server_id)
255
        if wait:
256
            self._wait_transition(server_id, "ACTIVE", "DELETED")
257

    
258
    @_check_kamaki
259
    def create_server(self, image_id=None, flavor_name=None, ssh_keys=None):
260
        """Create slave server"""
261
        self.logger.info("Create a new server..")
262
        if image_id is None:
263
            image = self._find_image()
264
            self.logger.debug("Will use image \"%s\"" % _green(image['name']))
265
            image_id = image["id"]
266
        self.logger.debug("Image has id %s" % _green(image_id))
267
        flavor_id = self._find_flavor(flavor_name)
268
        server = self.cyclades_client.create_server(
269
            self.config.get('Deployment', 'server_name'),
270
            flavor_id,
271
            image_id)
272
        server_id = server['id']
273
        self.write_config('server_id', server_id)
274
        self.logger.debug("Server got id %s" % _green(server_id))
275
        server_user = server['metadata']['users']
276
        self.write_config('server_user', server_user)
277
        self.logger.debug("Server's admin user is %s" % _green(server_user))
278
        server_passwd = server['adminPass']
279
        self.write_config('server_passwd', server_passwd)
280

    
281
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
282
        self._get_server_ip_and_port(server)
283
        self._copy_ssh_keys(ssh_keys)
284

    
285
        self.setup_fabric()
286
        self.logger.info("Setup firewall")
287
        accept_ssh_from = self.config.get('Global', 'accept_ssh_from')
288
        if accept_ssh_from != "":
289
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
290
            cmd = """
291
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
292
                cut -d':' -f2 | cut -d' ' -f1)
293
            iptables -A INPUT -s localhost -j ACCEPT
294
            iptables -A INPUT -s $local_ip -j ACCEPT
295
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
296
            iptables -A INPUT -p tcp --dport 22 -j DROP
297
            """.format(accept_ssh_from)
298
            _run(cmd, False)
299

    
300
    def _find_flavor(self, flavor_name):
301
        """Given a flavor_name (reg expression) find a flavor id to use"""
302
        # Get a list of flavor names from config file
303
        flavor_names = self.config.get('Deployment', 'flavor_name').split(",")
304
        if flavor_name is not None:
305
            # If we have a flavor_name to use, add it to our list
306
            flavor_names.insert(0, flavor_name)
307

    
308
        flavors = self.compute_client.list_flavors()
309
        for flname in flavor_names:
310
            sflname = flname.strip()
311
            self.logger.debug("Try to find a flavor with name \"%s\"" % sflname)
312
            fls = [f for f in flavors
313
                   if re.search(sflname, f['name']) is not None]
314
            if fls:
315
                self.logger.debug("Will use %s with id %s"
316
                                  % (fls[0]['name'], fls[0]['id']))
317
                return fls[0]['id']
318

    
319
        self.logger.error("No matching flavor found.. aborting")
320
        sys.exit(1)
321

    
322
    def _find_image(self):
323
        """Find a suitable image to use
324

325
        It has to belong to one of the `DEFAULT_SYSTEM_IMAGES_UUID'
326
        users and contain the word given by `image_name' option.
327
        """
328
        image_name = self.config.get('Deployment', 'image_name').lower()
329
        images = self.image_client.list_public(detail=True)['images']
330
        # Select images by `system_uuid' user
331
        images = [x for x in images
332
                  if x['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID]
333
        # Select images with `image_name' in their names
334
        images = [x for x in images
335
                  if x['name'].lower().find(image_name) != -1]
336
        # Let's select the first one
337
        return images[0]
338

    
339
    def _get_server_ip_and_port(self, server):
340
        """Compute server's IPv4 and ssh port number"""
341
        self.logger.info("Get server connection details..")
342
        server_ip = server['attachments'][0]['ipv4']
343
        if ".okeanos.io" in self.cyclades_client.base_url:
344
            tmp1 = int(server_ip.split(".")[2])
345
            tmp2 = int(server_ip.split(".")[3])
346
            server_ip = "gate.okeanos.io"
347
            server_port = 10000 + tmp1 * 256 + tmp2
348
        else:
349
            server_port = 22
350
        self.write_config('server_ip', server_ip)
351
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
352
        self.write_config('server_port', server_port)
353
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
354
        self.logger.debug("Access server using \"ssh -p %s %s@%s\"" %
355
                          (server_port, server['metadata']['users'], server_ip))
356

    
357
    @_check_fabric
358
    def _copy_ssh_keys(self, ssh_keys):
359
        """Upload/Install ssh keys to server"""
360
        self.logger.debug("Check for authentication keys to use")
361
        if ssh_keys is None:
362
            ssh_keys = self.config.get("Deployment", "ssh_keys")
363

    
364
        if ssh_keys != "":
365
            self.logger.debug("Will use %s authentication keys file" % ssh_keys)
366
            keyfile = '/tmp/%s.pub' % fabric.env.user
367
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
368
            if ssh_keys.startswith("http://") or \
369
                    ssh_keys.startswith("https://") or \
370
                    ssh_keys.startswith("ftp://"):
371
                cmd = """
372
                apt-get update
373
                apt-get install wget --yes
374
                wget {0} -O {1} --no-check-certificate
375
                """.format(ssh_keys, keyfile)
376
                _run(cmd, False)
377
            elif os.path.exists(ssh_keys):
378
                _put(ssh_keys, keyfile)
379
            else:
380
                self.logger.debug("No ssh keys found")
381
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
382
            _run('rm %s' % keyfile, False)
383
            self.logger.debug("Uploaded ssh authorized keys")
384
        else:
385
            self.logger.debug("No ssh keys found")
386

    
387
    def write_config(self, option, value):
388
        """Write changes back to config file"""
389
        self.temp_config.set(str(self.build_id), option, str(value))
390
        curr_time = time.strftime("%a, %d %b %Y %X", time.localtime())
391
        self.temp_config.set(str(self.build_id), "modified", curr_time)
392
        temp_conf_file = self.config.get('Global', 'temporary_config')
393
        with open(temp_conf_file, 'wb') as tcf:
394
            self.temp_config.write(tcf)
395

    
396
    def setup_fabric(self):
397
        """Setup fabric environment"""
398
        self.logger.info("Setup fabric parameters..")
399
        fabric.env.user = self.temp_config.get(str(self.build_id),
400
                                               'server_user')
401
        fabric.env.host_string = \
402
            self.temp_config.get(str(self.build_id), 'server_ip')
403
        fabric.env.port = self.temp_config.getint(str(self.build_id),
404
                                                  'server_port')
405
        fabric.env.password = self.temp_config.get(str(self.build_id),
406
                                                   'server_passwd')
407
        fabric.env.connection_attempts = 10
408
        fabric.env.shell = "/bin/bash -c"
409
        fabric.env.disable_known_hosts = True
410
        fabric.env.output_prefix = None
411

    
412
    def _check_hash_sum(self, localfile, remotefile):
413
        """Check hash sums of two files"""
414
        self.logger.debug("Check hash sum for local file %s" % localfile)
415
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
416
        self.logger.debug("Local file has sha256 hash %s" % hash1)
417
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
418
        hash2 = _run("sha256sum %s" % remotefile, False)
419
        hash2 = hash2.split(' ')[0]
420
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
421
        if hash1 != hash2:
422
            self.logger.error("Hashes differ.. aborting")
423
            sys.exit(-1)
424

    
425
    @_check_fabric
426
    def clone_repo(self):
427
        """Clone Synnefo repo from slave server"""
428
        self.logger.info("Configure repositories on remote server..")
429
        self.logger.debug("Setup apt, install curl and git")
430
        cmd = """
431
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
432
        apt-get update
433
        apt-get install curl git --yes
434
        echo -e "\n\ndeb {0}" >> /etc/apt/sources.list
435
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
436
        apt-get update
437
        git config --global user.name {1}
438
        git config --global user.email {2}
439
        """.format(self.config.get('Global', 'apt_repo'),
440
                   self.config.get('Global', 'git_config_name'),
441
                   self.config.get('Global', 'git_config_mail'))
442
        _run(cmd, False)
443

    
444
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
445
        synnefo_branch = self.config.get("Global", "synnefo_branch")
446
        if synnefo_branch == "":
447
            synnefo_branch = \
448
                subprocess.Popen(
449
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
450
                    stdout=subprocess.PIPE).communicate()[0].strip()
451
            if synnefo_branch == "HEAD":
452
                synnefo_branch = \
453
                    subprocess.Popen(
454
                        ["git", "rev-parse", "--short", "HEAD"],
455
                        stdout=subprocess.PIPE).communicate()[0].strip()
456
        self.logger.info("Will use branch %s" % synnefo_branch)
457
        # Currently clonning synnefo can fail unexpectedly
458
        cloned = False
459
        for i in range(10):
460
            self.logger.debug("Clone synnefo from %s" % synnefo_repo)
461
            try:
462
                _run("git clone %s synnefo" % synnefo_repo, False)
463
                cloned = True
464
                break
465
            except BaseException:
466
                self.logger.warning("Clonning synnefo failed.. retrying %s"
467
                                    % i)
468
        cmd = """
469
        cd synnefo
470
        for branch in `git branch -a | grep remotes | \
471
                       grep -v HEAD | grep -v master`; do
472
            git branch --track ${branch##*/} $branch
473
        done
474
        git checkout %s
475
        """ % (synnefo_branch)
476
        _run(cmd, False)
477

    
478
        if not cloned:
479
            self.logger.error("Can not clone Synnefo repo.")
480
            sys.exit(-1)
481

    
482
    @_check_fabric
483
    def build_synnefo(self):
484
        """Build Synnefo packages"""
485
        self.logger.info("Build Synnefo packages..")
486
        self.logger.debug("Install development packages")
487
        cmd = """
488
        apt-get update
489
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
490
                python-dev python-all python-pip --yes
491
        pip install devflow
492
        """
493
        _run(cmd, False)
494

    
495
        if self.config.get('Global', 'patch_pydist') == "True":
496
            self.logger.debug("Patch pydist.py module")
497
            cmd = r"""
498
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
499
                /usr/share/python/debpython/pydist.py
500
            """
501
            _run(cmd, False)
502

503
        # Build synnefo packages
504
        self.logger.debug("Build synnefo packages")
505
        cmd = """
506
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
507
        """
508
        with fabric.cd("synnefo"):
509
            _run(cmd, True)
510

511
        # Install snf-deploy package
512
        self.logger.debug("Install snf-deploy package")
513
        cmd = """
514
        dpkg -i snf-deploy*.deb
515
        apt-get -f install --yes
516
        """
517
        with fabric.cd("synnefo_build-area"):
518
            with fabric.settings(warn_only=True):
519
                _run(cmd, True)
520

521
        # Setup synnefo packages for snf-deploy
522
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
523
        cmd = """
524
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
525
        """
526
        _run(cmd, False)
527

528
    @_check_fabric
529
    def build_documentation(self):
530
        """Build Synnefo documentation"""
531
        self.logger.info("Build Synnefo documentation..")
532
        _run("pip install -U Sphinx", False)
533
        with fabric.cd("synnefo"):
534
            _run("devflow-update-version; "
535
                 "./ci/make_docs.sh synnefo_documentation", False)
536

537
    def fetch_documentation(self, dest=None):
538
        """Fetch Synnefo documentation"""
539
        self.logger.info("Fetch Synnefo documentation..")
540
        if dest is None:
541
            dest = "synnefo_documentation"
542
        dest = os.path.abspath(dest)
543
        if not os.path.exists(dest):
544
            os.makedirs(dest)
545
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
546
        self.logger.info("Downloaded documentation to %s" %
547
                         _green(dest))
548

549
    @_check_fabric
550
    def deploy_synnefo(self, schema=None):
551
        """Deploy Synnefo using snf-deploy"""
552
        self.logger.info("Deploy Synnefo..")
553
        if schema is None:
554
            schema = self.config.get('Global', 'schema')
555
        self.logger.debug("Will use %s schema" % schema)
556

557
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
558
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
559
            raise ValueError("Unknown schema: %s" % schema)
560

561
        self.logger.debug("Upload schema files to server")
562
        _put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
563

564
        self.logger.debug("Change password in nodes.conf file")
565
        cmd = """
566
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
567
        """.format(fabric.env.password)
568
        _run(cmd, False)
569

570
        self.logger.debug("Run snf-deploy")
571
        cmd = """
572
        snf-deploy --disable-colors --autoconf all
573
        """
574
        _run(cmd, True)
575

576
    @_check_fabric
577
    def unit_test(self):
578
        """Run Synnefo unit test suite"""
579
        self.logger.info("Run Synnefo unit test suite")
580
        component = self.config.get('Unit Tests', 'component')
581

582
        self.logger.debug("Install needed packages")
583
        cmd = """
584
        pip install mock
585
        pip install factory_boy
586
        """
587
        _run(cmd, False)
588

589
        self.logger.debug("Upload tests.sh file")
590
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
591
        _put(unit_tests_file, ".")
592

593
        self.logger.debug("Run unit tests")
594
        cmd = """
595
        bash tests.sh {0}
596
        """.format(component)
597
        _run(cmd, True)
598

599
    @_check_fabric
600
    def run_burnin(self):
601
        """Run burnin functional test suite"""
602
        self.logger.info("Run Burnin functional test suite")
603
        cmd = """
604
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
605
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
606
        images_user=$(kamaki image list -l | grep owner | \
607
                      cut -d':' -f2 | tr -d ' ')
608
        snf-burnin --auth-url=$auth_url --token=$token \
609
            --force-flavor=2 --image-id=all \
610
            --system-images-user=$images_user \
611
            {0}
612
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
613
        for i in $(ls $log_folder/*/details*); do
614
            echo -e "\\n\\n"
615
            echo -e "***** $i\\n"
616
            cat $i
617
        done
618
        """.format(self.config.get('Burnin', 'cmd_options'))
619
        _run(cmd, True)
620

621
    @_check_fabric
622
    def fetch_compressed(self, src, dest=None):
623
        """Create a tarball and fetch it locally"""
624
        self.logger.debug("Creating tarball of %s" % src)
625
        basename = os.path.basename(src)
626
        tar_file = basename + ".tgz"
627
        cmd = "tar czf %s %s" % (tar_file, src)
628
        _run(cmd, False)
629
        if not os.path.exists(dest):
630
            os.makedirs(dest)
631

632
        tmp_dir = tempfile.mkdtemp()
633
        fabric.get(tar_file, tmp_dir)
634

635
        dest_file = os.path.join(tmp_dir, tar_file)
636
        self._check_hash_sum(dest_file, tar_file)
637
        self.logger.debug("Untar packages file %s" % dest_file)
638
        cmd = """
639
        cd %s
640
        tar xzf %s
641
        cp -r %s/* %s
642
        rm -r %s
643
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
644
        os.system(cmd)
645
        self.logger.info("Downloaded %s to %s" %
646
                         (src, _green(dest)))
647

648
    @_check_fabric
649
    def fetch_packages(self, dest=None):
650
        """Fetch Synnefo packages"""
651
        if dest is None:
652
            dest = self.config.get('Global', 'pkgs_dir')
653
        dest = os.path.abspath(dest)
654
        if not os.path.exists(dest):
655
            os.makedirs(dest)
656
        self.fetch_compressed("synnefo_build-area", dest)
657
        self.logger.info("Downloaded debian packages to %s" %
658
                         _green(dest))
659