Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ ef695867

History | View | Annotate | Download (25.1 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import re
9
import sys
10
import time
11
import logging
12
import fabric.api as fabric
13
import subprocess
14
import tempfile
15
from ConfigParser import ConfigParser, DuplicateSectionError
16

    
17
from kamaki.cli import config as kamaki_config
18
from kamaki.clients.astakos import AstakosClient
19
from kamaki.clients.cyclades import CycladesClient
20
from kamaki.clients.image import ImageClient
21
from kamaki.clients.compute import ComputeClient
22

    
23
DEFAULT_CONFIG_FILE = "new_config"
24
# UUID of owner of system images
25
DEFAULT_SYSTEM_IMAGES_UUID = [
26
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
27
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
28
    ]
29

    
30

    
31
def _run(cmd, verbose):
32
    """Run fabric with verbose level"""
33
    if verbose:
34
        args = ('running',)
35
    else:
36
        args = ('running', 'stdout',)
37
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
38
        return fabric.run(cmd)
39

    
40

    
41
def _put(local, remote):
42
    """Run fabric put command without output"""
43
    with fabric.quiet():
44
        fabric.put(local, remote)
45

    
46

    
47
def _red(msg):
48
    """Red color"""
49
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
50
    return str(msg)
51

    
52

    
53
def _yellow(msg):
54
    """Yellow color"""
55
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
56
    return str(msg)
57

    
58

    
59
def _green(msg):
60
    """Green color"""
61
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
62
    return str(msg)
63

    
64

    
65
def _check_fabric(fun):
66
    """Check if fabric env has been set"""
67
    def wrapper(self, *args, **kwargs):
68
        """wrapper function"""
69
        if not self.fabric_installed:
70
            self.setup_fabric()
71
        return fun(self, *args, **kwargs)
72
    return wrapper
73

    
74

    
75
def _check_kamaki(fun):
76
    """Check if kamaki has been initialized"""
77
    def wrapper(self, *args, **kwargs):
78
        """wrapper function"""
79
        if not self.kamaki_installed:
80
            self.setup_kamaki()
81
        return fun(self, *args, **kwargs)
82
    return wrapper
83

    
84

    
85
class _MyFormatter(logging.Formatter):
86
    """Logging Formatter"""
87
    def format(self, record):
88
        format_orig = self._fmt
89
        if record.levelno == logging.DEBUG:
90
            self._fmt = "  %(msg)s"
91
        elif record.levelno == logging.INFO:
92
            self._fmt = "%(msg)s"
93
        elif record.levelno == logging.WARNING:
94
            self._fmt = _yellow("[W] %(msg)s")
95
        elif record.levelno == logging.ERROR:
96
            self._fmt = _red("[E] %(msg)s")
97
        result = logging.Formatter.format(self, record)
98
        self._fmt = format_orig
99
        return result
100

    
101

    
102
# Too few public methods. pylint: disable-msg=R0903
103
class _InfoFilter(logging.Filter):
104
    """Logging Filter that allows DEBUG and INFO messages only"""
105
    def filter(self, rec):
106
        """The filter"""
107
        return rec.levelno in (logging.DEBUG, logging.INFO)
108

    
109

    
110
# Too many instance attributes. pylint: disable-msg=R0902
111
class SynnefoCI(object):
112
    """SynnefoCI python class"""
113

    
114
    def __init__(self, config_file=None, build_id=None, cloud=None):
115
        """ Initialize SynnefoCI python class
116

117
        Setup logger, local_dir, config and kamaki
118
        """
119
        # Setup logger
120
        self.logger = logging.getLogger('synnefo-ci')
121
        self.logger.setLevel(logging.DEBUG)
122

    
123
        handler1 = logging.StreamHandler(sys.stdout)
124
        handler1.setLevel(logging.DEBUG)
125
        handler1.addFilter(_InfoFilter())
126
        handler1.setFormatter(_MyFormatter())
127
        handler2 = logging.StreamHandler(sys.stderr)
128
        handler2.setLevel(logging.WARNING)
129
        handler2.setFormatter(_MyFormatter())
130

    
131
        self.logger.addHandler(handler1)
132
        self.logger.addHandler(handler2)
133

    
134
        # Get our local dir
135
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
136
        self.repo_dir = os.path.dirname(self.ci_dir)
137

    
138
        # Read config file
139
        if config_file is None:
140
            config_file = DEFAULT_CONFIG_FILE
141
        if not os.path.isabs(config_file):
142
            config_file = os.path.join(self.ci_dir, config_file)
143
        self.config = ConfigParser()
144
        self.config.optionxform = str
145
        self.config.read(config_file)
146

    
147
        # Read temporary_config file
148
        temp_config = self.config.get('Global', 'temporary_config')
149
        self.temp_config = ConfigParser()
150
        self.temp_config.optionxform = str
151
        self.temp_config.read(temp_config)
152
        self.build_id = build_id
153
        self.logger.info("Will use \"%s\" as build id" % _green(self.build_id))
154

    
155
        # Set kamaki cloud
156
        if cloud is not None:
157
            self.kamaki_cloud = cloud
158
        elif self.config.has_option("Deployment", "kamaki_cloud"):
159
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
160
            if kamaki_cloud == "":
161
                self.kamaki_cloud = None
162
        else:
163
            self.kamaki_cloud = None
164

    
165
        # Initialize variables
166
        self.fabric_installed = False
167
        self.kamaki_installed = False
168
        self.cyclades_client = None
169
        self.compute_client = None
170
        self.image_client = None
171

    
172
    def setup_kamaki(self):
173
        """Initialize kamaki
174

175
        Setup cyclades_client, image_client and compute_client
176
        """
177

    
178
        config = kamaki_config.Config()
179
        if self.kamaki_cloud is None:
180
            self.kamaki_cloud = config.get_global("default_cloud")
181

    
182
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
183
                         self.kamaki_cloud)
184
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
185
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
186
        token = config.get_cloud(self.kamaki_cloud, "token")
187
        #self.logger.debug("Token is %s" % _green(token))
188

    
189
        astakos_client = AstakosClient(auth_url, token)
190

    
191
        cyclades_url = \
192
            astakos_client.get_service_endpoints('compute')['publicURL']
193
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
194
        self.cyclades_client = CycladesClient(cyclades_url, token)
195
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
196

    
197
        image_url = \
198
            astakos_client.get_service_endpoints('image')['publicURL']
199
        self.logger.debug("Images API url is %s" % _green(image_url))
200
        self.image_client = ImageClient(cyclades_url, token)
201
        self.image_client.CONNECTION_RETRY_LIMIT = 2
202

    
203
        compute_url = \
204
            astakos_client.get_service_endpoints('compute')['publicURL']
205
        self.logger.debug("Compute API url is %s" % _green(compute_url))
206
        self.compute_client = ComputeClient(compute_url, token)
207
        self.compute_client.CONNECTION_RETRY_LIMIT = 2
208

    
209
    def _wait_transition(self, server_id, current_status, new_status):
210
        """Wait for server to go from current_status to new_status"""
211
        self.logger.debug("Waiting for server to become %s" % new_status)
212
        timeout = self.config.getint('Global', 'build_timeout')
213
        sleep_time = 5
214
        while True:
215
            server = self.cyclades_client.get_server_details(server_id)
216
            if server['status'] == new_status:
217
                return server
218
            elif timeout < 0:
219
                self.logger.error(
220
                    "Waiting for server to become %s timed out" % new_status)
221
                self.destroy_server(False)
222
                sys.exit(-1)
223
            elif server['status'] == current_status:
224
                # Sleep for #n secs and continue
225
                timeout = timeout - sleep_time
226
                time.sleep(sleep_time)
227
            else:
228
                self.logger.error(
229
                    "Server failed with status %s" % server['status'])
230
                self.destroy_server(False)
231
                sys.exit(-1)
232

    
233
    @_check_kamaki
234
    def destroy_server(self, wait=True):
235
        """Destroy slave server"""
236
        server_id = int(self.read_temp_config('server_id'))
237
        self.logger.info("Destoying server with id %s " % server_id)
238
        self.cyclades_client.delete_server(server_id)
239
        if wait:
240
            self._wait_transition(server_id, "ACTIVE", "DELETED")
241

    
242
    @_check_kamaki
243
    def create_server(self, image_id=None, flavor_name=None, ssh_keys=None):
244
        """Create slave server"""
245
        self.logger.info("Create a new server..")
246

    
247
        # Find a build_id to use
248
        if self.build_id is None:
249
            # If build_id is given use this, else ..
250
            # Find a uniq build_id to use
251
            ids = self.temp_config.sections()
252
            if ids:
253
                max_id = int(max(self.temp_config.sections(), key=int))
254
                self.build_id = max_id + 1
255
            else:
256
                self.build_id = 1
257
        self.logger.debug("New build id \"%s\" was created"
258
                          % _green(self.build_id))
259

    
260
        # Find an image to use
261
        if image_id is None:
262
            image = self._find_image()
263
            self.logger.debug("Will use image \"%s\"" % _green(image['name']))
264
            image_id = image["id"]
265
        self.logger.debug("Image has id %s" % _green(image_id))
266
        # Find a flavor to use
267
        flavor_id = self._find_flavor(flavor_name)
268
        server = self.cyclades_client.create_server(
269
            self.config.get('Deployment', 'server_name'),
270
            flavor_id,
271
            image_id)
272
        server_id = server['id']
273
        self.write_temp_config('server_id', server_id)
274
        self.logger.debug("Server got id %s" % _green(server_id))
275
        server_user = server['metadata']['users']
276
        self.write_temp_config('server_user', server_user)
277
        self.logger.debug("Server's admin user is %s" % _green(server_user))
278
        server_passwd = server['adminPass']
279
        self.write_temp_config('server_passwd', server_passwd)
280

    
281
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
282
        self._get_server_ip_and_port(server)
283
        self._copy_ssh_keys(ssh_keys)
284

    
285
        self.setup_fabric()
286
        self.logger.info("Setup firewall")
287
        accept_ssh_from = self.config.get('Global', 'accept_ssh_from')
288
        if accept_ssh_from != "":
289
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
290
            cmd = """
291
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
292
                cut -d':' -f2 | cut -d' ' -f1)
293
            iptables -A INPUT -s localhost -j ACCEPT
294
            iptables -A INPUT -s $local_ip -j ACCEPT
295
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
296
            iptables -A INPUT -p tcp --dport 22 -j DROP
297
            """.format(accept_ssh_from)
298
            _run(cmd, False)
299

    
300
    def _find_flavor(self, flavor_name):
301
        """Given a flavor_name (reg expression) find a flavor id to use"""
302
        # Get a list of flavor names from config file
303
        flavor_names = self.config.get('Deployment', 'flavor_name').split(",")
304
        if flavor_name is not None:
305
            # If we have a flavor_name to use, add it to our list
306
            flavor_names.insert(0, flavor_name)
307

    
308
        flavors = self.compute_client.list_flavors()
309
        for flname in flavor_names:
310
            sflname = flname.strip()
311
            self.logger.debug("Try to find a flavor with name \"%s\"" % sflname)
312
            fls = [f for f in flavors
313
                   if re.search(sflname, f['name']) is not None]
314
            if fls:
315
                self.logger.debug("Will use %s with id %s"
316
                                  % (fls[0]['name'], fls[0]['id']))
317
                return fls[0]['id']
318

    
319
        self.logger.error("No matching flavor found.. aborting")
320
        sys.exit(1)
321

    
322
    def _find_image(self):
323
        """Find a suitable image to use
324

325
        It has to belong to one of the `DEFAULT_SYSTEM_IMAGES_UUID'
326
        users and contain the word given by `image_name' option.
327
        """
328
        image_name = self.config.get('Deployment', 'image_name').lower()
329
        images = self.image_client.list_public(detail=True)['images']
330
        # Select images by `system_uuid' user
331
        images = [x for x in images
332
                  if x['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID]
333
        # Select images with `image_name' in their names
334
        images = [x for x in images
335
                  if x['name'].lower().find(image_name) != -1]
336
        # Let's select the first one
337
        return images[0]
338

    
339
    def _get_server_ip_and_port(self, server):
340
        """Compute server's IPv4 and ssh port number"""
341
        self.logger.info("Get server connection details..")
342
        server_ip = server['attachments'][0]['ipv4']
343
        if ".okeanos.io" in self.cyclades_client.base_url:
344
            tmp1 = int(server_ip.split(".")[2])
345
            tmp2 = int(server_ip.split(".")[3])
346
            server_ip = "gate.okeanos.io"
347
            server_port = 10000 + tmp1 * 256 + tmp2
348
        else:
349
            server_port = 22
350
        self.write_temp_config('server_ip', server_ip)
351
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
352
        self.write_temp_config('server_port', server_port)
353
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
354
        self.logger.debug("Access server using \"ssh -p %s %s@%s\"" %
355
                          (server_port, server['metadata']['users'], server_ip))
356

    
357
    @_check_fabric
358
    def _copy_ssh_keys(self, ssh_keys):
359
        """Upload/Install ssh keys to server"""
360
        self.logger.debug("Check for authentication keys to use")
361
        if ssh_keys is None:
362
            ssh_keys = self.config.get("Deployment", "ssh_keys")
363

    
364
        if ssh_keys != "":
365
            self.logger.debug("Will use %s authentication keys file" % ssh_keys)
366
            keyfile = '/tmp/%s.pub' % fabric.env.user
367
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
368
            if ssh_keys.startswith("http://") or \
369
                    ssh_keys.startswith("https://") or \
370
                    ssh_keys.startswith("ftp://"):
371
                cmd = """
372
                apt-get update
373
                apt-get install wget --yes
374
                wget {0} -O {1} --no-check-certificate
375
                """.format(ssh_keys, keyfile)
376
                _run(cmd, False)
377
            elif os.path.exists(ssh_keys):
378
                _put(ssh_keys, keyfile)
379
            else:
380
                self.logger.debug("No ssh keys found")
381
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
382
            _run('rm %s' % keyfile, False)
383
            self.logger.debug("Uploaded ssh authorized keys")
384
        else:
385
            self.logger.debug("No ssh keys found")
386

    
387
    def write_temp_config(self, option, value):
388
        """Write changes back to config file"""
389
        # If build_id section doesn't exist create a new one
390
        try:
391
            self.temp_config.add_section(str(self.build_id))
392
            creation_time = time.strftime("%a, %d %b %Y %X", time.localtime())
393
            self.write_temp_config("created", creation_time)
394
        except DuplicateSectionError:
395
            pass
396
        self.temp_config.set(str(self.build_id), option, str(value))
397
        curr_time = time.strftime("%a, %d %b %Y %X", time.localtime())
398
        self.temp_config.set(str(self.build_id), "modified", curr_time)
399
        temp_conf_file = self.config.get('Global', 'temporary_config')
400
        with open(temp_conf_file, 'wb') as tcf:
401
            self.temp_config.write(tcf)
402

    
403
    def read_temp_config(self, option):
404
        """Read from temporary_config file"""
405
        # If build_id is None use the latest one
406
        if self.build_id is None:
407
            ids = self.temp_config.sections()
408
            if ids:
409
                self.build_id = int(ids[-1])
410
            else:
411
                self.logger.error("No sections in temporary config file")
412
                sys.exit(1)
413
            self.logger.debug("Will use \"%s\" as build id"
414
                              % _green(self.build_id))
415
        # Read specified option
416
        return self.temp_config.get(str(self.build_id), option)
417

    
418
    def setup_fabric(self):
419
        """Setup fabric environment"""
420
        self.logger.info("Setup fabric parameters..")
421
        fabric.env.user = self.read_temp_config('server_user')
422
        fabric.env.host_string = self.read_temp_config('server_ip')
423
        fabric.env.port = int(self.read_temp_config('server_port'))
424
        fabric.env.password = self.read_temp_config('server_passwd')
425
        fabric.env.connection_attempts = 10
426
        fabric.env.shell = "/bin/bash -c"
427
        fabric.env.disable_known_hosts = True
428
        fabric.env.output_prefix = None
429

    
430
    def _check_hash_sum(self, localfile, remotefile):
431
        """Check hash sums of two files"""
432
        self.logger.debug("Check hash sum for local file %s" % localfile)
433
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
434
        self.logger.debug("Local file has sha256 hash %s" % hash1)
435
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
436
        hash2 = _run("sha256sum %s" % remotefile, False)
437
        hash2 = hash2.split(' ')[0]
438
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
439
        if hash1 != hash2:
440
            self.logger.error("Hashes differ.. aborting")
441
            sys.exit(-1)
442

    
443
    @_check_fabric
444
    def clone_repo(self):
445
        """Clone Synnefo repo from slave server"""
446
        self.logger.info("Configure repositories on remote server..")
447
        self.logger.debug("Setup apt, install curl and git")
448
        cmd = """
449
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
450
        apt-get update
451
        apt-get install curl git --yes
452
        echo -e "\n\ndeb {0}" >> /etc/apt/sources.list
453
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
454
        apt-get update
455
        git config --global user.name {1}
456
        git config --global user.email {2}
457
        """.format(self.config.get('Global', 'apt_repo'),
458
                   self.config.get('Global', 'git_config_name'),
459
                   self.config.get('Global', 'git_config_mail'))
460
        _run(cmd, False)
461

    
462
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
463
        synnefo_branch = self.config.get("Global", "synnefo_branch")
464
        if synnefo_branch == "":
465
            synnefo_branch = \
466
                subprocess.Popen(
467
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
468
                    stdout=subprocess.PIPE).communicate()[0].strip()
469
            if synnefo_branch == "HEAD":
470
                synnefo_branch = \
471
                    subprocess.Popen(
472
                        ["git", "rev-parse", "--short", "HEAD"],
473
                        stdout=subprocess.PIPE).communicate()[0].strip()
474
        self.logger.info("Will use branch %s" % synnefo_branch)
475
        # Currently clonning synnefo can fail unexpectedly
476
        cloned = False
477
        for i in range(10):
478
            self.logger.debug("Clone synnefo from %s" % synnefo_repo)
479
            try:
480
                _run("git clone %s synnefo" % synnefo_repo, False)
481
                cloned = True
482
                break
483
            except BaseException:
484
                self.logger.warning("Clonning synnefo failed.. retrying %s"
485
                                    % i)
486
        cmd = """
487
        cd synnefo
488
        for branch in `git branch -a | grep remotes | \
489
                       grep -v HEAD | grep -v master`; do
490
            git branch --track ${branch##*/} $branch
491
        done
492
        git checkout %s
493
        """ % (synnefo_branch)
494
        _run(cmd, False)
495

    
496
        if not cloned:
497
            self.logger.error("Can not clone Synnefo repo.")
498
            sys.exit(-1)
499

    
500
    @_check_fabric
501
    def build_synnefo(self):
502
        """Build Synnefo packages"""
503
        self.logger.info("Build Synnefo packages..")
504
        self.logger.debug("Install development packages")
505
        cmd = """
506
        apt-get update
507
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
508
                python-dev python-all python-pip --yes
509
        pip install devflow
510
        """
511
        _run(cmd, False)
512

    
513
        if self.config.get('Global', 'patch_pydist') == "True":
514
            self.logger.debug("Patch pydist.py module")
515
            cmd = r"""
516
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
517
                /usr/share/python/debpython/pydist.py
518
            """
519
            _run(cmd, False)
520

521
        # Build synnefo packages
522
        self.logger.debug("Build synnefo packages")
523
        cmd = """
524
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
525
        """
526
        with fabric.cd("synnefo"):
527
            _run(cmd, True)
528

529
        # Install snf-deploy package
530
        self.logger.debug("Install snf-deploy package")
531
        cmd = """
532
        dpkg -i snf-deploy*.deb
533
        apt-get -f install --yes
534
        """
535
        with fabric.cd("synnefo_build-area"):
536
            with fabric.settings(warn_only=True):
537
                _run(cmd, True)
538

539
        # Setup synnefo packages for snf-deploy
540
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
541
        cmd = """
542
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
543
        """
544
        _run(cmd, False)
545

546
    @_check_fabric
547
    def build_documentation(self):
548
        """Build Synnefo documentation"""
549
        self.logger.info("Build Synnefo documentation..")
550
        _run("pip install -U Sphinx", False)
551
        with fabric.cd("synnefo"):
552
            _run("devflow-update-version; "
553
                 "./ci/make_docs.sh synnefo_documentation", False)
554

555
    def fetch_documentation(self, dest=None):
556
        """Fetch Synnefo documentation"""
557
        self.logger.info("Fetch Synnefo documentation..")
558
        if dest is None:
559
            dest = "synnefo_documentation"
560
        dest = os.path.abspath(dest)
561
        if not os.path.exists(dest):
562
            os.makedirs(dest)
563
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
564
        self.logger.info("Downloaded documentation to %s" %
565
                         _green(dest))
566

567
    @_check_fabric
568
    def deploy_synnefo(self, schema=None):
569
        """Deploy Synnefo using snf-deploy"""
570
        self.logger.info("Deploy Synnefo..")
571
        if schema is None:
572
            schema = self.config.get('Global', 'schema')
573
        self.logger.debug("Will use %s schema" % schema)
574

575
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
576
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
577
            raise ValueError("Unknown schema: %s" % schema)
578

579
        self.logger.debug("Upload schema files to server")
580
        _put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
581

582
        self.logger.debug("Change password in nodes.conf file")
583
        cmd = """
584
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
585
        """.format(fabric.env.password)
586
        _run(cmd, False)
587

588
        self.logger.debug("Run snf-deploy")
589
        cmd = """
590
        snf-deploy --disable-colors --autoconf all
591
        """
592
        _run(cmd, True)
593

594
    @_check_fabric
595
    def unit_test(self):
596
        """Run Synnefo unit test suite"""
597
        self.logger.info("Run Synnefo unit test suite")
598
        component = self.config.get('Unit Tests', 'component')
599

600
        self.logger.debug("Install needed packages")
601
        cmd = """
602
        pip install mock
603
        pip install factory_boy
604
        """
605
        _run(cmd, False)
606

607
        self.logger.debug("Upload tests.sh file")
608
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
609
        _put(unit_tests_file, ".")
610

611
        self.logger.debug("Run unit tests")
612
        cmd = """
613
        bash tests.sh {0}
614
        """.format(component)
615
        _run(cmd, True)
616

617
    @_check_fabric
618
    def run_burnin(self):
619
        """Run burnin functional test suite"""
620
        self.logger.info("Run Burnin functional test suite")
621
        cmd = """
622
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
623
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
624
        images_user=$(kamaki image list -l | grep owner | \
625
                      cut -d':' -f2 | tr -d ' ')
626
        snf-burnin --auth-url=$auth_url --token=$token \
627
            --force-flavor=2 --image-id=all \
628
            --system-images-user=$images_user \
629
            {0}
630
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
631
        for i in $(ls $log_folder/*/details*); do
632
            echo -e "\\n\\n"
633
            echo -e "***** $i\\n"
634
            cat $i
635
        done
636
        """.format(self.config.get('Burnin', 'cmd_options'))
637
        _run(cmd, True)
638

639
    @_check_fabric
640
    def fetch_compressed(self, src, dest=None):
641
        """Create a tarball and fetch it locally"""
642
        self.logger.debug("Creating tarball of %s" % src)
643
        basename = os.path.basename(src)
644
        tar_file = basename + ".tgz"
645
        cmd = "tar czf %s %s" % (tar_file, src)
646
        _run(cmd, False)
647
        if not os.path.exists(dest):
648
            os.makedirs(dest)
649

650
        tmp_dir = tempfile.mkdtemp()
651
        fabric.get(tar_file, tmp_dir)
652

653
        dest_file = os.path.join(tmp_dir, tar_file)
654
        self._check_hash_sum(dest_file, tar_file)
655
        self.logger.debug("Untar packages file %s" % dest_file)
656
        cmd = """
657
        cd %s
658
        tar xzf %s
659
        cp -r %s/* %s
660
        rm -r %s
661
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
662
        os.system(cmd)
663
        self.logger.info("Downloaded %s to %s" %
664
                         (src, _green(dest)))
665

666
    @_check_fabric
667
    def fetch_packages(self, dest=None):
668
        """Fetch Synnefo packages"""
669
        if dest is None:
670
            dest = self.config.get('Global', 'pkgs_dir')
671
        dest = os.path.abspath(dest)
672
        if not os.path.exists(dest):
673
            os.makedirs(dest)
674
        self.fetch_compressed("synnefo_build-area", dest)
675
        self.logger.info("Downloaded debian packages to %s" %
676
                         _green(dest))
677