Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ 69a64e60

History | View | Annotate | Download (28.8 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import re
9
import sys
10
import time
11
import logging
12
import fabric.api as fabric
13
import subprocess
14
import tempfile
15
from ConfigParser import ConfigParser, DuplicateSectionError
16

    
17
from kamaki.cli import config as kamaki_config
18
from kamaki.clients.astakos import AstakosClient
19
from kamaki.clients.cyclades import CycladesClient
20
from kamaki.clients.image import ImageClient
21
from kamaki.clients.compute import ComputeClient
22

    
23
DEFAULT_CONFIG_FILE = "new_config"
24
# UUID of owner of system images
25
DEFAULT_SYSTEM_IMAGES_UUID = [
26
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
27
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
28
    ]
29

    
30

    
31
def _run(cmd, verbose):
32
    """Run fabric with verbose level"""
33
    if verbose:
34
        args = ('running',)
35
    else:
36
        args = ('running', 'stdout',)
37
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
38
        return fabric.run(cmd)
39

    
40

    
41
def _put(local, remote):
42
    """Run fabric put command without output"""
43
    with fabric.quiet():
44
        fabric.put(local, remote)
45

    
46

    
47
def _red(msg):
48
    """Red color"""
49
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
50
    return str(msg)
51

    
52

    
53
def _yellow(msg):
54
    """Yellow color"""
55
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
56
    return str(msg)
57

    
58

    
59
def _green(msg):
60
    """Green color"""
61
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
62
    return str(msg)
63

    
64

    
65
def _check_fabric(fun):
66
    """Check if fabric env has been set"""
67
    def wrapper(self, *args, **kwargs):
68
        """wrapper function"""
69
        if not self.fabric_installed:
70
            self.setup_fabric()
71
            self.fabric_installed = True
72
        return fun(self, *args, **kwargs)
73
    return wrapper
74

    
75

    
76
def _check_kamaki(fun):
77
    """Check if kamaki has been initialized"""
78
    def wrapper(self, *args, **kwargs):
79
        """wrapper function"""
80
        if not self.kamaki_installed:
81
            self.setup_kamaki()
82
            self.kamaki_installed = True
83
        return fun(self, *args, **kwargs)
84
    return wrapper
85

    
86

    
87
class _MyFormatter(logging.Formatter):
88
    """Logging Formatter"""
89
    def format(self, record):
90
        format_orig = self._fmt
91
        if record.levelno == logging.DEBUG:
92
            self._fmt = "  %(msg)s"
93
        elif record.levelno == logging.INFO:
94
            self._fmt = "%(msg)s"
95
        elif record.levelno == logging.WARNING:
96
            self._fmt = _yellow("[W] %(msg)s")
97
        elif record.levelno == logging.ERROR:
98
            self._fmt = _red("[E] %(msg)s")
99
        result = logging.Formatter.format(self, record)
100
        self._fmt = format_orig
101
        return result
102

    
103

    
104
# Too few public methods. pylint: disable-msg=R0903
105
class _InfoFilter(logging.Filter):
106
    """Logging Filter that allows DEBUG and INFO messages only"""
107
    def filter(self, rec):
108
        """The filter"""
109
        return rec.levelno in (logging.DEBUG, logging.INFO)
110

    
111

    
112
# Too many instance attributes. pylint: disable-msg=R0902
113
class SynnefoCI(object):
114
    """SynnefoCI python class"""
115

    
116
    def __init__(self, config_file=None, build_id=None, cloud=None):
117
        """ Initialize SynnefoCI python class
118

119
        Setup logger, local_dir, config and kamaki
120
        """
121
        # Setup logger
122
        self.logger = logging.getLogger('synnefo-ci')
123
        self.logger.setLevel(logging.DEBUG)
124

    
125
        handler1 = logging.StreamHandler(sys.stdout)
126
        handler1.setLevel(logging.DEBUG)
127
        handler1.addFilter(_InfoFilter())
128
        handler1.setFormatter(_MyFormatter())
129
        handler2 = logging.StreamHandler(sys.stderr)
130
        handler2.setLevel(logging.WARNING)
131
        handler2.setFormatter(_MyFormatter())
132

    
133
        self.logger.addHandler(handler1)
134
        self.logger.addHandler(handler2)
135

    
136
        # Get our local dir
137
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
138
        self.repo_dir = os.path.dirname(self.ci_dir)
139

    
140
        # Read config file
141
        if config_file is None:
142
            config_file = DEFAULT_CONFIG_FILE
143
        if not os.path.isabs(config_file):
144
            config_file = os.path.join(self.ci_dir, config_file)
145
        self.config = ConfigParser()
146
        self.config.optionxform = str
147
        self.config.read(config_file)
148

    
149
        # Read temporary_config file
150
        temp_config = self.config.get('Global', 'temporary_config')
151
        self.temp_config = ConfigParser()
152
        self.temp_config.optionxform = str
153
        self.temp_config.read(os.path.expanduser(temp_config))
154
        self.build_id = build_id
155
        self.logger.info("Will use \"%s\" as build id" % _green(self.build_id))
156

    
157
        # Set kamaki cloud
158
        if cloud is not None:
159
            self.kamaki_cloud = cloud
160
        elif self.config.has_option("Deployment", "kamaki_cloud"):
161
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
162
            if kamaki_cloud == "":
163
                self.kamaki_cloud = None
164
        else:
165
            self.kamaki_cloud = None
166

    
167
        # Initialize variables
168
        self.fabric_installed = False
169
        self.kamaki_installed = False
170
        self.cyclades_client = None
171
        self.compute_client = None
172
        self.image_client = None
173

    
174
    def setup_kamaki(self):
175
        """Initialize kamaki
176

177
        Setup cyclades_client, image_client and compute_client
178
        """
179

    
180
        config = kamaki_config.Config()
181
        if self.kamaki_cloud is None:
182
            self.kamaki_cloud = config.get_global("default_cloud")
183

    
184
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
185
                         self.kamaki_cloud)
186
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
187
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
188
        token = config.get_cloud(self.kamaki_cloud, "token")
189
        #self.logger.debug("Token is %s" % _green(token))
190

    
191
        astakos_client = AstakosClient(auth_url, token)
192

    
193
        cyclades_url = \
194
            astakos_client.get_service_endpoints('compute')['publicURL']
195
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
196
        self.cyclades_client = CycladesClient(cyclades_url, token)
197
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
198

    
199
        image_url = \
200
            astakos_client.get_service_endpoints('image')['publicURL']
201
        self.logger.debug("Images API url is %s" % _green(image_url))
202
        self.image_client = ImageClient(cyclades_url, token)
203
        self.image_client.CONNECTION_RETRY_LIMIT = 2
204

    
205
        compute_url = \
206
            astakos_client.get_service_endpoints('compute')['publicURL']
207
        self.logger.debug("Compute API url is %s" % _green(compute_url))
208
        self.compute_client = ComputeClient(compute_url, token)
209
        self.compute_client.CONNECTION_RETRY_LIMIT = 2
210

    
211
    def _wait_transition(self, server_id, current_status, new_status):
212
        """Wait for server to go from current_status to new_status"""
213
        self.logger.debug("Waiting for server to become %s" % new_status)
214
        timeout = self.config.getint('Global', 'build_timeout')
215
        sleep_time = 5
216
        while True:
217
            server = self.cyclades_client.get_server_details(server_id)
218
            if server['status'] == new_status:
219
                return server
220
            elif timeout < 0:
221
                self.logger.error(
222
                    "Waiting for server to become %s timed out" % new_status)
223
                self.destroy_server(False)
224
                sys.exit(-1)
225
            elif server['status'] == current_status:
226
                # Sleep for #n secs and continue
227
                timeout = timeout - sleep_time
228
                time.sleep(sleep_time)
229
            else:
230
                self.logger.error(
231
                    "Server failed with status %s" % server['status'])
232
                self.destroy_server(False)
233
                sys.exit(-1)
234

    
235
    @_check_kamaki
236
    def destroy_server(self, wait=True):
237
        """Destroy slave server"""
238
        server_id = int(self.read_temp_config('server_id'))
239
        self.logger.info("Destoying server with id %s " % server_id)
240
        self.cyclades_client.delete_server(server_id)
241
        if wait:
242
            self._wait_transition(server_id, "ACTIVE", "DELETED")
243

    
244
    @_check_kamaki
245
    def create_server(self, image=None, flavor=None, ssh_keys=None):
246
        """Create slave server"""
247
        self.logger.info("Create a new server..")
248

    
249
        # Find a build_id to use
250
        if self.build_id is None:
251
            # If build_id is given use this, else ..
252
            # Find a uniq build_id to use
253
            ids = self.temp_config.sections()
254
            if ids:
255
                max_id = int(max(self.temp_config.sections(), key=int))
256
                self.build_id = max_id + 1
257
            else:
258
                self.build_id = 1
259
        self.logger.debug("New build id \"%s\" was created"
260
                          % _green(self.build_id))
261

    
262
        # Find an image to use
263
        image_id = self._find_image(image)
264
        # Find a flavor to use
265
        flavor_id = self._find_flavor(flavor)
266

    
267
        # Create Server
268
        server_name = self.config.get("Deployment", "server_name")
269
        server = self.cyclades_client.create_server(
270
            "%s(BID: %s)" % (server_name, self.build_id),
271
            flavor_id,
272
            image_id)
273
        server_id = server['id']
274
        self.write_temp_config('server_id', server_id)
275
        self.logger.debug("Server got id %s" % _green(server_id))
276
        server_user = server['metadata']['users']
277
        self.write_temp_config('server_user', server_user)
278
        self.logger.debug("Server's admin user is %s" % _green(server_user))
279
        server_passwd = server['adminPass']
280
        self.write_temp_config('server_passwd', server_passwd)
281

    
282
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
283
        self._get_server_ip_and_port(server)
284
        self._copy_ssh_keys(ssh_keys)
285

    
286
        # Setup Firewall
287
        self.setup_fabric()
288
        self.logger.info("Setup firewall")
289
        accept_ssh_from = self.config.get('Global', 'accept_ssh_from')
290
        if accept_ssh_from != "":
291
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
292
            cmd = """
293
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
294
                cut -d':' -f2 | cut -d' ' -f1)
295
            iptables -A INPUT -s localhost -j ACCEPT
296
            iptables -A INPUT -s $local_ip -j ACCEPT
297
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
298
            iptables -A INPUT -p tcp --dport 22 -j DROP
299
            """.format(accept_ssh_from)
300
            _run(cmd, False)
301

    
302
        # Setup apt, download packages
303
        self.logger.debug("Setup apt. Install x2goserver and firefox")
304
        cmd = """
305
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
306
        apt-get update
307
        apt-get install curl --yes
308
        echo -e "\n\n{0}" >> /etc/apt/sources.list
309
        # Synnefo repo's key
310
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
311
        # X2GO Key
312
        apt-key adv --recv-keys --keyserver keys.gnupg.net E1F958385BFE2B6E
313
        apt-get install x2go-keyring --yes
314
        apt-get update
315
        apt-get install x2goserver x2goserver-xsession iceweasel --yes
316
        """.format(self.config.get('Global', 'apt_repo'))
317
        _run(cmd, False)
318

    
319
    def _find_flavor(self, flavor=None):
320
        """Find a suitable flavor to use
321

322
        Search by name (reg expression) or by id
323
        """
324
        # Get a list of flavors from config file
325
        flavors = self.config.get('Deployment', 'flavors').split(",")
326
        if flavor is not None:
327
            # If we have a flavor_name to use, add it to our list
328
            flavors.insert(0, flavor)
329

    
330
        list_flavors = self.compute_client.list_flavors()
331
        for flv in flavors:
332
            [flv_type, flv_value] = flv.strip().split(':')
333
            if flv_type == "name":
334
                # Filter flavors by name
335
                self.logger.debug(
336
                    "Trying to find a flavor with name \"%s\"" % flv_value)
337
                list_flvs = \
338
                    [f for f in list_flavors
339
                     if re.search(flv_value, f['name'], flags=re.I) is not None]
340
            elif flv_type == "id":
341
                # Filter flavors by id
342
                self.logger.debug(
343
                    "Trying to find a flavor with id \"%s\"" % flv_value)
344
                list_flvs = \
345
                    [f for f in list_flavors
346
                     if f['id'].lower() == flv_value.lower()]
347
            else:
348
                self.logger.error("Unrecognized flavor type %s" % flv_type)
349

    
350
            # Check if we found one
351
            if list_flvs:
352
                self.logger.debug("Will use \"%s\" with id \"%s\""
353
                                  % (list_flvs[0]['name'], list_flvs[0]['id']))
354
                return list_flvs[0]['id']
355

    
356
        self.logger.error("No matching flavor found.. aborting")
357
        sys.exit(1)
358

    
359
    def _find_image(self, image=None):
360
        """Find a suitable image to use
361

362
        In case of search by name, the image has to belong to one
363
        of the `DEFAULT_SYSTEM_IMAGES_UUID' users.
364
        In case of search by id it only has to exist.
365
        """
366
        # Get a list of images from config file
367
        images = self.config.get('Deployment', 'images').split(",")
368
        if image is not None:
369
            # If we have an image from command line, add it to our list
370
            images.insert(0, image)
371

    
372
        list_images = self.image_client.list_public(detail=True)['images']
373
        for img in images:
374
            [img_type, img_value] = img.strip().split(':')
375
            if img_type == "name":
376
                # Filter images by name
377
                self.logger.debug(
378
                    "Trying to find an image with name \"%s\"" % img_value)
379
                list_imgs = \
380
                    [i for i in list_images
381
                     if i['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID and
382
                        re.search(img_value, i['name'], flags=re.I) is not None]
383
            elif img_type == "id":
384
                # Filter images by id
385
                self.logger.debug(
386
                    "Trying to find an image with id \"%s\"" % img_value)
387
                list_imgs = \
388
                    [i for i in list_images
389
                     if i['id'].lower() == img_value.lower()]
390
            else:
391
                self.logger.error("Unrecognized image type %s" % img_type)
392
                sys.exit(1)
393

    
394
            # Check if we found one
395
            if list_imgs:
396
                self.logger.debug("Will use \"%s\" with id \"%s\""
397
                                  % (list_imgs[0]['name'], list_imgs[0]['id']))
398
                return list_imgs[0]['id']
399

    
400
        # We didn't found one
401
        self.logger.error("No matching image found.. aborting")
402
        sys.exit(1)
403

    
404
    def _get_server_ip_and_port(self, server):
405
        """Compute server's IPv4 and ssh port number"""
406
        self.logger.info("Get server connection details..")
407
        server_ip = server['attachments'][0]['ipv4']
408
        if ".okeanos.io" in self.cyclades_client.base_url:
409
            tmp1 = int(server_ip.split(".")[2])
410
            tmp2 = int(server_ip.split(".")[3])
411
            server_ip = "gate.okeanos.io"
412
            server_port = 10000 + tmp1 * 256 + tmp2
413
        else:
414
            server_port = 22
415
        self.write_temp_config('server_ip', server_ip)
416
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
417
        self.write_temp_config('server_port', server_port)
418
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
419
        self.logger.debug("Access server using \"ssh -X -p %s %s@%s\"" %
420
                          (server_port, server['metadata']['users'], server_ip))
421

    
422
    @_check_fabric
423
    def _copy_ssh_keys(self, ssh_keys):
424
        """Upload/Install ssh keys to server"""
425
        self.logger.debug("Check for authentication keys to use")
426
        if ssh_keys is None:
427
            ssh_keys = self.config.get("Deployment", "ssh_keys")
428

    
429
        if ssh_keys != "":
430
            ssh_keys = os.path.expanduser(ssh_keys)
431
            self.logger.debug("Will use %s authentication keys file" % ssh_keys)
432
            keyfile = '/tmp/%s.pub' % fabric.env.user
433
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
434
            if ssh_keys.startswith("http://") or \
435
                    ssh_keys.startswith("https://") or \
436
                    ssh_keys.startswith("ftp://"):
437
                cmd = """
438
                apt-get update
439
                apt-get install wget --yes
440
                wget {0} -O {1} --no-check-certificate
441
                """.format(ssh_keys, keyfile)
442
                _run(cmd, False)
443
            elif os.path.exists(ssh_keys):
444
                _put(ssh_keys, keyfile)
445
            else:
446
                self.logger.debug("No ssh keys found")
447
                return
448
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
449
            _run('rm %s' % keyfile, False)
450
            self.logger.debug("Uploaded ssh authorized keys")
451
        else:
452
            self.logger.debug("No ssh keys found")
453

    
454
    def write_temp_config(self, option, value):
455
        """Write changes back to config file"""
456
        # If build_id section doesn't exist create a new one
457
        try:
458
            self.temp_config.add_section(str(self.build_id))
459
            creation_time = time.strftime("%a, %d %b %Y %X", time.localtime())
460
            self.write_temp_config("created", creation_time)
461
        except DuplicateSectionError:
462
            pass
463
        self.temp_config.set(str(self.build_id), option, str(value))
464
        curr_time = time.strftime("%a, %d %b %Y %X", time.localtime())
465
        self.temp_config.set(str(self.build_id), "modified", curr_time)
466
        temp_conf_file = self.config.get('Global', 'temporary_config')
467
        with open(temp_conf_file, 'wb') as tcf:
468
            self.temp_config.write(tcf)
469

    
470
    def read_temp_config(self, option):
471
        """Read from temporary_config file"""
472
        # If build_id is None use the latest one
473
        if self.build_id is None:
474
            ids = self.temp_config.sections()
475
            if ids:
476
                self.build_id = int(ids[-1])
477
            else:
478
                self.logger.error("No sections in temporary config file")
479
                sys.exit(1)
480
            self.logger.debug("Will use \"%s\" as build id"
481
                              % _green(self.build_id))
482
        # Read specified option
483
        return self.temp_config.get(str(self.build_id), option)
484

    
485
    def setup_fabric(self):
486
        """Setup fabric environment"""
487
        self.logger.info("Setup fabric parameters..")
488
        fabric.env.user = self.read_temp_config('server_user')
489
        fabric.env.host_string = self.read_temp_config('server_ip')
490
        fabric.env.port = int(self.read_temp_config('server_port'))
491
        fabric.env.password = self.read_temp_config('server_passwd')
492
        fabric.env.connection_attempts = 10
493
        fabric.env.shell = "/bin/bash -c"
494
        fabric.env.disable_known_hosts = True
495
        fabric.env.output_prefix = None
496

    
497
    def _check_hash_sum(self, localfile, remotefile):
498
        """Check hash sums of two files"""
499
        self.logger.debug("Check hash sum for local file %s" % localfile)
500
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
501
        self.logger.debug("Local file has sha256 hash %s" % hash1)
502
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
503
        hash2 = _run("sha256sum %s" % remotefile, False)
504
        hash2 = hash2.split(' ')[0]
505
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
506
        if hash1 != hash2:
507
            self.logger.error("Hashes differ.. aborting")
508
            sys.exit(-1)
509

    
510
    @_check_fabric
511
    def clone_repo(self, local_repo=False):
512
        """Clone Synnefo repo from slave server"""
513
        self.logger.info("Configure repositories on remote server..")
514
        self.logger.debug("Install/Setup git")
515
        cmd = """
516
        apt-get install git --yes
517
        git config --global user.name {0}
518
        git config --global user.email {1}
519
        """.format(self.config.get('Global', 'git_config_name'),
520
                   self.config.get('Global', 'git_config_mail'))
521
        _run(cmd, False)
522

    
523
        # Find synnefo_repo and synnefo_branch to use
524
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
525
        synnefo_branch = self.config.get("Global", "synnefo_branch")
526
        if synnefo_branch == "":
527
            synnefo_branch = \
528
                subprocess.Popen(
529
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
530
                    stdout=subprocess.PIPE).communicate()[0].strip()
531
            if synnefo_branch == "HEAD":
532
                synnefo_branch = \
533
                    subprocess.Popen(
534
                        ["git", "rev-parse", "--short", "HEAD"],
535
                        stdout=subprocess.PIPE).communicate()[0].strip()
536
        self.logger.info("Will use branch %s" % synnefo_branch)
537

    
538
        if local_repo or synnefo_branch == "":
539
            # Use local_repo
540
            self.logger.debug("Push local repo to server")
541
            # Firstly create the remote repo
542
            _run("git init synnefo", False)
543
            # Then push our local repo over ssh
544
            # We have to pass some arguments to ssh command
545
            # namely to disable host checking.
546
            (temp_ssh_file_handle, temp_ssh_file) = tempfile.mkstemp()
547
            os.close(temp_ssh_file_handle)
548
            cmd = """
549
            echo 'exec ssh -o "StrictHostKeyChecking no" \
550
                           -o "UserKnownHostsFile /dev/null" \
551
                           -q "$@"' > {4}
552
            chmod u+x {4}
553
            export GIT_SSH="{4}"
554
            echo "{0}" | git push --mirror ssh://{1}@{2}:{3}/~/synnefo
555
            rm -f {4}
556
            """.format(fabric.env.password,
557
                       fabric.env.user,
558
                       fabric.env.host_string,
559
                       fabric.env.port,
560
                       temp_ssh_file)
561
            os.system(cmd)
562
        else:
563
            # Clone Synnefo from remote repo
564
            # Currently clonning synnefo can fail unexpectedly
565
            cloned = False
566
            for i in range(10):
567
                self.logger.debug("Clone synnefo from %s" % synnefo_repo)
568
                try:
569
                    _run("git clone %s synnefo" % synnefo_repo, False)
570
                    cloned = True
571
                    break
572
                except BaseException:
573
                    self.logger.warning(
574
                        "Clonning synnefo failed.. retrying %s" % i)
575
            if not cloned:
576
                self.logger.error("Can not clone Synnefo repo.")
577
                sys.exit(-1)
578

    
579
        # Checkout the desired synnefo_branch
580
        self.logger.debug("Checkout \"%s\" branch/commit" % synnefo_branch)
581
        cmd = """
582
        cd synnefo
583
        for branch in `git branch -a | grep remotes | \
584
                       grep -v HEAD | grep -v master`; do
585
            git branch --track ${branch##*/} $branch
586
        done
587
        git checkout %s
588
        """ % (synnefo_branch)
589
        _run(cmd, False)
590

    
591
    @_check_fabric
592
    def build_synnefo(self):
593
        """Build Synnefo packages"""
594
        self.logger.info("Build Synnefo packages..")
595
        self.logger.debug("Install development packages")
596
        cmd = """
597
        apt-get update
598
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
599
                python-dev python-all python-pip --yes
600
        pip install devflow
601
        """
602
        _run(cmd, False)
603

    
604
        if self.config.get('Global', 'patch_pydist') == "True":
605
            self.logger.debug("Patch pydist.py module")
606
            cmd = r"""
607
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
608
                /usr/share/python/debpython/pydist.py
609
            """
610
            _run(cmd, False)
611

612
        # Build synnefo packages
613
        self.logger.debug("Build synnefo packages")
614
        cmd = """
615
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
616
        """
617
        with fabric.cd("synnefo"):
618
            _run(cmd, True)
619

620
        # Install snf-deploy package
621
        self.logger.debug("Install snf-deploy package")
622
        cmd = """
623
        dpkg -i snf-deploy*.deb
624
        apt-get -f install --yes
625
        """
626
        with fabric.cd("synnefo_build-area"):
627
            with fabric.settings(warn_only=True):
628
                _run(cmd, True)
629

630
        # Setup synnefo packages for snf-deploy
631
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
632
        cmd = """
633
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
634
        """
635
        _run(cmd, False)
636

637
    @_check_fabric
638
    def build_documentation(self):
639
        """Build Synnefo documentation"""
640
        self.logger.info("Build Synnefo documentation..")
641
        _run("pip install -U Sphinx", False)
642
        with fabric.cd("synnefo"):
643
            _run("devflow-update-version; "
644
                 "./ci/make_docs.sh synnefo_documentation", False)
645

646
    def fetch_documentation(self, dest=None):
647
        """Fetch Synnefo documentation"""
648
        self.logger.info("Fetch Synnefo documentation..")
649
        if dest is None:
650
            dest = "synnefo_documentation"
651
        dest = os.path.abspath(dest)
652
        if not os.path.exists(dest):
653
            os.makedirs(dest)
654
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
655
        self.logger.info("Downloaded documentation to %s" %
656
                         _green(dest))
657

658
    @_check_fabric
659
    def deploy_synnefo(self, schema=None):
660
        """Deploy Synnefo using snf-deploy"""
661
        self.logger.info("Deploy Synnefo..")
662
        if schema is None:
663
            schema = self.config.get('Global', 'schema')
664
        self.logger.debug("Will use \"%s\" schema" % schema)
665

666
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
667
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
668
            raise ValueError("Unknown schema: %s" % schema)
669

670
        self.logger.debug("Upload schema files to server")
671
        _put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
672

673
        self.logger.debug("Change password in nodes.conf file")
674
        cmd = """
675
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
676
        """.format(fabric.env.password)
677
        _run(cmd, False)
678

679
        self.logger.debug("Run snf-deploy")
680
        cmd = """
681
        snf-deploy --disable-colors --autoconf all
682
        """
683
        _run(cmd, True)
684

685
    @_check_fabric
686
    def unit_test(self):
687
        """Run Synnefo unit test suite"""
688
        self.logger.info("Run Synnefo unit test suite")
689
        component = self.config.get('Unit Tests', 'component')
690

691
        self.logger.debug("Install needed packages")
692
        cmd = """
693
        pip install mock
694
        pip install factory_boy
695
        """
696
        _run(cmd, False)
697

698
        self.logger.debug("Upload tests.sh file")
699
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
700
        _put(unit_tests_file, ".")
701

702
        self.logger.debug("Run unit tests")
703
        cmd = """
704
        bash tests.sh {0}
705
        """.format(component)
706
        _run(cmd, True)
707

708
    @_check_fabric
709
    def run_burnin(self):
710
        """Run burnin functional test suite"""
711
        self.logger.info("Run Burnin functional test suite")
712
        cmd = """
713
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
714
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
715
        images_user=$(kamaki image list -l | grep owner | \
716
                      cut -d':' -f2 | tr -d ' ')
717
        snf-burnin --auth-url=$auth_url --token=$token \
718
            --force-flavor=2 --image-id=all \
719
            --system-images-user=$images_user \
720
            {0}
721
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
722
        for i in $(ls $log_folder/*/details*); do
723
            echo -e "\\n\\n"
724
            echo -e "***** $i\\n"
725
            cat $i
726
        done
727
        """.format(self.config.get('Burnin', 'cmd_options'))
728
        _run(cmd, True)
729

730
    @_check_fabric
731
    def fetch_compressed(self, src, dest=None):
732
        """Create a tarball and fetch it locally"""
733
        self.logger.debug("Creating tarball of %s" % src)
734
        basename = os.path.basename(src)
735
        tar_file = basename + ".tgz"
736
        cmd = "tar czf %s %s" % (tar_file, src)
737
        _run(cmd, False)
738
        if not os.path.exists(dest):
739
            os.makedirs(dest)
740

741
        tmp_dir = tempfile.mkdtemp()
742
        fabric.get(tar_file, tmp_dir)
743

744
        dest_file = os.path.join(tmp_dir, tar_file)
745
        self._check_hash_sum(dest_file, tar_file)
746
        self.logger.debug("Untar packages file %s" % dest_file)
747
        cmd = """
748
        cd %s
749
        tar xzf %s
750
        cp -r %s/* %s
751
        rm -r %s
752
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
753
        os.system(cmd)
754
        self.logger.info("Downloaded %s to %s" %
755
                         (src, _green(dest)))
756

757
    @_check_fabric
758
    def fetch_packages(self, dest=None):
759
        """Fetch Synnefo packages"""
760
        if dest is None:
761
            dest = self.config.get('Global', 'pkgs_dir')
762
        dest = os.path.abspath(os.path.expanduser(dest))
763
        if not os.path.exists(dest):
764
            os.makedirs(dest)
765
        self.fetch_compressed("synnefo_build-area", dest)
766
        self.logger.info("Downloaded debian packages to %s" %
767
                         _green(dest))
768