Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ 358a19bc

History | View | Annotate | Download (28.6 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import re
9
import sys
10
import time
11
import logging
12
import fabric.api as fabric
13
import subprocess
14
import tempfile
15
from ConfigParser import ConfigParser, DuplicateSectionError
16

    
17
from kamaki.cli import config as kamaki_config
18
from kamaki.clients.astakos import AstakosClient
19
from kamaki.clients.cyclades import CycladesClient
20
from kamaki.clients.image import ImageClient
21
from kamaki.clients.compute import ComputeClient
22

    
23
DEFAULT_CONFIG_FILE = "new_config"
24
# UUID of owner of system images
25
DEFAULT_SYSTEM_IMAGES_UUID = [
26
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
27
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
28
    ]
29

    
30

    
31
def _run(cmd, verbose):
32
    """Run fabric with verbose level"""
33
    if verbose:
34
        args = ('running',)
35
    else:
36
        args = ('running', 'stdout',)
37
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
38
        return fabric.run(cmd)
39

    
40

    
41
def _put(local, remote):
42
    """Run fabric put command without output"""
43
    with fabric.quiet():
44
        fabric.put(local, remote)
45

    
46

    
47
def _red(msg):
48
    """Red color"""
49
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
50
    return str(msg)
51

    
52

    
53
def _yellow(msg):
54
    """Yellow color"""
55
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
56
    return str(msg)
57

    
58

    
59
def _green(msg):
60
    """Green color"""
61
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
62
    return str(msg)
63

    
64

    
65
def _check_fabric(fun):
66
    """Check if fabric env has been set"""
67
    def wrapper(self, *args, **kwargs):
68
        """wrapper function"""
69
        if not self.fabric_installed:
70
            self.setup_fabric()
71
            self.fabric_installed = True
72
        return fun(self, *args, **kwargs)
73
    return wrapper
74

    
75

    
76
def _check_kamaki(fun):
77
    """Check if kamaki has been initialized"""
78
    def wrapper(self, *args, **kwargs):
79
        """wrapper function"""
80
        if not self.kamaki_installed:
81
            self.setup_kamaki()
82
            self.kamaki_installed = True
83
        return fun(self, *args, **kwargs)
84
    return wrapper
85

    
86

    
87
class _MyFormatter(logging.Formatter):
88
    """Logging Formatter"""
89
    def format(self, record):
90
        format_orig = self._fmt
91
        if record.levelno == logging.DEBUG:
92
            self._fmt = "  %(msg)s"
93
        elif record.levelno == logging.INFO:
94
            self._fmt = "%(msg)s"
95
        elif record.levelno == logging.WARNING:
96
            self._fmt = _yellow("[W] %(msg)s")
97
        elif record.levelno == logging.ERROR:
98
            self._fmt = _red("[E] %(msg)s")
99
        result = logging.Formatter.format(self, record)
100
        self._fmt = format_orig
101
        return result
102

    
103

    
104
# Too few public methods. pylint: disable-msg=R0903
105
class _InfoFilter(logging.Filter):
106
    """Logging Filter that allows DEBUG and INFO messages only"""
107
    def filter(self, rec):
108
        """The filter"""
109
        return rec.levelno in (logging.DEBUG, logging.INFO)
110

    
111

    
112
# Too many instance attributes. pylint: disable-msg=R0902
113
class SynnefoCI(object):
114
    """SynnefoCI python class"""
115

    
116
    def __init__(self, config_file=None, build_id=None, cloud=None):
117
        """ Initialize SynnefoCI python class
118

119
        Setup logger, local_dir, config and kamaki
120
        """
121
        # Setup logger
122
        self.logger = logging.getLogger('synnefo-ci')
123
        self.logger.setLevel(logging.DEBUG)
124

    
125
        handler1 = logging.StreamHandler(sys.stdout)
126
        handler1.setLevel(logging.DEBUG)
127
        handler1.addFilter(_InfoFilter())
128
        handler1.setFormatter(_MyFormatter())
129
        handler2 = logging.StreamHandler(sys.stderr)
130
        handler2.setLevel(logging.WARNING)
131
        handler2.setFormatter(_MyFormatter())
132

    
133
        self.logger.addHandler(handler1)
134
        self.logger.addHandler(handler2)
135

    
136
        # Get our local dir
137
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
138
        self.repo_dir = os.path.dirname(self.ci_dir)
139

    
140
        # Read config file
141
        if config_file is None:
142
            config_file = DEFAULT_CONFIG_FILE
143
        if not os.path.isabs(config_file):
144
            config_file = os.path.join(self.ci_dir, config_file)
145
        self.config = ConfigParser()
146
        self.config.optionxform = str
147
        self.config.read(config_file)
148

    
149
        # Read temporary_config file
150
        temp_config = self.config.get('Global', 'temporary_config')
151
        self.temp_config = ConfigParser()
152
        self.temp_config.optionxform = str
153
        self.temp_config.read(temp_config)
154
        self.build_id = build_id
155
        self.logger.info("Will use \"%s\" as build id" % _green(self.build_id))
156

    
157
        # Set kamaki cloud
158
        if cloud is not None:
159
            self.kamaki_cloud = cloud
160
        elif self.config.has_option("Deployment", "kamaki_cloud"):
161
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
162
            if kamaki_cloud == "":
163
                self.kamaki_cloud = None
164
        else:
165
            self.kamaki_cloud = None
166

    
167
        # Initialize variables
168
        self.fabric_installed = False
169
        self.kamaki_installed = False
170
        self.cyclades_client = None
171
        self.compute_client = None
172
        self.image_client = None
173

    
174
    def setup_kamaki(self):
175
        """Initialize kamaki
176

177
        Setup cyclades_client, image_client and compute_client
178
        """
179

    
180
        config = kamaki_config.Config()
181
        if self.kamaki_cloud is None:
182
            self.kamaki_cloud = config.get_global("default_cloud")
183

    
184
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
185
                         self.kamaki_cloud)
186
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
187
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
188
        token = config.get_cloud(self.kamaki_cloud, "token")
189
        #self.logger.debug("Token is %s" % _green(token))
190

    
191
        astakos_client = AstakosClient(auth_url, token)
192

    
193
        cyclades_url = \
194
            astakos_client.get_service_endpoints('compute')['publicURL']
195
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
196
        self.cyclades_client = CycladesClient(cyclades_url, token)
197
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
198

    
199
        image_url = \
200
            astakos_client.get_service_endpoints('image')['publicURL']
201
        self.logger.debug("Images API url is %s" % _green(image_url))
202
        self.image_client = ImageClient(cyclades_url, token)
203
        self.image_client.CONNECTION_RETRY_LIMIT = 2
204

    
205
        compute_url = \
206
            astakos_client.get_service_endpoints('compute')['publicURL']
207
        self.logger.debug("Compute API url is %s" % _green(compute_url))
208
        self.compute_client = ComputeClient(compute_url, token)
209
        self.compute_client.CONNECTION_RETRY_LIMIT = 2
210

    
211
    def _wait_transition(self, server_id, current_status, new_status):
212
        """Wait for server to go from current_status to new_status"""
213
        self.logger.debug("Waiting for server to become %s" % new_status)
214
        timeout = self.config.getint('Global', 'build_timeout')
215
        sleep_time = 5
216
        while True:
217
            server = self.cyclades_client.get_server_details(server_id)
218
            if server['status'] == new_status:
219
                return server
220
            elif timeout < 0:
221
                self.logger.error(
222
                    "Waiting for server to become %s timed out" % new_status)
223
                self.destroy_server(False)
224
                sys.exit(-1)
225
            elif server['status'] == current_status:
226
                # Sleep for #n secs and continue
227
                timeout = timeout - sleep_time
228
                time.sleep(sleep_time)
229
            else:
230
                self.logger.error(
231
                    "Server failed with status %s" % server['status'])
232
                self.destroy_server(False)
233
                sys.exit(-1)
234

    
235
    @_check_kamaki
236
    def destroy_server(self, wait=True):
237
        """Destroy slave server"""
238
        server_id = int(self.read_temp_config('server_id'))
239
        self.logger.info("Destoying server with id %s " % server_id)
240
        self.cyclades_client.delete_server(server_id)
241
        if wait:
242
            self._wait_transition(server_id, "ACTIVE", "DELETED")
243

    
244
    @_check_kamaki
245
    def create_server(self, image=None, flavor=None, ssh_keys=None):
246
        """Create slave server"""
247
        self.logger.info("Create a new server..")
248

    
249
        # Find a build_id to use
250
        if self.build_id is None:
251
            # If build_id is given use this, else ..
252
            # Find a uniq build_id to use
253
            ids = self.temp_config.sections()
254
            if ids:
255
                max_id = int(max(self.temp_config.sections(), key=int))
256
                self.build_id = max_id + 1
257
            else:
258
                self.build_id = 1
259
        self.logger.debug("New build id \"%s\" was created"
260
                          % _green(self.build_id))
261

    
262
        # Find an image to use
263
        image_id = self._find_image(image)
264
        # Find a flavor to use
265
        flavor_id = self._find_flavor(flavor)
266

    
267
        # Create Server
268
        server = self.cyclades_client.create_server(
269
            self.config.get('Deployment', 'server_name'),
270
            flavor_id,
271
            image_id)
272
        server_id = server['id']
273
        self.write_temp_config('server_id', server_id)
274
        self.logger.debug("Server got id %s" % _green(server_id))
275
        server_user = server['metadata']['users']
276
        self.write_temp_config('server_user', server_user)
277
        self.logger.debug("Server's admin user is %s" % _green(server_user))
278
        server_passwd = server['adminPass']
279
        self.write_temp_config('server_passwd', server_passwd)
280

    
281
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
282
        self._get_server_ip_and_port(server)
283
        self._copy_ssh_keys(ssh_keys)
284

    
285
        # Setup Firewall
286
        self.setup_fabric()
287
        self.logger.info("Setup firewall")
288
        accept_ssh_from = self.config.get('Global', 'accept_ssh_from')
289
        if accept_ssh_from != "":
290
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
291
            cmd = """
292
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
293
                cut -d':' -f2 | cut -d' ' -f1)
294
            iptables -A INPUT -s localhost -j ACCEPT
295
            iptables -A INPUT -s $local_ip -j ACCEPT
296
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
297
            iptables -A INPUT -p tcp --dport 22 -j DROP
298
            """.format(accept_ssh_from)
299
            _run(cmd, False)
300

    
301
        # Setup apt, download packages
302
        self.logger.debug("Setup apt. Install x2goserver and firefox")
303
        cmd = """
304
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
305
        apt-get update
306
        apt-get install curl --yes
307
        echo -e "\n\n{0}" >> /etc/apt/sources.list
308
        # Synnefo repo's key
309
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
310
        # X2GO Key
311
        apt-key adv --recv-keys --keyserver keys.gnupg.net E1F958385BFE2B6E
312
        apt-get install x2go-keyring --yes
313
        apt-get update
314
        apt-get install x2goserver x2goserver-xsession iceweasel --yes
315
        """.format(self.config.get('Global', 'apt_repo'))
316
        _run(cmd, False)
317

    
318
    def _find_flavor(self, flavor=None):
319
        """Find a suitable flavor to use
320

321
        Search by name (reg expression) or by id
322
        """
323
        # Get a list of flavors from config file
324
        flavors = self.config.get('Deployment', 'flavors').split(",")
325
        if flavor is not None:
326
            # If we have a flavor_name to use, add it to our list
327
            flavors.insert(0, flavor)
328

    
329
        list_flavors = self.compute_client.list_flavors()
330
        for flv in flavors:
331
            [flv_type, flv_value] = flv.strip().split(':')
332
            if flv_type == "name":
333
                # Filter flavors by name
334
                self.logger.debug(
335
                    "Trying to find a flavor with name \"%s\"" % flv_value)
336
                list_flvs = \
337
                    [f for f in list_flavors
338
                     if re.search(flv_value, f['name'], flags=re.I) is not None]
339
            elif flv_type == "id":
340
                # Filter flavors by id
341
                self.logger.debug(
342
                    "Trying to find a flavor with id \"%s\"" % flv_value)
343
                list_flvs = \
344
                    [f for f in list_flavors
345
                     if f['id'].lower() == flv_value.lower()]
346
            else:
347
                self.logger.error("Unrecognized flavor type %s" % flv_type)
348

    
349
            # Check if we found one
350
            if list_flvs:
351
                self.logger.debug("Will use \"%s\" with id \"%s\""
352
                                  % (list_flvs[0]['name'], list_flvs[0]['id']))
353
                return list_flvs[0]['id']
354

    
355
        self.logger.error("No matching flavor found.. aborting")
356
        sys.exit(1)
357

    
358
    def _find_image(self, image=None):
359
        """Find a suitable image to use
360

361
        In case of search by name, the image has to belong to one
362
        of the `DEFAULT_SYSTEM_IMAGES_UUID' users.
363
        In case of search by id it only has to exist.
364
        """
365
        # Get a list of images from config file
366
        images = self.config.get('Deployment', 'images').split(",")
367
        if image is not None:
368
            # If we have an image from command line, add it to our list
369
            images.insert(0, image)
370

    
371
        list_images = self.image_client.list_public(detail=True)['images']
372
        for img in images:
373
            [img_type, img_value] = img.strip().split(':')
374
            if img_type == "name":
375
                # Filter images by name
376
                self.logger.debug(
377
                    "Trying to find an image with name \"%s\"" % img_value)
378
                list_imgs = \
379
                    [i for i in list_images
380
                     if i['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID and
381
                        re.search(img_value, i['name'], flags=re.I) is not None]
382
            elif img_type == "id":
383
                # Filter images by id
384
                self.logger.debug(
385
                    "Trying to find an image with id \"%s\"" % img_value)
386
                list_imgs = \
387
                    [i for i in list_images
388
                     if i['id'].lower() == img_value.lower()]
389
            else:
390
                self.logger.error("Unrecognized image type %s" % img_type)
391
                sys.exit(1)
392

    
393
            # Check if we found one
394
            if list_imgs:
395
                self.logger.debug("Will use \"%s\" with id \"%s\""
396
                                  % (list_imgs[0]['name'], list_imgs[0]['id']))
397
                return list_imgs[0]['id']
398

    
399
        # We didn't found one
400
        self.logger.error("No matching image found.. aborting")
401
        sys.exit(1)
402

    
403
    def _get_server_ip_and_port(self, server):
404
        """Compute server's IPv4 and ssh port number"""
405
        self.logger.info("Get server connection details..")
406
        server_ip = server['attachments'][0]['ipv4']
407
        if ".okeanos.io" in self.cyclades_client.base_url:
408
            tmp1 = int(server_ip.split(".")[2])
409
            tmp2 = int(server_ip.split(".")[3])
410
            server_ip = "gate.okeanos.io"
411
            server_port = 10000 + tmp1 * 256 + tmp2
412
        else:
413
            server_port = 22
414
        self.write_temp_config('server_ip', server_ip)
415
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
416
        self.write_temp_config('server_port', server_port)
417
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
418
        self.logger.debug("Access server using \"ssh -X -p %s %s@%s\"" %
419
                          (server_port, server['metadata']['users'], server_ip))
420

    
421
    @_check_fabric
422
    def _copy_ssh_keys(self, ssh_keys):
423
        """Upload/Install ssh keys to server"""
424
        self.logger.debug("Check for authentication keys to use")
425
        if ssh_keys is None:
426
            ssh_keys = self.config.get("Deployment", "ssh_keys")
427

    
428
        if ssh_keys != "":
429
            self.logger.debug("Will use %s authentication keys file" % ssh_keys)
430
            keyfile = '/tmp/%s.pub' % fabric.env.user
431
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
432
            if ssh_keys.startswith("http://") or \
433
                    ssh_keys.startswith("https://") or \
434
                    ssh_keys.startswith("ftp://"):
435
                cmd = """
436
                apt-get update
437
                apt-get install wget --yes
438
                wget {0} -O {1} --no-check-certificate
439
                """.format(ssh_keys, keyfile)
440
                _run(cmd, False)
441
            elif os.path.exists(ssh_keys):
442
                _put(ssh_keys, keyfile)
443
            else:
444
                self.logger.debug("No ssh keys found")
445
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
446
            _run('rm %s' % keyfile, False)
447
            self.logger.debug("Uploaded ssh authorized keys")
448
        else:
449
            self.logger.debug("No ssh keys found")
450

    
451
    def write_temp_config(self, option, value):
452
        """Write changes back to config file"""
453
        # If build_id section doesn't exist create a new one
454
        try:
455
            self.temp_config.add_section(str(self.build_id))
456
            creation_time = time.strftime("%a, %d %b %Y %X", time.localtime())
457
            self.write_temp_config("created", creation_time)
458
        except DuplicateSectionError:
459
            pass
460
        self.temp_config.set(str(self.build_id), option, str(value))
461
        curr_time = time.strftime("%a, %d %b %Y %X", time.localtime())
462
        self.temp_config.set(str(self.build_id), "modified", curr_time)
463
        temp_conf_file = self.config.get('Global', 'temporary_config')
464
        with open(temp_conf_file, 'wb') as tcf:
465
            self.temp_config.write(tcf)
466

    
467
    def read_temp_config(self, option):
468
        """Read from temporary_config file"""
469
        # If build_id is None use the latest one
470
        if self.build_id is None:
471
            ids = self.temp_config.sections()
472
            if ids:
473
                self.build_id = int(ids[-1])
474
            else:
475
                self.logger.error("No sections in temporary config file")
476
                sys.exit(1)
477
            self.logger.debug("Will use \"%s\" as build id"
478
                              % _green(self.build_id))
479
        # Read specified option
480
        return self.temp_config.get(str(self.build_id), option)
481

    
482
    def setup_fabric(self):
483
        """Setup fabric environment"""
484
        self.logger.info("Setup fabric parameters..")
485
        fabric.env.user = self.read_temp_config('server_user')
486
        fabric.env.host_string = self.read_temp_config('server_ip')
487
        fabric.env.port = int(self.read_temp_config('server_port'))
488
        fabric.env.password = self.read_temp_config('server_passwd')
489
        fabric.env.connection_attempts = 10
490
        fabric.env.shell = "/bin/bash -c"
491
        fabric.env.disable_known_hosts = True
492
        fabric.env.output_prefix = None
493

    
494
    def _check_hash_sum(self, localfile, remotefile):
495
        """Check hash sums of two files"""
496
        self.logger.debug("Check hash sum for local file %s" % localfile)
497
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
498
        self.logger.debug("Local file has sha256 hash %s" % hash1)
499
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
500
        hash2 = _run("sha256sum %s" % remotefile, False)
501
        hash2 = hash2.split(' ')[0]
502
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
503
        if hash1 != hash2:
504
            self.logger.error("Hashes differ.. aborting")
505
            sys.exit(-1)
506

    
507
    @_check_fabric
508
    def clone_repo(self, local_repo=False):
509
        """Clone Synnefo repo from slave server"""
510
        self.logger.info("Configure repositories on remote server..")
511
        self.logger.debug("Install/Setup git")
512
        cmd = """
513
        apt-get install git --yes
514
        git config --global user.name {0}
515
        git config --global user.email {1}
516
        """.format(self.config.get('Global', 'git_config_name'),
517
                   self.config.get('Global', 'git_config_mail'))
518
        _run(cmd, False)
519

    
520
        # Find synnefo_repo and synnefo_branch to use
521
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
522
        synnefo_branch = self.config.get("Global", "synnefo_branch")
523
        if synnefo_branch == "":
524
            synnefo_branch = \
525
                subprocess.Popen(
526
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
527
                    stdout=subprocess.PIPE).communicate()[0].strip()
528
            if synnefo_branch == "HEAD":
529
                synnefo_branch = \
530
                    subprocess.Popen(
531
                        ["git", "rev-parse", "--short", "HEAD"],
532
                        stdout=subprocess.PIPE).communicate()[0].strip()
533
        self.logger.info("Will use branch %s" % synnefo_branch)
534

    
535
        if local_repo or synnefo_branch == "":
536
            # Use local_repo
537
            self.logger.debug("Push local repo to server")
538
            # Firstly create the remote repo
539
            _run("git init synnefo", False)
540
            # Then push our local repo over ssh
541
            # We have to pass some arguments to ssh command
542
            # namely to disable host checking.
543
            (temp_ssh_file_handle, temp_ssh_file) = tempfile.mkstemp()
544
            os.close(temp_ssh_file_handle)
545
            cmd = """
546
            echo 'exec ssh -o "StrictHostKeyChecking no" \
547
                           -o "UserKnownHostsFile /dev/null" \
548
                           -q "$@"' > {4}
549
            chmod u+x {4}
550
            export GIT_SSH="{4}"
551
            echo "{0}" | git push --mirror ssh://{1}@{2}:{3}/~/synnefo
552
            rm -f {4}
553
            """.format(fabric.env.password,
554
                       fabric.env.user,
555
                       fabric.env.host_string,
556
                       fabric.env.port,
557
                       temp_ssh_file)
558
            os.system(cmd)
559
        else:
560
            # Clone Synnefo from remote repo
561
            # Currently clonning synnefo can fail unexpectedly
562
            cloned = False
563
            for i in range(10):
564
                self.logger.debug("Clone synnefo from %s" % synnefo_repo)
565
                try:
566
                    _run("git clone %s synnefo" % synnefo_repo, False)
567
                    cloned = True
568
                    break
569
                except BaseException:
570
                    self.logger.warning(
571
                        "Clonning synnefo failed.. retrying %s" % i)
572
            if not cloned:
573
                self.logger.error("Can not clone Synnefo repo.")
574
                sys.exit(-1)
575

    
576
        # Checkout the desired synnefo_branch
577
        self.logger.debug("Checkout \"%s\" branch/commit" % synnefo_branch)
578
        cmd = """
579
        cd synnefo
580
        for branch in `git branch -a | grep remotes | \
581
                       grep -v HEAD | grep -v master`; do
582
            git branch --track ${branch##*/} $branch
583
        done
584
        git checkout %s
585
        """ % (synnefo_branch)
586
        _run(cmd, False)
587

    
588
    @_check_fabric
589
    def build_synnefo(self):
590
        """Build Synnefo packages"""
591
        self.logger.info("Build Synnefo packages..")
592
        self.logger.debug("Install development packages")
593
        cmd = """
594
        apt-get update
595
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
596
                python-dev python-all python-pip --yes
597
        pip install devflow
598
        """
599
        _run(cmd, False)
600

    
601
        if self.config.get('Global', 'patch_pydist') == "True":
602
            self.logger.debug("Patch pydist.py module")
603
            cmd = r"""
604
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
605
                /usr/share/python/debpython/pydist.py
606
            """
607
            _run(cmd, False)
608

609
        # Build synnefo packages
610
        self.logger.debug("Build synnefo packages")
611
        cmd = """
612
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
613
        """
614
        with fabric.cd("synnefo"):
615
            _run(cmd, True)
616

617
        # Install snf-deploy package
618
        self.logger.debug("Install snf-deploy package")
619
        cmd = """
620
        dpkg -i snf-deploy*.deb
621
        apt-get -f install --yes
622
        """
623
        with fabric.cd("synnefo_build-area"):
624
            with fabric.settings(warn_only=True):
625
                _run(cmd, True)
626

627
        # Setup synnefo packages for snf-deploy
628
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
629
        cmd = """
630
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
631
        """
632
        _run(cmd, False)
633

634
    @_check_fabric
635
    def build_documentation(self):
636
        """Build Synnefo documentation"""
637
        self.logger.info("Build Synnefo documentation..")
638
        _run("pip install -U Sphinx", False)
639
        with fabric.cd("synnefo"):
640
            _run("devflow-update-version; "
641
                 "./ci/make_docs.sh synnefo_documentation", False)
642

643
    def fetch_documentation(self, dest=None):
644
        """Fetch Synnefo documentation"""
645
        self.logger.info("Fetch Synnefo documentation..")
646
        if dest is None:
647
            dest = "synnefo_documentation"
648
        dest = os.path.abspath(dest)
649
        if not os.path.exists(dest):
650
            os.makedirs(dest)
651
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
652
        self.logger.info("Downloaded documentation to %s" %
653
                         _green(dest))
654

655
    @_check_fabric
656
    def deploy_synnefo(self, schema=None):
657
        """Deploy Synnefo using snf-deploy"""
658
        self.logger.info("Deploy Synnefo..")
659
        if schema is None:
660
            schema = self.config.get('Global', 'schema')
661
        self.logger.debug("Will use \"%s\" schema" % schema)
662

663
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
664
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
665
            raise ValueError("Unknown schema: %s" % schema)
666

667
        self.logger.debug("Upload schema files to server")
668
        _put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
669

670
        self.logger.debug("Change password in nodes.conf file")
671
        cmd = """
672
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
673
        """.format(fabric.env.password)
674
        _run(cmd, False)
675

676
        self.logger.debug("Run snf-deploy")
677
        cmd = """
678
        snf-deploy --disable-colors --autoconf all
679
        """
680
        _run(cmd, True)
681

682
    @_check_fabric
683
    def unit_test(self):
684
        """Run Synnefo unit test suite"""
685
        self.logger.info("Run Synnefo unit test suite")
686
        component = self.config.get('Unit Tests', 'component')
687

688
        self.logger.debug("Install needed packages")
689
        cmd = """
690
        pip install mock
691
        pip install factory_boy
692
        """
693
        _run(cmd, False)
694

695
        self.logger.debug("Upload tests.sh file")
696
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
697
        _put(unit_tests_file, ".")
698

699
        self.logger.debug("Run unit tests")
700
        cmd = """
701
        bash tests.sh {0}
702
        """.format(component)
703
        _run(cmd, True)
704

705
    @_check_fabric
706
    def run_burnin(self):
707
        """Run burnin functional test suite"""
708
        self.logger.info("Run Burnin functional test suite")
709
        cmd = """
710
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
711
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
712
        images_user=$(kamaki image list -l | grep owner | \
713
                      cut -d':' -f2 | tr -d ' ')
714
        snf-burnin --auth-url=$auth_url --token=$token \
715
            --force-flavor=2 --image-id=all \
716
            --system-images-user=$images_user \
717
            {0}
718
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
719
        for i in $(ls $log_folder/*/details*); do
720
            echo -e "\\n\\n"
721
            echo -e "***** $i\\n"
722
            cat $i
723
        done
724
        """.format(self.config.get('Burnin', 'cmd_options'))
725
        _run(cmd, True)
726

727
    @_check_fabric
728
    def fetch_compressed(self, src, dest=None):
729
        """Create a tarball and fetch it locally"""
730
        self.logger.debug("Creating tarball of %s" % src)
731
        basename = os.path.basename(src)
732
        tar_file = basename + ".tgz"
733
        cmd = "tar czf %s %s" % (tar_file, src)
734
        _run(cmd, False)
735
        if not os.path.exists(dest):
736
            os.makedirs(dest)
737

738
        tmp_dir = tempfile.mkdtemp()
739
        fabric.get(tar_file, tmp_dir)
740

741
        dest_file = os.path.join(tmp_dir, tar_file)
742
        self._check_hash_sum(dest_file, tar_file)
743
        self.logger.debug("Untar packages file %s" % dest_file)
744
        cmd = """
745
        cd %s
746
        tar xzf %s
747
        cp -r %s/* %s
748
        rm -r %s
749
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
750
        os.system(cmd)
751
        self.logger.info("Downloaded %s to %s" %
752
                         (src, _green(dest)))
753

754
    @_check_fabric
755
    def fetch_packages(self, dest=None):
756
        """Fetch Synnefo packages"""
757
        if dest is None:
758
            dest = self.config.get('Global', 'pkgs_dir')
759
        dest = os.path.abspath(dest)
760
        if not os.path.exists(dest):
761
            os.makedirs(dest)
762
        self.fetch_compressed("synnefo_build-area", dest)
763
        self.logger.info("Downloaded debian packages to %s" %
764
                         _green(dest))
765