Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ fe402df2

History | View | Annotate | Download (29.7 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import re
9
import sys
10
import time
11
import logging
12
import fabric.api as fabric
13
import subprocess
14
import tempfile
15
from ConfigParser import ConfigParser, DuplicateSectionError
16

    
17
from kamaki.cli import config as kamaki_config
18
from kamaki.clients.astakos import AstakosClient
19
from kamaki.clients.cyclades import CycladesClient
20
from kamaki.clients.image import ImageClient
21
from kamaki.clients.compute import ComputeClient
22
import filelocker
23

    
24
DEFAULT_CONFIG_FILE = "new_config"
25
# UUID of owner of system images
26
DEFAULT_SYSTEM_IMAGES_UUID = [
27
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
28
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
29
    ]
30

    
31

    
32
def _run(cmd, verbose):
33
    """Run fabric with verbose level"""
34
    if verbose:
35
        args = ('running',)
36
    else:
37
        args = ('running', 'stdout',)
38
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
39
        return fabric.run(cmd)
40

    
41

    
42
def _put(local, remote):
43
    """Run fabric put command without output"""
44
    with fabric.quiet():
45
        fabric.put(local, remote)
46

    
47

    
48
def _red(msg):
49
    """Red color"""
50
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
51
    return str(msg)
52

    
53

    
54
def _yellow(msg):
55
    """Yellow color"""
56
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
57
    return str(msg)
58

    
59

    
60
def _green(msg):
61
    """Green color"""
62
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
63
    return str(msg)
64

    
65

    
66
def _check_fabric(fun):
67
    """Check if fabric env has been set"""
68
    def wrapper(self, *args, **kwargs):
69
        """wrapper function"""
70
        if not self.fabric_installed:
71
            self.setup_fabric()
72
            self.fabric_installed = True
73
        return fun(self, *args, **kwargs)
74
    return wrapper
75

    
76

    
77
def _check_kamaki(fun):
78
    """Check if kamaki has been initialized"""
79
    def wrapper(self, *args, **kwargs):
80
        """wrapper function"""
81
        if not self.kamaki_installed:
82
            self.setup_kamaki()
83
            self.kamaki_installed = True
84
        return fun(self, *args, **kwargs)
85
    return wrapper
86

    
87

    
88
class _MyFormatter(logging.Formatter):
89
    """Logging Formatter"""
90
    def format(self, record):
91
        format_orig = self._fmt
92
        if record.levelno == logging.DEBUG:
93
            self._fmt = "  %(msg)s"
94
        elif record.levelno == logging.INFO:
95
            self._fmt = "%(msg)s"
96
        elif record.levelno == logging.WARNING:
97
            self._fmt = _yellow("[W] %(msg)s")
98
        elif record.levelno == logging.ERROR:
99
            self._fmt = _red("[E] %(msg)s")
100
        result = logging.Formatter.format(self, record)
101
        self._fmt = format_orig
102
        return result
103

    
104

    
105
# Too few public methods. pylint: disable-msg=R0903
106
class _InfoFilter(logging.Filter):
107
    """Logging Filter that allows DEBUG and INFO messages only"""
108
    def filter(self, rec):
109
        """The filter"""
110
        return rec.levelno in (logging.DEBUG, logging.INFO)
111

    
112

    
113
# Too many instance attributes. pylint: disable-msg=R0902
114
class SynnefoCI(object):
115
    """SynnefoCI python class"""
116

    
117
    def __init__(self, config_file=None, build_id=None, cloud=None):
118
        """ Initialize SynnefoCI python class
119

120
        Setup logger, local_dir, config and kamaki
121
        """
122
        # Setup logger
123
        self.logger = logging.getLogger('synnefo-ci')
124
        self.logger.setLevel(logging.DEBUG)
125

    
126
        handler1 = logging.StreamHandler(sys.stdout)
127
        handler1.setLevel(logging.DEBUG)
128
        handler1.addFilter(_InfoFilter())
129
        handler1.setFormatter(_MyFormatter())
130
        handler2 = logging.StreamHandler(sys.stderr)
131
        handler2.setLevel(logging.WARNING)
132
        handler2.setFormatter(_MyFormatter())
133

    
134
        self.logger.addHandler(handler1)
135
        self.logger.addHandler(handler2)
136

    
137
        # Get our local dir
138
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
139
        self.repo_dir = os.path.dirname(self.ci_dir)
140

    
141
        # Read config file
142
        if config_file is None:
143
            config_file = DEFAULT_CONFIG_FILE
144
        if not os.path.isabs(config_file):
145
            config_file = os.path.join(self.ci_dir, config_file)
146
        self.config = ConfigParser()
147
        self.config.optionxform = str
148
        self.config.read(config_file)
149

    
150
        # Read temporary_config file
151
        self.temp_config_file = \
152
            os.path.expanduser(self.config.get('Global', 'temporary_config'))
153
        self.temp_config = ConfigParser()
154
        self.temp_config.optionxform = str
155
        self.temp_config.read(self.temp_config_file)
156
        self.build_id = build_id
157
        self.logger.info("Will use \"%s\" as build id" % _green(self.build_id))
158

    
159
        # Set kamaki cloud
160
        if cloud is not None:
161
            self.kamaki_cloud = cloud
162
        elif self.config.has_option("Deployment", "kamaki_cloud"):
163
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
164
            if kamaki_cloud == "":
165
                self.kamaki_cloud = None
166
        else:
167
            self.kamaki_cloud = None
168

    
169
        # Initialize variables
170
        self.fabric_installed = False
171
        self.kamaki_installed = False
172
        self.cyclades_client = None
173
        self.compute_client = None
174
        self.image_client = None
175

    
176
    def setup_kamaki(self):
177
        """Initialize kamaki
178

179
        Setup cyclades_client, image_client and compute_client
180
        """
181

    
182
        config = kamaki_config.Config()
183
        if self.kamaki_cloud is None:
184
            self.kamaki_cloud = config.get_global("default_cloud")
185

    
186
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
187
                         self.kamaki_cloud)
188
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
189
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
190
        token = config.get_cloud(self.kamaki_cloud, "token")
191
        #self.logger.debug("Token is %s" % _green(token))
192

    
193
        astakos_client = AstakosClient(auth_url, token)
194

    
195
        cyclades_url = \
196
            astakos_client.get_service_endpoints('compute')['publicURL']
197
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
198
        self.cyclades_client = CycladesClient(cyclades_url, token)
199
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
200

    
201
        image_url = \
202
            astakos_client.get_service_endpoints('image')['publicURL']
203
        self.logger.debug("Images API url is %s" % _green(image_url))
204
        self.image_client = ImageClient(cyclades_url, token)
205
        self.image_client.CONNECTION_RETRY_LIMIT = 2
206

    
207
        compute_url = \
208
            astakos_client.get_service_endpoints('compute')['publicURL']
209
        self.logger.debug("Compute API url is %s" % _green(compute_url))
210
        self.compute_client = ComputeClient(compute_url, token)
211
        self.compute_client.CONNECTION_RETRY_LIMIT = 2
212

    
213
    def _wait_transition(self, server_id, current_status, new_status):
214
        """Wait for server to go from current_status to new_status"""
215
        self.logger.debug("Waiting for server to become %s" % new_status)
216
        timeout = self.config.getint('Global', 'build_timeout')
217
        sleep_time = 5
218
        while True:
219
            server = self.cyclades_client.get_server_details(server_id)
220
            if server['status'] == new_status:
221
                return server
222
            elif timeout < 0:
223
                self.logger.error(
224
                    "Waiting for server to become %s timed out" % new_status)
225
                self.destroy_server(False)
226
                sys.exit(-1)
227
            elif server['status'] == current_status:
228
                # Sleep for #n secs and continue
229
                timeout = timeout - sleep_time
230
                time.sleep(sleep_time)
231
            else:
232
                self.logger.error(
233
                    "Server failed with status %s" % server['status'])
234
                self.destroy_server(False)
235
                sys.exit(-1)
236

    
237
    @_check_kamaki
238
    def destroy_server(self, wait=True):
239
        """Destroy slave server"""
240
        server_id = int(self.read_temp_config('server_id'))
241
        self.logger.info("Destoying server with id %s " % server_id)
242
        self.cyclades_client.delete_server(server_id)
243
        if wait:
244
            self._wait_transition(server_id, "ACTIVE", "DELETED")
245

    
246
    @_check_kamaki
247
    def create_server(self, image=None, flavor=None, ssh_keys=None):
248
        """Create slave server"""
249
        self.logger.info("Create a new server..")
250

    
251
        # Find a build_id to use
252
        if self.build_id is None:
253
            # If build_id is given use this, else ..
254
            # Find a uniq build_id to use
255
            ids = self.temp_config.sections()
256
            if ids:
257
                max_id = int(max(self.temp_config.sections(), key=int))
258
                self.build_id = max_id + 1
259
            else:
260
                self.build_id = 1
261
        self.logger.debug("New build id \"%s\" was created"
262
                          % _green(self.build_id))
263

    
264
        # Find an image to use
265
        image_id = self._find_image(image)
266
        # Find a flavor to use
267
        flavor_id = self._find_flavor(flavor)
268

    
269
        # Create Server
270
        server_name = self.config.get("Deployment", "server_name")
271
        server = self.cyclades_client.create_server(
272
            "%s(BID: %s)" % (server_name, self.build_id),
273
            flavor_id,
274
            image_id)
275
        server_id = server['id']
276
        self.write_temp_config('server_id', server_id)
277
        self.logger.debug("Server got id %s" % _green(server_id))
278
        server_user = server['metadata']['users']
279
        self.write_temp_config('server_user', server_user)
280
        self.logger.debug("Server's admin user is %s" % _green(server_user))
281
        server_passwd = server['adminPass']
282
        self.write_temp_config('server_passwd', server_passwd)
283

    
284
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
285
        self._get_server_ip_and_port(server)
286
        self._copy_ssh_keys(ssh_keys)
287

    
288
        # Setup Firewall
289
        self.setup_fabric()
290
        self.logger.info("Setup firewall")
291
        accept_ssh_from = self.config.get('Global', 'accept_ssh_from')
292
        if accept_ssh_from != "":
293
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
294
            cmd = """
295
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
296
                cut -d':' -f2 | cut -d' ' -f1)
297
            iptables -A INPUT -s localhost -j ACCEPT
298
            iptables -A INPUT -s $local_ip -j ACCEPT
299
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
300
            iptables -A INPUT -p tcp --dport 22 -j DROP
301
            """.format(accept_ssh_from)
302
            _run(cmd, False)
303

    
304
        # Setup apt, download packages
305
        self.logger.debug("Setup apt. Install x2goserver and firefox")
306
        cmd = """
307
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
308
        apt-get update
309
        apt-get install curl --yes --force-yes
310
        echo -e "\n\n{0}" >> /etc/apt/sources.list
311
        # Synnefo repo's key
312
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
313
        # X2GO Key
314
        apt-key adv --recv-keys --keyserver keys.gnupg.net E1F958385BFE2B6E
315
        apt-get install x2go-keyring --yes --force-yes
316
        apt-get update
317
        apt-get install x2goserver x2goserver-xsession \
318
                iceweasel --yes --force-yes
319
        """.format(self.config.get('Global', 'apt_repo'))
320
        _run(cmd, False)
321

    
322
    def _find_flavor(self, flavor=None):
323
        """Find a suitable flavor to use
324

325
        Search by name (reg expression) or by id
326
        """
327
        # Get a list of flavors from config file
328
        flavors = self.config.get('Deployment', 'flavors').split(",")
329
        if flavor is not None:
330
            # If we have a flavor_name to use, add it to our list
331
            flavors.insert(0, flavor)
332

    
333
        list_flavors = self.compute_client.list_flavors()
334
        for flv in flavors:
335
            flv_type, flv_value = parse_typed_option(option="flavor", value=flv)
336
            if flv_type == "name":
337
                # Filter flavors by name
338
                self.logger.debug(
339
                    "Trying to find a flavor with name \"%s\"" % flv_value)
340
                list_flvs = \
341
                    [f for f in list_flavors
342
                     if re.search(flv_value, f['name'], flags=re.I) is not None]
343
            elif flv_type == "id":
344
                # Filter flavors by id
345
                self.logger.debug(
346
                    "Trying to find a flavor with id \"%s\"" % flv_value)
347
                list_flvs = \
348
                    [f for f in list_flavors
349
                     if f['id'].lower() == flv_value.lower()]
350
            else:
351
                self.logger.error("Unrecognized flavor type %s" % flv_type)
352

    
353
            # Check if we found one
354
            if list_flvs:
355
                self.logger.debug("Will use \"%s\" with id \"%s\""
356
                                  % (list_flvs[0]['name'], list_flvs[0]['id']))
357
                return list_flvs[0]['id']
358

    
359
        self.logger.error("No matching flavor found.. aborting")
360
        sys.exit(1)
361

    
362
    def _find_image(self, image=None):
363
        """Find a suitable image to use
364

365
        In case of search by name, the image has to belong to one
366
        of the `DEFAULT_SYSTEM_IMAGES_UUID' users.
367
        In case of search by id it only has to exist.
368
        """
369
        # Get a list of images from config file
370
        images = self.config.get('Deployment', 'images').split(",")
371
        if image is not None:
372
            # If we have an image from command line, add it to our list
373
            images.insert(0, image)
374

    
375
        list_images = self.image_client.list_public(detail=True)['images']
376
        for img in images:
377
            img_type, img_value = parse_typed_option(option="image", value=img)
378
            if img_type == "name":
379
                # Filter images by name
380
                self.logger.debug(
381
                    "Trying to find an image with name \"%s\"" % img_value)
382
                list_imgs = \
383
                    [i for i in list_images
384
                     if i['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID and
385
                        re.search(img_value, i['name'], flags=re.I) is not None]
386
            elif img_type == "id":
387
                # Filter images by id
388
                self.logger.debug(
389
                    "Trying to find an image with id \"%s\"" % img_value)
390
                list_imgs = \
391
                    [i for i in list_images
392
                     if i['id'].lower() == img_value.lower()]
393
            else:
394
                self.logger.error("Unrecognized image type %s" % img_type)
395
                sys.exit(1)
396

    
397
            # Check if we found one
398
            if list_imgs:
399
                self.logger.debug("Will use \"%s\" with id \"%s\""
400
                                  % (list_imgs[0]['name'], list_imgs[0]['id']))
401
                return list_imgs[0]['id']
402

    
403
        # We didn't found one
404
        self.logger.error("No matching image found.. aborting")
405
        sys.exit(1)
406

    
407
    def _get_server_ip_and_port(self, server):
408
        """Compute server's IPv4 and ssh port number"""
409
        self.logger.info("Get server connection details..")
410
        server_ip = server['attachments'][0]['ipv4']
411
        if (".okeanos.io" in self.cyclades_client.base_url or
412
           ".demo.synnefo.org" in self.cyclades_client.base_url):
413
            tmp1 = int(server_ip.split(".")[2])
414
            tmp2 = int(server_ip.split(".")[3])
415
            server_ip = "gate.okeanos.io"
416
            server_port = 10000 + tmp1 * 256 + tmp2
417
        else:
418
            server_port = 22
419
        self.write_temp_config('server_ip', server_ip)
420
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
421
        self.write_temp_config('server_port', server_port)
422
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
423
        self.logger.debug("Access server using \"ssh -X -p %s %s@%s\"" %
424
                          (server_port, server['metadata']['users'], server_ip))
425

    
426
    @_check_fabric
427
    def _copy_ssh_keys(self, ssh_keys):
428
        """Upload/Install ssh keys to server"""
429
        self.logger.debug("Check for authentication keys to use")
430
        if ssh_keys is None:
431
            ssh_keys = self.config.get("Deployment", "ssh_keys")
432

    
433
        if ssh_keys != "":
434
            ssh_keys = os.path.expanduser(ssh_keys)
435
            self.logger.debug("Will use %s authentication keys file" % ssh_keys)
436
            keyfile = '/tmp/%s.pub' % fabric.env.user
437
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
438
            if ssh_keys.startswith("http://") or \
439
                    ssh_keys.startswith("https://") or \
440
                    ssh_keys.startswith("ftp://"):
441
                cmd = """
442
                apt-get update
443
                apt-get install wget --yes --force-yes
444
                wget {0} -O {1} --no-check-certificate
445
                """.format(ssh_keys, keyfile)
446
                _run(cmd, False)
447
            elif os.path.exists(ssh_keys):
448
                _put(ssh_keys, keyfile)
449
            else:
450
                self.logger.debug("No ssh keys found")
451
                return
452
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
453
            _run('rm %s' % keyfile, False)
454
            self.logger.debug("Uploaded ssh authorized keys")
455
        else:
456
            self.logger.debug("No ssh keys found")
457

    
458
    def write_temp_config(self, option, value):
459
        """Write changes back to config file"""
460
        # Acquire the lock to write to temp_config_file
461
        with filelocker.lock("%s.lock" % self.temp_config_file,
462
                             filelocker.LOCK_EX):
463

    
464
            # Read temp_config again to get any new entries
465
            self.temp_config.read(self.temp_config_file)
466

    
467
            # If build_id section doesn't exist create a new one
468
            try:
469
                self.temp_config.add_section(str(self.build_id))
470
                creation_time = \
471
                    time.strftime("%a, %d %b %Y %X", time.localtime())
472
                self.temp_config.set(str(self.build_id),
473
                                     "created", str(creation_time))
474
            except DuplicateSectionError:
475
                pass
476
            self.temp_config.set(str(self.build_id), option, str(value))
477
            curr_time = time.strftime("%a, %d %b %Y %X", time.localtime())
478
            self.temp_config.set(str(self.build_id), "modified", curr_time)
479
            with open(self.temp_config_file, 'wb') as tcf:
480
                self.temp_config.write(tcf)
481

    
482
    def read_temp_config(self, option):
483
        """Read from temporary_config file"""
484
        # If build_id is None use the latest one
485
        if self.build_id is None:
486
            ids = self.temp_config.sections()
487
            if ids:
488
                self.build_id = int(ids[-1])
489
            else:
490
                self.logger.error("No sections in temporary config file")
491
                sys.exit(1)
492
            self.logger.debug("Will use \"%s\" as build id"
493
                              % _green(self.build_id))
494
        # Read specified option
495
        return self.temp_config.get(str(self.build_id), option)
496

    
497
    def setup_fabric(self):
498
        """Setup fabric environment"""
499
        self.logger.info("Setup fabric parameters..")
500
        fabric.env.user = self.read_temp_config('server_user')
501
        fabric.env.host_string = self.read_temp_config('server_ip')
502
        fabric.env.port = int(self.read_temp_config('server_port'))
503
        fabric.env.password = self.read_temp_config('server_passwd')
504
        fabric.env.connection_attempts = 10
505
        fabric.env.shell = "/bin/bash -c"
506
        fabric.env.disable_known_hosts = True
507
        fabric.env.output_prefix = None
508

    
509
    def _check_hash_sum(self, localfile, remotefile):
510
        """Check hash sums of two files"""
511
        self.logger.debug("Check hash sum for local file %s" % localfile)
512
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
513
        self.logger.debug("Local file has sha256 hash %s" % hash1)
514
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
515
        hash2 = _run("sha256sum %s" % remotefile, False)
516
        hash2 = hash2.split(' ')[0]
517
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
518
        if hash1 != hash2:
519
            self.logger.error("Hashes differ.. aborting")
520
            sys.exit(-1)
521

    
522
    @_check_fabric
523
    def clone_repo(self, local_repo=False):
524
        """Clone Synnefo repo from slave server"""
525
        self.logger.info("Configure repositories on remote server..")
526
        self.logger.debug("Install/Setup git")
527
        cmd = """
528
        apt-get install git --yes --force-yes
529
        git config --global user.name {0}
530
        git config --global user.email {1}
531
        """.format(self.config.get('Global', 'git_config_name'),
532
                   self.config.get('Global', 'git_config_mail'))
533
        _run(cmd, False)
534

    
535
        # Find synnefo_repo and synnefo_branch to use
536
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
537
        synnefo_branch = self.config.get("Global", "synnefo_branch")
538
        if synnefo_branch == "":
539
            synnefo_branch = \
540
                subprocess.Popen(
541
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
542
                    stdout=subprocess.PIPE).communicate()[0].strip()
543
            if synnefo_branch == "HEAD":
544
                synnefo_branch = \
545
                    subprocess.Popen(
546
                        ["git", "rev-parse", "--short", "HEAD"],
547
                        stdout=subprocess.PIPE).communicate()[0].strip()
548
        self.logger.info("Will use branch %s" % synnefo_branch)
549

    
550
        if local_repo or synnefo_branch == "":
551
            # Use local_repo
552
            self.logger.debug("Push local repo to server")
553
            # Firstly create the remote repo
554
            _run("git init synnefo", False)
555
            # Then push our local repo over ssh
556
            # We have to pass some arguments to ssh command
557
            # namely to disable host checking.
558
            (temp_ssh_file_handle, temp_ssh_file) = tempfile.mkstemp()
559
            os.close(temp_ssh_file_handle)
560
            # XXX: git push doesn't read the password
561
            cmd = """
562
            echo 'exec ssh -o "StrictHostKeyChecking no" \
563
                           -o "UserKnownHostsFile /dev/null" \
564
                           -q "$@"' > {4}
565
            chmod u+x {4}
566
            export GIT_SSH="{4}"
567
            echo "{0}" | git push --mirror ssh://{1}@{2}:{3}/~/synnefo
568
            rm -f {4}
569
            """.format(fabric.env.password,
570
                       fabric.env.user,
571
                       fabric.env.host_string,
572
                       fabric.env.port,
573
                       temp_ssh_file)
574
            os.system(cmd)
575
        else:
576
            # Clone Synnefo from remote repo
577
            # Currently clonning synnefo can fail unexpectedly
578
            cloned = False
579
            for i in range(10):
580
                self.logger.debug("Clone synnefo from %s" % synnefo_repo)
581
                try:
582
                    _run("git clone %s synnefo" % synnefo_repo, False)
583
                    cloned = True
584
                    break
585
                except BaseException:
586
                    self.logger.warning(
587
                        "Clonning synnefo failed.. retrying %s" % i)
588
            if not cloned:
589
                self.logger.error("Can not clone Synnefo repo.")
590
                sys.exit(-1)
591

    
592
        # Checkout the desired synnefo_branch
593
        self.logger.debug("Checkout \"%s\" branch/commit" % synnefo_branch)
594
        cmd = """
595
        cd synnefo
596
        for branch in `git branch -a | grep remotes | \
597
                       grep -v HEAD | grep -v master`; do
598
            git branch --track ${branch##*/} $branch
599
        done
600
        git checkout %s
601
        """ % (synnefo_branch)
602
        _run(cmd, False)
603

    
604
    @_check_fabric
605
    def build_synnefo(self):
606
        """Build Synnefo packages"""
607
        self.logger.info("Build Synnefo packages..")
608
        self.logger.debug("Install development packages")
609
        cmd = """
610
        apt-get update
611
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
612
                python-dev python-all python-pip --yes --force-yes
613
        pip install devflow
614
        """
615
        _run(cmd, False)
616

    
617
        if self.config.get('Global', 'patch_pydist') == "True":
618
            self.logger.debug("Patch pydist.py module")
619
            cmd = r"""
620
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
621
                /usr/share/python/debpython/pydist.py
622
            """
623
            _run(cmd, False)
624

625
        # Build synnefo packages
626
        self.logger.debug("Build synnefo packages")
627
        cmd = """
628
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
629
        """
630
        with fabric.cd("synnefo"):
631
            _run(cmd, True)
632

633
        # Install snf-deploy package
634
        self.logger.debug("Install snf-deploy package")
635
        cmd = """
636
        dpkg -i snf-deploy*.deb
637
        apt-get -f install --yes --force-yes
638
        """
639
        with fabric.cd("synnefo_build-area"):
640
            with fabric.settings(warn_only=True):
641
                _run(cmd, True)
642

643
        # Setup synnefo packages for snf-deploy
644
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
645
        cmd = """
646
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
647
        """
648
        _run(cmd, False)
649

650
    @_check_fabric
651
    def build_documentation(self):
652
        """Build Synnefo documentation"""
653
        self.logger.info("Build Synnefo documentation..")
654
        _run("pip install -U Sphinx", False)
655
        with fabric.cd("synnefo"):
656
            _run("devflow-update-version; "
657
                 "./ci/make_docs.sh synnefo_documentation", False)
658

659
    def fetch_documentation(self, dest=None):
660
        """Fetch Synnefo documentation"""
661
        self.logger.info("Fetch Synnefo documentation..")
662
        if dest is None:
663
            dest = "synnefo_documentation"
664
        dest = os.path.abspath(dest)
665
        if not os.path.exists(dest):
666
            os.makedirs(dest)
667
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
668
        self.logger.info("Downloaded documentation to %s" %
669
                         _green(dest))
670

671
    @_check_fabric
672
    def deploy_synnefo(self, schema=None):
673
        """Deploy Synnefo using snf-deploy"""
674
        self.logger.info("Deploy Synnefo..")
675
        if schema is None:
676
            schema = self.config.get('Global', 'schema')
677
        self.logger.debug("Will use \"%s\" schema" % schema)
678

679
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
680
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
681
            raise ValueError("Unknown schema: %s" % schema)
682

683
        self.logger.debug("Upload schema files to server")
684
        _put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
685

686
        self.logger.debug("Change password in nodes.conf file")
687
        cmd = """
688
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
689
        """.format(fabric.env.password)
690
        _run(cmd, False)
691

692
        self.logger.debug("Run snf-deploy")
693
        cmd = """
694
        snf-deploy --disable-colors --autoconf all
695
        """
696
        _run(cmd, True)
697

698
    @_check_fabric
699
    def unit_test(self):
700
        """Run Synnefo unit test suite"""
701
        self.logger.info("Run Synnefo unit test suite")
702
        component = self.config.get('Unit Tests', 'component')
703

704
        self.logger.debug("Install needed packages")
705
        cmd = """
706
        pip install mock
707
        pip install factory_boy
708
        """
709
        _run(cmd, False)
710

711
        self.logger.debug("Upload tests.sh file")
712
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
713
        _put(unit_tests_file, ".")
714

715
        self.logger.debug("Run unit tests")
716
        cmd = """
717
        bash tests.sh {0}
718
        """.format(component)
719
        _run(cmd, True)
720

721
    @_check_fabric
722
    def run_burnin(self):
723
        """Run burnin functional test suite"""
724
        self.logger.info("Run Burnin functional test suite")
725
        cmd = """
726
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
727
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
728
        images_user=$(kamaki image list -l | grep owner | \
729
                      cut -d':' -f2 | tr -d ' ')
730
        snf-burnin --auth-url=$auth_url --token=$token \
731
            --force-flavor=2 --image-id=all \
732
            --system-images-user=$images_user \
733
            {0}
734
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
735
        for i in $(ls $log_folder/*/details*); do
736
            echo -e "\\n\\n"
737
            echo -e "***** $i\\n"
738
            cat $i
739
        done
740
        """.format(self.config.get('Burnin', 'cmd_options'))
741
        _run(cmd, True)
742

743
    @_check_fabric
744
    def fetch_compressed(self, src, dest=None):
745
        """Create a tarball and fetch it locally"""
746
        self.logger.debug("Creating tarball of %s" % src)
747
        basename = os.path.basename(src)
748
        tar_file = basename + ".tgz"
749
        cmd = "tar czf %s %s" % (tar_file, src)
750
        _run(cmd, False)
751
        if not os.path.exists(dest):
752
            os.makedirs(dest)
753

754
        tmp_dir = tempfile.mkdtemp()
755
        fabric.get(tar_file, tmp_dir)
756

757
        dest_file = os.path.join(tmp_dir, tar_file)
758
        self._check_hash_sum(dest_file, tar_file)
759
        self.logger.debug("Untar packages file %s" % dest_file)
760
        cmd = """
761
        cd %s
762
        tar xzf %s
763
        cp -r %s/* %s
764
        rm -r %s
765
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
766
        os.system(cmd)
767
        self.logger.info("Downloaded %s to %s" %
768
                         (src, _green(dest)))
769

770
    @_check_fabric
771
    def fetch_packages(self, dest=None):
772
        """Fetch Synnefo packages"""
773
        if dest is None:
774
            dest = self.config.get('Global', 'pkgs_dir')
775
        dest = os.path.abspath(os.path.expanduser(dest))
776
        if not os.path.exists(dest):
777
            os.makedirs(dest)
778
        self.fetch_compressed("synnefo_build-area", dest)
779
        self.logger.info("Downloaded debian packages to %s" %
780
                         _green(dest))
781

782

783
def parse_typed_option(option, value):
784
    try:
785
        [type_, val] = value.strip().split(':')
786
        if type_ not in ["id", "name"]:
787
            raise ValueError
788
        return type_, val
789
    except ValueError:
790
        msg = "Invalid %s format. Must be [id|name]:.+" % option
791
        raise ValueError(msg)
792