Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ 0b0d69f5

History | View | Annotate | Download (30.1 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import re
9
import sys
10
import time
11
import logging
12
import fabric.api as fabric
13
import subprocess
14
import tempfile
15
from ConfigParser import ConfigParser, DuplicateSectionError
16

    
17
from kamaki.cli import config as kamaki_config
18
from kamaki.clients.astakos import AstakosClient
19
from kamaki.clients.cyclades import CycladesClient
20
from kamaki.clients.image import ImageClient
21
from kamaki.clients.compute import ComputeClient
22
import filelocker
23

    
24
DEFAULT_CONFIG_FILE = "new_config"
25
# UUID of owner of system images
26
DEFAULT_SYSTEM_IMAGES_UUID = [
27
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
28
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
29
]
30

    
31

    
32
def _run(cmd, verbose):
33
    """Run fabric with verbose level"""
34
    if verbose:
35
        args = ('running',)
36
    else:
37
        args = ('running', 'stdout',)
38
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
39
        return fabric.run(cmd)
40

    
41

    
42
def _put(local, remote):
43
    """Run fabric put command without output"""
44
    with fabric.quiet():
45
        fabric.put(local, remote)
46

    
47

    
48
def _red(msg):
49
    """Red color"""
50
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
51
    return str(msg)
52

    
53

    
54
def _yellow(msg):
55
    """Yellow color"""
56
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
57
    return str(msg)
58

    
59

    
60
def _green(msg):
61
    """Green color"""
62
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
63
    return str(msg)
64

    
65

    
66
def _check_fabric(fun):
67
    """Check if fabric env has been set"""
68
    def wrapper(self, *args, **kwargs):
69
        """wrapper function"""
70
        if not self.fabric_installed:
71
            self.setup_fabric()
72
            self.fabric_installed = True
73
        return fun(self, *args, **kwargs)
74
    return wrapper
75

    
76

    
77
def _check_kamaki(fun):
78
    """Check if kamaki has been initialized"""
79
    def wrapper(self, *args, **kwargs):
80
        """wrapper function"""
81
        if not self.kamaki_installed:
82
            self.setup_kamaki()
83
            self.kamaki_installed = True
84
        return fun(self, *args, **kwargs)
85
    return wrapper
86

    
87

    
88
class _MyFormatter(logging.Formatter):
89
    """Logging Formatter"""
90
    def format(self, record):
91
        format_orig = self._fmt
92
        if record.levelno == logging.DEBUG:
93
            self._fmt = "  %(msg)s"
94
        elif record.levelno == logging.INFO:
95
            self._fmt = "%(msg)s"
96
        elif record.levelno == logging.WARNING:
97
            self._fmt = _yellow("[W] %(msg)s")
98
        elif record.levelno == logging.ERROR:
99
            self._fmt = _red("[E] %(msg)s")
100
        result = logging.Formatter.format(self, record)
101
        self._fmt = format_orig
102
        return result
103

    
104

    
105
# Too few public methods. pylint: disable-msg=R0903
106
class _InfoFilter(logging.Filter):
107
    """Logging Filter that allows DEBUG and INFO messages only"""
108
    def filter(self, rec):
109
        """The filter"""
110
        return rec.levelno in (logging.DEBUG, logging.INFO)
111

    
112

    
113
# Too many instance attributes. pylint: disable-msg=R0902
114
class SynnefoCI(object):
115
    """SynnefoCI python class"""
116

    
117
    def __init__(self, config_file=None, build_id=None, cloud=None):
118
        """ Initialize SynnefoCI python class
119

120
        Setup logger, local_dir, config and kamaki
121
        """
122
        # Setup logger
123
        self.logger = logging.getLogger('synnefo-ci')
124
        self.logger.setLevel(logging.DEBUG)
125

    
126
        handler1 = logging.StreamHandler(sys.stdout)
127
        handler1.setLevel(logging.DEBUG)
128
        handler1.addFilter(_InfoFilter())
129
        handler1.setFormatter(_MyFormatter())
130
        handler2 = logging.StreamHandler(sys.stderr)
131
        handler2.setLevel(logging.WARNING)
132
        handler2.setFormatter(_MyFormatter())
133

    
134
        self.logger.addHandler(handler1)
135
        self.logger.addHandler(handler2)
136

    
137
        # Get our local dir
138
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
139
        self.repo_dir = os.path.dirname(self.ci_dir)
140

    
141
        # Read config file
142
        if config_file is None:
143
            config_file = DEFAULT_CONFIG_FILE
144
        if not os.path.isabs(config_file):
145
            config_file = os.path.join(self.ci_dir, config_file)
146
        self.config = ConfigParser()
147
        self.config.optionxform = str
148
        self.config.read(config_file)
149

    
150
        # Read temporary_config file
151
        self.temp_config_file = \
152
            os.path.expanduser(self.config.get('Global', 'temporary_config'))
153
        self.temp_config = ConfigParser()
154
        self.temp_config.optionxform = str
155
        self.temp_config.read(self.temp_config_file)
156
        self.build_id = build_id
157
        self.logger.info("Will use \"%s\" as build id" % _green(self.build_id))
158

    
159
        # Set kamaki cloud
160
        if cloud is not None:
161
            self.kamaki_cloud = cloud
162
        elif self.config.has_option("Deployment", "kamaki_cloud"):
163
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
164
            if kamaki_cloud == "":
165
                self.kamaki_cloud = None
166
        else:
167
            self.kamaki_cloud = None
168

    
169
        # Initialize variables
170
        self.fabric_installed = False
171
        self.kamaki_installed = False
172
        self.cyclades_client = None
173
        self.compute_client = None
174
        self.image_client = None
175

    
176
    def setup_kamaki(self):
177
        """Initialize kamaki
178

179
        Setup cyclades_client, image_client and compute_client
180
        """
181

    
182
        config = kamaki_config.Config()
183
        if self.kamaki_cloud is None:
184
            self.kamaki_cloud = config.get_global("default_cloud")
185

    
186
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
187
                         self.kamaki_cloud)
188
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
189
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
190
        token = config.get_cloud(self.kamaki_cloud, "token")
191
        #self.logger.debug("Token is %s" % _green(token))
192

    
193
        self.astakos_client = AstakosClient(auth_url, token)
194

    
195
        cyclades_url = \
196
            self.astakos_client.get_service_endpoints('compute')['publicURL']
197
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
198
        self.cyclades_client = CycladesClient(cyclades_url, token)
199
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
200

    
201
        image_url = \
202
            self.astakos_client.get_service_endpoints('image')['publicURL']
203
        self.logger.debug("Images API url is %s" % _green(image_url))
204
        self.image_client = ImageClient(cyclades_url, token)
205
        self.image_client.CONNECTION_RETRY_LIMIT = 2
206

    
207
        compute_url = \
208
            self.astakos_client.get_service_endpoints('compute')['publicURL']
209
        self.logger.debug("Compute API url is %s" % _green(compute_url))
210
        self.compute_client = ComputeClient(compute_url, token)
211
        self.compute_client.CONNECTION_RETRY_LIMIT = 2
212

    
213
    def _wait_transition(self, server_id, current_status, new_status):
214
        """Wait for server to go from current_status to new_status"""
215
        self.logger.debug("Waiting for server to become %s" % new_status)
216
        timeout = self.config.getint('Global', 'build_timeout')
217
        sleep_time = 5
218
        while True:
219
            server = self.cyclades_client.get_server_details(server_id)
220
            if server['status'] == new_status:
221
                return server
222
            elif timeout < 0:
223
                self.logger.error(
224
                    "Waiting for server to become %s timed out" % new_status)
225
                self.destroy_server(False)
226
                sys.exit(-1)
227
            elif server['status'] == current_status:
228
                # Sleep for #n secs and continue
229
                timeout = timeout - sleep_time
230
                time.sleep(sleep_time)
231
            else:
232
                self.logger.error(
233
                    "Server failed with status %s" % server['status'])
234
                self.destroy_server(False)
235
                sys.exit(-1)
236

    
237
    @_check_kamaki
238
    def destroy_server(self, wait=True):
239
        """Destroy slave server"""
240
        server_id = int(self.read_temp_config('server_id'))
241
        self.logger.info("Destoying server with id %s " % server_id)
242
        self.cyclades_client.delete_server(server_id)
243
        if wait:
244
            self._wait_transition(server_id, "ACTIVE", "DELETED")
245

    
246
    @_check_kamaki
247
    def create_server(self, image=None, flavor=None, ssh_keys=None):
248
        """Create slave server"""
249
        self.logger.info("Create a new server..")
250

    
251
        # Find a build_id to use
252
        if self.build_id is None:
253
            # If build_id is given use this, else ..
254
            # Find a uniq build_id to use
255
            ids = self.temp_config.sections()
256
            if ids:
257
                max_id = int(max(self.temp_config.sections(), key=int))
258
                self.build_id = max_id + 1
259
            else:
260
                self.build_id = 1
261
        self.logger.debug("New build id \"%s\" was created"
262
                          % _green(self.build_id))
263

    
264
        # Find an image to use
265
        image_id = self._find_image(image)
266
        # Find a flavor to use
267
        flavor_id = self._find_flavor(flavor)
268

    
269
        # Create Server
270
        server_name = self.config.get("Deployment", "server_name")
271
        server = self.cyclades_client.create_server(
272
            "%s(BID: %s)" % (server_name, self.build_id),
273
            flavor_id,
274
            image_id)
275
        server_id = server['id']
276
        self.write_temp_config('server_id', server_id)
277
        self.logger.debug("Server got id %s" % _green(server_id))
278
        server_user = server['metadata']['users']
279
        self.write_temp_config('server_user', server_user)
280
        self.logger.debug("Server's admin user is %s" % _green(server_user))
281
        server_passwd = server['adminPass']
282
        self.write_temp_config('server_passwd', server_passwd)
283

    
284
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
285
        self._get_server_ip_and_port(server)
286
        self._copy_ssh_keys(ssh_keys)
287

    
288
        # Setup Firewall
289
        self.setup_fabric()
290
        self.logger.info("Setup firewall")
291
        accept_ssh_from = self.config.get('Global', 'accept_ssh_from')
292
        if accept_ssh_from != "":
293
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
294
            cmd = """
295
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
296
                cut -d':' -f2 | cut -d' ' -f1)
297
            iptables -A INPUT -s localhost -j ACCEPT
298
            iptables -A INPUT -s $local_ip -j ACCEPT
299
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
300
            iptables -A INPUT -p tcp --dport 22 -j DROP
301
            """.format(accept_ssh_from)
302
            _run(cmd, False)
303

    
304
        # Setup apt, download packages
305
        self.logger.debug("Setup apt. Install x2goserver and firefox")
306
        cmd = """
307
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
308
        apt-get update
309
        apt-get install curl --yes --force-yes
310
        echo -e "\n\n{0}" >> /etc/apt/sources.list
311
        # Synnefo repo's key
312
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
313
        # X2GO Key
314
        apt-key adv --recv-keys --keyserver keys.gnupg.net E1F958385BFE2B6E
315
        apt-get install x2go-keyring --yes --force-yes
316
        apt-get update
317
        apt-get install x2goserver x2goserver-xsession \
318
                iceweasel --yes --force-yes
319
        """.format(self.config.get('Global', 'apt_repo'))
320
        _run(cmd, False)
321

    
322
    def _find_flavor(self, flavor=None):
323
        """Find a suitable flavor to use
324

325
        Search by name (reg expression) or by id
326
        """
327
        # Get a list of flavors from config file
328
        flavors = self.config.get('Deployment', 'flavors').split(",")
329
        if flavor is not None:
330
            # If we have a flavor_name to use, add it to our list
331
            flavors.insert(0, flavor)
332

    
333
        list_flavors = self.compute_client.list_flavors()
334
        for flv in flavors:
335
            flv_type, flv_value = parse_typed_option(option="flavor",
336
                                                     value=flv)
337
            if flv_type == "name":
338
                # Filter flavors by name
339
                self.logger.debug(
340
                    "Trying to find a flavor with name \"%s\"" % flv_value)
341
                list_flvs = \
342
                    [f for f in list_flavors
343
                     if re.search(flv_value, f['name'], flags=re.I)
344
                     is not None]
345
            elif flv_type == "id":
346
                # Filter flavors by id
347
                self.logger.debug(
348
                    "Trying to find a flavor with id \"%s\"" % flv_value)
349
                list_flvs = \
350
                    [f for f in list_flavors
351
                     if f['id'].lower() == flv_value.lower()]
352
            else:
353
                self.logger.error("Unrecognized flavor type %s" % flv_type)
354

    
355
            # Check if we found one
356
            if list_flvs:
357
                self.logger.debug("Will use \"%s\" with id \"%s\""
358
                                  % (list_flvs[0]['name'], list_flvs[0]['id']))
359
                return list_flvs[0]['id']
360

    
361
        self.logger.error("No matching flavor found.. aborting")
362
        sys.exit(1)
363

    
364
    def _find_image(self, image=None):
365
        """Find a suitable image to use
366

367
        In case of search by name, the image has to belong to one
368
        of the `DEFAULT_SYSTEM_IMAGES_UUID' users.
369
        In case of search by id it only has to exist.
370
        """
371
        # Get a list of images from config file
372
        images = self.config.get('Deployment', 'images').split(",")
373
        if image is not None:
374
            # If we have an image from command line, add it to our list
375
            images.insert(0, image)
376

    
377
        auth = self.astakos_client.authenticate()
378
        user_uuid = auth["access"]["token"]["tenant"]["id"]
379
        list_images = self.image_client.list_public(detail=True)['images']
380
        for img in images:
381
            img_type, img_value = parse_typed_option(option="image", value=img)
382
            if img_type == "name":
383
                # Filter images by name
384
                self.logger.debug(
385
                    "Trying to find an image with name \"%s\"" % img_value)
386
                accepted_uuids = DEFAULT_SYSTEM_IMAGES_UUID + [user_uuid]
387
                list_imgs = \
388
                    [i for i in list_images if i['user_id'] in accepted_uuids
389
                     and
390
                     re.search(img_value, i['name'], flags=re.I) is not None]
391
            elif img_type == "id":
392
                # Filter images by id
393
                self.logger.debug(
394
                    "Trying to find an image with id \"%s\"" % img_value)
395
                list_imgs = \
396
                    [i for i in list_images
397
                     if i['id'].lower() == img_value.lower()]
398
            else:
399
                self.logger.error("Unrecognized image type %s" % img_type)
400
                sys.exit(1)
401

    
402
            # Check if we found one
403
            if list_imgs:
404
                self.logger.debug("Will use \"%s\" with id \"%s\""
405
                                  % (list_imgs[0]['name'], list_imgs[0]['id']))
406
                return list_imgs[0]['id']
407

    
408
        # We didn't found one
409
        self.logger.error("No matching image found.. aborting")
410
        sys.exit(1)
411

    
412
    def _get_server_ip_and_port(self, server):
413
        """Compute server's IPv4 and ssh port number"""
414
        self.logger.info("Get server connection details..")
415
        server_ip = server['attachments'][0]['ipv4']
416
        if (".okeanos.io" in self.cyclades_client.base_url or
417
           ".demo.synnefo.org" in self.cyclades_client.base_url):
418
            tmp1 = int(server_ip.split(".")[2])
419
            tmp2 = int(server_ip.split(".")[3])
420
            server_ip = "gate.okeanos.io"
421
            server_port = 10000 + tmp1 * 256 + tmp2
422
        else:
423
            server_port = 22
424
        self.write_temp_config('server_ip', server_ip)
425
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
426
        self.write_temp_config('server_port', server_port)
427
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
428
        self.logger.debug("Access server using \"ssh -X -p %s %s@%s\"" %
429
                          (server_port, server['metadata']['users'],
430
                           server_ip))
431

    
432
    @_check_fabric
433
    def _copy_ssh_keys(self, ssh_keys):
434
        """Upload/Install ssh keys to server"""
435
        self.logger.debug("Check for authentication keys to use")
436
        if ssh_keys is None:
437
            ssh_keys = self.config.get("Deployment", "ssh_keys")
438

    
439
        if ssh_keys != "":
440
            ssh_keys = os.path.expanduser(ssh_keys)
441
            self.logger.debug("Will use %s authentication keys file" %
442
                              ssh_keys)
443
            keyfile = '/tmp/%s.pub' % fabric.env.user
444
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
445
            if ssh_keys.startswith("http://") or \
446
                    ssh_keys.startswith("https://") or \
447
                    ssh_keys.startswith("ftp://"):
448
                cmd = """
449
                apt-get update
450
                apt-get install wget --yes --force-yes
451
                wget {0} -O {1} --no-check-certificate
452
                """.format(ssh_keys, keyfile)
453
                _run(cmd, False)
454
            elif os.path.exists(ssh_keys):
455
                _put(ssh_keys, keyfile)
456
            else:
457
                self.logger.debug("No ssh keys found")
458
                return
459
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
460
            _run('rm %s' % keyfile, False)
461
            self.logger.debug("Uploaded ssh authorized keys")
462
        else:
463
            self.logger.debug("No ssh keys found")
464

    
465
    def write_temp_config(self, option, value):
466
        """Write changes back to config file"""
467
        # Acquire the lock to write to temp_config_file
468
        with filelocker.lock("%s.lock" % self.temp_config_file,
469
                             filelocker.LOCK_EX):
470

    
471
            # Read temp_config again to get any new entries
472
            self.temp_config.read(self.temp_config_file)
473

    
474
            # If build_id section doesn't exist create a new one
475
            try:
476
                self.temp_config.add_section(str(self.build_id))
477
                creation_time = \
478
                    time.strftime("%a, %d %b %Y %X", time.localtime())
479
                self.temp_config.set(str(self.build_id),
480
                                     "created", str(creation_time))
481
            except DuplicateSectionError:
482
                pass
483
            self.temp_config.set(str(self.build_id), option, str(value))
484
            curr_time = time.strftime("%a, %d %b %Y %X", time.localtime())
485
            self.temp_config.set(str(self.build_id), "modified", curr_time)
486
            with open(self.temp_config_file, 'wb') as tcf:
487
                self.temp_config.write(tcf)
488

    
489
    def read_temp_config(self, option):
490
        """Read from temporary_config file"""
491
        # If build_id is None use the latest one
492
        if self.build_id is None:
493
            ids = self.temp_config.sections()
494
            if ids:
495
                self.build_id = int(ids[-1])
496
            else:
497
                self.logger.error("No sections in temporary config file")
498
                sys.exit(1)
499
            self.logger.debug("Will use \"%s\" as build id"
500
                              % _green(self.build_id))
501
        # Read specified option
502
        return self.temp_config.get(str(self.build_id), option)
503

    
504
    def setup_fabric(self):
505
        """Setup fabric environment"""
506
        self.logger.info("Setup fabric parameters..")
507
        fabric.env.user = self.read_temp_config('server_user')
508
        fabric.env.host_string = self.read_temp_config('server_ip')
509
        fabric.env.port = int(self.read_temp_config('server_port'))
510
        fabric.env.password = self.read_temp_config('server_passwd')
511
        fabric.env.connection_attempts = 10
512
        fabric.env.shell = "/bin/bash -c"
513
        fabric.env.disable_known_hosts = True
514
        fabric.env.output_prefix = None
515

    
516
    def _check_hash_sum(self, localfile, remotefile):
517
        """Check hash sums of two files"""
518
        self.logger.debug("Check hash sum for local file %s" % localfile)
519
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
520
        self.logger.debug("Local file has sha256 hash %s" % hash1)
521
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
522
        hash2 = _run("sha256sum %s" % remotefile, False)
523
        hash2 = hash2.split(' ')[0]
524
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
525
        if hash1 != hash2:
526
            self.logger.error("Hashes differ.. aborting")
527
            sys.exit(-1)
528

    
529
    @_check_fabric
530
    def clone_repo(self, local_repo=False):
531
        """Clone Synnefo repo from slave server"""
532
        self.logger.info("Configure repositories on remote server..")
533
        self.logger.debug("Install/Setup git")
534
        cmd = """
535
        apt-get install git --yes --force-yes
536
        git config --global user.name {0}
537
        git config --global user.email {1}
538
        """.format(self.config.get('Global', 'git_config_name'),
539
                   self.config.get('Global', 'git_config_mail'))
540
        _run(cmd, False)
541

    
542
        # Find synnefo_repo and synnefo_branch to use
543
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
544
        synnefo_branch = self.config.get("Global", "synnefo_branch")
545
        if synnefo_branch == "":
546
            synnefo_branch = \
547
                subprocess.Popen(
548
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
549
                    stdout=subprocess.PIPE).communicate()[0].strip()
550
            if synnefo_branch == "HEAD":
551
                synnefo_branch = \
552
                    subprocess.Popen(
553
                        ["git", "rev-parse", "--short", "HEAD"],
554
                        stdout=subprocess.PIPE).communicate()[0].strip()
555
        self.logger.info("Will use branch %s" % synnefo_branch)
556

    
557
        if local_repo or synnefo_branch == "":
558
            # Use local_repo
559
            self.logger.debug("Push local repo to server")
560
            # Firstly create the remote repo
561
            _run("git init synnefo", False)
562
            # Then push our local repo over ssh
563
            # We have to pass some arguments to ssh command
564
            # namely to disable host checking.
565
            (temp_ssh_file_handle, temp_ssh_file) = tempfile.mkstemp()
566
            os.close(temp_ssh_file_handle)
567
            # XXX: git push doesn't read the password
568
            cmd = """
569
            echo 'exec ssh -o "StrictHostKeyChecking no" \
570
                           -o "UserKnownHostsFile /dev/null" \
571
                           -q "$@"' > {4}
572
            chmod u+x {4}
573
            export GIT_SSH="{4}"
574
            echo "{0}" | git push --mirror ssh://{1}@{2}:{3}/~/synnefo
575
            rm -f {4}
576
            """.format(fabric.env.password,
577
                       fabric.env.user,
578
                       fabric.env.host_string,
579
                       fabric.env.port,
580
                       temp_ssh_file)
581
            os.system(cmd)
582
        else:
583
            # Clone Synnefo from remote repo
584
            # Currently clonning synnefo can fail unexpectedly
585
            cloned = False
586
            for i in range(10):
587
                self.logger.debug("Clone synnefo from %s" % synnefo_repo)
588
                try:
589
                    _run("git clone %s synnefo" % synnefo_repo, False)
590
                    cloned = True
591
                    break
592
                except BaseException:
593
                    self.logger.warning(
594
                        "Clonning synnefo failed.. retrying %s" % i)
595
            if not cloned:
596
                self.logger.error("Can not clone Synnefo repo.")
597
                sys.exit(-1)
598

    
599
        # Checkout the desired synnefo_branch
600
        self.logger.debug("Checkout \"%s\" branch/commit" % synnefo_branch)
601
        cmd = """
602
        cd synnefo
603
        for branch in `git branch -a | grep remotes | \
604
                       grep -v HEAD | grep -v master`; do
605
            git branch --track ${branch##*/} $branch
606
        done
607
        git checkout %s
608
        """ % (synnefo_branch)
609
        _run(cmd, False)
610

    
611
    @_check_fabric
612
    def build_synnefo(self):
613
        """Build Synnefo packages"""
614
        self.logger.info("Build Synnefo packages..")
615
        self.logger.debug("Install development packages")
616
        cmd = """
617
        apt-get update
618
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
619
                python-dev python-all python-pip --yes --force-yes
620
        pip install devflow
621
        """
622
        _run(cmd, False)
623

    
624
        if self.config.get('Global', 'patch_pydist') == "True":
625
            self.logger.debug("Patch pydist.py module")
626
            cmd = r"""
627
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
628
                /usr/share/python/debpython/pydist.py
629
            """
630
            _run(cmd, False)
631

632
        # Build synnefo packages
633
        self.logger.debug("Build synnefo packages")
634
        cmd = """
635
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
636
        """
637
        with fabric.cd("synnefo"):
638
            _run(cmd, True)
639

640
        # Install snf-deploy package
641
        self.logger.debug("Install snf-deploy package")
642
        cmd = """
643
        dpkg -i snf-deploy*.deb
644
        apt-get -f install --yes --force-yes
645
        """
646
        with fabric.cd("synnefo_build-area"):
647
            with fabric.settings(warn_only=True):
648
                _run(cmd, True)
649

650
        # Setup synnefo packages for snf-deploy
651
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
652
        cmd = """
653
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
654
        """
655
        _run(cmd, False)
656

657
    @_check_fabric
658
    def build_documentation(self):
659
        """Build Synnefo documentation"""
660
        self.logger.info("Build Synnefo documentation..")
661
        _run("pip install -U Sphinx", False)
662
        with fabric.cd("synnefo"):
663
            _run("devflow-update-version; "
664
                 "./ci/make_docs.sh synnefo_documentation", False)
665

666
    def fetch_documentation(self, dest=None):
667
        """Fetch Synnefo documentation"""
668
        self.logger.info("Fetch Synnefo documentation..")
669
        if dest is None:
670
            dest = "synnefo_documentation"
671
        dest = os.path.abspath(dest)
672
        if not os.path.exists(dest):
673
            os.makedirs(dest)
674
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
675
        self.logger.info("Downloaded documentation to %s" %
676
                         _green(dest))
677

678
    @_check_fabric
679
    def deploy_synnefo(self, schema=None):
680
        """Deploy Synnefo using snf-deploy"""
681
        self.logger.info("Deploy Synnefo..")
682
        if schema is None:
683
            schema = self.config.get('Global', 'schema')
684
        self.logger.debug("Will use \"%s\" schema" % schema)
685

686
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
687
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
688
            raise ValueError("Unknown schema: %s" % schema)
689

690
        self.logger.debug("Upload schema files to server")
691
        _put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
692

693
        self.logger.debug("Change password in nodes.conf file")
694
        cmd = """
695
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
696
        """.format(fabric.env.password)
697
        _run(cmd, False)
698

699
        self.logger.debug("Run snf-deploy")
700
        cmd = """
701
        snf-deploy --disable-colors --autoconf all
702
        """
703
        _run(cmd, True)
704

705
    @_check_fabric
706
    def unit_test(self):
707
        """Run Synnefo unit test suite"""
708
        self.logger.info("Run Synnefo unit test suite")
709
        component = self.config.get('Unit Tests', 'component')
710

711
        self.logger.debug("Install needed packages")
712
        cmd = """
713
        pip install mock
714
        pip install factory_boy
715
        """
716
        _run(cmd, False)
717

718
        self.logger.debug("Upload tests.sh file")
719
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
720
        _put(unit_tests_file, ".")
721

722
        self.logger.debug("Run unit tests")
723
        cmd = """
724
        bash tests.sh {0}
725
        """.format(component)
726
        _run(cmd, True)
727

728
    @_check_fabric
729
    def run_burnin(self):
730
        """Run burnin functional test suite"""
731
        self.logger.info("Run Burnin functional test suite")
732
        cmd = """
733
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
734
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
735
        images_user=$(kamaki image list -l | grep owner | \
736
                      cut -d':' -f2 | tr -d ' ')
737
        snf-burnin --auth-url=$auth_url --token=$token \
738
            --force-flavor=2 --image-id=all \
739
            --system-images-user=$images_user \
740
            {0}
741
        BurninExitStatus=$?
742
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
743
        for i in $(ls $log_folder/*/details*); do
744
            echo -e "\\n\\n"
745
            echo -e "***** $i\\n"
746
            cat $i
747
        done
748
        exit $BurninExitStatus
749
        """.format(self.config.get('Burnin', 'cmd_options'))
750
        _run(cmd, True)
751

752
    @_check_fabric
753
    def fetch_compressed(self, src, dest=None):
754
        """Create a tarball and fetch it locally"""
755
        self.logger.debug("Creating tarball of %s" % src)
756
        basename = os.path.basename(src)
757
        tar_file = basename + ".tgz"
758
        cmd = "tar czf %s %s" % (tar_file, src)
759
        _run(cmd, False)
760
        if not os.path.exists(dest):
761
            os.makedirs(dest)
762

763
        tmp_dir = tempfile.mkdtemp()
764
        fabric.get(tar_file, tmp_dir)
765

766
        dest_file = os.path.join(tmp_dir, tar_file)
767
        self._check_hash_sum(dest_file, tar_file)
768
        self.logger.debug("Untar packages file %s" % dest_file)
769
        cmd = """
770
        cd %s
771
        tar xzf %s
772
        cp -r %s/* %s
773
        rm -r %s
774
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
775
        os.system(cmd)
776
        self.logger.info("Downloaded %s to %s" %
777
                         (src, _green(dest)))
778

779
    @_check_fabric
780
    def fetch_packages(self, dest=None):
781
        """Fetch Synnefo packages"""
782
        if dest is None:
783
            dest = self.config.get('Global', 'pkgs_dir')
784
        dest = os.path.abspath(os.path.expanduser(dest))
785
        if not os.path.exists(dest):
786
            os.makedirs(dest)
787
        self.fetch_compressed("synnefo_build-area", dest)
788
        self.logger.info("Downloaded debian packages to %s" %
789
                         _green(dest))
790

791

792
def parse_typed_option(option, value):
793
    try:
794
        [type_, val] = value.strip().split(':')
795
        if type_ not in ["id", "name"]:
796
            raise ValueError
797
        return type_, val
798
    except ValueError:
799
        msg = "Invalid %s format. Must be [id|name]:.+" % option
800
        raise ValueError(msg)
801