Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ d8363ea2

History | View | Annotate | Download (22.1 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import sys
9
import time
10
import logging
11
import fabric.api as fabric
12
import subprocess
13
import tempfile
14
from ConfigParser import ConfigParser, DuplicateSectionError
15

    
16
from kamaki.cli import config as kamaki_config
17
from kamaki.clients.astakos import AstakosClient
18
from kamaki.clients.cyclades import CycladesClient
19
from kamaki.clients.image import ImageClient
20

    
21
DEFAULT_CONFIG_FILE = "new_config"
22
# UUID of owner of system images
23
DEFAULT_SYSTEM_IMAGES_UUID = [
24
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
25
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
26
    ]
27

    
28

    
29
def _run(cmd, verbose):
30
    """Run fabric with verbose level"""
31
    if verbose:
32
        args = ('running',)
33
    else:
34
        args = ('running', 'stdout',)
35
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
36
        return fabric.run(cmd)
37

    
38

    
39
def _red(msg):
40
    """Red color"""
41
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
42
    return str(msg)
43

    
44

    
45
def _yellow(msg):
46
    """Yellow color"""
47
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
48
    return str(msg)
49

    
50

    
51
def _green(msg):
52
    """Green color"""
53
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
54
    return str(msg)
55

    
56

    
57
def _check_fabric(fun):
58
    """Check if fabric env has been set"""
59
    def wrapper(self, *args, **kwargs):
60
        """wrapper function"""
61
        if not self.fabric_installed:
62
            self.setup_fabric()
63
        return fun(self, *args, **kwargs)
64
    return wrapper
65

    
66

    
67
def _check_kamaki(fun):
68
    """Check if kamaki has been initialized"""
69
    def wrapper(self, *args, **kwargs):
70
        """wrapper function"""
71
        if not self.kamaki_installed:
72
            self.setup_kamaki()
73
        return fun(self, *args, **kwargs)
74
    return wrapper
75

    
76

    
77
class _MyFormatter(logging.Formatter):
78
    """Logging Formatter"""
79
    def format(self, record):
80
        format_orig = self._fmt
81
        if record.levelno == logging.DEBUG:
82
            self._fmt = "  %(msg)s"
83
        elif record.levelno == logging.INFO:
84
            self._fmt = "%(msg)s"
85
        elif record.levelno == logging.WARNING:
86
            self._fmt = _yellow("[W] %(msg)s")
87
        elif record.levelno == logging.ERROR:
88
            self._fmt = _red("[E] %(msg)s")
89
        result = logging.Formatter.format(self, record)
90
        self._fmt = format_orig
91
        return result
92

    
93

    
94
# Too few public methods. pylint: disable-msg=R0903
95
class _InfoFilter(logging.Filter):
96
    """Logging Filter that allows DEBUG and INFO messages only"""
97
    def filter(self, rec):
98
        """The filter"""
99
        return rec.levelno in (logging.DEBUG, logging.INFO)
100

    
101

    
102
# Too many instance attributes. pylint: disable-msg=R0902
103
class SynnefoCI(object):
104
    """SynnefoCI python class"""
105

    
106
    def __init__(self, config_file=None, cleanup_config=False, cloud=None):
107
        """ Initialize SynnefoCI python class
108

109
        Setup logger, local_dir, config and kamaki
110
        """
111
        # Setup logger
112
        self.logger = logging.getLogger('synnefo-ci')
113
        self.logger.setLevel(logging.DEBUG)
114

    
115
        handler1 = logging.StreamHandler(sys.stdout)
116
        handler1.setLevel(logging.DEBUG)
117
        handler1.addFilter(_InfoFilter())
118
        handler1.setFormatter(_MyFormatter())
119
        handler2 = logging.StreamHandler(sys.stderr)
120
        handler2.setLevel(logging.WARNING)
121
        handler2.setFormatter(_MyFormatter())
122

    
123
        self.logger.addHandler(handler1)
124
        self.logger.addHandler(handler2)
125

    
126
        # Get our local dir
127
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
128
        self.repo_dir = os.path.dirname(self.ci_dir)
129

    
130
        # Read config file
131
        if config_file is None:
132
            config_file = DEFAULT_CONFIG_FILE
133
        if not os.path.isabs(config_file):
134
            config_file = os.path.join(self.ci_dir, config_file)
135

    
136
        self.config = ConfigParser()
137
        self.config.optionxform = str
138
        self.config.read(config_file)
139
        temp_config = self.config.get('Global', 'temporary_config')
140
        if cleanup_config:
141
            try:
142
                os.remove(temp_config)
143
            except OSError:
144
                pass
145
        else:
146
            self.config.read(self.config.get('Global', 'temporary_config'))
147

    
148
        # Set kamaki cloud
149
        if cloud is not None:
150
            self.kamaki_cloud = cloud
151
        elif self.config.has_option("Deployment", "kamaki_cloud"):
152
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
153
            if kamaki_cloud == "":
154
                self.kamaki_cloud = None
155
        else:
156
            self.kamaki_cloud = None
157

    
158
        # Initialize variables
159
        self.fabric_installed = False
160
        self.kamaki_installed = False
161
        self.cyclades_client = None
162
        self.image_client = None
163

    
164
    def setup_kamaki(self):
165
        """Initialize kamaki
166

167
        Setup cyclades_client and image_client
168
        """
169

    
170
        config = kamaki_config.Config()
171
        if self.kamaki_cloud is None:
172
            self.kamaki_cloud = config.get_global("default_cloud")
173

    
174
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
175
                         self.kamaki_cloud)
176
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
177
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
178
        token = config.get_cloud(self.kamaki_cloud, "token")
179
        #self.logger.debug("Token is %s" % _green(token))
180

    
181
        astakos_client = AstakosClient(auth_url, token)
182

    
183
        cyclades_url = \
184
            astakos_client.get_service_endpoints('compute')['publicURL']
185
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
186
        self.cyclades_client = CycladesClient(cyclades_url, token)
187
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
188

    
189
        image_url = \
190
            astakos_client.get_service_endpoints('image')['publicURL']
191
        self.logger.debug("Images API url is %s" % _green(image_url))
192
        self.image_client = ImageClient(cyclades_url, token)
193
        self.image_client.CONNECTION_RETRY_LIMIT = 2
194

    
195
    def _wait_transition(self, server_id, current_status, new_status):
196
        """Wait for server to go from current_status to new_status"""
197
        self.logger.debug("Waiting for server to become %s" % new_status)
198
        timeout = self.config.getint('Global', 'build_timeout')
199
        sleep_time = 5
200
        while True:
201
            server = self.cyclades_client.get_server_details(server_id)
202
            if server['status'] == new_status:
203
                return server
204
            elif timeout < 0:
205
                self.logger.error(
206
                    "Waiting for server to become %s timed out" % new_status)
207
                self.destroy_server(False)
208
                sys.exit(-1)
209
            elif server['status'] == current_status:
210
                # Sleep for #n secs and continue
211
                timeout = timeout - sleep_time
212
                time.sleep(sleep_time)
213
            else:
214
                self.logger.error(
215
                    "Server failed with status %s" % server['status'])
216
                self.destroy_server(False)
217
                sys.exit(-1)
218

    
219
    @_check_kamaki
220
    def destroy_server(self, wait=True):
221
        """Destroy slave server"""
222
        server_id = self.config.getint('Temporary Options', 'server_id')
223
        self.logger.info("Destoying server with id %s " % server_id)
224
        self.cyclades_client.delete_server(server_id)
225
        if wait:
226
            self._wait_transition(server_id, "ACTIVE", "DELETED")
227

    
228
    @_check_kamaki
229
    def create_server(self, image_id=None, flavor_id=None):
230
        """Create slave server"""
231
        self.logger.info("Create a new server..")
232
        if image_id is None:
233
            image = self._find_image()
234
            self.logger.debug("Will use image \"%s\"" % _green(image['name']))
235
            image_id = image["id"]
236
        self.logger.debug("Image has id %s" % _green(image_id))
237
        if flavor_id is None:
238
            flavor_id = self.config.getint("Deployment", "flavor_id")
239
        server = self.cyclades_client.create_server(
240
            self.config.get('Deployment', 'server_name'),
241
            flavor_id,
242
            image_id)
243
        server_id = server['id']
244
        self.write_config('server_id', server_id)
245
        self.logger.debug("Server got id %s" % _green(server_id))
246
        server_user = server['metadata']['users']
247
        self.write_config('server_user', server_user)
248
        self.logger.debug("Server's admin user is %s" % _green(server_user))
249
        server_passwd = server['adminPass']
250
        self.write_config('server_passwd', server_passwd)
251

    
252
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
253
        self._get_server_ip_and_port(server)
254
        self._copy_ssh_keys()
255

    
256
        self.setup_fabric()
257
        self.logger.info("Setup firewall")
258
        accept_ssh_from = self.config.get('Global', 'filter_access_network')
259
        if accept_ssh_from != "":
260
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
261
            cmd = """
262
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
263
                cut -d':' -f2 | cut -d' ' -f1)
264
            iptables -A INPUT -s localhost -j ACCEPT
265
            iptables -A INPUT -s $local_ip -j ACCEPT
266
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
267
            iptables -A INPUT -p tcp --dport 22 -j DROP
268
            """.format(accept_ssh_from)
269
            _run(cmd, False)
270

    
271
    def _find_image(self):
272
        """Find a suitable image to use
273

274
        It has to belong to one of the `DEFAULT_SYSTEM_IMAGES_UUID'
275
        users and contain the word given by `image_name' option.
276
        """
277
        image_name = self.config.get('Deployment', 'image_name').lower()
278
        images = self.image_client.list_public(detail=True)['images']
279
        # Select images by `system_uuid' user
280
        images = [x for x in images
281
                  if x['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID]
282
        # Select images with `image_name' in their names
283
        images = [x for x in images
284
                  if x['name'].lower().find(image_name) != -1]
285
        # Let's select the first one
286
        return images[0]
287

    
288
    def _get_server_ip_and_port(self, server):
289
        """Compute server's IPv4 and ssh port number"""
290
        self.logger.info("Get server connection details..")
291
        server_ip = server['attachments'][0]['ipv4']
292
        if ".okeanos.io" in self.cyclades_client.base_url:
293
            tmp1 = int(server_ip.split(".")[2])
294
            tmp2 = int(server_ip.split(".")[3])
295
            server_ip = "gate.okeanos.io"
296
            server_port = 10000 + tmp1 * 256 + tmp2
297
        else:
298
            server_port = 22
299
        self.write_config('server_ip', server_ip)
300
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
301
        self.write_config('server_port', server_port)
302
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
303

    
304
    @_check_fabric
305
    def _copy_ssh_keys(self):
306
        """Upload/Install ssh keys to server"""
307
        if not self.config.has_option("Deployment", "ssh_keys"):
308
            return
309
        authorized_keys = self.config.get("Deployment",
310
                                          "ssh_keys")
311
        if authorized_keys != "" and os.path.exists(authorized_keys):
312
            keyfile = '/tmp/%s.pub' % fabric.env.user
313
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
314
            fabric.put(authorized_keys, keyfile)
315
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
316
            _run('rm %s' % keyfile, False)
317
            self.logger.debug("Uploaded ssh authorized keys")
318
        else:
319
            self.logger.debug("No ssh keys found")
320

    
321
    def write_config(self, option, value, section="Temporary Options"):
322
        """Write changes back to config file"""
323
        try:
324
            self.config.add_section(section)
325
        except DuplicateSectionError:
326
            pass
327
        self.config.set(section, option, str(value))
328
        temp_conf_file = self.config.get('Global', 'temporary_config')
329
        with open(temp_conf_file, 'wb') as tcf:
330
            self.config.write(tcf)
331

    
332
    def setup_fabric(self):
333
        """Setup fabric environment"""
334
        self.logger.info("Setup fabric parameters..")
335
        fabric.env.user = self.config.get('Temporary Options', 'server_user')
336
        fabric.env.host_string = \
337
            self.config.get('Temporary Options', 'server_ip')
338
        fabric.env.port = self.config.getint('Temporary Options',
339
                                             'server_port')
340
        fabric.env.password = self.config.get('Temporary Options',
341
                                              'server_passwd')
342
        fabric.env.connection_attempts = 10
343
        fabric.env.shell = "/bin/bash -c"
344
        fabric.env.disable_known_hosts = True
345
        fabric.env.output_prefix = None
346

    
347
    def _check_hash_sum(self, localfile, remotefile):
348
        """Check hash sums of two files"""
349
        self.logger.debug("Check hash sum for local file %s" % localfile)
350
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
351
        self.logger.debug("Local file has sha256 hash %s" % hash1)
352
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
353
        hash2 = _run("sha256sum %s" % remotefile, False)
354
        hash2 = hash2.split(' ')[0]
355
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
356
        if hash1 != hash2:
357
            self.logger.error("Hashes differ.. aborting")
358
            sys.exit(-1)
359

    
360
    @_check_fabric
361
    def clone_repo(self):
362
        """Clone Synnefo repo from slave server"""
363
        self.logger.info("Configure repositories on remote server..")
364
        self.logger.debug("Setup apt, install curl and git")
365
        cmd = """
366
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
367
        apt-get update
368
        apt-get install curl git --yes
369
        echo -e "\n\ndeb {0}" >> /etc/apt/sources.list
370
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
371
        apt-get update
372
        git config --global user.name {1}
373
        git config --global user.email {2}
374
        """.format(self.config.get('Global', 'apt_repo'),
375
                   self.config.get('Global', 'git_config_name'),
376
                   self.config.get('Global', 'git_config_mail'))
377
        _run(cmd, False)
378

    
379
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
380
        synnefo_branch = self.config.get("Global", "synnefo_branch")
381
        if synnefo_branch == "":
382
            synnefo_branch = \
383
                subprocess.Popen(
384
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
385
                    stdout=subprocess.PIPE).communicate()[0].strip()
386
            if synnefo_branch == "HEAD":
387
                synnefo_branch = \
388
                    subprocess.Popen(
389
                        ["git", "rev-parse", "--short", "HEAD"],
390
                        stdout=subprocess.PIPE).communicate()[0].strip()
391
        self.logger.info("Will use branch %s" % synnefo_branch)
392
        # Currently clonning synnefo can fail unexpectedly
393
        cloned = False
394
        for i in range(10):
395
            self.logger.debug("Clone synnefo from %s" % synnefo_repo)
396
            try:
397
                _run("git clone %s synnefo" % synnefo_repo, False)
398
                cloned = True
399
                break
400
            except BaseException:
401
                self.logger.warning("Clonning synnefo failed.. retrying %s"
402
                                    % i)
403
        cmd = """
404
        cd synnefo
405
        for branch in `git branch -a | grep remotes | \
406
                       grep -v HEAD | grep -v master`; do
407
            git branch --track ${branch##*/} $branch
408
        done
409
        git checkout %s
410
        """ % (synnefo_branch)
411
        _run(cmd, False)
412

    
413
        if not cloned:
414
            self.logger.error("Can not clone Synnefo repo.")
415
            sys.exit(-1)
416

    
417
        deploy_repo = self.config.get('Global', 'deploy_repo')
418
        self.logger.debug("Clone snf-deploy from %s" % deploy_repo)
419
        _run("git clone --depth 1 %s" % deploy_repo, False)
420

    
421
    @_check_fabric
422
    def build_synnefo(self):
423
        """Build Synnefo packages"""
424
        self.logger.info("Build Synnefo packages..")
425
        self.logger.debug("Install development packages")
426
        cmd = """
427
        apt-get update
428
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
429
                python-dev python-all python-pip --yes
430
        pip install devflow
431
        """
432
        _run(cmd, False)
433

    
434
        if self.config.get('Global', 'patch_pydist') == "True":
435
            self.logger.debug("Patch pydist.py module")
436
            cmd = r"""
437
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
438
                /usr/share/python/debpython/pydist.py
439
            """
440
            _run(cmd, False)
441

442
        self.logger.debug("Build snf-deploy package")
443
        cmd = """
444
        git checkout -t origin/debian
445
        git-buildpackage --git-upstream-branch=master \
446
                --git-debian-branch=debian \
447
                --git-export-dir=../snf-deploy_build-area \
448
                -uc -us
449
        """
450
        with fabric.cd("snf-deploy"):
451
            _run(cmd, True)
452

453
        self.logger.debug("Install snf-deploy package")
454
        cmd = """
455
        dpkg -i snf-deploy*.deb
456
        apt-get -f install --yes
457
        """
458
        with fabric.cd("snf-deploy_build-area"):
459
            with fabric.settings(warn_only=True):
460
                _run(cmd, True)
461

462
        self.logger.debug("Build synnefo packages")
463
        cmd = """
464
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
465
        """
466
        with fabric.cd("synnefo"):
467
            _run(cmd, True)
468

469
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
470
        cmd = """
471
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
472
        """
473
        _run(cmd, False)
474

475
    @_check_fabric
476
    def build_documentation(self):
477
        """Build Synnefo documentation"""
478
        self.logger.info("Build Synnefo documentation..")
479
        _run("pip install -U Sphinx", False)
480
        with fabric.cd("synnefo"):
481
            _run("devflow-update-version; "
482
                 "./ci/make_docs.sh synnefo_documentation", False)
483

484
    def fetch_documentation(self, dest=None):
485
        """Fetch Synnefo documentation"""
486
        self.logger.info("Fetch Synnefo documentation..")
487
        if dest is None:
488
            dest = "synnefo_documentation"
489
        dest = os.path.abspath(dest)
490
        if not os.path.exists(dest):
491
            os.makedirs(dest)
492
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
493
        self.logger.info("Downloaded documentation to %s" %
494
                         _green(dest))
495

496
    @_check_fabric
497
    def deploy_synnefo(self, schema=None):
498
        """Deploy Synnefo using snf-deploy"""
499
        self.logger.info("Deploy Synnefo..")
500
        if schema is None:
501
            schema = self.config.get('Global', 'schema')
502
        self.logger.debug("Will use %s schema" % schema)
503

504
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
505
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
506
            raise ValueError("Unknown schema: %s" % schema)
507

508
        self.logger.debug("Upload schema files to server")
509
        with fabric.quiet():
510
            fabric.put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
511

512
        self.logger.debug("Change password in nodes.conf file")
513
        cmd = """
514
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
515
        """.format(fabric.env.password)
516
        _run(cmd, False)
517

518
        self.logger.debug("Run snf-deploy")
519
        cmd = """
520
        snf-deploy all --autoconf
521
        """
522
        _run(cmd, True)
523

524
    @_check_fabric
525
    def unit_test(self):
526
        """Run Synnefo unit test suite"""
527
        self.logger.info("Run Synnefo unit test suite")
528
        component = self.config.get('Unit Tests', 'component')
529

530
        self.logger.debug("Install needed packages")
531
        cmd = """
532
        pip install mock
533
        pip install factory_boy
534
        """
535
        _run(cmd, False)
536

537
        self.logger.debug("Upload tests.sh file")
538
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
539
        with fabric.quiet():
540
            fabric.put(unit_tests_file, ".")
541

542
        self.logger.debug("Run unit tests")
543
        cmd = """
544
        bash tests.sh {0}
545
        """.format(component)
546
        _run(cmd, True)
547

548
    @_check_fabric
549
    def run_burnin(self):
550
        """Run burnin functional test suite"""
551
        self.logger.info("Run Burnin functional test suite")
552
        cmd = """
553
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
554
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
555
        images_user=$(kamaki image list -l | grep owner | \
556
                      cut -d':' -f2 | tr -d ' ')
557
        snf-burnin --auth-url=$auth_url --token=$token \
558
            --force-flavor=2 --image-id=all \
559
            --system-images-user=$images_user \
560
            {0}
561
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
562
        for i in $(ls $log_folder/*/details*); do
563
            echo -e "\\n\\n"
564
            echo -e "***** $i\\n"
565
            cat $i
566
        done
567
        """.format(self.config.get('Burnin', 'cmd_options'))
568
        _run(cmd, True)
569

570
    @_check_fabric
571
    def fetch_compressed(self, src, dest=None):
572
        """Create a tarball and fetch it locally"""
573
        self.logger.debug("Creating tarball of %s" % src)
574
        basename = os.path.basename(src)
575
        tar_file = basename + ".tgz"
576
        cmd = "tar czf %s %s" % (tar_file, src)
577
        _run(cmd, False)
578
        if not os.path.exists(dest):
579
            os.makedirs(dest)
580

581
        tmp_dir = tempfile.mkdtemp()
582
        fabric.get(tar_file, tmp_dir)
583

584
        dest_file = os.path.join(tmp_dir, tar_file)
585
        self._check_hash_sum(dest_file, tar_file)
586
        self.logger.debug("Untar packages file %s" % dest_file)
587
        cmd = """
588
        cd %s
589
        tar xzf %s
590
        cp -r %s/* %s
591
        rm -r %s
592
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
593
        os.system(cmd)
594
        self.logger.info("Downloaded %s to %s" %
595
                         (src, _green(dest)))
596

597
    @_check_fabric
598
    def fetch_packages(self, dest=None):
599
        """Fetch Synnefo packages"""
600
        if dest is None:
601
            dest = self.config.get('Global', 'pkgs_dir')
602
        dest = os.path.abspath(dest)
603
        if not os.path.exists(dest):
604
            os.makedirs(dest)
605
        self.fetch_compressed("synnefo_build-area", dest)
606
        self.logger.info("Downloaded debian packages to %s" %
607
                         _green(dest))
608