Statistics
| Branch: | Tag: | Revision:

root / ci / utils.py @ 6868804a

History | View | Annotate | Download (22.3 kB)

1
#!/usr/bin/env python
2

    
3
"""
4
Synnefo ci utils module
5
"""
6

    
7
import os
8
import sys
9
import time
10
import logging
11
import fabric.api as fabric
12
import subprocess
13
import tempfile
14
from ConfigParser import ConfigParser, DuplicateSectionError
15

    
16
from kamaki.cli import config as kamaki_config
17
from kamaki.clients.astakos import AstakosClient
18
from kamaki.clients.cyclades import CycladesClient
19
from kamaki.clients.image import ImageClient
20

    
21
DEFAULT_CONFIG_FILE = "new_config"
22
# UUID of owner of system images
23
DEFAULT_SYSTEM_IMAGES_UUID = [
24
    "25ecced9-bf53-4145-91ee-cf47377e9fb2",  # production (okeanos.grnet.gr)
25
    "04cbe33f-29b7-4ef1-94fb-015929e5fc06",  # testing (okeanos.io)
26
    ]
27

    
28

    
29
def _run(cmd, verbose):
30
    """Run fabric with verbose level"""
31
    if verbose:
32
        args = ('running',)
33
    else:
34
        args = ('running', 'stdout',)
35
    with fabric.hide(*args):  # Used * or ** magic. pylint: disable-msg=W0142
36
        return fabric.run(cmd)
37

    
38

    
39
def _put(local, remote):
40
    """Run fabric put command without output"""
41
    with fabric.quiet():
42
        fabric.put(local, remote)
43

    
44

    
45
def _red(msg):
46
    """Red color"""
47
    #return "\x1b[31m" + str(msg) + "\x1b[0m"
48
    return str(msg)
49

    
50

    
51
def _yellow(msg):
52
    """Yellow color"""
53
    #return "\x1b[33m" + str(msg) + "\x1b[0m"
54
    return str(msg)
55

    
56

    
57
def _green(msg):
58
    """Green color"""
59
    #return "\x1b[32m" + str(msg) + "\x1b[0m"
60
    return str(msg)
61

    
62

    
63
def _check_fabric(fun):
64
    """Check if fabric env has been set"""
65
    def wrapper(self, *args, **kwargs):
66
        """wrapper function"""
67
        if not self.fabric_installed:
68
            self.setup_fabric()
69
        return fun(self, *args, **kwargs)
70
    return wrapper
71

    
72

    
73
def _check_kamaki(fun):
74
    """Check if kamaki has been initialized"""
75
    def wrapper(self, *args, **kwargs):
76
        """wrapper function"""
77
        if not self.kamaki_installed:
78
            self.setup_kamaki()
79
        return fun(self, *args, **kwargs)
80
    return wrapper
81

    
82

    
83
class _MyFormatter(logging.Formatter):
84
    """Logging Formatter"""
85
    def format(self, record):
86
        format_orig = self._fmt
87
        if record.levelno == logging.DEBUG:
88
            self._fmt = "  %(msg)s"
89
        elif record.levelno == logging.INFO:
90
            self._fmt = "%(msg)s"
91
        elif record.levelno == logging.WARNING:
92
            self._fmt = _yellow("[W] %(msg)s")
93
        elif record.levelno == logging.ERROR:
94
            self._fmt = _red("[E] %(msg)s")
95
        result = logging.Formatter.format(self, record)
96
        self._fmt = format_orig
97
        return result
98

    
99

    
100
# Too few public methods. pylint: disable-msg=R0903
101
class _InfoFilter(logging.Filter):
102
    """Logging Filter that allows DEBUG and INFO messages only"""
103
    def filter(self, rec):
104
        """The filter"""
105
        return rec.levelno in (logging.DEBUG, logging.INFO)
106

    
107

    
108
# Too many instance attributes. pylint: disable-msg=R0902
109
class SynnefoCI(object):
110
    """SynnefoCI python class"""
111

    
112
    def __init__(self, config_file=None, cleanup_config=False, cloud=None):
113
        """ Initialize SynnefoCI python class
114

115
        Setup logger, local_dir, config and kamaki
116
        """
117
        # Setup logger
118
        self.logger = logging.getLogger('synnefo-ci')
119
        self.logger.setLevel(logging.DEBUG)
120

    
121
        handler1 = logging.StreamHandler(sys.stdout)
122
        handler1.setLevel(logging.DEBUG)
123
        handler1.addFilter(_InfoFilter())
124
        handler1.setFormatter(_MyFormatter())
125
        handler2 = logging.StreamHandler(sys.stderr)
126
        handler2.setLevel(logging.WARNING)
127
        handler2.setFormatter(_MyFormatter())
128

    
129
        self.logger.addHandler(handler1)
130
        self.logger.addHandler(handler2)
131

    
132
        # Get our local dir
133
        self.ci_dir = os.path.dirname(os.path.abspath(__file__))
134
        self.repo_dir = os.path.dirname(self.ci_dir)
135

    
136
        # Read config file
137
        if config_file is None:
138
            config_file = DEFAULT_CONFIG_FILE
139
        if not os.path.isabs(config_file):
140
            config_file = os.path.join(self.ci_dir, config_file)
141

    
142
        self.config = ConfigParser()
143
        self.config.optionxform = str
144
        self.config.read(config_file)
145
        temp_config = self.config.get('Global', 'temporary_config')
146
        if cleanup_config:
147
            try:
148
                os.remove(temp_config)
149
            except OSError:
150
                pass
151
        else:
152
            self.config.read(self.config.get('Global', 'temporary_config'))
153

    
154
        # Set kamaki cloud
155
        if cloud is not None:
156
            self.kamaki_cloud = cloud
157
        elif self.config.has_option("Deployment", "kamaki_cloud"):
158
            kamaki_cloud = self.config.get("Deployment", "kamaki_cloud")
159
            if kamaki_cloud == "":
160
                self.kamaki_cloud = None
161
        else:
162
            self.kamaki_cloud = None
163

    
164
        # Initialize variables
165
        self.fabric_installed = False
166
        self.kamaki_installed = False
167
        self.cyclades_client = None
168
        self.image_client = None
169

    
170
    def setup_kamaki(self):
171
        """Initialize kamaki
172

173
        Setup cyclades_client and image_client
174
        """
175

    
176
        config = kamaki_config.Config()
177
        if self.kamaki_cloud is None:
178
            self.kamaki_cloud = config.get_global("default_cloud")
179

    
180
        self.logger.info("Setup kamaki client, using cloud '%s'.." %
181
                         self.kamaki_cloud)
182
        auth_url = config.get_cloud(self.kamaki_cloud, "url")
183
        self.logger.debug("Authentication URL is %s" % _green(auth_url))
184
        token = config.get_cloud(self.kamaki_cloud, "token")
185
        #self.logger.debug("Token is %s" % _green(token))
186

    
187
        astakos_client = AstakosClient(auth_url, token)
188

    
189
        cyclades_url = \
190
            astakos_client.get_service_endpoints('compute')['publicURL']
191
        self.logger.debug("Cyclades API url is %s" % _green(cyclades_url))
192
        self.cyclades_client = CycladesClient(cyclades_url, token)
193
        self.cyclades_client.CONNECTION_RETRY_LIMIT = 2
194

    
195
        image_url = \
196
            astakos_client.get_service_endpoints('image')['publicURL']
197
        self.logger.debug("Images API url is %s" % _green(image_url))
198
        self.image_client = ImageClient(cyclades_url, token)
199
        self.image_client.CONNECTION_RETRY_LIMIT = 2
200

    
201
    def _wait_transition(self, server_id, current_status, new_status):
202
        """Wait for server to go from current_status to new_status"""
203
        self.logger.debug("Waiting for server to become %s" % new_status)
204
        timeout = self.config.getint('Global', 'build_timeout')
205
        sleep_time = 5
206
        while True:
207
            server = self.cyclades_client.get_server_details(server_id)
208
            if server['status'] == new_status:
209
                return server
210
            elif timeout < 0:
211
                self.logger.error(
212
                    "Waiting for server to become %s timed out" % new_status)
213
                self.destroy_server(False)
214
                sys.exit(-1)
215
            elif server['status'] == current_status:
216
                # Sleep for #n secs and continue
217
                timeout = timeout - sleep_time
218
                time.sleep(sleep_time)
219
            else:
220
                self.logger.error(
221
                    "Server failed with status %s" % server['status'])
222
                self.destroy_server(False)
223
                sys.exit(-1)
224

    
225
    @_check_kamaki
226
    def destroy_server(self, wait=True):
227
        """Destroy slave server"""
228
        server_id = self.config.getint('Temporary Options', 'server_id')
229
        self.logger.info("Destoying server with id %s " % server_id)
230
        self.cyclades_client.delete_server(server_id)
231
        if wait:
232
            self._wait_transition(server_id, "ACTIVE", "DELETED")
233

    
234
    @_check_kamaki
235
    def create_server(self, image_id=None, flavor_id=None, ssh_keys=None):
236
        """Create slave server"""
237
        self.logger.info("Create a new server..")
238
        if image_id is None:
239
            image = self._find_image()
240
            self.logger.debug("Will use image \"%s\"" % _green(image['name']))
241
            image_id = image["id"]
242
        self.logger.debug("Image has id %s" % _green(image_id))
243
        if flavor_id is None:
244
            flavor_id = self.config.getint("Deployment", "flavor_id")
245
        server = self.cyclades_client.create_server(
246
            self.config.get('Deployment', 'server_name'),
247
            flavor_id,
248
            image_id)
249
        server_id = server['id']
250
        self.write_config('server_id', server_id)
251
        self.logger.debug("Server got id %s" % _green(server_id))
252
        server_user = server['metadata']['users']
253
        self.write_config('server_user', server_user)
254
        self.logger.debug("Server's admin user is %s" % _green(server_user))
255
        server_passwd = server['adminPass']
256
        self.write_config('server_passwd', server_passwd)
257

    
258
        server = self._wait_transition(server_id, "BUILD", "ACTIVE")
259
        self._get_server_ip_and_port(server)
260
        self._copy_ssh_keys(ssh_keys)
261

    
262
        self.setup_fabric()
263
        self.logger.info("Setup firewall")
264
        accept_ssh_from = self.config.get('Global', 'accept_ssh_from')
265
        if accept_ssh_from != "":
266
            self.logger.debug("Block ssh except from %s" % accept_ssh_from)
267
            cmd = """
268
            local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
269
                cut -d':' -f2 | cut -d' ' -f1)
270
            iptables -A INPUT -s localhost -j ACCEPT
271
            iptables -A INPUT -s $local_ip -j ACCEPT
272
            iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
273
            iptables -A INPUT -p tcp --dport 22 -j DROP
274
            """.format(accept_ssh_from)
275
            _run(cmd, False)
276

    
277
    def _find_image(self):
278
        """Find a suitable image to use
279

280
        It has to belong to one of the `DEFAULT_SYSTEM_IMAGES_UUID'
281
        users and contain the word given by `image_name' option.
282
        """
283
        image_name = self.config.get('Deployment', 'image_name').lower()
284
        images = self.image_client.list_public(detail=True)['images']
285
        # Select images by `system_uuid' user
286
        images = [x for x in images
287
                  if x['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID]
288
        # Select images with `image_name' in their names
289
        images = [x for x in images
290
                  if x['name'].lower().find(image_name) != -1]
291
        # Let's select the first one
292
        return images[0]
293

    
294
    def _get_server_ip_and_port(self, server):
295
        """Compute server's IPv4 and ssh port number"""
296
        self.logger.info("Get server connection details..")
297
        server_ip = server['attachments'][0]['ipv4']
298
        if ".okeanos.io" in self.cyclades_client.base_url:
299
            tmp1 = int(server_ip.split(".")[2])
300
            tmp2 = int(server_ip.split(".")[3])
301
            server_ip = "gate.okeanos.io"
302
            server_port = 10000 + tmp1 * 256 + tmp2
303
        else:
304
            server_port = 22
305
        self.write_config('server_ip', server_ip)
306
        self.logger.debug("Server's IPv4 is %s" % _green(server_ip))
307
        self.write_config('server_port', server_port)
308
        self.logger.debug("Server's ssh port is %s" % _green(server_port))
309
        self.logger.debug("Access server using \"ssh -p %s %s@%s\"" %
310
                          (server_port, fabric.env.user, server_ip))
311

    
312
    @_check_fabric
313
    def _copy_ssh_keys(self, ssh_keys):
314
        """Upload/Install ssh keys to server"""
315
        self.logger.debug("Check for authentication keys to upload")
316
        if ssh_keys is None:
317
            ssh_keys = self.config.get("Deployment", "ssh_keys")
318

    
319
        if ssh_keys != "" and os.path.exists(ssh_keys):
320
            keyfile = '/tmp/%s.pub' % fabric.env.user
321
            _run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False)
322
            _put(ssh_keys, keyfile)
323
            _run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False)
324
            _run('rm %s' % keyfile, False)
325
            self.logger.debug("Uploaded ssh authorized keys")
326
        else:
327
            self.logger.debug("No ssh keys found")
328

    
329
    def write_config(self, option, value, section="Temporary Options"):
330
        """Write changes back to config file"""
331
        try:
332
            self.config.add_section(section)
333
        except DuplicateSectionError:
334
            pass
335
        self.config.set(section, option, str(value))
336
        temp_conf_file = self.config.get('Global', 'temporary_config')
337
        with open(temp_conf_file, 'wb') as tcf:
338
            self.config.write(tcf)
339

    
340
    def setup_fabric(self):
341
        """Setup fabric environment"""
342
        self.logger.info("Setup fabric parameters..")
343
        fabric.env.user = self.config.get('Temporary Options', 'server_user')
344
        fabric.env.host_string = \
345
            self.config.get('Temporary Options', 'server_ip')
346
        fabric.env.port = self.config.getint('Temporary Options',
347
                                             'server_port')
348
        fabric.env.password = self.config.get('Temporary Options',
349
                                              'server_passwd')
350
        fabric.env.connection_attempts = 10
351
        fabric.env.shell = "/bin/bash -c"
352
        fabric.env.disable_known_hosts = True
353
        fabric.env.output_prefix = None
354

    
355
    def _check_hash_sum(self, localfile, remotefile):
356
        """Check hash sums of two files"""
357
        self.logger.debug("Check hash sum for local file %s" % localfile)
358
        hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0]
359
        self.logger.debug("Local file has sha256 hash %s" % hash1)
360
        self.logger.debug("Check hash sum for remote file %s" % remotefile)
361
        hash2 = _run("sha256sum %s" % remotefile, False)
362
        hash2 = hash2.split(' ')[0]
363
        self.logger.debug("Remote file has sha256 hash %s" % hash2)
364
        if hash1 != hash2:
365
            self.logger.error("Hashes differ.. aborting")
366
            sys.exit(-1)
367

    
368
    @_check_fabric
369
    def clone_repo(self):
370
        """Clone Synnefo repo from slave server"""
371
        self.logger.info("Configure repositories on remote server..")
372
        self.logger.debug("Setup apt, install curl and git")
373
        cmd = """
374
        echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
375
        apt-get update
376
        apt-get install curl git --yes
377
        echo -e "\n\ndeb {0}" >> /etc/apt/sources.list
378
        curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
379
        apt-get update
380
        git config --global user.name {1}
381
        git config --global user.email {2}
382
        """.format(self.config.get('Global', 'apt_repo'),
383
                   self.config.get('Global', 'git_config_name'),
384
                   self.config.get('Global', 'git_config_mail'))
385
        _run(cmd, False)
386

    
387
        synnefo_repo = self.config.get('Global', 'synnefo_repo')
388
        synnefo_branch = self.config.get("Global", "synnefo_branch")
389
        if synnefo_branch == "":
390
            synnefo_branch = \
391
                subprocess.Popen(
392
                    ["git", "rev-parse", "--abbrev-ref", "HEAD"],
393
                    stdout=subprocess.PIPE).communicate()[0].strip()
394
            if synnefo_branch == "HEAD":
395
                synnefo_branch = \
396
                    subprocess.Popen(
397
                        ["git", "rev-parse", "--short", "HEAD"],
398
                        stdout=subprocess.PIPE).communicate()[0].strip()
399
        self.logger.info("Will use branch %s" % synnefo_branch)
400
        # Currently clonning synnefo can fail unexpectedly
401
        cloned = False
402
        for i in range(10):
403
            self.logger.debug("Clone synnefo from %s" % synnefo_repo)
404
            try:
405
                _run("git clone %s synnefo" % synnefo_repo, False)
406
                cloned = True
407
                break
408
            except BaseException:
409
                self.logger.warning("Clonning synnefo failed.. retrying %s"
410
                                    % i)
411
        cmd = """
412
        cd synnefo
413
        for branch in `git branch -a | grep remotes | \
414
                       grep -v HEAD | grep -v master`; do
415
            git branch --track ${branch##*/} $branch
416
        done
417
        git checkout %s
418
        """ % (synnefo_branch)
419
        _run(cmd, False)
420

    
421
        if not cloned:
422
            self.logger.error("Can not clone Synnefo repo.")
423
            sys.exit(-1)
424

    
425
        deploy_repo = self.config.get('Global', 'deploy_repo')
426
        self.logger.debug("Clone snf-deploy from %s" % deploy_repo)
427
        _run("git clone --depth 1 %s" % deploy_repo, False)
428

    
429
    @_check_fabric
430
    def build_synnefo(self):
431
        """Build Synnefo packages"""
432
        self.logger.info("Build Synnefo packages..")
433
        self.logger.debug("Install development packages")
434
        cmd = """
435
        apt-get update
436
        apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
437
                python-dev python-all python-pip --yes
438
        pip install devflow
439
        """
440
        _run(cmd, False)
441

    
442
        if self.config.get('Global', 'patch_pydist') == "True":
443
            self.logger.debug("Patch pydist.py module")
444
            cmd = r"""
445
            sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \
446
                /usr/share/python/debpython/pydist.py
447
            """
448
            _run(cmd, False)
449

450
        self.logger.debug("Build snf-deploy package")
451
        cmd = """
452
        git checkout -t origin/debian
453
        git-buildpackage --git-upstream-branch=master \
454
                --git-debian-branch=debian \
455
                --git-export-dir=../snf-deploy_build-area \
456
                -uc -us
457
        """
458
        with fabric.cd("snf-deploy"):
459
            _run(cmd, True)
460

461
        self.logger.debug("Install snf-deploy package")
462
        cmd = """
463
        dpkg -i snf-deploy*.deb
464
        apt-get -f install --yes
465
        """
466
        with fabric.cd("snf-deploy_build-area"):
467
            with fabric.settings(warn_only=True):
468
                _run(cmd, True)
469

470
        self.logger.debug("Build synnefo packages")
471
        cmd = """
472
        devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign
473
        """
474
        with fabric.cd("synnefo"):
475
            _run(cmd, True)
476

477
        self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
478
        cmd = """
479
        cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/
480
        """
481
        _run(cmd, False)
482

483
    @_check_fabric
484
    def build_documentation(self):
485
        """Build Synnefo documentation"""
486
        self.logger.info("Build Synnefo documentation..")
487
        _run("pip install -U Sphinx", False)
488
        with fabric.cd("synnefo"):
489
            _run("devflow-update-version; "
490
                 "./ci/make_docs.sh synnefo_documentation", False)
491

492
    def fetch_documentation(self, dest=None):
493
        """Fetch Synnefo documentation"""
494
        self.logger.info("Fetch Synnefo documentation..")
495
        if dest is None:
496
            dest = "synnefo_documentation"
497
        dest = os.path.abspath(dest)
498
        if not os.path.exists(dest):
499
            os.makedirs(dest)
500
        self.fetch_compressed("synnefo/synnefo_documentation", dest)
501
        self.logger.info("Downloaded documentation to %s" %
502
                         _green(dest))
503

504
    @_check_fabric
505
    def deploy_synnefo(self, schema=None):
506
        """Deploy Synnefo using snf-deploy"""
507
        self.logger.info("Deploy Synnefo..")
508
        if schema is None:
509
            schema = self.config.get('Global', 'schema')
510
        self.logger.debug("Will use %s schema" % schema)
511

512
        schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
513
        if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
514
            raise ValueError("Unknown schema: %s" % schema)
515

516
        self.logger.debug("Upload schema files to server")
517
        _put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
518

519
        self.logger.debug("Change password in nodes.conf file")
520
        cmd = """
521
        sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
522
        """.format(fabric.env.password)
523
        _run(cmd, False)
524

525
        self.logger.debug("Run snf-deploy")
526
        cmd = """
527
        snf-deploy all --autoconf
528
        """
529
        _run(cmd, True)
530

531
    @_check_fabric
532
    def unit_test(self):
533
        """Run Synnefo unit test suite"""
534
        self.logger.info("Run Synnefo unit test suite")
535
        component = self.config.get('Unit Tests', 'component')
536

537
        self.logger.debug("Install needed packages")
538
        cmd = """
539
        pip install mock
540
        pip install factory_boy
541
        """
542
        _run(cmd, False)
543

544
        self.logger.debug("Upload tests.sh file")
545
        unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
546
        _put(unit_tests_file, ".")
547

548
        self.logger.debug("Run unit tests")
549
        cmd = """
550
        bash tests.sh {0}
551
        """.format(component)
552
        _run(cmd, True)
553

554
    @_check_fabric
555
    def run_burnin(self):
556
        """Run burnin functional test suite"""
557
        self.logger.info("Run Burnin functional test suite")
558
        cmd = """
559
        auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3)
560
        token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3)
561
        images_user=$(kamaki image list -l | grep owner | \
562
                      cut -d':' -f2 | tr -d ' ')
563
        snf-burnin --auth-url=$auth_url --token=$token \
564
            --force-flavor=2 --image-id=all \
565
            --system-images-user=$images_user \
566
            {0}
567
        log_folder=$(ls -1d /var/log/burnin/* | tail -n1)
568
        for i in $(ls $log_folder/*/details*); do
569
            echo -e "\\n\\n"
570
            echo -e "***** $i\\n"
571
            cat $i
572
        done
573
        """.format(self.config.get('Burnin', 'cmd_options'))
574
        _run(cmd, True)
575

576
    @_check_fabric
577
    def fetch_compressed(self, src, dest=None):
578
        """Create a tarball and fetch it locally"""
579
        self.logger.debug("Creating tarball of %s" % src)
580
        basename = os.path.basename(src)
581
        tar_file = basename + ".tgz"
582
        cmd = "tar czf %s %s" % (tar_file, src)
583
        _run(cmd, False)
584
        if not os.path.exists(dest):
585
            os.makedirs(dest)
586

587
        tmp_dir = tempfile.mkdtemp()
588
        fabric.get(tar_file, tmp_dir)
589

590
        dest_file = os.path.join(tmp_dir, tar_file)
591
        self._check_hash_sum(dest_file, tar_file)
592
        self.logger.debug("Untar packages file %s" % dest_file)
593
        cmd = """
594
        cd %s
595
        tar xzf %s
596
        cp -r %s/* %s
597
        rm -r %s
598
        """ % (tmp_dir, tar_file, src, dest, tmp_dir)
599
        os.system(cmd)
600
        self.logger.info("Downloaded %s to %s" %
601
                         (src, _green(dest)))
602

603
    @_check_fabric
604
    def fetch_packages(self, dest=None):
605
        """Fetch Synnefo packages"""
606
        if dest is None:
607
            dest = self.config.get('Global', 'pkgs_dir')
608
        dest = os.path.abspath(dest)
609
        if not os.path.exists(dest):
610
            os.makedirs(dest)
611
        self.fetch_compressed("synnefo_build-area", dest)
612
        self.logger.info("Downloaded debian packages to %s" %
613
                         _green(dest))
614