Statistics
| Branch: | Tag: | Revision:

root / pithos / api / util.py @ 8cb45c13

History | View | Annotate | Download (23.2 kB)

1
# Copyright 2011 GRNET S.A. All rights reserved.
2
# 
3
# Redistribution and use in source and binary forms, with or
4
# without modification, are permitted provided that the following
5
# conditions are met:
6
# 
7
#   1. Redistributions of source code must retain the above
8
#      copyright notice, this list of conditions and the following
9
#      disclaimer.
10
# 
11
#   2. Redistributions in binary form must reproduce the above
12
#      copyright notice, this list of conditions and the following
13
#      disclaimer in the documentation and/or other materials
14
#      provided with the distribution.
15
# 
16
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
17
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
20
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
23
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
24
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
26
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27
# POSSIBILITY OF SUCH DAMAGE.
28
# 
29
# The views and conclusions contained in the software and
30
# documentation are those of the authors and should not be
31
# interpreted as representing official policies, either expressed
32
# or implied, of GRNET S.A.
33

    
34
from functools import wraps
35
from time import time
36
from traceback import format_exc
37
from wsgiref.handlers import format_date_time
38
from binascii import hexlify
39

    
40
from django.conf import settings
41
from django.http import HttpResponse
42
from django.utils.http import http_date, parse_etags
43

    
44
from pithos.api.compat import parse_http_date_safe
45
from pithos.api.faults import (Fault, NotModified, BadRequest, ItemNotFound, LengthRequired,
46
                                PreconditionFailed, RangeNotSatisfiable, ServiceUnavailable)
47
from pithos.backends import backend
48

    
49
import datetime
50
import logging
51
import re
52
import hashlib
53
import uuid
54

    
55

    
56
logger = logging.getLogger(__name__)
57

    
58

    
59
def printable_meta_dict(d):
60
    """Format a meta dictionary for printing out json/xml.
61
    
62
    Convert all keys to lower case and replace dashes to underscores.
63
    Change 'modified' key from backend to 'last_modified' and format date.
64
    """
65
    
66
    if 'modified' in d:
67
        d['last_modified'] = datetime.datetime.fromtimestamp(int(d['modified'])).isoformat()
68
        del(d['modified'])
69
    return dict([(k.lower().replace('-', '_'), v) for k, v in d.iteritems()])
70

    
71
def format_meta_key(k):
72
    """Convert underscores to dashes and capitalize intra-dash strings."""
73
    
74
    return '-'.join([x.capitalize() for x in k.replace('_', '-').split('-')])
75

    
76
def get_meta_prefix(request, prefix):
77
    """Get all prefix-* request headers in a dict. Reformat keys with format_meta_key()."""
78
    
79
    prefix = 'HTTP_' + prefix.upper().replace('-', '_')
80
    return dict([(format_meta_key(k[5:]), v) for k, v in request.META.iteritems() if k.startswith(prefix)])
81

    
82
def get_account_meta(request):
83
    """Get metadata from an account request."""
84
    
85
    meta = get_meta_prefix(request, 'X-Account-Meta-')    
86
    return meta
87

    
88
def put_account_meta(response, meta):
89
    """Put metadata in an account response."""
90
    
91
    response['X-Account-Container-Count'] = meta['count']
92
    response['X-Account-Bytes-Used'] = meta['bytes']
93
    if 'modified' in meta:
94
        response['Last-Modified'] = http_date(int(meta['modified']))
95
    for k in [x for x in meta.keys() if x.startswith('X-Account-Meta-')]:
96
        response[k.encode('utf-8')] = meta[k].encode('utf-8')
97
    if 'until_timestamp' in meta:
98
        response['X-Account-Until-Timestamp'] = http_date(int(meta['until_timestamp']))
99

    
100
def get_container_meta(request):
101
    """Get metadata from a container request."""
102
    
103
    meta = get_meta_prefix(request, 'X-Container-Meta-')
104
    return meta
105

    
106
def put_container_meta(response, meta):
107
    """Put metadata in a container response."""
108
    
109
    response['X-Container-Object-Count'] = meta['count']
110
    response['X-Container-Bytes-Used'] = meta['bytes']
111
    response['Last-Modified'] = http_date(int(meta['modified']))
112
    for k in [x for x in meta.keys() if x.startswith('X-Container-Meta-')]:
113
        response[k.encode('utf-8')] = meta[k].encode('utf-8')
114
    response['X-Container-Object-Meta'] = [x[14:] for x in meta['object_meta'] if x.startswith('X-Object-Meta-')]
115
    response['X-Container-Block-Size'] = backend.block_size
116
    response['X-Container-Block-Hash'] = backend.hash_algorithm
117
    if 'until_timestamp' in meta:
118
        response['X-Container-Until-Timestamp'] = http_date(int(meta['until_timestamp']))
119

    
120
def get_object_meta(request):
121
    """Get metadata from an object request."""
122
    
123
    meta = get_meta_prefix(request, 'X-Object-Meta-')
124
    if request.META.get('CONTENT_TYPE'):
125
        meta['Content-Type'] = request.META['CONTENT_TYPE']
126
    if request.META.get('HTTP_CONTENT_ENCODING'):
127
        meta['Content-Encoding'] = request.META['HTTP_CONTENT_ENCODING']
128
    if request.META.get('HTTP_CONTENT_DISPOSITION'):
129
        meta['Content-Disposition'] = request.META['HTTP_CONTENT_DISPOSITION']
130
    if request.META.get('HTTP_X_OBJECT_MANIFEST'):
131
        meta['X-Object-Manifest'] = request.META['HTTP_X_OBJECT_MANIFEST']
132
    if request.META.get('HTTP_X_OBJECT_PUBLIC'):
133
        meta['X-Object-Public'] = request.META['HTTP_X_OBJECT_PUBLIC']
134
    return meta
135

    
136
def put_object_meta(response, meta, public=False):
137
    """Put metadata in an object response."""
138
    
139
    response['ETag'] = meta['hash']
140
    response['Content-Length'] = meta['bytes']
141
    response['Content-Type'] = meta.get('Content-Type', 'application/octet-stream')
142
    response['Last-Modified'] = http_date(int(meta['modified']))
143
    if not public:
144
        response['X-Object-Version'] = meta['version']
145
        response['X-Object-Version-Timestamp'] = meta['version_timestamp']
146
        for k in [x for x in meta.keys() if x.startswith('X-Object-Meta-')]:
147
            response[k.encode('utf-8')] = meta[k].encode('utf-8')
148
        for k in ('Content-Encoding', 'Content-Disposition', 'X-Object-Manifest', 'X-Object-Public'):
149
            if k in meta:
150
                response[k] = meta[k]
151
    else:
152
        for k in ('Content-Encoding', 'Content-Disposition'):
153
            if k in meta:
154
                response[k] = meta[k]
155

    
156
def update_manifest_meta(request, v_account, meta):
157
    """Update metadata if the object has an X-Object-Manifest."""
158
    
159
    if 'X-Object-Manifest' in meta:
160
        hash = ''
161
        bytes = 0
162
        try:
163
            src_container, src_name = split_container_object_string(meta['X-Object-Manifest'])
164
            objects = backend.list_objects(request.user, v_account, src_container, prefix=src_name, virtual=False)
165
            for x in objects:
166
                src_meta = backend.get_object_meta(request.user, v_account, src_container, x[0], x[1])
167
                hash += src_meta['hash']
168
                bytes += src_meta['bytes']
169
        except:
170
            # Ignore errors.
171
            return
172
        meta['bytes'] = bytes
173
        md5 = hashlib.md5()
174
        md5.update(hash)
175
        meta['hash'] = md5.hexdigest().lower()
176

    
177
def validate_modification_preconditions(request, meta):
178
    """Check that the modified timestamp conforms with the preconditions set."""
179
    
180
    if 'modified' not in meta:
181
        return # TODO: Always return?
182
    
183
    if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
184
    if if_modified_since is not None:
185
        if_modified_since = parse_http_date_safe(if_modified_since)
186
    if if_modified_since is not None and int(meta['modified']) <= if_modified_since:
187
        raise NotModified('Resource has not been modified')
188
    
189
    if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
190
    if if_unmodified_since is not None:
191
        if_unmodified_since = parse_http_date_safe(if_unmodified_since)
192
    if if_unmodified_since is not None and int(meta['modified']) > if_unmodified_since:
193
        raise PreconditionFailed('Resource has been modified')
194

    
195
def validate_matching_preconditions(request, meta):
196
    """Check that the ETag conforms with the preconditions set."""
197
    
198
    if 'hash' not in meta:
199
        return # TODO: Always return?
200
    
201
    if_match = request.META.get('HTTP_IF_MATCH')
202
    if if_match is not None and if_match != '*':
203
        if meta['hash'] not in [x.lower() for x in parse_etags(if_match)]:
204
            raise PreconditionFailed('Resource Etag does not match')
205
    
206
    if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
207
    if if_none_match is not None:
208
        if if_none_match == '*' or meta['hash'] in [x.lower() for x in parse_etags(if_none_match)]:
209
            raise NotModified('Resource Etag matches')
210

    
211
def split_container_object_string(s):
212
    pos = s.find('/')
213
    if pos == -1:
214
        raise ValueError
215
    return s[:pos], s[(pos + 1):]
216

    
217
def copy_or_move_object(request, v_account, src_container, src_name, dest_container, dest_name, move=False):
218
    """Copy or move an object."""
219
    
220
    meta = get_object_meta(request)
221
    # Keep previous values of 'Content-Type' (if a new one is absent) and 'hash'.
222
    try:
223
        src_meta = backend.get_object_meta(request.user, v_account, src_container, src_name)
224
    except NameError:
225
        raise ItemNotFound('Container or object does not exist')
226
    if 'Content-Type' in meta and 'Content-Type' in src_meta:
227
        del(src_meta['Content-Type'])
228
    for k in ('Content-Type', 'hash'):
229
        if k in src_meta:
230
            meta[k] = src_meta[k]
231
    
232
    src_version = request.META.get('HTTP_X_SOURCE_VERSION')
233
    try:
234
        if move:
235
            backend.move_object(request.user, v_account, src_container, src_name, dest_container, dest_name, meta, True, src_version)
236
        else:
237
            backend.copy_object(request.user, v_account, src_container, src_name, dest_container, dest_name, meta, True, src_version)
238
    except NameError:
239
        raise ItemNotFound('Container or object does not exist')
240

    
241
def get_int_parameter(request, name):
242
    p = request.GET.get(name)
243
    if p is not None:
244
        try:
245
            p = int(p)
246
        except ValueError:
247
            return None
248
        if p < 0:
249
            return None
250
    return p
251

    
252
def get_content_length(request):
253
    content_length = request.META.get('CONTENT_LENGTH')
254
    if not content_length:
255
        raise LengthRequired('Missing Content-Length header')
256
    try:
257
        content_length = int(content_length)
258
        if content_length < 0:
259
            raise ValueError
260
    except ValueError:
261
        raise BadRequest('Invalid Content-Length header')
262
    return content_length
263

    
264
def get_range(request, size):
265
    """Parse a Range header from the request.
266
    
267
    Either returns None, when the header is not existent or should be ignored,
268
    or a list of (offset, length) tuples - should be further checked.
269
    """
270
    
271
    ranges = request.META.get('HTTP_RANGE', '').replace(' ', '')
272
    if not ranges.startswith('bytes='):
273
        return None
274
    
275
    ret = []
276
    for r in (x.strip() for x in ranges[6:].split(',')):
277
        p = re.compile('^(?P<offset>\d*)-(?P<upto>\d*)$')
278
        m = p.match(r)
279
        if not m:
280
            return None
281
        offset = m.group('offset')
282
        upto = m.group('upto')
283
        if offset == '' and upto == '':
284
            return None
285
        
286
        if offset != '':
287
            offset = int(offset)
288
            if upto != '':
289
                upto = int(upto)
290
                if offset > upto:
291
                    return None
292
                ret.append((offset, upto - offset + 1))
293
            else:
294
                ret.append((offset, size - offset))
295
        else:
296
            length = int(upto)
297
            ret.append((size - length, length))
298
    
299
    return ret
300

    
301
def get_content_range(request):
302
    """Parse a Content-Range header from the request.
303
    
304
    Either returns None, when the header is not existent or should be ignored,
305
    or an (offset, length, total) tuple - check as length, total may be None.
306
    Returns (None, None, None) if the provided range is '*/*'.
307
    """
308
    
309
    ranges = request.META.get('HTTP_CONTENT_RANGE', '')
310
    if not ranges:
311
        return None
312
    
313
    p = re.compile('^bytes (?P<offset>\d+)-(?P<upto>\d*)/(?P<total>(\d+|\*))$')
314
    m = p.match(ranges)
315
    if not m:
316
        if ranges == 'bytes */*':
317
            return (None, None, None)
318
        return None
319
    offset = int(m.group('offset'))
320
    upto = m.group('upto')
321
    total = m.group('total')
322
    if upto != '':
323
        upto = int(upto)
324
    else:
325
        upto = None
326
    if total != '*':
327
        total = int(total)
328
    else:
329
        total = None
330
    if (upto is not None and offset > upto) or \
331
        (total is not None and offset >= total) or \
332
        (total is not None and upto is not None and upto >= total):
333
        return None
334
    
335
    if upto is None:
336
        length = None
337
    else:
338
        length = upto - offset + 1
339
    return (offset, length, total)
340

    
341
def raw_input_socket(request):
342
    """Return the socket for reading the rest of the request."""
343
    
344
    server_software = request.META.get('SERVER_SOFTWARE')
345
    if not server_software:
346
        if 'wsgi.input' in request.environ:
347
            return request.environ['wsgi.input']
348
        raise ServiceUnavailable('Unknown server software')
349
    if server_software.startswith('WSGIServer'):
350
        return request.environ['wsgi.input']
351
    elif server_software.startswith('mod_python'):
352
        return request._req
353
    raise ServiceUnavailable('Unknown server software')
354

    
355
MAX_UPLOAD_SIZE = 10 * (1024 * 1024) # 10MB
356

    
357
def socket_read_iterator(sock, length=0, blocksize=4096):
358
    """Return a maximum of blocksize data read from the socket in each iteration.
359
    
360
    Read up to 'length'. If 'length' is negative, will attempt a chunked read.
361
    The maximum ammount of data read is controlled by MAX_UPLOAD_SIZE.
362
    """
363
    
364
    if length < 0: # Chunked transfers
365
        data = ''
366
        while length < MAX_UPLOAD_SIZE:
367
            # Get chunk size.
368
            if hasattr(sock, 'readline'):
369
                chunk_length = sock.readline()
370
            else:
371
                chunk_length = ''
372
                while chunk_length[-1:] != '\n':
373
                    chunk_length += sock.read(1)
374
                chunk_length.strip()
375
            pos = chunk_length.find(';')
376
            if pos >= 0:
377
                chunk_length = chunk_length[:pos]
378
            try:
379
                chunk_length = int(chunk_length, 16)
380
            except Exception, e:
381
                raise BadRequest('Bad chunk size') # TODO: Change to something more appropriate.
382
            # Check if done.
383
            if chunk_length == 0:
384
                if len(data) > 0:
385
                    yield data
386
                return
387
            # Get the actual data.
388
            while chunk_length > 0:
389
                chunk = sock.read(min(chunk_length, blocksize))
390
                chunk_length -= len(chunk)
391
                if length > 0:
392
                    length += len(chunk)
393
                data += chunk
394
                if len(data) >= blocksize:
395
                    ret = data[:blocksize]
396
                    data = data[blocksize:]
397
                    yield ret
398
            sock.read(2) # CRLF
399
        # TODO: Raise something to note that maximum size is reached.
400
    else:
401
        if length > MAX_UPLOAD_SIZE:
402
            # TODO: Raise something to note that maximum size is reached.
403
            pass
404
        while length > 0:
405
            data = sock.read(min(length, blocksize))
406
            length -= len(data)
407
            yield data
408

    
409
class ObjectWrapper(object):
410
    """Return the object's data block-per-block in each iteration.
411
    
412
    Read from the object using the offset and length provided in each entry of the range list.
413
    """
414
    
415
    def __init__(self, ranges, sizes, hashmaps, boundary):
416
        self.ranges = ranges
417
        self.sizes = sizes
418
        self.hashmaps = hashmaps
419
        self.boundary = boundary
420
        self.size = sum(self.sizes)
421
        
422
        self.file_index = 0
423
        self.block_index = 0
424
        self.block_hash = -1
425
        self.block = ''
426
        
427
        self.range_index = -1
428
        self.offset, self.length = self.ranges[0]
429
    
430
    def __iter__(self):
431
        return self
432
    
433
    def part_iterator(self):
434
        if self.length > 0:
435
            # Get the file for the current offset.
436
            file_size = self.sizes[self.file_index]
437
            while self.offset >= file_size:
438
                self.offset -= file_size
439
                self.file_index += 1
440
                file_size = self.sizes[self.file_index]
441
            
442
            # Get the block for the current position.
443
            self.block_index = int(self.offset / backend.block_size)
444
            if self.block_hash != self.hashmaps[self.file_index][self.block_index]:
445
                self.block_hash = self.hashmaps[self.file_index][self.block_index]
446
                try:
447
                    self.block = backend.get_block(self.block_hash)
448
                except NameError:
449
                    raise ItemNotFound('Block does not exist')
450
            
451
            # Get the data from the block.
452
            bo = self.offset % backend.block_size
453
            bl = min(self.length, len(self.block) - bo)
454
            data = self.block[bo:bo + bl]
455
            self.offset += bl
456
            self.length -= bl
457
            return data
458
        else:
459
            raise StopIteration
460
    
461
    def next(self):
462
        if len(self.ranges) == 1:
463
            return self.part_iterator()
464
        if self.range_index == len(self.ranges):
465
            raise StopIteration
466
        try:
467
            if self.range_index == -1:
468
                raise StopIteration
469
            return self.part_iterator()
470
        except StopIteration:
471
            self.range_index += 1
472
            out = []
473
            if self.range_index < len(self.ranges):
474
                # Part header.
475
                self.offset, self.length = self.ranges[self.range_index]
476
                self.file_index = 0
477
                if self.range_index > 0:
478
                    out.append('')
479
                out.append('--' + self.boundary)
480
                out.append('Content-Range: bytes %d-%d/%d' % (self.offset, self.offset + self.length - 1, self.size))
481
                out.append('Content-Transfer-Encoding: binary')
482
                out.append('')
483
                out.append('')
484
                return '\r\n'.join(out)
485
            else:
486
                # Footer.
487
                out.append('')
488
                out.append('--' + self.boundary + '--')
489
                out.append('')
490
                return '\r\n'.join(out)
491

    
492
def object_data_response(request, sizes, hashmaps, meta, public=False):
493
    """Get the HttpResponse object for replying with the object's data."""
494
    
495
    # Range handling.
496
    size = sum(sizes)
497
    ranges = get_range(request, size)
498
    if ranges is None:
499
        ranges = [(0, size)]
500
        ret = 200
501
    else:
502
        check = [True for offset, length in ranges if
503
                    length <= 0 or length > size or
504
                    offset < 0 or offset >= size or
505
                    offset + length > size]
506
        if len(check) > 0:
507
            raise RangeNotSatisfiable('Requested range exceeds object limits')        
508
        ret = 206
509
    
510
    if ret == 206 and len(ranges) > 1:
511
        boundary = uuid.uuid4().hex
512
    else:
513
        boundary = ''
514
    wrapper = ObjectWrapper(ranges, sizes, hashmaps, boundary)
515
    response = HttpResponse(wrapper, status=ret)
516
    put_object_meta(response, meta, public)
517
    if ret == 206:
518
        if len(ranges) == 1:
519
            offset, length = ranges[0]
520
            response['Content-Length'] = length # Update with the correct length.
521
            response['Content-Range'] = 'bytes %d-%d/%d' % (offset, offset + length - 1, size)
522
        else:
523
            del(response['Content-Length'])
524
            response['Content-Type'] = 'multipart/byteranges; boundary=%s' % (boundary,)
525
    return response
526

    
527
def hashmap_hash(hashmap):
528
    """Produce the root hash, treating the hashmap as a Merkle-like tree."""
529
    
530
    def subhash(d):
531
        h = hashlib.new(backend.hash_algorithm)
532
        h.update(d)
533
        return h.digest()
534
    
535
    if len(hashmap) == 0:
536
        return hexlify(subhash(''))
537
    if len(hashmap) == 1:
538
        return hexlify(subhash(hashmap[0]))
539
    s = 2
540
    while s < len(hashmap):
541
        s = s * 2
542
    h = hashmap + ([('\x00' * len(hashmap[0]))] * (s - len(hashmap)))
543
    h = [subhash(h[x] + (h[x + 1] if x + 1 < len(h) else '')) for x in range(0, len(h), 2)]
544
    while len(h) > 1:
545
        h = [subhash(h[x] + (h[x + 1] if x + 1 < len(h) else '')) for x in range(0, len(h), 2)]
546
    return hexlify(h[0])
547

    
548
def update_response_headers(request, response):
549
    if request.serialization == 'xml':
550
        response['Content-Type'] = 'application/xml; charset=UTF-8'
551
    elif request.serialization == 'json':
552
        response['Content-Type'] = 'application/json; charset=UTF-8'
553
    elif not response['Content-Type']:
554
        response['Content-Type'] = 'text/plain; charset=UTF-8'
555

    
556
    if settings.TEST:
557
        response['Date'] = format_date_time(time())
558

    
559
def render_fault(request, fault):
560
    if settings.DEBUG or settings.TEST:
561
        fault.details = format_exc(fault)
562

    
563
    request.serialization = 'text'
564
    data = '\n'.join((fault.message, fault.details)) + '\n'
565
    response = HttpResponse(data, status=fault.code)
566
    update_response_headers(request, response)
567
    return response
568

    
569
def request_serialization(request, format_allowed=False):
570
    """Return the serialization format requested.
571
    
572
    Valid formats are 'text' and 'json', 'xml' if 'format_allowed' is True.
573
    """
574
    
575
    if not format_allowed:
576
        return 'text'
577
    
578
    format = request.GET.get('format')
579
    if format == 'json':
580
        return 'json'
581
    elif format == 'xml':
582
        return 'xml'
583
    
584
    for item in request.META.get('HTTP_ACCEPT', '').split(','):
585
        accept, sep, rest = item.strip().partition(';')
586
        if accept == 'application/json':
587
            return 'json'
588
        elif accept == 'application/xml' or accept == 'text/xml':
589
            return 'xml'
590
    
591
    return 'text'
592

    
593
def api_method(http_method=None, format_allowed=False):
594
    """Decorator function for views that implement an API method."""
595
    
596
    def decorator(func):
597
        @wraps(func)
598
        def wrapper(request, *args, **kwargs):
599
            try:
600
                if http_method and request.method != http_method:
601
                    raise BadRequest('Method not allowed.')
602
                
603
                # The args variable may contain up to (account, container, object).
604
                if len(args) > 1 and len(args[1]) > 256:
605
                    raise BadRequest('Container name too large.')
606
                if len(args) > 2 and len(args[2]) > 1024:
607
                    raise BadRequest('Object name too large.')
608
                
609
                # Fill in custom request variables.
610
                request.serialization = request_serialization(request, format_allowed)
611
                # TODO: Authenticate.
612
                request.user = "test"
613
                
614
                response = func(request, *args, **kwargs)
615
                update_response_headers(request, response)
616
                return response
617
            except Fault, fault:
618
                return render_fault(request, fault)
619
            except BaseException, e:
620
                logger.exception('Unexpected error: %s' % e)
621
                fault = ServiceUnavailable('Unexpected error')
622
                return render_fault(request, fault)
623
        return wrapper
624
    return decorator