Statistics
| Branch: | Tag: | Revision:

root / pithos / api / util.py @ 6d1e6dce

History | View | Annotate | Download (26.1 kB)

1
# Copyright 2011 GRNET S.A. All rights reserved.
2
# 
3
# Redistribution and use in source and binary forms, with or
4
# without modification, are permitted provided that the following
5
# conditions are met:
6
# 
7
#   1. Redistributions of source code must retain the above
8
#      copyright notice, this list of conditions and the following
9
#      disclaimer.
10
# 
11
#   2. Redistributions in binary form must reproduce the above
12
#      copyright notice, this list of conditions and the following
13
#      disclaimer in the documentation and/or other materials
14
#      provided with the distribution.
15
# 
16
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
17
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
20
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
23
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
24
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
26
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27
# POSSIBILITY OF SUCH DAMAGE.
28
# 
29
# The views and conclusions contained in the software and
30
# documentation are those of the authors and should not be
31
# interpreted as representing official policies, either expressed
32
# or implied, of GRNET S.A.
33

    
34
from functools import wraps
35
from time import time
36
from traceback import format_exc
37
from wsgiref.handlers import format_date_time
38
from binascii import hexlify
39

    
40
from django.conf import settings
41
from django.http import HttpResponse
42
from django.utils.http import http_date, parse_etags
43

    
44
from pithos.api.compat import parse_http_date_safe, parse_http_date
45
from pithos.api.faults import (Fault, NotModified, BadRequest, Unauthorized, ItemNotFound,
46
                                LengthRequired, PreconditionFailed, RangeNotSatisfiable,
47
                                ServiceUnavailable)
48
from pithos.backends import backend
49
from pithos.backends.base import NotAllowedError
50

    
51
import datetime
52
import logging
53
import re
54
import hashlib
55
import uuid
56

    
57

    
58
logger = logging.getLogger(__name__)
59

    
60

    
61
def printable_header_dict(d):
62
    """Format a meta dictionary for printing out json/xml.
63
    
64
    Convert all keys to lower case and replace dashes to underscores.
65
    Change 'modified' key from backend to 'last_modified' and format date.
66
    """
67
    
68
    if 'modified' in d:
69
        d['last_modified'] = datetime.datetime.fromtimestamp(int(d['modified'])).isoformat()
70
        del(d['modified'])
71
    return dict([(k.lower().replace('-', '_'), v) for k, v in d.iteritems()])
72

    
73
def format_header_key(k):
74
    """Convert underscores to dashes and capitalize intra-dash strings."""
75
    
76
    return '-'.join([x.capitalize() for x in k.replace('_', '-').split('-')])
77

    
78
def get_header_prefix(request, prefix):
79
    """Get all prefix-* request headers in a dict. Reformat keys with format_header_key()."""
80
    
81
    prefix = 'HTTP_' + prefix.upper().replace('-', '_')
82
    return dict([(format_header_key(k[5:]), v.replace('_', '')) for k, v in request.META.iteritems() if k.startswith(prefix) and len(k) > len(prefix)])
83

    
84
def get_account_headers(request):
85
    meta = get_header_prefix(request, 'X-Account-Meta-')
86
    groups = {}
87
    for k, v in get_header_prefix(request, 'X-Account-Group-').iteritems():
88
        n = k[16:].lower()
89
        if '-' in n or '_' in n:
90
            raise BadRequest('Bad characters in group name')
91
        groups[n] = v.replace(' ', '').split(',')
92
        if '' in groups[n]:
93
            groups[n].remove('')
94
    return meta, groups
95

    
96
def put_account_headers(response, meta, groups):
97
    response['X-Account-Container-Count'] = meta['count']
98
    response['X-Account-Bytes-Used'] = meta['bytes']
99
    if 'modified' in meta:
100
        response['Last-Modified'] = http_date(int(meta['modified']))
101
    for k in [x for x in meta.keys() if x.startswith('X-Account-Meta-')]:
102
        response[k.encode('utf-8')] = meta[k].encode('utf-8')
103
    if 'until_timestamp' in meta:
104
        response['X-Account-Until-Timestamp'] = http_date(int(meta['until_timestamp']))
105
    for k, v in groups.iteritems():
106
        response[format_header_key('X-Account-Group-' + k).encode('utf-8')] = (','.join(v)).encode('utf-8')
107

    
108
def get_container_headers(request):
109
    meta = get_header_prefix(request, 'X-Container-Meta-')
110
    return meta
111

    
112
def put_container_headers(response, meta):
113
    response['X-Container-Object-Count'] = meta['count']
114
    response['X-Container-Bytes-Used'] = meta['bytes']
115
    response['Last-Modified'] = http_date(int(meta['modified']))
116
    for k in [x for x in meta.keys() if x.startswith('X-Container-Meta-')]:
117
        response[k.encode('utf-8')] = meta[k].encode('utf-8')
118
    response['X-Container-Object-Meta'] = [x[14:] for x in meta['object_meta'] if x.startswith('X-Object-Meta-')]
119
    response['X-Container-Block-Size'] = backend.block_size
120
    response['X-Container-Block-Hash'] = backend.hash_algorithm
121
    if 'until_timestamp' in meta:
122
        response['X-Container-Until-Timestamp'] = http_date(int(meta['until_timestamp']))
123

    
124
def get_object_headers(request):
125
    meta = get_header_prefix(request, 'X-Object-Meta-')
126
    if request.META.get('CONTENT_TYPE'):
127
        meta['Content-Type'] = request.META['CONTENT_TYPE']
128
    if request.META.get('HTTP_CONTENT_ENCODING'):
129
        meta['Content-Encoding'] = request.META['HTTP_CONTENT_ENCODING']
130
    if request.META.get('HTTP_CONTENT_DISPOSITION'):
131
        meta['Content-Disposition'] = request.META['HTTP_CONTENT_DISPOSITION']
132
    if request.META.get('HTTP_X_OBJECT_MANIFEST'):
133
        meta['X-Object-Manifest'] = request.META['HTTP_X_OBJECT_MANIFEST']
134
    return meta
135

    
136
def put_object_headers(response, meta, public=False):
137
    response['ETag'] = meta['hash']
138
    response['Content-Length'] = meta['bytes']
139
    response['Content-Type'] = meta.get('Content-Type', 'application/octet-stream')
140
    response['Last-Modified'] = http_date(int(meta['modified']))
141
    if not public:
142
        response['X-Object-Modified-By'] = meta['modified_by']
143
        response['X-Object-Version'] = meta['version']
144
        response['X-Object-Version-Timestamp'] = http_date(int(meta['version_timestamp']))
145
        for k in [x for x in meta.keys() if x.startswith('X-Object-Meta-')]:
146
            response[k.encode('utf-8')] = meta[k].encode('utf-8')
147
        for k in ('Content-Encoding', 'Content-Disposition', 'X-Object-Manifest', 'X-Object-Sharing', 'X-Object-Shared-By'):
148
            if k in meta:
149
                response[k] = meta[k]
150
    else:
151
        for k in ('Content-Encoding', 'Content-Disposition'):
152
            if k in meta:
153
                response[k] = meta[k]
154

    
155
def update_manifest_meta(request, v_account, meta):
156
    """Update metadata if the object has an X-Object-Manifest."""
157
    
158
    if 'X-Object-Manifest' in meta:
159
        hash = ''
160
        bytes = 0
161
        try:
162
            src_container, src_name = split_container_object_string('/' + meta['X-Object-Manifest'])
163
            objects = backend.list_objects(request.user, v_account, src_container, prefix=src_name, virtual=False)
164
            for x in objects:
165
                src_meta = backend.get_object_meta(request.user, v_account, src_container, x[0], x[1])
166
                hash += src_meta['hash']
167
                bytes += src_meta['bytes']
168
        except:
169
            # Ignore errors.
170
            return
171
        meta['bytes'] = bytes
172
        md5 = hashlib.md5()
173
        md5.update(hash)
174
        meta['hash'] = md5.hexdigest().lower()
175

    
176
def update_sharing_meta(permissions, v_account, v_container, v_object, meta):
177
    if permissions is None:
178
        return
179
    perm_path, perms = permissions
180
    if len(perms) == 0:
181
        return
182
    ret = []
183
    r = ','.join(perms.get('read', []))
184
    if r:
185
        ret.append('read=' + r)
186
    w = ','.join(perms.get('write', []))
187
    if w:
188
        ret.append('write=' + w)
189
    meta['X-Object-Sharing'] = '; '.join(ret)
190
    if '/'.join((v_account, v_container, v_object)) != perm_path:
191
        meta['X-Object-Shared-By'] = perm_path
192

    
193
def validate_modification_preconditions(request, meta):
194
    """Check that the modified timestamp conforms with the preconditions set."""
195
    
196
    if 'modified' not in meta:
197
        return # TODO: Always return?
198
    
199
    if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
200
    if if_modified_since is not None:
201
        if_modified_since = parse_http_date_safe(if_modified_since)
202
    if if_modified_since is not None and int(meta['modified']) <= if_modified_since:
203
        raise NotModified('Resource has not been modified')
204
    
205
    if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
206
    if if_unmodified_since is not None:
207
        if_unmodified_since = parse_http_date_safe(if_unmodified_since)
208
    if if_unmodified_since is not None and int(meta['modified']) > if_unmodified_since:
209
        raise PreconditionFailed('Resource has been modified')
210

    
211
def validate_matching_preconditions(request, meta):
212
    """Check that the ETag conforms with the preconditions set."""
213
    
214
    if 'hash' not in meta:
215
        return # TODO: Always return?
216
    
217
    if_match = request.META.get('HTTP_IF_MATCH')
218
    if if_match is not None and if_match != '*':
219
        if meta['hash'] not in [x.lower() for x in parse_etags(if_match)]:
220
            raise PreconditionFailed('Resource Etag does not match')
221
    
222
    if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
223
    if if_none_match is not None:
224
        if if_none_match == '*' or meta['hash'] in [x.lower() for x in parse_etags(if_none_match)]:
225
            raise NotModified('Resource Etag matches')
226

    
227
def split_container_object_string(s):
228
    if not len(s) > 0 or s[0] != '/':
229
        raise ValueError
230
    s = s[1:]
231
    pos = s.find('/')
232
    if pos == -1:
233
        raise ValueError
234
    return s[:pos], s[(pos + 1):]
235

    
236
def copy_or_move_object(request, v_account, src_container, src_name, dest_container, dest_name, move=False):
237
    """Copy or move an object."""
238
    
239
    meta = get_object_headers(request)
240
    permissions = get_sharing(request)
241
    src_version = request.META.get('HTTP_X_SOURCE_VERSION')    
242
    try:
243
        if move:
244
            backend.move_object(request.user, v_account, src_container, src_name, dest_container, dest_name, meta, False, permissions)
245
        else:
246
            backend.copy_object(request.user, v_account, src_container, src_name, dest_container, dest_name, meta, False, permissions, src_version)
247
    except NotAllowedError:
248
        raise Unauthorized('Access denied')
249
    except NameError, IndexError:
250
        raise ItemNotFound('Container or object does not exist')
251
    except ValueError:
252
        raise BadRequest('Invalid sharing header')
253
    except AttributeError:
254
        raise Conflict('Sharing already set above or below this path in the hierarchy')
255

    
256
def get_int_parameter(request, name):
257
    p = request.GET.get(name)
258
    if p is not None:
259
        try:
260
            p = int(p)
261
        except ValueError:
262
            return None
263
        if p < 0:
264
            return None
265
    return p
266

    
267
def get_content_length(request):
268
    content_length = request.META.get('CONTENT_LENGTH')
269
    if not content_length:
270
        raise LengthRequired('Missing Content-Length header')
271
    try:
272
        content_length = int(content_length)
273
        if content_length < 0:
274
            raise ValueError
275
    except ValueError:
276
        raise BadRequest('Invalid Content-Length header')
277
    return content_length
278

    
279
def get_range(request, size):
280
    """Parse a Range header from the request.
281
    
282
    Either returns None, when the header is not existent or should be ignored,
283
    or a list of (offset, length) tuples - should be further checked.
284
    """
285
    
286
    ranges = request.META.get('HTTP_RANGE', '').replace(' ', '')
287
    if not ranges.startswith('bytes='):
288
        return None
289
    
290
    ret = []
291
    for r in (x.strip() for x in ranges[6:].split(',')):
292
        p = re.compile('^(?P<offset>\d*)-(?P<upto>\d*)$')
293
        m = p.match(r)
294
        if not m:
295
            return None
296
        offset = m.group('offset')
297
        upto = m.group('upto')
298
        if offset == '' and upto == '':
299
            return None
300
        
301
        if offset != '':
302
            offset = int(offset)
303
            if upto != '':
304
                upto = int(upto)
305
                if offset > upto:
306
                    return None
307
                ret.append((offset, upto - offset + 1))
308
            else:
309
                ret.append((offset, size - offset))
310
        else:
311
            length = int(upto)
312
            ret.append((size - length, length))
313
    
314
    return ret
315

    
316
def get_content_range(request):
317
    """Parse a Content-Range header from the request.
318
    
319
    Either returns None, when the header is not existent or should be ignored,
320
    or an (offset, length, total) tuple - check as length, total may be None.
321
    Returns (None, None, None) if the provided range is '*/*'.
322
    """
323
    
324
    ranges = request.META.get('HTTP_CONTENT_RANGE', '')
325
    if not ranges:
326
        return None
327
    
328
    p = re.compile('^bytes (?P<offset>\d+)-(?P<upto>\d*)/(?P<total>(\d+|\*))$')
329
    m = p.match(ranges)
330
    if not m:
331
        if ranges == 'bytes */*':
332
            return (None, None, None)
333
        return None
334
    offset = int(m.group('offset'))
335
    upto = m.group('upto')
336
    total = m.group('total')
337
    if upto != '':
338
        upto = int(upto)
339
    else:
340
        upto = None
341
    if total != '*':
342
        total = int(total)
343
    else:
344
        total = None
345
    if (upto is not None and offset > upto) or \
346
        (total is not None and offset >= total) or \
347
        (total is not None and upto is not None and upto >= total):
348
        return None
349
    
350
    if upto is None:
351
        length = None
352
    else:
353
        length = upto - offset + 1
354
    return (offset, length, total)
355

    
356
def get_sharing(request):
357
    """Parse an X-Object-Sharing header from the request.
358
    
359
    Raises BadRequest on error.
360
    """
361
    
362
    permissions = request.META.get('HTTP_X_OBJECT_SHARING')
363
    if permissions is None:
364
        return None
365
    
366
    ret = {}
367
    permissions = permissions.replace(' ', '')
368
    if permissions == '':
369
        return ret
370
    for perm in (x for x in permissions.split(';')):
371
        if perm.startswith('read='):
372
            ret['read'] = [v.replace(' ','').lower() for v in perm[5:].split(',')]
373
            if '' in ret['read']:
374
                ret['read'].remove('')
375
            if '*' in ret['read']:
376
                ret['read'] = ['*']
377
            if len(ret['read']) == 0:
378
                raise BadRequest('Bad X-Object-Sharing header value')
379
        elif perm.startswith('write='):
380
            ret['write'] = [v.replace(' ','').lower() for v in perm[6:].split(',')]
381
            if '' in ret['write']:
382
                ret['write'].remove('')
383
            if '*' in ret['write']:
384
                ret['write'] = ['*']
385
            if len(ret['write']) == 0:
386
                raise BadRequest('Bad X-Object-Sharing header value')
387
        else:
388
            raise BadRequest('Bad X-Object-Sharing header value')
389
    return ret
390

    
391
def raw_input_socket(request):
392
    """Return the socket for reading the rest of the request."""
393
    
394
    server_software = request.META.get('SERVER_SOFTWARE')
395
    if not server_software:
396
        if 'wsgi.input' in request.environ:
397
            return request.environ['wsgi.input']
398
        raise ServiceUnavailable('Unknown server software')
399
    if server_software.startswith('WSGIServer'):
400
        return request.environ['wsgi.input']
401
    elif server_software.startswith('mod_python'):
402
        return request._req
403
    raise ServiceUnavailable('Unknown server software')
404

    
405
MAX_UPLOAD_SIZE = 10 * (1024 * 1024) # 10MB
406

    
407
def socket_read_iterator(sock, length=0, blocksize=4096):
408
    """Return a maximum of blocksize data read from the socket in each iteration.
409
    
410
    Read up to 'length'. If 'length' is negative, will attempt a chunked read.
411
    The maximum ammount of data read is controlled by MAX_UPLOAD_SIZE.
412
    """
413
    
414
    if length < 0: # Chunked transfers
415
        data = ''
416
        while length < MAX_UPLOAD_SIZE:
417
            # Get chunk size.
418
            if hasattr(sock, 'readline'):
419
                chunk_length = sock.readline()
420
            else:
421
                chunk_length = ''
422
                while chunk_length[-1:] != '\n':
423
                    chunk_length += sock.read(1)
424
                chunk_length.strip()
425
            pos = chunk_length.find(';')
426
            if pos >= 0:
427
                chunk_length = chunk_length[:pos]
428
            try:
429
                chunk_length = int(chunk_length, 16)
430
            except Exception, e:
431
                raise BadRequest('Bad chunk size') # TODO: Change to something more appropriate.
432
            # Check if done.
433
            if chunk_length == 0:
434
                if len(data) > 0:
435
                    yield data
436
                return
437
            # Get the actual data.
438
            while chunk_length > 0:
439
                chunk = sock.read(min(chunk_length, blocksize))
440
                chunk_length -= len(chunk)
441
                if length > 0:
442
                    length += len(chunk)
443
                data += chunk
444
                if len(data) >= blocksize:
445
                    ret = data[:blocksize]
446
                    data = data[blocksize:]
447
                    yield ret
448
            sock.read(2) # CRLF
449
        # TODO: Raise something to note that maximum size is reached.
450
    else:
451
        if length > MAX_UPLOAD_SIZE:
452
            # TODO: Raise something to note that maximum size is reached.
453
            pass
454
        while length > 0:
455
            data = sock.read(min(length, blocksize))
456
            length -= len(data)
457
            yield data
458

    
459
class ObjectWrapper(object):
460
    """Return the object's data block-per-block in each iteration.
461
    
462
    Read from the object using the offset and length provided in each entry of the range list.
463
    """
464
    
465
    def __init__(self, ranges, sizes, hashmaps, boundary):
466
        self.ranges = ranges
467
        self.sizes = sizes
468
        self.hashmaps = hashmaps
469
        self.boundary = boundary
470
        self.size = sum(self.sizes)
471
        
472
        self.file_index = 0
473
        self.block_index = 0
474
        self.block_hash = -1
475
        self.block = ''
476
        
477
        self.range_index = -1
478
        self.offset, self.length = self.ranges[0]
479
    
480
    def __iter__(self):
481
        return self
482
    
483
    def part_iterator(self):
484
        if self.length > 0:
485
            # Get the file for the current offset.
486
            file_size = self.sizes[self.file_index]
487
            while self.offset >= file_size:
488
                self.offset -= file_size
489
                self.file_index += 1
490
                file_size = self.sizes[self.file_index]
491
            
492
            # Get the block for the current position.
493
            self.block_index = int(self.offset / backend.block_size)
494
            if self.block_hash != self.hashmaps[self.file_index][self.block_index]:
495
                self.block_hash = self.hashmaps[self.file_index][self.block_index]
496
                try:
497
                    self.block = backend.get_block(self.block_hash)
498
                except NameError:
499
                    raise ItemNotFound('Block does not exist')
500
            
501
            # Get the data from the block.
502
            bo = self.offset % backend.block_size
503
            bl = min(self.length, len(self.block) - bo)
504
            data = self.block[bo:bo + bl]
505
            self.offset += bl
506
            self.length -= bl
507
            return data
508
        else:
509
            raise StopIteration
510
    
511
    def next(self):
512
        if len(self.ranges) == 1:
513
            return self.part_iterator()
514
        if self.range_index == len(self.ranges):
515
            raise StopIteration
516
        try:
517
            if self.range_index == -1:
518
                raise StopIteration
519
            return self.part_iterator()
520
        except StopIteration:
521
            self.range_index += 1
522
            out = []
523
            if self.range_index < len(self.ranges):
524
                # Part header.
525
                self.offset, self.length = self.ranges[self.range_index]
526
                self.file_index = 0
527
                if self.range_index > 0:
528
                    out.append('')
529
                out.append('--' + self.boundary)
530
                out.append('Content-Range: bytes %d-%d/%d' % (self.offset, self.offset + self.length - 1, self.size))
531
                out.append('Content-Transfer-Encoding: binary')
532
                out.append('')
533
                out.append('')
534
                return '\r\n'.join(out)
535
            else:
536
                # Footer.
537
                out.append('')
538
                out.append('--' + self.boundary + '--')
539
                out.append('')
540
                return '\r\n'.join(out)
541

    
542
def object_data_response(request, sizes, hashmaps, meta, public=False):
543
    """Get the HttpResponse object for replying with the object's data."""
544
    
545
    # Range handling.
546
    size = sum(sizes)
547
    ranges = get_range(request, size)
548
    if ranges is None:
549
        ranges = [(0, size)]
550
        ret = 200
551
    else:
552
        check = [True for offset, length in ranges if
553
                    length <= 0 or length > size or
554
                    offset < 0 or offset >= size or
555
                    offset + length > size]
556
        if len(check) > 0:
557
            raise RangeNotSatisfiable('Requested range exceeds object limits')
558
        ret = 206
559
        if_range = request.META.get('HTTP_IF_RANGE', '')
560
        if if_range and if_range.startswith('If-Range:'):
561
            if_range = if_range.split('If-Range:')[1]
562
            try:
563
                # modification time has passed instead
564
                last_modified = parse_http_date(if_range)
565
                if last_modified != meta['modified']:
566
                    ranges = [(0, size)]
567
                    ret = 200
568
            except ValueError:
569
                if if_range != meta['hash']:
570
                    ranges = [(0, size)]
571
                    ret = 200
572
    
573
    if ret == 206 and len(ranges) > 1:
574
        boundary = uuid.uuid4().hex
575
    else:
576
        boundary = ''
577
    wrapper = ObjectWrapper(ranges, sizes, hashmaps, boundary)
578
    response = HttpResponse(wrapper, status=ret)
579
    put_object_headers(response, meta, public)
580
    if ret == 206:
581
        if len(ranges) == 1:
582
            offset, length = ranges[0]
583
            response['Content-Length'] = length # Update with the correct length.
584
            response['Content-Range'] = 'bytes %d-%d/%d' % (offset, offset + length - 1, size)
585
        else:
586
            del(response['Content-Length'])
587
            response['Content-Type'] = 'multipart/byteranges; boundary=%s' % (boundary,)
588
    return response
589

    
590
def put_object_block(hashmap, data, offset):
591
    """Put one block of data at the given offset."""
592
    
593
    bi = int(offset / backend.block_size)
594
    bo = offset % backend.block_size
595
    bl = min(len(data), backend.block_size - bo)
596
    if bi < len(hashmap):
597
        hashmap[bi] = backend.update_block(hashmap[bi], data[:bl], bo)
598
    else:
599
        hashmap.append(backend.put_block(('\x00' * bo) + data[:bl]))
600
    return bl # Return ammount of data written.
601

    
602
def hashmap_hash(hashmap):
603
    """Produce the root hash, treating the hashmap as a Merkle-like tree."""
604
    
605
    def subhash(d):
606
        h = hashlib.new(backend.hash_algorithm)
607
        h.update(d)
608
        return h.digest()
609
    
610
    if len(hashmap) == 0:
611
        return hexlify(subhash(''))
612
    if len(hashmap) == 1:
613
        return hexlify(subhash(hashmap[0]))
614
    s = 2
615
    while s < len(hashmap):
616
        s = s * 2
617
    h = hashmap + ([('\x00' * len(hashmap[0]))] * (s - len(hashmap)))
618
    h = [subhash(h[x] + (h[x + 1] if x + 1 < len(h) else '')) for x in range(0, len(h), 2)]
619
    while len(h) > 1:
620
        h = [subhash(h[x] + (h[x + 1] if x + 1 < len(h) else '')) for x in range(0, len(h), 2)]
621
    return hexlify(h[0])
622

    
623
def update_response_headers(request, response):
624
    if request.serialization == 'xml':
625
        response['Content-Type'] = 'application/xml; charset=UTF-8'
626
    elif request.serialization == 'json':
627
        response['Content-Type'] = 'application/json; charset=UTF-8'
628
    elif not response['Content-Type']:
629
        response['Content-Type'] = 'text/plain; charset=UTF-8'
630

    
631
    if settings.TEST:
632
        response['Date'] = format_date_time(time())
633

    
634
def render_fault(request, fault):
635
    if settings.DEBUG or settings.TEST:
636
        fault.details = format_exc(fault)
637

    
638
    request.serialization = 'text'
639
    data = '\n'.join((fault.message, fault.details)) + '\n'
640
    response = HttpResponse(data, status=fault.code)
641
    update_response_headers(request, response)
642
    return response
643

    
644
def request_serialization(request, format_allowed=False):
645
    """Return the serialization format requested.
646
    
647
    Valid formats are 'text' and 'json', 'xml' if 'format_allowed' is True.
648
    """
649
    
650
    if not format_allowed:
651
        return 'text'
652
    
653
    format = request.GET.get('format')
654
    if format == 'json':
655
        return 'json'
656
    elif format == 'xml':
657
        return 'xml'
658
    
659
    for item in request.META.get('HTTP_ACCEPT', '').split(','):
660
        accept, sep, rest = item.strip().partition(';')
661
        if accept == 'application/json':
662
            return 'json'
663
        elif accept == 'application/xml' or accept == 'text/xml':
664
            return 'xml'
665
    
666
    return 'text'
667

    
668
def api_method(http_method=None, format_allowed=False):
669
    """Decorator function for views that implement an API method."""
670
    
671
    def decorator(func):
672
        @wraps(func)
673
        def wrapper(request, *args, **kwargs):
674
            try:
675
                if http_method and request.method != http_method:
676
                    raise BadRequest('Method not allowed.')
677
                
678
                # The args variable may contain up to (account, container, object).
679
                if len(args) > 1 and len(args[1]) > 256:
680
                    raise BadRequest('Container name too large.')
681
                if len(args) > 2 and len(args[2]) > 1024:
682
                    raise BadRequest('Object name too large.')
683
                
684
                # Fill in custom request variables.
685
                request.serialization = request_serialization(request, format_allowed)
686
                
687
                response = func(request, *args, **kwargs)
688
                update_response_headers(request, response)
689
                return response
690
            except Fault, fault:
691
                return render_fault(request, fault)
692
            except BaseException, e:
693
                logger.exception('Unexpected error: %s' % e)
694
                fault = ServiceUnavailable('Unexpected error')
695
                return render_fault(request, fault)
696
        return wrapper
697
    return decorator