Statistics
| Branch: | Tag: | Revision:

root / pithos / api / util.py @ 0a7f1671

History | View | Annotate | Download (28.4 kB)

1
# Copyright 2011 GRNET S.A. All rights reserved.
2
# 
3
# Redistribution and use in source and binary forms, with or
4
# without modification, are permitted provided that the following
5
# conditions are met:
6
# 
7
#   1. Redistributions of source code must retain the above
8
#      copyright notice, this list of conditions and the following
9
#      disclaimer.
10
# 
11
#   2. Redistributions in binary form must reproduce the above
12
#      copyright notice, this list of conditions and the following
13
#      disclaimer in the documentation and/or other materials
14
#      provided with the distribution.
15
# 
16
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
17
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
20
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
23
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
24
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
26
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27
# POSSIBILITY OF SUCH DAMAGE.
28
# 
29
# The views and conclusions contained in the software and
30
# documentation are those of the authors and should not be
31
# interpreted as representing official policies, either expressed
32
# or implied, of GRNET S.A.
33

    
34
from functools import wraps
35
from time import time
36
from traceback import format_exc
37
from wsgiref.handlers import format_date_time
38
from binascii import hexlify
39

    
40
from django.conf import settings
41
from django.http import HttpResponse
42
from django.utils import simplejson as json
43
from django.utils.http import http_date, parse_etags
44
from django.utils.encoding import smart_str
45

    
46
from pithos.api.compat import parse_http_date_safe, parse_http_date
47
from pithos.api.faults import (Fault, NotModified, BadRequest, Unauthorized, ItemNotFound,
48
                                Conflict, LengthRequired, PreconditionFailed, RangeNotSatisfiable,
49
                                ServiceUnavailable)
50
from pithos.backends import backend
51
from pithos.backends.base import NotAllowedError
52

    
53
import datetime
54
import logging
55
import re
56
import hashlib
57
import uuid
58

    
59

    
60
logger = logging.getLogger(__name__)
61

    
62

    
63
def rename_meta_key(d, old, new):
64
    if old not in d:
65
        return
66
    d[new] = d[old]
67
    del(d[old])
68

    
69
def printable_header_dict(d):
70
    """Format a meta dictionary for printing out json/xml.
71
    
72
    Convert all keys to lower case and replace dashes with underscores.
73
    Format 'last_modified' timestamp.
74
    """
75
    
76
    d['last_modified'] = datetime.datetime.fromtimestamp(int(d['last_modified'])).isoformat()
77
    return dict([(k.lower().replace('-', '_'), v) for k, v in d.iteritems()])
78

    
79
def format_header_key(k):
80
    """Convert underscores to dashes and capitalize intra-dash strings."""
81
    return '-'.join([x.capitalize() for x in k.replace('_', '-').split('-')])
82

    
83
def get_header_prefix(request, prefix):
84
    """Get all prefix-* request headers in a dict. Reformat keys with format_header_key()."""
85
    
86
    prefix = 'HTTP_' + prefix.upper().replace('-', '_')
87
    # TODO: Document or remove '~' replacing.
88
    return dict([(format_header_key(k[5:]), v.replace('~', '')) for k, v in request.META.iteritems() if k.startswith(prefix) and len(k) > len(prefix)])
89

    
90
def get_account_headers(request):
91
    meta = get_header_prefix(request, 'X-Account-Meta-')
92
    groups = {}
93
    for k, v in get_header_prefix(request, 'X-Account-Group-').iteritems():
94
        n = k[16:].lower()
95
        if '-' in n or '_' in n:
96
            raise BadRequest('Bad characters in group name')
97
        groups[n] = v.replace(' ', '').split(',')
98
        if '' in groups[n]:
99
            groups[n].remove('')
100
    return meta, groups
101

    
102
def put_account_headers(response, meta, groups):
103
    if 'count' in meta:
104
        response['X-Account-Container-Count'] = meta['count']
105
    if 'bytes' in meta:
106
        response['X-Account-Bytes-Used'] = meta['bytes']
107
    response['Last-Modified'] = http_date(int(meta['modified']))
108
    for k in [x for x in meta.keys() if x.startswith('X-Account-Meta-')]:
109
        response[smart_str(k, strings_only=True)] = smart_str(meta[k], strings_only=True)
110
    if 'until_timestamp' in meta:
111
        response['X-Account-Until-Timestamp'] = http_date(int(meta['until_timestamp']))
112
    for k, v in groups.iteritems():
113
        k = smart_str(k, strings_only=True)
114
        k = format_header_key('X-Account-Group-' + k)
115
        v = smart_str(','.join(v), strings_only=True)
116
        response[k] = v
117
    
118
def get_container_headers(request):
119
    meta = get_header_prefix(request, 'X-Container-Meta-')
120
    policy = dict([(k[19:].lower(), v.replace(' ', '')) for k, v in get_header_prefix(request, 'X-Container-Policy-').iteritems()])
121
    return meta, policy
122

    
123
def put_container_headers(response, meta, policy):
124
    if 'count' in meta:
125
        response['X-Container-Object-Count'] = meta['count']
126
    if 'bytes' in meta:
127
        response['X-Container-Bytes-Used'] = meta['bytes']
128
    response['Last-Modified'] = http_date(int(meta['modified']))
129
    for k in [x for x in meta.keys() if x.startswith('X-Container-Meta-')]:
130
        response[smart_str(k, strings_only=True)] = smart_str(meta[k], strings_only=True)
131
    l = [smart_str(x, strings_only=True) for x in meta['object_meta'] if x.startswith('X-Object-Meta-')]
132
    response['X-Container-Object-Meta'] = ','.join([x[14:] for x in l])
133
    response['X-Container-Block-Size'] = backend.block_size
134
    response['X-Container-Block-Hash'] = backend.hash_algorithm
135
    if 'until_timestamp' in meta:
136
        response['X-Container-Until-Timestamp'] = http_date(int(meta['until_timestamp']))
137
    for k, v in policy.iteritems():
138
        response[smart_str(format_header_key('X-Container-Policy-' + k), strings_only=True)] = smart_str(v, strings_only=True)
139

    
140
def get_object_headers(request):
141
    meta = get_header_prefix(request, 'X-Object-Meta-')
142
    if request.META.get('CONTENT_TYPE'):
143
        meta['Content-Type'] = request.META['CONTENT_TYPE']
144
    if request.META.get('HTTP_CONTENT_ENCODING'):
145
        meta['Content-Encoding'] = request.META['HTTP_CONTENT_ENCODING']
146
    if request.META.get('HTTP_CONTENT_DISPOSITION'):
147
        meta['Content-Disposition'] = request.META['HTTP_CONTENT_DISPOSITION']
148
    if request.META.get('HTTP_X_OBJECT_MANIFEST'):
149
        meta['X-Object-Manifest'] = request.META['HTTP_X_OBJECT_MANIFEST']
150
    return meta, get_sharing(request), get_public(request)
151

    
152
def put_object_headers(response, meta, restricted=False):
153
    response['ETag'] = meta['hash']
154
    response['Content-Length'] = meta['bytes']
155
    response['Content-Type'] = meta.get('Content-Type', 'application/octet-stream')
156
    response['Last-Modified'] = http_date(int(meta['modified']))
157
    if not restricted:
158
        response['X-Object-Modified-By'] = smart_str(meta['modified_by'], strings_only=True)
159
        response['X-Object-Version'] = meta['version']
160
        response['X-Object-Version-Timestamp'] = http_date(int(meta['version_timestamp']))
161
        for k in [x for x in meta.keys() if x.startswith('X-Object-Meta-')]:
162
            response[smart_str(k, strings_only=True)] = smart_str(meta[k], strings_only=True)
163
        for k in ('Content-Encoding', 'Content-Disposition', 'X-Object-Manifest', 'X-Object-Sharing', 'X-Object-Shared-By', 'X-Object-Public'):
164
            if k in meta:
165
                response[k] = smart_str(meta[k], strings_only=True)
166
    else:
167
        for k in ('Content-Encoding', 'Content-Disposition'):
168
            if k in meta:
169
                response[k] = meta[k]
170

    
171
def update_manifest_meta(request, v_account, meta):
172
    """Update metadata if the object has an X-Object-Manifest."""
173
    
174
    if 'X-Object-Manifest' in meta:
175
        hash = ''
176
        bytes = 0
177
        try:
178
            src_container, src_name = split_container_object_string('/' + meta['X-Object-Manifest'])
179
            objects = backend.list_objects(request.user, v_account, src_container, prefix=src_name, virtual=False)
180
            for x in objects:
181
                src_meta = backend.get_object_meta(request.user, v_account, src_container, x[0], x[1])
182
                hash += src_meta['hash']
183
                bytes += src_meta['bytes']
184
        except:
185
            # Ignore errors.
186
            return
187
        meta['bytes'] = bytes
188
        md5 = hashlib.md5()
189
        md5.update(hash)
190
        meta['hash'] = md5.hexdigest().lower()
191

    
192
def update_sharing_meta(permissions, v_account, v_container, v_object, meta):
193
    if permissions is None:
194
        return
195
    perm_path, perms = permissions
196
    if len(perms) == 0:
197
        return
198
    ret = []
199
    r = ','.join(perms.get('read', []))
200
    if r:
201
        ret.append('read=' + r)
202
    w = ','.join(perms.get('write', []))
203
    if w:
204
        ret.append('write=' + w)
205
    meta['X-Object-Sharing'] = '; '.join(ret)
206
    if '/'.join((v_account, v_container, v_object)) != perm_path:
207
        meta['X-Object-Shared-By'] = perm_path
208

    
209
def update_public_meta(public, meta):
210
    if not public:
211
        return
212
    meta['X-Object-Public'] = public
213

    
214
def validate_modification_preconditions(request, meta):
215
    """Check that the modified timestamp conforms with the preconditions set."""
216
    
217
    if 'modified' not in meta:
218
        return # TODO: Always return?
219
    
220
    if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
221
    if if_modified_since is not None:
222
        if_modified_since = parse_http_date_safe(if_modified_since)
223
    if if_modified_since is not None and int(meta['modified']) <= if_modified_since:
224
        raise NotModified('Resource has not been modified')
225
    
226
    if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
227
    if if_unmodified_since is not None:
228
        if_unmodified_since = parse_http_date_safe(if_unmodified_since)
229
    if if_unmodified_since is not None and int(meta['modified']) > if_unmodified_since:
230
        raise PreconditionFailed('Resource has been modified')
231

    
232
def validate_matching_preconditions(request, meta):
233
    """Check that the ETag conforms with the preconditions set."""
234
    
235
    hash = meta.get('hash', None)
236
    
237
    if_match = request.META.get('HTTP_IF_MATCH')
238
    if if_match is not None:
239
        if hash is None:
240
            raise PreconditionFailed('Resource does not exist')
241
        if if_match != '*' and hash not in [x.lower() for x in parse_etags(if_match)]:
242
            raise PreconditionFailed('Resource ETag does not match')
243
    
244
    if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
245
    if if_none_match is not None:
246
        # TODO: If this passes, must ignore If-Modified-Since header.
247
        if hash is not None:
248
            if if_none_match == '*' or hash in [x.lower() for x in parse_etags(if_none_match)]:
249
                # TODO: Continue if an If-Modified-Since header is present.
250
                if request.method in ('HEAD', 'GET'):
251
                    raise NotModified('Resource ETag matches')
252
                raise PreconditionFailed('Resource exists or ETag matches')
253

    
254
def split_container_object_string(s):
255
    if not len(s) > 0 or s[0] != '/':
256
        raise ValueError
257
    s = s[1:]
258
    pos = s.find('/')
259
    if pos == -1:
260
        raise ValueError
261
    return s[:pos], s[(pos + 1):]
262

    
263
def copy_or_move_object(request, v_account, src_container, src_name, dest_container, dest_name, move=False):
264
    """Copy or move an object."""
265
    
266
    meta, permissions, public = get_object_headers(request)
267
    src_version = request.META.get('HTTP_X_SOURCE_VERSION')    
268
    try:
269
        if move:
270
            backend.move_object(request.user, v_account, src_container, src_name, dest_container, dest_name, meta, False, permissions)
271
        else:
272
            backend.copy_object(request.user, v_account, src_container, src_name, dest_container, dest_name, meta, False, permissions, src_version)
273
    except NotAllowedError:
274
        raise Unauthorized('Access denied')
275
    except (NameError, IndexError):
276
        raise ItemNotFound('Container or object does not exist')
277
    except ValueError:
278
        raise BadRequest('Invalid sharing header')
279
    except AttributeError, e:
280
        raise Conflict(json.dumps(e.data))
281
    if public is not None:
282
        try:
283
            backend.update_object_public(request.user, v_account, dest_container, dest_name, public)
284
        except NotAllowedError:
285
            raise Unauthorized('Access denied')
286
        except NameError:
287
            raise ItemNotFound('Object does not exist')
288

    
289
def get_int_parameter(p):
290
    if p is not None:
291
        try:
292
            p = int(p)
293
        except ValueError:
294
            return None
295
        if p < 0:
296
            return None
297
    return p
298

    
299
def get_content_length(request):
300
    content_length = get_int_parameter(request.META.get('CONTENT_LENGTH'))
301
    if content_length is None:
302
        raise LengthRequired('Missing or invalid Content-Length header')
303
    return content_length
304

    
305
def get_range(request, size):
306
    """Parse a Range header from the request.
307
    
308
    Either returns None, when the header is not existent or should be ignored,
309
    or a list of (offset, length) tuples - should be further checked.
310
    """
311
    
312
    ranges = request.META.get('HTTP_RANGE', '').replace(' ', '')
313
    if not ranges.startswith('bytes='):
314
        return None
315
    
316
    ret = []
317
    for r in (x.strip() for x in ranges[6:].split(',')):
318
        p = re.compile('^(?P<offset>\d*)-(?P<upto>\d*)$')
319
        m = p.match(r)
320
        if not m:
321
            return None
322
        offset = m.group('offset')
323
        upto = m.group('upto')
324
        if offset == '' and upto == '':
325
            return None
326
        
327
        if offset != '':
328
            offset = int(offset)
329
            if upto != '':
330
                upto = int(upto)
331
                if offset > upto:
332
                    return None
333
                ret.append((offset, upto - offset + 1))
334
            else:
335
                ret.append((offset, size - offset))
336
        else:
337
            length = int(upto)
338
            ret.append((size - length, length))
339
    
340
    return ret
341

    
342
def get_content_range(request):
343
    """Parse a Content-Range header from the request.
344
    
345
    Either returns None, when the header is not existent or should be ignored,
346
    or an (offset, length, total) tuple - check as length, total may be None.
347
    Returns (None, None, None) if the provided range is '*/*'.
348
    """
349
    
350
    ranges = request.META.get('HTTP_CONTENT_RANGE', '')
351
    if not ranges:
352
        return None
353
    
354
    p = re.compile('^bytes (?P<offset>\d+)-(?P<upto>\d*)/(?P<total>(\d+|\*))$')
355
    m = p.match(ranges)
356
    if not m:
357
        if ranges == 'bytes */*':
358
            return (None, None, None)
359
        return None
360
    offset = int(m.group('offset'))
361
    upto = m.group('upto')
362
    total = m.group('total')
363
    if upto != '':
364
        upto = int(upto)
365
    else:
366
        upto = None
367
    if total != '*':
368
        total = int(total)
369
    else:
370
        total = None
371
    if (upto is not None and offset > upto) or \
372
        (total is not None and offset >= total) or \
373
        (total is not None and upto is not None and upto >= total):
374
        return None
375
    
376
    if upto is None:
377
        length = None
378
    else:
379
        length = upto - offset + 1
380
    return (offset, length, total)
381

    
382
def get_sharing(request):
383
    """Parse an X-Object-Sharing header from the request.
384
    
385
    Raises BadRequest on error.
386
    """
387
    
388
    permissions = request.META.get('HTTP_X_OBJECT_SHARING')
389
    if permissions is None:
390
        return None
391
    
392
    # TODO: Document or remove '~' replacing.
393
    permissions = permissions.replace('~', '')
394
    
395
    ret = {}
396
    permissions = permissions.replace(' ', '')
397
    if permissions == '':
398
        return ret
399
    for perm in (x for x in permissions.split(';')):
400
        if perm.startswith('read='):
401
            ret['read'] = list(set([v.replace(' ','').lower() for v in perm[5:].split(',')]))
402
            if '' in ret['read']:
403
                ret['read'].remove('')
404
            if '*' in ret['read']:
405
                ret['read'] = ['*']
406
            if len(ret['read']) == 0:
407
                raise BadRequest('Bad X-Object-Sharing header value')
408
        elif perm.startswith('write='):
409
            ret['write'] = list(set([v.replace(' ','').lower() for v in perm[6:].split(',')]))
410
            if '' in ret['write']:
411
                ret['write'].remove('')
412
            if '*' in ret['write']:
413
                ret['write'] = ['*']
414
            if len(ret['write']) == 0:
415
                raise BadRequest('Bad X-Object-Sharing header value')
416
        else:
417
            raise BadRequest('Bad X-Object-Sharing header value')
418
    return ret
419

    
420
def get_public(request):
421
    """Parse an X-Object-Public header from the request.
422
    
423
    Raises BadRequest on error.
424
    """
425
    
426
    public = request.META.get('HTTP_X_OBJECT_PUBLIC')
427
    if public is None:
428
        return None
429
    
430
    public = public.replace(' ', '').lower()
431
    if public == 'true':
432
        return True
433
    elif public == 'false' or public == '':
434
        return False
435
    raise BadRequest('Bad X-Object-Public header value')
436

    
437
def raw_input_socket(request):
438
    """Return the socket for reading the rest of the request."""
439
    
440
    server_software = request.META.get('SERVER_SOFTWARE')
441
    if server_software and server_software.startswith('mod_python'):
442
        return request._req
443
    if 'wsgi.input' in request.environ:
444
        return request.environ['wsgi.input']
445
    raise ServiceUnavailable('Unknown server software')
446

    
447
MAX_UPLOAD_SIZE = 10 * (1024 * 1024) # 10MB
448

    
449
def socket_read_iterator(request, length=0, blocksize=4096):
450
    """Return a maximum of blocksize data read from the socket in each iteration.
451
    
452
    Read up to 'length'. If 'length' is negative, will attempt a chunked read.
453
    The maximum ammount of data read is controlled by MAX_UPLOAD_SIZE.
454
    """
455
    
456
    sock = raw_input_socket(request)
457
    if length < 0: # Chunked transfers
458
        # Small version (server does the dechunking).
459
        if request.environ.get('mod_wsgi.input_chunked', None):
460
            while length < MAX_UPLOAD_SIZE:
461
                data = sock.read(blocksize)
462
                if data == '':
463
                    return
464
                yield data
465
            raise BadRequest('Maximum size is reached')
466
        
467
        # Long version (do the dechunking).
468
        data = ''
469
        while length < MAX_UPLOAD_SIZE:
470
            # Get chunk size.
471
            if hasattr(sock, 'readline'):
472
                chunk_length = sock.readline()
473
            else:
474
                chunk_length = ''
475
                while chunk_length[-1:] != '\n':
476
                    chunk_length += sock.read(1)
477
                chunk_length.strip()
478
            pos = chunk_length.find(';')
479
            if pos >= 0:
480
                chunk_length = chunk_length[:pos]
481
            try:
482
                chunk_length = int(chunk_length, 16)
483
            except Exception, e:
484
                raise BadRequest('Bad chunk size') # TODO: Change to something more appropriate.
485
            # Check if done.
486
            if chunk_length == 0:
487
                if len(data) > 0:
488
                    yield data
489
                return
490
            # Get the actual data.
491
            while chunk_length > 0:
492
                chunk = sock.read(min(chunk_length, blocksize))
493
                chunk_length -= len(chunk)
494
                if length > 0:
495
                    length += len(chunk)
496
                data += chunk
497
                if len(data) >= blocksize:
498
                    ret = data[:blocksize]
499
                    data = data[blocksize:]
500
                    yield ret
501
            sock.read(2) # CRLF
502
        raise BadRequest('Maximum size is reached')
503
    else:
504
        if length > MAX_UPLOAD_SIZE:
505
            raise BadRequest('Maximum size is reached')
506
        while length > 0:
507
            data = sock.read(min(length, blocksize))
508
            length -= len(data)
509
            yield data
510

    
511
class ObjectWrapper(object):
512
    """Return the object's data block-per-block in each iteration.
513
    
514
    Read from the object using the offset and length provided in each entry of the range list.
515
    """
516
    
517
    def __init__(self, ranges, sizes, hashmaps, boundary):
518
        self.ranges = ranges
519
        self.sizes = sizes
520
        self.hashmaps = hashmaps
521
        self.boundary = boundary
522
        self.size = sum(self.sizes)
523
        
524
        self.file_index = 0
525
        self.block_index = 0
526
        self.block_hash = -1
527
        self.block = ''
528
        
529
        self.range_index = -1
530
        self.offset, self.length = self.ranges[0]
531
    
532
    def __iter__(self):
533
        return self
534
    
535
    def part_iterator(self):
536
        if self.length > 0:
537
            # Get the file for the current offset.
538
            file_size = self.sizes[self.file_index]
539
            while self.offset >= file_size:
540
                self.offset -= file_size
541
                self.file_index += 1
542
                file_size = self.sizes[self.file_index]
543
            
544
            # Get the block for the current position.
545
            self.block_index = int(self.offset / backend.block_size)
546
            if self.block_hash != self.hashmaps[self.file_index][self.block_index]:
547
                self.block_hash = self.hashmaps[self.file_index][self.block_index]
548
                try:
549
                    self.block = backend.get_block(self.block_hash)
550
                except NameError:
551
                    raise ItemNotFound('Block does not exist')
552
            
553
            # Get the data from the block.
554
            bo = self.offset % backend.block_size
555
            bl = min(self.length, len(self.block) - bo)
556
            data = self.block[bo:bo + bl]
557
            self.offset += bl
558
            self.length -= bl
559
            return data
560
        else:
561
            raise StopIteration
562
    
563
    def next(self):
564
        if len(self.ranges) == 1:
565
            return self.part_iterator()
566
        if self.range_index == len(self.ranges):
567
            raise StopIteration
568
        try:
569
            if self.range_index == -1:
570
                raise StopIteration
571
            return self.part_iterator()
572
        except StopIteration:
573
            self.range_index += 1
574
            out = []
575
            if self.range_index < len(self.ranges):
576
                # Part header.
577
                self.offset, self.length = self.ranges[self.range_index]
578
                self.file_index = 0
579
                if self.range_index > 0:
580
                    out.append('')
581
                out.append('--' + self.boundary)
582
                out.append('Content-Range: bytes %d-%d/%d' % (self.offset, self.offset + self.length - 1, self.size))
583
                out.append('Content-Transfer-Encoding: binary')
584
                out.append('')
585
                out.append('')
586
                return '\r\n'.join(out)
587
            else:
588
                # Footer.
589
                out.append('')
590
                out.append('--' + self.boundary + '--')
591
                out.append('')
592
                return '\r\n'.join(out)
593

    
594
def object_data_response(request, sizes, hashmaps, meta, public=False):
595
    """Get the HttpResponse object for replying with the object's data."""
596
    
597
    # Range handling.
598
    size = sum(sizes)
599
    ranges = get_range(request, size)
600
    if ranges is None:
601
        ranges = [(0, size)]
602
        ret = 200
603
    else:
604
        check = [True for offset, length in ranges if
605
                    length <= 0 or length > size or
606
                    offset < 0 or offset >= size or
607
                    offset + length > size]
608
        if len(check) > 0:
609
            raise RangeNotSatisfiable('Requested range exceeds object limits')
610
        ret = 206
611
        if_range = request.META.get('HTTP_IF_RANGE')
612
        if if_range:
613
            try:
614
                # Modification time has passed instead.
615
                last_modified = parse_http_date(if_range)
616
                if last_modified != meta['modified']:
617
                    ranges = [(0, size)]
618
                    ret = 200
619
            except ValueError:
620
                if if_range != meta['hash']:
621
                    ranges = [(0, size)]
622
                    ret = 200
623
    
624
    if ret == 206 and len(ranges) > 1:
625
        boundary = uuid.uuid4().hex
626
    else:
627
        boundary = ''
628
    wrapper = ObjectWrapper(ranges, sizes, hashmaps, boundary)
629
    response = HttpResponse(wrapper, status=ret)
630
    put_object_headers(response, meta, public)
631
    if ret == 206:
632
        if len(ranges) == 1:
633
            offset, length = ranges[0]
634
            response['Content-Length'] = length # Update with the correct length.
635
            response['Content-Range'] = 'bytes %d-%d/%d' % (offset, offset + length - 1, size)
636
        else:
637
            del(response['Content-Length'])
638
            response['Content-Type'] = 'multipart/byteranges; boundary=%s' % (boundary,)
639
    return response
640

    
641
def put_object_block(hashmap, data, offset):
642
    """Put one block of data at the given offset."""
643
    
644
    bi = int(offset / backend.block_size)
645
    bo = offset % backend.block_size
646
    bl = min(len(data), backend.block_size - bo)
647
    if bi < len(hashmap):
648
        hashmap[bi] = backend.update_block(hashmap[bi], data[:bl], bo)
649
    else:
650
        hashmap.append(backend.put_block(('\x00' * bo) + data[:bl]))
651
    return bl # Return ammount of data written.
652

    
653
def hashmap_hash(hashmap):
654
    """Produce the root hash, treating the hashmap as a Merkle-like tree."""
655
    
656
    def subhash(d):
657
        h = hashlib.new(backend.hash_algorithm)
658
        h.update(d)
659
        return h.digest()
660
    
661
    if len(hashmap) == 0:
662
        return hexlify(subhash(''))
663
    if len(hashmap) == 1:
664
        return hashmap[0]
665
    s = 2
666
    while s < len(hashmap):
667
        s = s * 2
668
    h = hashmap + ([('\x00' * len(hashmap[0]))] * (s - len(hashmap)))
669
    h = [subhash(h[x] + (h[x + 1] if x + 1 < len(h) else '')) for x in range(0, len(h), 2)]
670
    while len(h) > 1:
671
        h = [subhash(h[x] + (h[x + 1] if x + 1 < len(h) else '')) for x in range(0, len(h), 2)]
672
    return hexlify(h[0])
673

    
674
def update_response_headers(request, response):
675
    if request.serialization == 'xml':
676
        response['Content-Type'] = 'application/xml; charset=UTF-8'
677
    elif request.serialization == 'json':
678
        response['Content-Type'] = 'application/json; charset=UTF-8'
679
    elif not response['Content-Type']:
680
        response['Content-Type'] = 'text/plain; charset=UTF-8'
681
    
682
    if not response.has_header('Content-Length') and not (response.has_header('Content-Type') and response['Content-Type'].startswith('multipart/byteranges')):
683
        response['Content-Length'] = len(response.content)
684
    
685
    if settings.TEST:
686
        response['Date'] = format_date_time(time())
687

    
688
def render_fault(request, fault):
689
    if settings.DEBUG or settings.TEST:
690
        fault.details = format_exc(fault)
691
    
692
    request.serialization = 'text'
693
    data = '\n'.join((fault.message, fault.details)) + '\n'
694
    response = HttpResponse(data, status=fault.code)
695
    update_response_headers(request, response)
696
    return response
697

    
698
def request_serialization(request, format_allowed=False):
699
    """Return the serialization format requested.
700
    
701
    Valid formats are 'text' and 'json', 'xml' if 'format_allowed' is True.
702
    """
703
    
704
    if not format_allowed:
705
        return 'text'
706
    
707
    format = request.GET.get('format')
708
    if format == 'json':
709
        return 'json'
710
    elif format == 'xml':
711
        return 'xml'
712
    
713
#     for item in request.META.get('HTTP_ACCEPT', '').split(','):
714
#         accept, sep, rest = item.strip().partition(';')
715
#         if accept == 'application/json':
716
#             return 'json'
717
#         elif accept == 'application/xml' or accept == 'text/xml':
718
#             return 'xml'
719
    
720
    return 'text'
721

    
722
def api_method(http_method=None, format_allowed=False):
723
    """Decorator function for views that implement an API method."""
724
    
725
    def decorator(func):
726
        @wraps(func)
727
        def wrapper(request, *args, **kwargs):
728
            try:
729
                if http_method and request.method != http_method:
730
                    raise BadRequest('Method not allowed.')
731
                
732
                # The args variable may contain up to (account, container, object).
733
                if len(args) > 1 and len(args[1]) > 256:
734
                    raise BadRequest('Container name too large.')
735
                if len(args) > 2 and len(args[2]) > 1024:
736
                    raise BadRequest('Object name too large.')
737
                
738
                # Fill in custom request variables.
739
                request.serialization = request_serialization(request, format_allowed)
740
                
741
                response = func(request, *args, **kwargs)
742
                update_response_headers(request, response)
743
                return response
744
            except Fault, fault:
745
                return render_fault(request, fault)
746
            except BaseException, e:
747
                logger.exception('Unexpected error: %s' % e)
748
                fault = ServiceUnavailable('Unexpected error')
749
                return render_fault(request, fault)
750
        return wrapper
751
    return decorator