Use 401 error when missing token and 403 when forbidden.
[pithos] / pithos / api / util.py
index 175e1cd..296be84 100644 (file)
@@ -35,78 +35,132 @@ from functools import wraps
 from time import time
 from traceback import format_exc
 from wsgiref.handlers import format_date_time
 from time import time
 from traceback import format_exc
 from wsgiref.handlers import format_date_time
+from binascii import hexlify, unhexlify
+from datetime import datetime, tzinfo, timedelta
 
 from django.conf import settings
 from django.http import HttpResponse
 
 from django.conf import settings
 from django.http import HttpResponse
+from django.utils import simplejson as json
 from django.utils.http import http_date, parse_etags
 from django.utils.http import http_date, parse_etags
+from django.utils.encoding import smart_str
 
 
-from pithos.api.compat import parse_http_date_safe
-from pithos.api.faults import (Fault, NotModified, BadRequest, ItemNotFound, LengthRequired,
-                                PreconditionFailed, ServiceUnavailable)
-from pithos.backends import backend
+from pithos.api.compat import parse_http_date_safe, parse_http_date
+from pithos.api.faults import (Fault, NotModified, BadRequest, Unauthorized, Forbidden, ItemNotFound,
+                                Conflict, LengthRequired, PreconditionFailed, RequestEntityTooLarge,
+                                RangeNotSatisfiable, ServiceUnavailable)
+from pithos.backends import connect_backend
+from pithos.backends.base import NotAllowedError, QuotaError
 
 
-import datetime
 import logging
 import re
 import logging
 import re
-
+import hashlib
+import uuid
+import decimal
 
 logger = logging.getLogger(__name__)
 
 
 
 logger = logging.getLogger(__name__)
 
 
-def printable_meta_dict(d):
+class UTC(tzinfo):
+   def utcoffset(self, dt):
+       return timedelta(0)
+
+   def tzname(self, dt):
+       return 'UTC'
+
+   def dst(self, dt):
+       return timedelta(0)
+
+def json_encode_decimal(obj):
+    if isinstance(obj, decimal.Decimal):
+        return str(obj)
+    raise TypeError(repr(obj) + " is not JSON serializable")
+
+def isoformat(d):
+   """Return an ISO8601 date string that includes a timezone."""
+
+   return d.replace(tzinfo=UTC()).isoformat()
+
+def rename_meta_key(d, old, new):
+    if old not in d:
+        return
+    d[new] = d[old]
+    del(d[old])
+
+def printable_header_dict(d):
     """Format a meta dictionary for printing out json/xml.
     
     """Format a meta dictionary for printing out json/xml.
     
-    Convert all keys to lower case and replace dashes to underscores.
-    Change 'modified' key from backend to 'last_modified' and format date.
+    Convert all keys to lower case and replace dashes with underscores.
+    Format 'last_modified' timestamp.
     """
     """
-    if 'modified' in d:
-        d['last_modified'] = datetime.datetime.fromtimestamp(int(d['modified'])).isoformat()
-        del(d['modified'])
+    
+    d['last_modified'] = isoformat(datetime.fromtimestamp(d['last_modified']))
     return dict([(k.lower().replace('-', '_'), v) for k, v in d.iteritems()])
 
     return dict([(k.lower().replace('-', '_'), v) for k, v in d.iteritems()])
 
-def format_meta_key(k):
-    """Convert underscores to dashes and capitalize intra-dash strings"""
+def format_header_key(k):
+    """Convert underscores to dashes and capitalize intra-dash strings."""
     return '-'.join([x.capitalize() for x in k.replace('_', '-').split('-')])
 
     return '-'.join([x.capitalize() for x in k.replace('_', '-').split('-')])
 
-def get_meta_prefix(request, prefix):
-    """Get all prefix-* request headers in a dict. Reformat keys with format_meta_key()"""
+def get_header_prefix(request, prefix):
+    """Get all prefix-* request headers in a dict. Reformat keys with format_header_key()."""
+    
     prefix = 'HTTP_' + prefix.upper().replace('-', '_')
     prefix = 'HTTP_' + prefix.upper().replace('-', '_')
-    return dict([(format_meta_key(k[5:]), v) for k, v in request.META.iteritems() if k.startswith(prefix)])
-
-def get_account_meta(request):
-    """Get metadata from an account request"""
-    meta = get_meta_prefix(request, 'X-Account-Meta-')    
-    return meta
-
-def put_account_meta(response, meta):
-    """Put metadata in an account response"""
-    response['X-Account-Container-Count'] = meta['count']
-    response['X-Account-Bytes-Used'] = meta['bytes']
-    if 'modified' in meta:
-        response['Last-Modified'] = http_date(int(meta['modified']))
+    # TODO: Document or remove '~' replacing.
+    return dict([(format_header_key(k[5:]), v.replace('~', '')) for k, v in request.META.iteritems() if k.startswith(prefix) and len(k) > len(prefix)])
+
+def get_account_headers(request):
+    meta = get_header_prefix(request, 'X-Account-Meta-')
+    groups = {}
+    for k, v in get_header_prefix(request, 'X-Account-Group-').iteritems():
+        n = k[16:].lower()
+        if '-' in n or '_' in n:
+            raise BadRequest('Bad characters in group name')
+        groups[n] = v.replace(' ', '').split(',')
+        while '' in groups[n]:
+            groups[n].remove('')
+    return meta, groups
+
+def put_account_headers(response, meta, groups, policy):
+    if 'count' in meta:
+        response['X-Account-Container-Count'] = meta['count']
+    if 'bytes' in meta:
+        response['X-Account-Bytes-Used'] = meta['bytes']
+    response['Last-Modified'] = http_date(int(meta['modified']))
     for k in [x for x in meta.keys() if x.startswith('X-Account-Meta-')]:
     for k in [x for x in meta.keys() if x.startswith('X-Account-Meta-')]:
-        response[k.encode('utf-8')] = meta[k].encode('utf-8')
-
-def get_container_meta(request):
-    """Get metadata from a container request"""
-    meta = get_meta_prefix(request, 'X-Container-Meta-')
-    return meta
-
-def put_container_meta(response, meta):
-    """Put metadata in a container response"""
-    response['X-Container-Object-Count'] = meta['count']
-    response['X-Container-Bytes-Used'] = meta['bytes']
-    if 'modified' in meta:
-        response['Last-Modified'] = http_date(int(meta['modified']))
+        response[smart_str(k, strings_only=True)] = smart_str(meta[k], strings_only=True)
+    if 'until_timestamp' in meta:
+        response['X-Account-Until-Timestamp'] = http_date(int(meta['until_timestamp']))
+    for k, v in groups.iteritems():
+        k = smart_str(k, strings_only=True)
+        k = format_header_key('X-Account-Group-' + k)
+        v = smart_str(','.join(v), strings_only=True)
+        response[k] = v
+    for k, v in policy.iteritems():
+        response[smart_str(format_header_key('X-Account-Policy-' + k), strings_only=True)] = smart_str(v, strings_only=True)
+
+def get_container_headers(request):
+    meta = get_header_prefix(request, 'X-Container-Meta-')
+    policy = dict([(k[19:].lower(), v.replace(' ', '')) for k, v in get_header_prefix(request, 'X-Container-Policy-').iteritems()])
+    return meta, policy
+
+def put_container_headers(request, response, meta, policy):
+    if 'count' in meta:
+        response['X-Container-Object-Count'] = meta['count']
+    if 'bytes' in meta:
+        response['X-Container-Bytes-Used'] = meta['bytes']
+    response['Last-Modified'] = http_date(int(meta['modified']))
     for k in [x for x in meta.keys() if x.startswith('X-Container-Meta-')]:
     for k in [x for x in meta.keys() if x.startswith('X-Container-Meta-')]:
-        response[k.encode('utf-8')] = meta[k].encode('utf-8')
-    response['X-Container-Object-Meta'] = [x[14:] for x in meta['object_meta'] if x.startswith('X-Object-Meta-')]
-    response['X-Container-Block-Size'] = backend.block_size
-    response['X-Container-Block-Hash'] = backend.hash_algorithm
-
-def get_object_meta(request):
-    """Get metadata from an object request"""
-    meta = get_meta_prefix(request, 'X-Object-Meta-')
+        response[smart_str(k, strings_only=True)] = smart_str(meta[k], strings_only=True)
+    l = [smart_str(x, strings_only=True) for x in meta['object_meta'] if x.startswith('X-Object-Meta-')]
+    response['X-Container-Object-Meta'] = ','.join([x[14:] for x in l])
+    response['X-Container-Block-Size'] = request.backend.block_size
+    response['X-Container-Block-Hash'] = request.backend.hash_algorithm
+    if 'until_timestamp' in meta:
+        response['X-Container-Until-Timestamp'] = http_date(int(meta['until_timestamp']))
+    for k, v in policy.iteritems():
+        response[smart_str(format_header_key('X-Container-Policy-' + k), strings_only=True)] = smart_str(v, strings_only=True)
+
+def get_object_headers(request):
+    meta = get_header_prefix(request, 'X-Object-Meta-')
     if request.META.get('CONTENT_TYPE'):
         meta['Content-Type'] = request.META['CONTENT_TYPE']
     if request.META.get('HTTP_CONTENT_ENCODING'):
     if request.META.get('CONTENT_TYPE'):
         meta['Content-Type'] = request.META['CONTENT_TYPE']
     if request.META.get('HTTP_CONTENT_ENCODING'):
@@ -115,22 +169,79 @@ def get_object_meta(request):
         meta['Content-Disposition'] = request.META['HTTP_CONTENT_DISPOSITION']
     if request.META.get('HTTP_X_OBJECT_MANIFEST'):
         meta['X-Object-Manifest'] = request.META['HTTP_X_OBJECT_MANIFEST']
         meta['Content-Disposition'] = request.META['HTTP_CONTENT_DISPOSITION']
     if request.META.get('HTTP_X_OBJECT_MANIFEST'):
         meta['X-Object-Manifest'] = request.META['HTTP_X_OBJECT_MANIFEST']
-    return meta
+    return meta, get_sharing(request), get_public(request)
 
 
-def put_object_meta(response, meta):
-    """Put metadata in an object response"""
+def put_object_headers(response, meta, restricted=False):
     response['ETag'] = meta['hash']
     response['Content-Length'] = meta['bytes']
     response['Content-Type'] = meta.get('Content-Type', 'application/octet-stream')
     response['Last-Modified'] = http_date(int(meta['modified']))
     response['ETag'] = meta['hash']
     response['Content-Length'] = meta['bytes']
     response['Content-Type'] = meta.get('Content-Type', 'application/octet-stream')
     response['Last-Modified'] = http_date(int(meta['modified']))
-    for k in [x for x in meta.keys() if x.startswith('X-Object-Meta-')]:
-        response[k.encode('utf-8')] = meta[k].encode('utf-8')
-    for k in ('Content-Encoding', 'Content-Disposition', 'X-Object-Manifest'):
-        if k in meta:
-            response[k] = meta[k]
+    if not restricted:
+        response['X-Object-Modified-By'] = smart_str(meta['modified_by'], strings_only=True)
+        response['X-Object-Version'] = meta['version']
+        response['X-Object-Version-Timestamp'] = http_date(int(meta['version_timestamp']))
+        for k in [x for x in meta.keys() if x.startswith('X-Object-Meta-')]:
+            response[smart_str(k, strings_only=True)] = smart_str(meta[k], strings_only=True)
+        for k in ('Content-Encoding', 'Content-Disposition', 'X-Object-Manifest',
+                  'X-Object-Sharing', 'X-Object-Shared-By', 'X-Object-Allowed-To',
+                  'X-Object-Public'):
+            if k in meta:
+                response[k] = smart_str(meta[k], strings_only=True)
+    else:
+        for k in ('Content-Encoding', 'Content-Disposition'):
+            if k in meta:
+                response[k] = meta[k]
+
+def update_manifest_meta(request, v_account, meta):
+    """Update metadata if the object has an X-Object-Manifest."""
+    
+    if 'X-Object-Manifest' in meta:
+        hash = ''
+        bytes = 0
+        try:
+            src_container, src_name = split_container_object_string('/' + meta['X-Object-Manifest'])
+            objects = request.backend.list_objects(request.user, v_account,
+                                src_container, prefix=src_name, virtual=False)
+            for x in objects:
+                src_meta = request.backend.get_object_meta(request.user,
+                                        v_account, src_container, x[0], x[1])
+                hash += src_meta['hash']
+                bytes += src_meta['bytes']
+        except:
+            # Ignore errors.
+            return
+        meta['bytes'] = bytes
+        md5 = hashlib.md5()
+        md5.update(hash)
+        meta['hash'] = md5.hexdigest().lower()
+
+def update_sharing_meta(request, permissions, v_account, v_container, v_object, meta):
+    if permissions is None:
+        return
+    allowed, perm_path, perms = permissions
+    if len(perms) == 0:
+        return
+    ret = []
+    r = ','.join(perms.get('read', []))
+    if r:
+        ret.append('read=' + r)
+    w = ','.join(perms.get('write', []))
+    if w:
+        ret.append('write=' + w)
+    meta['X-Object-Sharing'] = '; '.join(ret)
+    if '/'.join((v_account, v_container, v_object)) != perm_path:
+        meta['X-Object-Shared-By'] = perm_path
+    if request.user != v_account:
+        meta['X-Object-Allowed-To'] = allowed
+
+def update_public_meta(public, meta):
+    if not public:
+        return
+    meta['X-Object-Public'] = public
 
 def validate_modification_preconditions(request, meta):
 
 def validate_modification_preconditions(request, meta):
-    """Check that the modified timestamp conforms with the preconditions set"""
+    """Check that the modified timestamp conforms with the preconditions set."""
+    
     if 'modified' not in meta:
         return # TODO: Always return?
     
     if 'modified' not in meta:
         return # TODO: Always return?
     
@@ -138,86 +249,101 @@ def validate_modification_preconditions(request, meta):
     if if_modified_since is not None:
         if_modified_since = parse_http_date_safe(if_modified_since)
     if if_modified_since is not None and int(meta['modified']) <= if_modified_since:
     if if_modified_since is not None:
         if_modified_since = parse_http_date_safe(if_modified_since)
     if if_modified_since is not None and int(meta['modified']) <= if_modified_since:
-        raise NotModified('Object has not been modified')
+        raise NotModified('Resource has not been modified')
     
     if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
     if if_unmodified_since is not None:
         if_unmodified_since = parse_http_date_safe(if_unmodified_since)
     if if_unmodified_since is not None and int(meta['modified']) > if_unmodified_since:
     
     if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
     if if_unmodified_since is not None:
         if_unmodified_since = parse_http_date_safe(if_unmodified_since)
     if if_unmodified_since is not None and int(meta['modified']) > if_unmodified_since:
-        raise PreconditionFailed('Object has been modified')
+        raise PreconditionFailed('Resource has been modified')
 
 def validate_matching_preconditions(request, meta):
 
 def validate_matching_preconditions(request, meta):
-    """Check that the ETag conforms with the preconditions set"""
-    if 'hash' not in meta:
-        return # TODO: Always return?
+    """Check that the ETag conforms with the preconditions set."""
+    
+    hash = meta.get('hash', None)
     
     if_match = request.META.get('HTTP_IF_MATCH')
     
     if_match = request.META.get('HTTP_IF_MATCH')
-    if if_match is not None and if_match != '*':
-        if meta['hash'] not in [x.lower() for x in parse_etags(if_match)]:
-            raise PreconditionFailed('Object Etag does not match')
+    if if_match is not None:
+        if hash is None:
+            raise PreconditionFailed('Resource does not exist')
+        if if_match != '*' and hash not in [x.lower() for x in parse_etags(if_match)]:
+            raise PreconditionFailed('Resource ETag does not match')
     
     if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
     if if_none_match is not None:
     
     if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
     if if_none_match is not None:
-        if if_none_match == '*' or meta['hash'] in [x.lower() for x in parse_etags(if_none_match)]:
-            raise NotModified('Object Etag matches')
-
-def copy_or_move_object(request, src_path, dest_path, move=False):
-    """Copy or move an object"""
-    if type(src_path) == str:
-        parts = src_path.split('/')
-        if len(parts) < 3 or parts[0] != '':
-            raise BadRequest('Invalid X-Copy-From or X-Move-From header')
-        src_container = parts[1]
-        src_name = '/'.join(parts[2:])
-    elif type(src_path) == tuple and len(src_path) == 2:
-        src_container, src_name = src_path
-    if type(dest_path) == str:
-        parts = dest_path.split('/')
-        if len(parts) < 3 or parts[0] != '':
-            raise BadRequest('Invalid Destination header')
-        dest_container = parts[1]
-        dest_name = '/'.join(parts[2:])
-    elif type(dest_path) == tuple and len(dest_path) == 2:
-        dest_container, dest_name = dest_path
-    
-    meta = get_object_meta(request)
-    # Keep previous values of 'Content-Type' (if a new one is absent) and 'hash'.
-    try:
-        src_meta = backend.get_object_meta(request.user, src_container, src_name)
-    except NameError:
-        raise ItemNotFound('Container or object does not exist')
-    if 'Content-Type' in meta and 'Content-Type' in src_meta:
-        del(src_meta['Content-Type'])
-    for k in ('Content-Type', 'hash'):
-        if k in src_meta:
-            meta[k] = src_meta[k]
+        # TODO: If this passes, must ignore If-Modified-Since header.
+        if hash is not None:
+            if if_none_match == '*' or hash in [x.lower() for x in parse_etags(if_none_match)]:
+                # TODO: Continue if an If-Modified-Since header is present.
+                if request.method in ('HEAD', 'GET'):
+                    raise NotModified('Resource ETag matches')
+                raise PreconditionFailed('Resource exists or ETag matches')
+
+def split_container_object_string(s):
+    if not len(s) > 0 or s[0] != '/':
+        raise ValueError
+    s = s[1:]
+    pos = s.find('/')
+    if pos == -1:
+        raise ValueError
+    return s[:pos], s[(pos + 1):]
+
+def copy_or_move_object(request, src_account, src_container, src_name, dest_account, dest_container, dest_name, move=False):
+    """Copy or move an object."""
     
     
+    meta, permissions, public = get_object_headers(request)
+    src_version = request.META.get('HTTP_X_SOURCE_VERSION')
     try:
         if move:
     try:
         if move:
-            backend.move_object(request.user, src_container, src_name, dest_container, dest_name, meta, replace_meta=True)
+            version_id = request.backend.move_object(request.user, src_account, src_container, src_name,
+                                                        dest_account, dest_container, dest_name,
+                                                        meta, False, permissions)
         else:
         else:
-            backend.copy_object(request.user, src_container, src_name, dest_container, dest_name, meta, replace_meta=True)
-    except NameError:
+            version_id = request.backend.copy_object(request.user, src_account, src_container, src_name,
+                                                        dest_account, dest_container, dest_name,
+                                                        meta, False, permissions, src_version)
+    except NotAllowedError:
+        raise Forbidden('Not allowed')
+    except (NameError, IndexError):
         raise ItemNotFound('Container or object does not exist')
         raise ItemNotFound('Container or object does not exist')
+    except ValueError:
+        raise BadRequest('Invalid sharing header')
+    except AttributeError, e:
+        raise Conflict('\n'.join(e.data) + '\n')
+    except QuotaError:
+        raise RequestEntityTooLarge('Quota exceeded')
+    if public is not None:
+        try:
+            request.backend.update_object_public(request.user, dest_account, dest_container, dest_name, public)
+        except NotAllowedError:
+            raise Forbidden('Not allowed')
+        except NameError:
+            raise ItemNotFound('Object does not exist')
+    return version_id
+
+def get_int_parameter(p):
+    if p is not None:
+        try:
+            p = int(p)
+        except ValueError:
+            return None
+        if p < 0:
+            return None
+    return p
 
 def get_content_length(request):
 
 def get_content_length(request):
-    content_length = request.META.get('CONTENT_LENGTH')
-    if not content_length:
-        raise LengthRequired('Missing Content-Length header')
-    try:
-        content_length = int(content_length)
-        if content_length < 0:
-            raise ValueError
-    except ValueError:
-        raise BadRequest('Invalid Content-Length header')
+    content_length = get_int_parameter(request.META.get('CONTENT_LENGTH'))
+    if content_length is None:
+        raise LengthRequired('Missing or invalid Content-Length header')
     return content_length
 
 def get_range(request, size):
     return content_length
 
 def get_range(request, size):
-    """Parse a Range header from the request
+    """Parse a Range header from the request.
     
     Either returns None, when the header is not existent or should be ignored,
     or a list of (offset, length) tuples - should be further checked.
     """
     
     Either returns None, when the header is not existent or should be ignored,
     or a list of (offset, length) tuples - should be further checked.
     """
+    
     ranges = request.META.get('HTTP_RANGE', '').replace(' ', '')
     if not ranges.startswith('bytes='):
         return None
     ranges = request.META.get('HTTP_RANGE', '').replace(' ', '')
     if not ranges.startswith('bytes='):
         return None
@@ -249,7 +375,7 @@ def get_range(request, size):
     return ret
 
 def get_content_range(request):
     return ret
 
 def get_content_range(request):
-    """Parse a Content-Range header from the request
+    """Parse a Content-Range header from the request.
     
     Either returns None, when the header is not existent or should be ignored,
     or an (offset, length, total) tuple - check as length, total may be None.
     
     Either returns None, when the header is not existent or should be ignored,
     or an (offset, length, total) tuple - check as length, total may be None.
@@ -277,39 +403,112 @@ def get_content_range(request):
         total = int(total)
     else:
         total = None
         total = int(total)
     else:
         total = None
-    if (upto and offset > upto) or \
-        (total and offset >= total) or \
-        (total and upto and upto >= total):
+    if (upto is not None and offset > upto) or \
+        (total is not None and offset >= total) or \
+        (total is not None and upto is not None and upto >= total):
         return None
     
         return None
     
-    if not upto:
+    if upto is None:
         length = None
     else:
         length = upto - offset + 1
     return (offset, length, total)
 
         length = None
     else:
         length = upto - offset + 1
     return (offset, length, total)
 
+def get_sharing(request):
+    """Parse an X-Object-Sharing header from the request.
+    
+    Raises BadRequest on error.
+    """
+    
+    permissions = request.META.get('HTTP_X_OBJECT_SHARING')
+    if permissions is None:
+        return None
+    
+    # TODO: Document or remove '~' replacing.
+    permissions = permissions.replace('~', '')
+    
+    ret = {}
+    permissions = permissions.replace(' ', '')
+    if permissions == '':
+        return ret
+    for perm in (x for x in permissions.split(';')):
+        if perm.startswith('read='):
+            ret['read'] = list(set([v.replace(' ','').lower() for v in perm[5:].split(',')]))
+            if '' in ret['read']:
+                ret['read'].remove('')
+            if '*' in ret['read']:
+                ret['read'] = ['*']
+            if len(ret['read']) == 0:
+                raise BadRequest('Bad X-Object-Sharing header value')
+        elif perm.startswith('write='):
+            ret['write'] = list(set([v.replace(' ','').lower() for v in perm[6:].split(',')]))
+            if '' in ret['write']:
+                ret['write'].remove('')
+            if '*' in ret['write']:
+                ret['write'] = ['*']
+            if len(ret['write']) == 0:
+                raise BadRequest('Bad X-Object-Sharing header value')
+        else:
+            raise BadRequest('Bad X-Object-Sharing header value')
+    
+    # Keep duplicates only in write list.
+    dups = [x for x in ret.get('read', []) if x in ret.get('write', []) and x != '*']
+    if dups:
+        for x in dups:
+            ret['read'].remove(x)
+        if len(ret['read']) == 0:
+            del(ret['read'])
+    
+    return ret
+
+def get_public(request):
+    """Parse an X-Object-Public header from the request.
+    
+    Raises BadRequest on error.
+    """
+    
+    public = request.META.get('HTTP_X_OBJECT_PUBLIC')
+    if public is None:
+        return None
+    
+    public = public.replace(' ', '').lower()
+    if public == 'true':
+        return True
+    elif public == 'false' or public == '':
+        return False
+    raise BadRequest('Bad X-Object-Public header value')
+
 def raw_input_socket(request):
 def raw_input_socket(request):
-    """Return the socket for reading the rest of the request"""
+    """Return the socket for reading the rest of the request."""
+    
     server_software = request.META.get('SERVER_SOFTWARE')
     server_software = request.META.get('SERVER_SOFTWARE')
-    if not server_software:
-        if 'wsgi.input' in request.environ:
-            return request.environ['wsgi.input']
-        raise ServiceUnavailable('Unknown server software')
-    if server_software.startswith('WSGIServer'):
-        return request.environ['wsgi.input']
-    elif server_software.startswith('mod_python'):
+    if server_software and server_software.startswith('mod_python'):
         return request._req
         return request._req
+    if 'wsgi.input' in request.environ:
+        return request.environ['wsgi.input']
     raise ServiceUnavailable('Unknown server software')
 
     raise ServiceUnavailable('Unknown server software')
 
-MAX_UPLOAD_SIZE = 10 * (1024 * 1024) # 10MB
+MAX_UPLOAD_SIZE = 5 * (1024 * 1024 * 1024) # 5GB
 
 
-def socket_read_iterator(sock, length=0, blocksize=4096):
-    """Return a maximum of blocksize data read from the socket in each iteration
+def socket_read_iterator(request, length=0, blocksize=4096):
+    """Return a maximum of blocksize data read from the socket in each iteration.
     
     Read up to 'length'. If 'length' is negative, will attempt a chunked read.
     The maximum ammount of data read is controlled by MAX_UPLOAD_SIZE.
     """
     
     Read up to 'length'. If 'length' is negative, will attempt a chunked read.
     The maximum ammount of data read is controlled by MAX_UPLOAD_SIZE.
     """
+    
+    sock = raw_input_socket(request)
     if length < 0: # Chunked transfers
     if length < 0: # Chunked transfers
+        # Small version (server does the dechunking).
+        if request.environ.get('mod_wsgi.input_chunked', None) or request.META['SERVER_SOFTWARE'].startswith('gunicorn'):
+            while length < MAX_UPLOAD_SIZE:
+                data = sock.read(blocksize)
+                if data == '':
+                    return
+                yield data
+            raise BadRequest('Maximum size is reached')
+        
+        # Long version (do the dechunking).
         data = ''
         while length < MAX_UPLOAD_SIZE:
             # Get chunk size.
         data = ''
         while length < MAX_UPLOAD_SIZE:
             # Get chunk size.
@@ -336,39 +535,42 @@ def socket_read_iterator(sock, length=0, blocksize=4096):
             while chunk_length > 0:
                 chunk = sock.read(min(chunk_length, blocksize))
                 chunk_length -= len(chunk)
             while chunk_length > 0:
                 chunk = sock.read(min(chunk_length, blocksize))
                 chunk_length -= len(chunk)
-                length += len(chunk)
+                if length > 0:
+                    length += len(chunk)
                 data += chunk
                 if len(data) >= blocksize:
                     ret = data[:blocksize]
                     data = data[blocksize:]
                     yield ret
             sock.read(2) # CRLF
                 data += chunk
                 if len(data) >= blocksize:
                     ret = data[:blocksize]
                     data = data[blocksize:]
                     yield ret
             sock.read(2) # CRLF
-        # TODO: Raise something to note that maximum size is reached.
+        raise BadRequest('Maximum size is reached')
     else:
         if length > MAX_UPLOAD_SIZE:
     else:
         if length > MAX_UPLOAD_SIZE:
-            # TODO: Raise something to note that maximum size is reached.
-            pass
+            raise BadRequest('Maximum size is reached')
         while length > 0:
             data = sock.read(min(length, blocksize))
         while length > 0:
             data = sock.read(min(length, blocksize))
+            if not data:
+                raise BadRequest()
             length -= len(data)
             yield data
 
 class ObjectWrapper(object):
             length -= len(data)
             yield data
 
 class ObjectWrapper(object):
-    """Return the object's data block-per-block in each iteration
+    """Return the object's data block-per-block in each iteration.
     
     Read from the object using the offset and length provided in each entry of the range list.
     """
     
     
     Read from the object using the offset and length provided in each entry of the range list.
     """
     
-    def __init__(self, v_account, v_container, v_object, ranges, size, hashmap, boundary):
-        self.v_account = v_account
-        self.v_container = v_container
-        self.v_object = v_object
+    def __init__(self, backend, ranges, sizes, hashmaps, boundary):
+        self.backend = backend
         self.ranges = ranges
         self.ranges = ranges
-        self.size = size
-        self.hashmap = hashmap
+        self.sizes = sizes
+        self.hashmaps = hashmaps
         self.boundary = boundary
         self.boundary = boundary
+        self.size = sum(self.sizes)
         
         
-        self.block_index = -1
+        self.file_index = 0
+        self.block_index = 0
+        self.block_hash = -1
         self.block = ''
         
         self.range_index = -1
         self.block = ''
         
         self.range_index = -1
@@ -379,17 +581,25 @@ class ObjectWrapper(object):
     
     def part_iterator(self):
         if self.length > 0:
     
     def part_iterator(self):
         if self.length > 0:
-            # Get the block for the current offset.
-            bi = int(self.offset / backend.block_size)
-            if self.block_index != bi:
+            # Get the file for the current offset.
+            file_size = self.sizes[self.file_index]
+            while self.offset >= file_size:
+                self.offset -= file_size
+                self.file_index += 1
+                file_size = self.sizes[self.file_index]
+            
+            # Get the block for the current position.
+            self.block_index = int(self.offset / self.backend.block_size)
+            if self.block_hash != self.hashmaps[self.file_index][self.block_index]:
+                self.block_hash = self.hashmaps[self.file_index][self.block_index]
                 try:
                 try:
-                    self.block = backend.get_block(self.hashmap[bi])
+                    self.block = self.backend.get_block(self.block_hash)
                 except NameError:
                     raise ItemNotFound('Block does not exist')
                 except NameError:
                     raise ItemNotFound('Block does not exist')
-                self.block_index = bi
+            
             # Get the data from the block.
             # Get the data from the block.
-            bo = self.offset % backend.block_size
-            bl = min(self.length, backend.block_size - bo)
+            bo = self.offset % self.backend.block_size
+            bl = min(self.length, len(self.block) - bo)
             data = self.block[bo:bo + bl]
             self.offset += bl
             self.length -= bl
             data = self.block[bo:bo + bl]
             self.offset += bl
             self.length -= bl
@@ -412,6 +622,7 @@ class ObjectWrapper(object):
             if self.range_index < len(self.ranges):
                 # Part header.
                 self.offset, self.length = self.ranges[self.range_index]
             if self.range_index < len(self.ranges):
                 # Part header.
                 self.offset, self.length = self.ranges[self.range_index]
+                self.file_index = 0
                 if self.range_index > 0:
                     out.append('')
                 out.append('--' + self.boundary)
                 if self.range_index > 0:
                     out.append('')
                 out.append('--' + self.boundary)
@@ -427,6 +638,87 @@ class ObjectWrapper(object):
                 out.append('')
                 return '\r\n'.join(out)
 
                 out.append('')
                 return '\r\n'.join(out)
 
+def object_data_response(request, sizes, hashmaps, meta, public=False):
+    """Get the HttpResponse object for replying with the object's data."""
+    
+    # Range handling.
+    size = sum(sizes)
+    ranges = get_range(request, size)
+    if ranges is None:
+        ranges = [(0, size)]
+        ret = 200
+    else:
+        check = [True for offset, length in ranges if
+                    length <= 0 or length > size or
+                    offset < 0 or offset >= size or
+                    offset + length > size]
+        if len(check) > 0:
+            raise RangeNotSatisfiable('Requested range exceeds object limits')
+        ret = 206
+        if_range = request.META.get('HTTP_IF_RANGE')
+        if if_range:
+            try:
+                # Modification time has passed instead.
+                last_modified = parse_http_date(if_range)
+                if last_modified != meta['modified']:
+                    ranges = [(0, size)]
+                    ret = 200
+            except ValueError:
+                if if_range != meta['hash']:
+                    ranges = [(0, size)]
+                    ret = 200
+    
+    if ret == 206 and len(ranges) > 1:
+        boundary = uuid.uuid4().hex
+    else:
+        boundary = ''
+    wrapper = ObjectWrapper(request.backend, ranges, sizes, hashmaps, boundary)
+    response = HttpResponse(wrapper, status=ret)
+    put_object_headers(response, meta, public)
+    if ret == 206:
+        if len(ranges) == 1:
+            offset, length = ranges[0]
+            response['Content-Length'] = length # Update with the correct length.
+            response['Content-Range'] = 'bytes %d-%d/%d' % (offset, offset + length - 1, size)
+        else:
+            del(response['Content-Length'])
+            response['Content-Type'] = 'multipart/byteranges; boundary=%s' % (boundary,)
+    return response
+
+def put_object_block(request, hashmap, data, offset):
+    """Put one block of data at the given offset."""
+    
+    bi = int(offset / request.backend.block_size)
+    bo = offset % request.backend.block_size
+    bl = min(len(data), request.backend.block_size - bo)
+    if bi < len(hashmap):
+        hashmap[bi] = request.backend.update_block(hashmap[bi], data[:bl], bo)
+    else:
+        hashmap.append(request.backend.put_block(('\x00' * bo) + data[:bl]))
+    return bl # Return ammount of data written.
+
+def hashmap_hash(request, hashmap):
+    """Produce the root hash, treating the hashmap as a Merkle-like tree."""
+    
+    def subhash(d):
+        h = hashlib.new(request.backend.hash_algorithm)
+        h.update(d)
+        return h.digest()
+    
+    if len(hashmap) == 0:
+        return hexlify(subhash(''))
+    if len(hashmap) == 1:
+        return hashmap[0]
+    
+    s = 2
+    while s < len(hashmap):
+        s = s * 2
+    h = [unhexlify(x) for x in hashmap]
+    h += [('\x00' * len(h[0]))] * (s - len(hashmap))
+    while len(h) > 1:
+        h = [subhash(h[x] + h[x + 1]) for x in range(0, len(h), 2)]
+    return hexlify(h[0])
+
 def update_response_headers(request, response):
     if request.serialization == 'xml':
         response['Content-Type'] = 'application/xml; charset=UTF-8'
 def update_response_headers(request, response):
     if request.serialization == 'xml':
         response['Content-Type'] = 'application/xml; charset=UTF-8'
@@ -434,14 +726,17 @@ def update_response_headers(request, response):
         response['Content-Type'] = 'application/json; charset=UTF-8'
     elif not response['Content-Type']:
         response['Content-Type'] = 'text/plain; charset=UTF-8'
         response['Content-Type'] = 'application/json; charset=UTF-8'
     elif not response['Content-Type']:
         response['Content-Type'] = 'text/plain; charset=UTF-8'
-
+    
+    if not response.has_header('Content-Length') and not (response.has_header('Content-Type') and response['Content-Type'].startswith('multipart/byteranges')):
+        response['Content-Length'] = len(response.content)
+    
     if settings.TEST:
         response['Date'] = format_date_time(time())
 
 def render_fault(request, fault):
     if settings.DEBUG or settings.TEST:
         fault.details = format_exc(fault)
     if settings.TEST:
         response['Date'] = format_date_time(time())
 
 def render_fault(request, fault):
     if settings.DEBUG or settings.TEST:
         fault.details = format_exc(fault)
-
+    
     request.serialization = 'text'
     data = '\n'.join((fault.message, fault.details)) + '\n'
     response = HttpResponse(data, status=fault.code)
     request.serialization = 'text'
     data = '\n'.join((fault.message, fault.details)) + '\n'
     response = HttpResponse(data, status=fault.code)
@@ -449,10 +744,11 @@ def render_fault(request, fault):
     return response
 
 def request_serialization(request, format_allowed=False):
     return response
 
 def request_serialization(request, format_allowed=False):
-    """Return the serialization format requested
+    """Return the serialization format requested.
     
     Valid formats are 'text' and 'json', 'xml' if 'format_allowed' is True.
     """
     
     Valid formats are 'text' and 'json', 'xml' if 'format_allowed' is True.
     """
+    
     if not format_allowed:
         return 'text'
     
     if not format_allowed:
         return 'text'
     
@@ -471,15 +767,18 @@ def request_serialization(request, format_allowed=False):
     
     return 'text'
 
     
     return 'text'
 
-def api_method(http_method=None, format_allowed=False):
-    """Decorator function for views that implement an API method"""
+def api_method(http_method=None, format_allowed=False, user_required=True):
+    """Decorator function for views that implement an API method."""
+    
     def decorator(func):
         @wraps(func)
         def wrapper(request, *args, **kwargs):
             try:
                 if http_method and request.method != http_method:
                     raise BadRequest('Method not allowed.')
     def decorator(func):
         @wraps(func)
         def wrapper(request, *args, **kwargs):
             try:
                 if http_method and request.method != http_method:
                     raise BadRequest('Method not allowed.')
-
+                if user_required and getattr(request, 'user', None) is None:
+                    raise Unauthorized('Access denied')
+                
                 # The args variable may contain up to (account, container, object).
                 if len(args) > 1 and len(args[1]) > 256:
                     raise BadRequest('Container name too large.')
                 # The args variable may contain up to (account, container, object).
                 if len(args) > 1 and len(args[1]) > 256:
                     raise BadRequest('Container name too large.')
@@ -488,9 +787,8 @@ def api_method(http_method=None, format_allowed=False):
                 
                 # Fill in custom request variables.
                 request.serialization = request_serialization(request, format_allowed)
                 
                 # Fill in custom request variables.
                 request.serialization = request_serialization(request, format_allowed)
-                # TODO: Authenticate.
-                request.user = "test"
-                
+                request.backend = connect_backend()
+
                 response = func(request, *args, **kwargs)
                 update_response_headers(request, response)
                 return response
                 response = func(request, *args, **kwargs)
                 update_response_headers(request, response)
                 return response
@@ -500,5 +798,8 @@ def api_method(http_method=None, format_allowed=False):
                 logger.exception('Unexpected error: %s' % e)
                 fault = ServiceUnavailable('Unexpected error')
                 return render_fault(request, fault)
                 logger.exception('Unexpected error: %s' % e)
                 fault = ServiceUnavailable('Unexpected error')
                 return render_fault(request, fault)
+            finally:
+                if getattr(request, 'backend', None) is not None:
+                    request.backend.wrapper.conn.close()
         return wrapper
     return decorator
         return wrapper
     return decorator