Rename tests to livetest in kamaki.clients
[kamaki] / kamaki / clients / pithos.py
index 2908fd9..e8b4b6d 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright 2011-2012 GRNET S.A. All rights reserved.
+# Copyright 2011-2013 GRNET S.A. All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or
 # without modification, are permitted provided that the following
 # interpreted as representing official policies, either expressed
 # or implied, of GRNET S.A.
 
-import gevent
-import gevent.monkey
-# Monkey-patch everything for gevent early on
-gevent.monkey.patch_all()
-
-import hashlib, os, gevent.pool
+from threading import enumerate as activethreads
 
+from os import fstat
+from hashlib import new as newhashlib
 from time import time
 
-from .storage import StorageClient, ClientError
-from .utils import path4url, params4url, prefix_keys, filter_in, filter_out, list2str
+from binascii import hexlify
+
+from kamaki.clients import SilentEvent, sendlog
+from kamaki.clients.pithos_rest_api import PithosRestAPI
+from kamaki.clients.storage import ClientError
+from kamaki.clients.utils import path4url, filter_in
+from StringIO import StringIO
 
 
-def pithos_hash(block, blockhash):
-    h = hashlib.new(blockhash)
+def _pithos_hash(block, blockhash):
+    h = newhashlib(blockhash)
     h.update(block.rstrip('\x00'))
     return h.hexdigest()
 
-class PithosClient(StorageClient):
+
+def _range_up(start, end, a_range):
+    if a_range:
+        (rstart, rend) = a_range.split('-')
+        (rstart, rend) = (int(rstart), int(rend))
+        if rstart > end or rend < start:
+            return (0, 0)
+        if rstart > start:
+            start = rstart
+        if rend < end:
+            end = rend
+    return (start, end)
+
+
+class PithosClient(PithosRestAPI):
     """GRNet Pithos API client"""
 
-    def __init__(self, base_url, token, account=None, container = None):
-        super(PithosClient, self).__init__(base_url, token,
-            account = account, container = container)
-        self.async_pool = None
-
-    def account_head(self, until = None,
-        if_modified_since=None, if_unmodified_since=None, *args, **kwargs):
-        """ Full Pithos+ HEAD at account level
-        --- request parameters ---
-        @param until (string): optional timestamp
-        --- --- optional request headers ---
-        @param if_modified_since (string): Retrieve if account has changed since provided timestamp
-        @param if_unmodified_since (string): Retrieve if account has not changed since provided timestamp
-        """
-        self.assert_account()
-        path = path4url(self.account)
-
-        path += '' if until is None else params4url({'until':until})
-        self.set_header('If-Modified-Since', if_modified_since)
-        self.set_header('If-Unmodified-Since', if_unmodified_since)
-
-        success = kwargs.pop('success', 204)
-        return self.head(path, *args, success=success, **kwargs)
-
-    def account_get(self, limit=None, marker=None, format='json', show_only_shared=False, until=None,
-        if_modified_since=None, if_unmodified_since=None, *args, **kwargs):
-        """  Full Pithos+ GET at account level
-        --- request parameters ---
-        @param limit (integer): The amount of results requested (server will use default value if None)
-        @param marker (string): Return containers with name lexicographically after marker
-        @param format (string): reply format can be json or xml (default: json)
-        @param shared (bool): If true, only shared containers will be included in results
-        @param until (string): optional timestamp
-        --- --- optional request headers ---
-        @param if_modified_since (string): Retrieve if account has changed since provided timestamp
-        @param if_unmodified_since (string): Retrieve if account has not changed since provided timestamp
-        """
-        self.assert_account()
-
-        param_dict = {} if format is None else dict(format=format)
-        if limit is not None:
-            param_dict['limit'] = limit
-        if marker is not None:
-            param_dict['marker'] = marker
-        if show_only_shared:
-            param_dict['shared'] = None
-        if until is not None:
-            param_dict['until'] = until
-
-        path = path4url(self.account)+params4url(param_dict)
-        self.set_header('If-Modified-Since', if_modified_since)
-        self.set_header('If-Unmodified-Since', if_unmodified_since)
+    _thread_exceptions = []
 
-        success = kwargs.pop('success', (200, 204))
-        return self.get(path, *args, success = success, **kwargs)
-
-    def account_post(self, update=True,
-        groups={}, metadata={}, quota=None, versioning=None, *args, **kwargs):
-        """ Full Pithos+ POST at account level
-        --- request parameters ---
-        @param update (bool): if True, Do not replace metadata/groups
-        --- request headers ---
-        @groups (dict): Optional user defined groups in the form
-                    {   'group1':['user1', 'user2', ...], 
-                        'group2':['userA', 'userB', ...], ...
-                    }
-        @metadata (dict): Optional user defined metadata in the form
-                    {   'name1': 'value1',
-                        'name2': 'value2', ...
-                    }
-        @param quota(integer): If supported, sets the Account quota
-        @param versioning(string): If supported, sets the Account versioning
-                    to 'auto' or some other supported versioning string
-        """
-        self.assert_account()
-        path = path4url(self.account) + params4url({'update':None}) if update else ''
-        for group, usernames in groups.items():
-            userstr = ''
-            dlm = ''
-            for user in usernames:
-                userstr = userstr + dlm + user
-                dlm = ','
-            self.set_header('X-Account-Group-'+group, userstr)
-        for metaname, metaval in metadata.items():
-            self.set_header('X-Account-Meta-'+metaname, metaval)
-        self.set_header('X-Account-Policy-Quota', quota)
-        self.set_header('X-Account-Policy-Versioning', versioning)
-
-        success = kwargs.pop('success', 202)
-        return self.post(path, *args, success=success, **kwargs)
-
-    def container_head(self, until=None,
-        if_modified_since=None, if_unmodified_since=None, *args, **kwargs):
-        """ Full Pithos+ HEAD at container level
-        --- request params ---
-        @param until (string): optional timestamp
-        --- optional request headers --- 
-        @param if_modified_since (string): Retrieve if account has changed since provided timestamp
-        @param if_unmodified_since (string): Retrieve if account has not changed since provided timestamp
-        """
-        self.assert_container()
-        path = path4url(self.account, self.container)
-        path += '' if until is None else params4url(dict(until=until))
-        self.set_header('If-Modified-Since', if_modified_since)
-        self.set_header('If-Unmodified-Since', if_unmodified_since)
-        success = kwargs.pop('success', 204)
-        return self.head(path, *args, success=success, **kwargs)
-
-    def container_get(self, limit = None, marker = None, prefix=None, delimiter=None, path = None,
-        format='json', meta=[], show_only_shared=False, until=None,
-        if_modified_since=None, if_unmodified_since=None, *args, **kwargs):
-        """ Full Pithos+ GET at container level
-        --- request parameters ---
-        @param limit (integer): The amount of results requested (server qill use default value if None)
-        @param marker (string): Return containers with name lexicographically after marker
-        @param prefix (string): Return objects starting with prefix
-        @param delimiter (string): Return objects up to the delimiter
-        @param path (string): assume prefix = path and delimiter = / (overwrites prefix
-        and delimiter)
-        @param format (string): reply format can be json or xml (default: json)
-        @param meta (list): Return objects that satisfy the key queries in the specified
-        comma separated list (use <key>, !<key> for existence queries, <key><op><value>
-        for value queries, where <op> can be one of =, !=, <=, >=, <, >)
-        @param shared (bool): If true, only shared containers will be included in results
-        @param until (string): optional timestamp
-        --- --- optional request headers ---
-        @param if_modified_since (string): Retrieve if account has changed since provided timestamp
-        @param if_unmodified_since (string): Retrieve if account has not changed since provided timestamp
-        """
-        self.assert_container()
-
-        param_dict = {} if format is None else dict(format=format)
-        if limit is not None:
-            param_dict['limit'] = limit
-        if marker is not None:
-            param_dict['marker'] = marker
-        if path is not None:
-                param_dict['path'] = path
-        else:
-            if prefix is not None:
-                param_dict['prefix'] = prefix
-            if delimiter is not None:
-                param_dict['delimiter'] = delimiter
-        if show_only_shared:
-            param_dict['shared'] = None
-        if meta is not None and len(meta) > 0:
-            param_dict['meta'] = list2str(meta)
-        if until is not None:
-            param_dict['until'] = until
-        path = path4url(self.account, self.container)+params4url(param_dict)
-        self.set_header('If-Modified-Since', if_modified_since)
-        self.set_header('If-Unmodified-Since', if_unmodified_since)
-        success = kwargs.pop('success', 200)
-        return self.get(path, *args, success=success, **kwargs)
-
-    def container_put(self, quota=None, versioning=None, metadata={}, *args, **kwargs):
-        """ Full Pithos+ PUT at container level
-        --- request headers ---
-        @param quota (integer): Size limit in KB
-        @param versioning (string): 'auto' or other string supported by server
-        @metadata (dict): Optional user defined metadata in the form
-        {   'name1': 'value1',
-        'name2': 'value2', ...
-        }
-        """
-        self.assert_container()
-        path = path4url(self.account, self.container)
-        for metaname, metaval in metadata.items():
-            self.set_header('X-Container-Meta-'+metaname, metaval)
-        self.set_header('X-Container-Policy-Quota', quota)
-        self.set_header('X-Container-Policy-Versioning', versioning)
-        success = kwargs.pop('success',(201, 202))
-        return self.put(path, *args, success=success, **kwargs)
-
-    def container_post(self, update=True, format='json',
-        quota=None, versioning=None, metadata={}, content_type=None, content_length=None, transfer_encoding=None,
-        *args, **kwargs):
-        """ Full Pithos+ POST at container level
-        --- request params ---
-        @param update (bool):  if True, Do not replace metadata/groups
-        @param format(string): json (default) or xml
-        --- request headers ---
-        @param quota (integer): Size limit in KB
-        @param versioning (string): 'auto' or other string supported by server
-        @metadata (dict): Optional user defined metadata in the form
-        {   'name1': 'value1',
-        'name2': 'value2', ...
-        }
-        @param content_type (string): set a custom content type
-        @param content_length (string): set a custrom content length
-        @param transfer_encoding (string): set a custrom transfer encoding
-        """
-        self.assert_container()
-        param_dict = {} if format is None else dict(format=format)
-        if update:
-            param_dict['update'] = None
-        path = path4url(self.account, self.container)+params4url(param_dict)
-
-        for metaname, metaval in metadata.items():
-            self.set_header('X-Container-Meta-'+metaname, metaval)
-        self.set_header('X-Container-Policy-Quota', quota)
-        self.set_header('X-Container-Policy-Versioning', versioning)
-        self.set_header('Content-Type', content_type)
-        self.set_header('Content-Length', content_length)
-        self.set_header('Transfer-Encoding', transfer_encoding)
-        success = kwargs.pop('success', 202)
-        return self.post(path, *args, success=success, **kwargs)
-
-    def container_delete(self, until=None, delimiter=None, *args, **kwargs):
-        """ Full Pithos+ DELETE at container level
-        --- request parameters ---
-        @param until (timestamp string): if defined, container is purged up to that time
-        """
-        self.assert_container()
-        param_dict = {} 
-        if until is not None:
-            param_dict['until']=until
-        if delimiter is not None:
-            param_dict['delimiter'] = delimiter
-        path=path4url(self.account, self.container)+params4url(param_dict)
-        success = kwargs.pop('success', 204)
-        return self.delete(path, success=success)
-
-    def object_head(self, object, version=None, if_etag_match=None, if_etag_not_match = None,
-        if_modified_since = None, if_unmodified_since = None, *args, **kwargs):
-        """ Full Pithos+ HEAD at object level
-        --- request parameters ---
-        @param version (string): optional version identified
-        --- request headers ---
-        @param if_etag_match (string): if provided, return only results
-                with etag matching with this
-        @param if_etag_not_match (string): if provided, return only results
-                with etag not matching with this
-        @param if_modified_since (string): Retrieve if account has changed since provided timestamp
-        @param if_unmodified_since (string): Retrieve if account has not changed since provided timestamp
-        """
-        self.assert_container()
-        path=path4url(self.account, self.container, object)
-        path += '' if version is None else params4url(dict(version=version))
-        self.set_header('If-Match', if_etag_match)
-        self.set_header('If-None-Match', if_etag_not_match)
-        self.set_header('If-Modified-Since', if_modified_since)
-        self.set_header('If-Unmodified-Since', if_unmodified_since)
-        success = kwargs.pop('success', 200)
-        return self.head(path, *args, success=success, **kwargs)
-
-    def object_get(self, object, format='json', hashmap=False, version=None,
-        data_range=None, if_range=False, if_etag_match=None, if_etag_not_match = None, if_modified_since = None, if_unmodified_since = None, *args, **kwargs):
-        """ Full Pithos+ GET at object level
-        --- request parameters ---
-        @param format (string): json (default) or xml
-        @param hashmap (bool): Optional request for hashmap
-        @param version (string): optional version identified
-        --- request headers ---
-        @param data_range (string): Optional range of data to retrieve
-        @param if_range (bool): 
-        @param if_etag_match (string): if provided, return only results
-                with etag matching with this
-        @param if_etag_not_match (string): if provided, return only results
-                with etag not matching with this
-        @param if_modified_since (string): Retrieve if account has changed since provided timestamp
-        @param if_unmodified_since (string): Retrieve if account has not changed since provided timestamp
-        """
-        self.assert_container()
-        param_dict = {} if format is None else dict(format=format)
-        if hashmap:
-            param_dict['hashmap']=None
-        if version is not None:
-            param_dict['version']=version
-        path=path4url(self.account, self.container, object)+params4url(param_dict)
-        self.set_header('Range', data_range)
-        self.set_header('If-Range', '', if_range is True and data_range is not None)
-        self.set_header('If-Match', if_etag_match, )
-        self.set_header('If-None-Match', if_etag_not_match)
-        self.set_header('If-Modified-Since', if_modified_since)
-        self.set_header('If-Unmodified-Since', if_unmodified_since)
-        success = kwargs.pop('success', 200)
-        return self.get(path, *args, success=success, **kwargs)
-
-    def object_put(self, object, format='json', hashmap=False,
-        if_etag_match=None, if_etag_not_match = None, etag=None, content_length = None, content_type=None, transfer_encoding=None,
-        copy_from=None, move_from=None, source_account=None, source_version=None, content_encoding = None, content_disposition=None,
-        manifest = None, permitions = {}, public = None, metadata={}, *args, **kwargs):
-        """ Full Pithos+ PUT at object level
-        --- request parameters ---
-        @param format (string): json (default) or xml
-        @param hashmap (bool): Optional hashmap provided instead of data
-        --- request headers ---
-        @param if_etag_match (string): if provided, return only results
-                with etag matching with this
-        @param if_etag_not_match (string): if provided, return only results
-                with etag not matching with this
-        @param etag (string): The MD5 hash of the object (optional to check written data)
-        @param content_length (integer): The size of the data written
-        @param content_type (string): The MIME content type of the object
-        @param transfer_encoding (string): Set to chunked to specify incremental uploading (if used, Content-Length is ignored)
-        @param copy_from (string): The source path in the form /<container>/<object>
-        @param move_from (string): The source path in the form /<container>/<object>
-        @param source_account (string): The source account to copy/move from
-        @param source_version (string): The source version to copy from
-        @param conent_encoding (string): The encoding of the object
-        @param content_disposition (string): The presentation style of the object
-        @param manifest (string): Object parts prefix in /<container>/<object> form
-        @param permitions (dict): Object permissions in the form (all fields are optional)
-                {'read':[user1, group1, user2, ...], 'write':['user3, group2, group3, ...]}
-        @param public (bool): If true, Object is publicly accessible, if false, not
-        @param metadata (dict): Optional user defined metadata in the form
-                {'meta-key-1':'meta-value-1', 'meta-key-2':'meta-value-2', ...}
-        """
-        self.assert_container()
-        param_dict = {} if format is None else dict(format=format)
-        if hashmap:
-            param_dict['hashmap'] = None
-        path=path4url(self.account, self.container, object)+params4url(param_dict)
-        self.set_header('If-Match', if_etag_match)
-        self.set_header('If-None-Match', if_etag_not_match)
-        self.set_header('ETag', etag)
-        self.set_header('Content-Length', content_length)
-        self.set_header('Content-Type', content_type)
-        self.set_header('Transfer-Encoding', transfer_encoding)
-        self.set_header('X-Copy-From', copy_from)
-        self.set_header('X-Move-From', move_from)
-        self.set_header('X-Source-Account', source_account)
-        self.set_header('X-Source-Version', source_version)
-        self.set_header('Content-Encoding', content_encoding)
-        self.set_header('Content-Disposition', content_disposition)
-        self.set_header('X-Object-Manifest', manifest)
-        perms = None
-        for permition_type, permition_list in permitions.items():
-            if perms is None:
-                perms = '' #Remove permitions
-            if len(permition_list) == 0:
-                continue
-            perms += ';'+permition_type if len(perms) > 0 else permition_type
-            perms += '='+list2str(permition_list, seperator=',')
-        self.set_header('X-Object-Sharing', perms)
-        self.set_header('X-Object-Public', public)
-        for key, val in metadata.items():
-            self.set_header('X-Object-Meta-'+key, val)
-
-        success = kwargs.pop('success', 201)
-        return self.put(path, *args, success=success, **kwargs)
-
-    def object_copy(self, object, destination, format='json', ignore_content_type=False,
-        if_etag_match=None, if_etag_not_match=None, destination_account=None,
-        content_type=None, content_encoding=None, content_disposition=None, source_version=None,
-        permitions={}, public=False, metadata={}, *args, **kwargs):
-        """ Full Pithos+ COPY at object level
-        --- request parameters ---
-        @param format (string): json (default) or xml
-        @param ignore_content_type (bool): Ignore the supplied Content-Type
-        --- request headers ---
-         @param if_etag_match (string): if provided, copy only results
-                with etag matching with this
-        @param if_etag_not_match (string): if provided, copy only results
-                with etag not matching with this
-        @param destination (string): The destination path in the form /<container>/<object>
-        @param destination_account (string): The destination account to copy to
-        @param content_type (string): The MIME content type of the object
-        @param content_encoding (string): The encoding of the object
-        @param content_disposition (string): The presentation style of the object
-        @param source_version (string): The source version to copy from
-        @param permitions (dict): Object permissions in the form (all fields are optional)
-                {'read':[user1, group1, user2, ...], 'write':['user3, group2, group3, ...]}
-                permitions override source permitions, removing any old permitions
-        @param public (bool): If true, Object is publicly accessible, if else, not
-        @param metadata (dict): Optional user defined metadata in the form
-                {'meta-key-1':'meta-value-1', 'meta-key-2':'meta-value-2', ...}
-                Metadata are appended to the source metadata. In case of same keys, they
-                replace the old metadata
-        """
-        self.assert_container()
-        param_dict = {} if format is None else dict(format=format)
-        if ignore_content_type:
-            param_dict['ignore_content_type'] = None
-        path = path4url(self.account, self.container, object)+params4url(param_dict)
-        self.set_header('If-Match', if_etag_match)
-        self.set_header('If-None-Match', if_etag_not_match)
-        self.set_header('Destination', destination)
-        self.set_header('Destination-Account', destination_account)
-        self.set_header('Content-Type', content_type)
-        self.set_header('Content-Encoding', content_encoding)
-        self.set_header('Content-Disposition', content_disposition)
-        self.set_header('X-Source-Version', source_version)
-        perms = None
-        for permition_type, permition_list in permitions.items():
-            if perms is None:
-                perms = '' #Remove permitions
-            if len(permition_list) == 0:
-                continue
-            perms += ';'+permition_type if len(perms) > 0 else permition_type
-            perms += '='+list2str(permition_list, seperator=',')
-        self.set_header('X-Object-Sharing', perms)
-        self.set_header('X-Object-Public', public)
-        for key, val in metadata.items():
-            self.set_header('X-Object-Meta-'+key, val)
-        success = kwargs.pop('success', 201)
-        return self.copy(path, *args, success=success, **kwargs)
-
-    def object_move(self, object, format='json', ignore_content_type=False,
-        if_etag_match=None, if_etag_not_match=None, destination=None, destination_account=None,
-        content_type=None, content_encoding=None, content_disposition=None, permitions={},
-        public=False, metadata={}, *args, **kwargs):
-        """ Full Pithos+ COPY at object level
-        --- request parameters ---
-        @param format (string): json (default) or xml
-        @param ignore_content_type (bool): Ignore the supplied Content-Type
-        --- request headers ---
-         @param if_etag_match (string): if provided, return only results
-                with etag matching with this
-        @param if_etag_not_match (string): if provided, return only results
-                with etag not matching with this
-        @param destination (string): The destination path in the form /<container>/<object>
-        @param destination_account (string): The destination account to copy to
-        @param content_type (string): The MIME content type of the object
-        @param content_encoding (string): The encoding of the object
-        @param content_disposition (string): The presentation style of the object
-        @param source_version (string): The source version to copy from
-        @param permitions (dict): Object permissions in the form (all fields are optional)
-                {'read':[user1, group1, user2, ...], 'write':['user3, group2, group3, ...]}
-        @param public (bool): If true, Object is publicly accessible, if false, not
-        @param metadata (dict): Optional user defined metadata in the form
-                {'meta-key-1':'meta-value-1', 'meta-key-2':'meta-value-2', ...}
-        """
-        self.assert_container()
-        param_dict = {} if format is None else dict(format=format)
-        if ignore_content_type:
-            param_dict['ignore_content_type']=None
-        path = path4url(self.account, self.container, object)+params4url(param_dict)
-        self.set_header('If-Match', if_etag_match)
-        self.set_header('If-None-Match', if_etag_not_match)
-        self.set_header('Destination', destination)
-        self.set_header('Destination-Account', destination_account)
-        self.set_header('Content-Type', content_type)
-        self.set_header('Content-Encoding', content_encoding)
-        self.set_header('Content-Disposition', content_disposition)
-        perms = None
-        for permition_type, permition_list in permitions.items():
-            if perms is None:
-                perms = '' #Remove permitions
-            if len(permition_list) == 0:
-                continue
-            perms += ';'+permition_type if len(perms) > 0 else permition_type
-            perms += '='+list2str(permition_list, seperator=',')
-        self.set_header('X-Object-Sharing', perms)
-        self.set_header('X-Object-Public', public)
-        for key, val in metadata.items():
-            self.set_header('X-Object-Meta-'+key, val)
-        success = kwargs.pop('success', 201)
-        return self.move(path, *args, success=success, **kwargs)
-
-    def object_post(self, object, format='json', update=True,
-        if_etag_match=None, if_etag_not_match=None, content_length=None, content_type=None,
-        content_range=None, transfer_encoding=None, content_encoding=None, content_disposition=None,
-        source_object=None, source_account=None, source_version=None, object_bytes=None,
-        manifest=None, permitions={}, public=False, metadata={}, *args, **kwargs):
-        """ Full Pithos+ POST at object level
-        --- request parameters ---
-        @param format (string): json (default) or xml
-        @param update (bool): Do not replace metadata
-        --- request headers ---
-        @param if_etag_match (string): if provided, return only results
-                with etag matching with this
-        @param if_etag_not_match (string): if provided, return only results
-                with etag not matching with this
-        @param content_length (string): The size of the data written
-        @param content_type (string): The MIME content type of the object
-        @param content_range (string): The range of data supplied
-        @param transfer_encoding (string): Set to chunked to specify incremental uploading
-                (if used, Content-Length is ignored)
-        @param content_encoding (string): The encoding of the object
-        @param content_disposition (string): The presentation style of the object
-        @param source_object (string): Update with data from the object at path /<container>/<object>
-        @param source_account (string): The source account to update from
-        @param source_version (string): The source version to copy from
-        @param object_bytes (integer): The updated objects final size
-        @param manifest (string): Object parts prefix in /<container>/<object> form
-        @param permitions (dict): Object permissions in the form (all fields are optional)
-                {'read':[user1, group1, user2, ...], 'write':['user3, group2, group3, ...]}
-        @param public (bool): If true, Object is publicly accessible, if false, not
-        @param metadata (dict): Optional user defined metadata in the form
-                {'meta-key-1':'meta-value-1', 'meta-key-2':'meta-value-2', ...}
-        """
-        self.assert_container()
-        param_dict = {} if format is None else dict(format=format)
-        if update:
-            param_dict['update'] = None
-        path = path4url(self.account, self.container, object)+params4url(param_dict)
-        self.set_header('If-Match', if_etag_match)
-        self.set_header('If-None-Match', if_etag_not_match)
-        self.set_header('Content-Length', content_length, iff=transfer_encoding is None)
-        self.set_header('Content-Type', content_type)
-        self.set_header('Content-Range', content_range)
-        self.set_header('Transfer-Encoding', transfer_encoding)
-        self.set_header('Content-Encoding', content_encoding)
-        self.set_header('Content-Disposition', content_disposition)
-        self.set_header('X-Source-Object', source_object)
-        self.set_header('X-Source-Account', source_account)
-        self.set_header('X-Source-Version', source_version)
-        self.set_header('X-Object-Bytes', object_bytes)
-        self.set_header('X-Object-Manifest', manifest)
-        perms = None
-        for permition_type, permition_list in permitions.items():
-            if perms is None:
-                perms = '' #Remove permitions
-            if len(permition_list) == 0:
-                continue
-            perms += ';'+permition_type if len(perms) > 0 else permition_type
-            perms += '='+list2str(permition_list, seperator=',')
-        self.set_header('X-Object-Sharing', perms)
-        self.set_header('X-Object-Public', public)
-        for key, val in metadata.items():
-            self.set_header('X-Object-Meta-'+key, val)
-        success=kwargs.pop('success', (202, 204))
-        return self.post(path, *args, success=success, **kwargs)
-       
-    def object_delete(self, object, until=None, delimiter=None, *args, **kwargs):
-        """ Full Pithos+ DELETE at object level
-        --- request parameters --- 
-        @param until (string): Optional timestamp
-        """
-        self.assert_container()
-        param_dict = {} 
-        if until is not None:
-            param_dict['until']=until
-        if delimiter is not None:
-            param_dict['delimiter'] = delimiter
-        path = path4url(self.account, self.container, object)+params4url(param_dict)
-        success = kwargs.pop('success', 204)
-        return self.delete(path, *args, success=success, **kwargs)
+    def __init__(self, base_url, token, account=None, container=None):
+        super(PithosClient, self).__init__(base_url, token, account, container)
 
     def purge_container(self):
-        self.container_delete(until=unicode(time()))
+        """Delete an empty container and destroy associated blocks
+        """
+        r = self.container_delete(until=unicode(time()))
+        r.release()
+
+    def upload_object_unchunked(
+            self, obj, f,
+            withHashFile=False,
+            size=None,
+            etag=None,
+            content_encoding=None,
+            content_disposition=None,
+            content_type=None,
+            sharing=None,
+            public=None):
+        """
+        :param obj: (str) remote object path
 
-    def put_block_async(self, data, hash):
-        class SilentGreenlet(gevent.Greenlet):
-            def _report_error(self, exc_info):
-                _stderr = sys._stderr
-                try:
-                    sys.stderr = StringIO()
-                    gevent.Greenlet._report_error(self, exc_info)
-                finally:
-                    sys.stderr = _stderr
-        POOL_SIZE = 5
-        if self.async_pool is None:
-            self.async_pool = gevent.pool.Pool(size=POOL_SIZE)
-        g = SilentGreenlet(self.put_block, data, hash)
-        self.async_pool.start(g)
-        return g
-
-    def put_block(self, data, hash):
-        r = self.container_post(update=True, content_type='application/octet-stream',
-            content_length=len(data), data=data, format='json')
-        self.reset_headers()
-        assert r.json[0] == hash, 'Local hash does not match server'
+        :param f: open file descriptor
 
-    def async_upload_object(self, object, f, size=None, hash_cb=None,
-        upload_cb=None):
-        """Like upload_object object but it sends blocks of data asynchronously
-        using geven/greenlet
+        :param withHashFile: (bool)
+
+        :param size: (int) size of data to upload
+
+        :param etag: (str)
+
+        :param content_encoding: (str)
+
+        :param content_disposition: (str)
+
+        :param content_type: (str)
+
+        :param sharing: {'read':[user and/or grp names],
+            'write':[usr and/or grp names]}
+
+        :param public: (bool)
         """
-        self.assert_container()
+        self._assert_container()
+
+        if withHashFile:
+            data = f.read()
+            try:
+                import json
+                data = json.dumps(json.loads(data))
+            except ValueError:
+                raise ClientError('"%s" is not json-formated' % f.name, 1)
+            except SyntaxError:
+                msg = '"%s" is not a valid hashmap file' % f.name
+                raise ClientError(msg, 1)
+            f = StringIO(data)
+        data = f.read(size) if size is not None else f.read()
+        r = self.object_put(
+            obj,
+            data=data,
+            etag=etag,
+            content_encoding=content_encoding,
+            content_disposition=content_disposition,
+            content_type=content_type,
+            permissions=sharing,
+            public=public,
+            success=201)
+        r.release()
+
+    def create_object_by_manifestation(
+            self, obj,
+            etag=None,
+            content_encoding=None,
+            content_disposition=None,
+            content_type=None,
+            sharing=None,
+            public=None):
+        """
+        :param obj: (str) remote object path
+
+        :param etag: (str)
+
+        :param content_encoding: (str)
+
+        :param content_disposition: (str)
 
-        meta = self.get_container_info(self.container)
+        :param content_type: (str)
+
+        :param sharing: {'read':[user and/or grp names],
+            'write':[usr and/or grp names]}
+
+        :param public: (bool)
+        """
+        self._assert_container()
+        r = self.object_put(
+            obj,
+            content_length=0,
+            etag=etag,
+            content_encoding=content_encoding,
+            content_disposition=content_disposition,
+            content_type=content_type,
+            permissions=sharing,
+            public=public,
+            manifest='%s/%s' % (self.container, obj))
+        r.release()
+
+    # upload_* auxiliary methods
+    def _put_block_async(self, data, hash, upload_gen=None):
+        event = SilentEvent(method=self._put_block, data=data, hash=hash)
+        event.start()
+        return event
+
+    def _put_block(self, data, hash):
+        from random import randint
+        if not randint(0, 7):
+            raise ClientError('BAD GATEWAY STUFF', 503)
+        r = self.container_post(
+            update=True,
+            content_type='application/octet-stream',
+            content_length=len(data),
+            data=data,
+            format='json')
+        assert r.json[0] == hash, 'Local hash does not match server'
+
+    def _get_file_block_info(self, fileobj, size=None):
+        meta = self.get_container_info()
         blocksize = int(meta['x-container-block-size'])
         blockhash = meta['x-container-block-hash']
-
-        size = size if size is not None else os.fstat(f.fileno()).st_size
+        size = size if size is not None else fstat(fileobj.fileno()).st_size
         nblocks = 1 + (size - 1) // blocksize
-        hashes = []
-        map = {}
+        return (blocksize, blockhash, size, nblocks)
+
+    def _get_missing_hashes(
+            self, obj, json,
+            size=None,
+            format='json',
+            hashmap=True,
+            content_type=None,
+            etag=None,
+            content_encoding=None,
+            content_disposition=None,
+            permissions=None,
+            public=None,
+            success=(201, 409)):
+        r = self.object_put(
+            obj,
+            format='json',
+            hashmap=True,
+            content_type=content_type,
+            json=json,
+            etag=etag,
+            content_encoding=content_encoding,
+            content_disposition=content_disposition,
+            permissions=permissions,
+            public=public,
+            success=success)
+        if r.status_code == 201:
+            r.release()
+            return None
+        return r.json
 
+    def _caclulate_uploaded_blocks(
+            self, blocksize, blockhash, size, nblocks, hashes, hmap, fileobj,
+            hash_cb=None):
         offset = 0
-
         if hash_cb:
             hash_gen = hash_cb(nblocks)
             hash_gen.next()
 
         for i in range(nblocks):
-            block = f.read(min(blocksize, size - offset))
+            block = fileobj.read(min(blocksize, size - offset))
             bytes = len(block)
-            hash = pithos_hash(block, blockhash)
+            hash = _pithos_hash(block, blockhash)
             hashes.append(hash)
-            map[hash] = (offset, bytes)
+            hmap[hash] = (offset, bytes)
             offset += bytes
             if hash_cb:
                 hash_gen.next()
+        msg = 'Failed to calculate uploaded blocks:'
+        msg += ' Offset and object size do not match'
+        assert offset == size, msg
 
-        assert offset == size
-
-        hashmap = dict(bytes=size, hashes=hashes)
-        r = self.object_put(object, format='json', hashmap=True,
-            content_type='application/octet-stream', json=hashmap, success=(201, 409))
-        self.reset_headers()
-
-        if r.status_code == 201:
-            return
+    def _upload_missing_blocks(self, missing, hmap, fileobj, upload_gen=None):
+        """upload missing blocks asynchronously"""
 
-        missing = r.json
-
-        if upload_cb:
-            upload_gen = upload_cb(len(missing))
-            upload_gen.next()
+        self._init_thread_limit()
 
         flying = []
+        failures = []
         for hash in missing:
-            offset, bytes = map[hash]
-            f.seek(offset)
-            data = f.read(bytes)
-            #self.put_block(data, hash)
-            r = self.put_block_async(data, hash)
+            offset, bytes = hmap[hash]
+            fileobj.seek(offset)
+            data = fileobj.read(bytes)
+            r = self._put_block_async(data, hash, upload_gen)
             flying.append(r)
-            for r in flying:
-                if r.ready():
-                    if r.exception:
-                        raise r.exception
-                    if upload_cb:
+            unfinished = self._watch_thread_limit(flying)
+            for thread in set(flying).difference(unfinished):
+                if thread.exception:
+                    failures.append(thread)
+                    if isinstance(
+                            thread.exception,
+                            ClientError) and thread.exception.status == 502:
+                        self.POOLSIZE = self._thread_limit
+                elif thread.isAlive():
+                    flying.append(thread)
+                elif upload_gen:
+                    try:
                         upload_gen.next()
-            flying = [r for r in flying if not r.ready()]
+                    except:
+                        pass
+            flying = unfinished
+
+        for thread in flying:
+            thread.join()
+            if thread.exception:
+                failures.append(thread)
+            elif upload_gen:
+                try:
+                    upload_gen.next()
+                except:
+                    pass
 
-        gevent.joinall(flying)
-        self.object_put(object, format='json', hashmap=True,
-            content_type='application/octet-stream', json=hashmap, success=201)
+        return [failure.kwargs['hash'] for failure in failures]
 
-    def upload_object(self, object, f, size=None, hash_cb=None,
-        upload_cb=None):
-        """Create an object by uploading only the missing blocks
-        hash_cb is a generator function taking the total number of blocks to
-        be hashed as an argument. Its next() will be called every time a block
-        is hashed.
-        upload_cb is a generator function with the same properties that is
-        called every time a block is uploaded.
-        """
-        self.assert_container()
+    def upload_object(
+            self, obj, f,
+            size=None,
+            hash_cb=None,
+            upload_cb=None,
+            etag=None,
+            content_encoding=None,
+            content_disposition=None,
+            content_type=None,
+            sharing=None,
+            public=None):
+        """Upload an object using multiple connections (threads)
 
-        meta = self.get_container_info(self.container)
-        blocksize = int(meta['x-container-block-size'])
-        blockhash = meta['x-container-block-hash']
+        :param obj: (str) remote object path
 
-        size = size if size is not None else os.fstat(f.fileno()).st_size
-        nblocks = 1 + (size - 1) // blocksize
-        hashes = []
-        map = {}
+        :param f: open file descriptor (rb)
 
-        offset = 0
+        :param hash_cb: optional progress.bar object for calculating hashes
 
-        if hash_cb:
-            hash_gen = hash_cb(nblocks)
-            hash_gen.next()
+        :param upload_cb: optional progress.bar object for uploading
 
-        for i in range(nblocks):
-            block = f.read(min(blocksize, size - offset))
-            bytes = len(block)
-            hash = pithos_hash(block, blockhash)
-            hashes.append(hash)
-            map[hash] = (offset, bytes)
-            offset += bytes
-            if hash_cb:
-                hash_gen.next()
+        :param etag: (str)
 
-        assert offset == size
+        :param content_encoding: (str)
 
-        hashmap = dict(bytes=size, hashes=hashes)
-        r = self.object_put(object, format='json', hashmap=True,
-            content_type='application/octet-stream', json=hashmap, success=(201, 409))
-        self.reset_headers()
+        :param content_disposition: (str)
 
-        if r.status_code == 201:
-            return
+        :param content_type: (str)
+
+        :param sharing: {'read':[user and/or grp names],
+            'write':[usr and/or grp names]}
 
-        missing = r.json
+        :param public: (bool)
+        """
+        self._assert_container()
+
+        #init
+        block_info = (blocksize, blockhash, size, nblocks) =\
+            self._get_file_block_info(f, size)
+        (hashes, hmap, offset) = ([], {}, 0)
+        if content_type is None:
+            content_type = 'application/octet-stream'
+
+        self._caclulate_uploaded_blocks(
+            *block_info,
+            hashes=hashes,
+            hmap=hmap,
+            fileobj=f,
+            hash_cb=hash_cb)
+
+        hashmap = dict(bytes=size, hashes=hashes)
+        missing = self._get_missing_hashes(
+            obj, hashmap,
+            content_type=content_type,
+            size=size,
+            etag=etag,
+            content_encoding=content_encoding,
+            content_disposition=content_disposition,
+            permissions=sharing,
+            public=public)
+
+        if missing is None:
+            return
 
         if upload_cb:
             upload_gen = upload_cb(len(missing))
-            upload_gen.next()
+            for i in range(len(missing), len(hashmap['hashes']) + 1):
+                try:
+                    upload_gen.next()
+                except:
+                    upload_gen = None
+        else:
+            upload_gen = None
+
+        retries = 7
+        try:
+            while retries:
+                sendlog.info('%s blocks missing' % len(missing))
+                num_of_blocks = len(missing)
+                missing = self._upload_missing_blocks(
+                    missing,
+                    hmap,
+                    f,
+                    upload_gen)
+                if missing:
+                    if num_of_blocks == len(missing):
+                        retries -= 1
+                    else:
+                        num_of_blocks = len(missing)
+                else:
+                    break
+            if missing:
+                raise ClientError(
+                    '%s blocks failed to upload' % len(missing),
+                    status=800)
+        except KeyboardInterrupt:
+            sendlog.info('- - - wait for threads to finish')
+            for thread in activethreads():
+                thread.join()
+            raise
+
+        r = self.object_put(
+            obj,
+            format='json',
+            hashmap=True,
+            content_type=content_type,
+            json=hashmap,
+            success=201)
+        r.release()
+
+    # download_* auxiliary methods
+    def _get_remote_blocks_info(self, obj, **restargs):
+        #retrieve object hashmap
+        myrange = restargs.pop('data_range', None)
+        hashmap = self.get_object_hashmap(obj, **restargs)
+        restargs['data_range'] = myrange
+        blocksize = int(hashmap['block_size'])
+        blockhash = hashmap['block_hash']
+        total_size = hashmap['bytes']
+        #assert total_size/blocksize + 1 == len(hashmap['hashes'])
+        map_dict = {}
+        for i, h in enumerate(hashmap['hashes']):
+            map_dict[h] = i
+        return (blocksize, blockhash, total_size, hashmap['hashes'], map_dict)
+
+    def _dump_blocks_sync(
+            self, obj, remote_hashes, blocksize, total_size, dst, range,
+            **args):
+        for blockid, blockhash in enumerate(remote_hashes):
+            if blockhash:
+                start = blocksize * blockid
+                is_last = start + blocksize > total_size
+                end = (total_size - 1) if is_last else (start + blocksize - 1)
+                (start, end) = _range_up(start, end, range)
+                args['data_range'] = 'bytes=%s-%s' % (start, end)
+                r = self.object_get(obj, success=(200, 206), **args)
+                self._cb_next()
+                dst.write(r.content)
+                dst.flush()
+
+    def _get_block_async(self, obj, **args):
+        event = SilentEvent(self.object_get, obj, success=(200, 206), **args)
+        event.start()
+        return event
+
+    def _hash_from_file(self, fp, start, size, blockhash):
+        fp.seek(start)
+        block = fp.read(size)
+        h = newhashlib(blockhash)
+        h.update(block.strip('\x00'))
+        return hexlify(h.digest())
+
+    def _thread2file(self, flying, local_file, offset=0, **restargs):
+        """write the results of a greenleted rest call to a file
+
+        :param offset: the offset of the file up to blocksize
+        - e.g. if the range is 10-100, all blocks will be written to
+        normal_position - 10
+        """
+        finished = []
+        for i, (start, g) in enumerate(flying.items()):
+            if not g.isAlive():
+                if g.exception:
+                    raise g.exception
+                block = g.value.content
+                local_file.seek(start - offset)
+                local_file.write(block)
+                self._cb_next()
+                finished.append(flying.pop(start))
+        local_file.flush()
+        return finished
+
+    def _dump_blocks_async(
+            self, obj, remote_hashes, blocksize, total_size, local_file,
+            blockhash=None, resume=False, filerange=None, **restargs):
+        file_size = fstat(local_file.fileno()).st_size if resume else 0
+        flying = {}
+        finished = []
+        offset = 0
+        if filerange is not None:
+            rstart = int(filerange.split('-')[0])
+            offset = rstart if blocksize > rstart else rstart % blocksize
+
+        self._init_thread_limit()
+        for block_hash, blockid in remote_hashes.items():
+            start = blocksize * blockid
+            if start < file_size and block_hash == self._hash_from_file(
+                    local_file, start, blocksize, blockhash):
+                self._cb_next()
+                continue
+            self._watch_thread_limit(flying.values())
+            finished += self._thread2file(
+                flying,
+                local_file,
+                offset,
+                **restargs)
+            end = total_size - 1 if start + blocksize > total_size\
+                else start + blocksize - 1
+            (start, end) = _range_up(start, end, filerange)
+            if start == end:
+                self._cb_next()
+                continue
+            restargs['async_headers'] = {'Range': 'bytes=%s-%s' % (start, end)}
+            flying[start] = self._get_block_async(obj, **restargs)
+
+        for thread in flying.values():
+            thread.join()
+        finished += self._thread2file(flying, local_file, offset, **restargs)
+
+    def download_object(
+            self, obj, dst,
+            download_cb=None,
+            version=None,
+            resume=False,
+            range_str=None,
+            if_match=None,
+            if_none_match=None,
+            if_modified_since=None,
+            if_unmodified_since=None):
+        """Download an object (multiple connections, random blocks)
+
+        :param obj: (str) remote object path
+
+        :param dst: open file descriptor (wb+)
+
+        :param download_cb: optional progress.bar object for downloading
+
+        :param version: (str) file version
+
+        :param resume: (bool) if set, preserve already downloaded file parts
+
+        :param range_str: (str) from, to are file positions (int) in bytes
+
+        :param if_match: (str)
+
+        :param if_none_match: (str)
+
+        :param if_modified_since: (str) formated date
+
+        :param if_unmodified_since: (str) formated date"""
+        restargs = dict(
+            version=version,
+            data_range=None if range_str is None else 'bytes=%s' % range_str,
+            if_match=if_match,
+            if_none_match=if_none_match,
+            if_modified_since=if_modified_since,
+            if_unmodified_since=if_unmodified_since)
+
+        (
+            blocksize,
+            blockhash,
+            total_size,
+            hash_list,
+            remote_hashes) = self._get_remote_blocks_info(obj, **restargs)
+        assert total_size >= 0
+
+        if download_cb:
+            self.progress_bar_gen = download_cb(len(remote_hashes))
+            self._cb_next()
+
+        if dst.isatty():
+            self._dump_blocks_sync(
+                obj,
+                hash_list,
+                blocksize,
+                total_size,
+                dst,
+                range_str,
+                **restargs)
+        else:
+            self._dump_blocks_async(
+                obj,
+                remote_hashes,
+                blocksize,
+                total_size,
+                dst,
+                blockhash,
+                resume,
+                range_str,
+                **restargs)
+            if not range_str:
+                dst.truncate(total_size)
+
+        self._complete_cb()
+
+    #Command Progress Bar method
+    def _cb_next(self):
+        if hasattr(self, 'progress_bar_gen'):
+            try:
+                self.progress_bar_gen.next()
+            except:
+                pass
+
+    def _complete_cb(self):
+        while True:
+            try:
+                self.progress_bar_gen.next()
+            except:
+                break
+
+    def get_object_hashmap(
+            self, obj,
+            version=None,
+            if_match=None,
+            if_none_match=None,
+            if_modified_since=None,
+            if_unmodified_since=None,
+            data_range=None):
+        """
+        :param obj: (str) remote object path
 
-        for hash in missing:
-            offset, bytes = map[hash]
-            f.seek(offset)
-            data = f.read(bytes)
-            self.put_block(data, hash)
-            r = self.put_block(data, hash)
-            if upload_cb:
-                upload_gen.next()
+        :param if_match: (str)
 
-        self.object_put(object, format='json', hashmap=True,
-            content_type='application/octet-stream', json=hashmap, success=201)
+        :param if_none_match: (str)
+
+        :param if_modified_since: (str) formated date
+
+        :param if_unmodified_since: (str) formated date
+
+        :param data_range: (str) from-to where from and to are integers
+            denoting file positions in bytes
+
+        :returns: (list)
+        """
+        try:
+            r = self.object_get(
+                obj,
+                hashmap=True,
+                version=version,
+                if_etag_match=if_match,
+                if_etag_not_match=if_none_match,
+                if_modified_since=if_modified_since,
+                if_unmodified_since=if_unmodified_since,
+                data_range=data_range)
+        except ClientError as err:
+            if err.status == 304 or err.status == 412:
+                return {}
+            raise
+        return r.json
 
     def set_account_group(self, group, usernames):
-        self.account_post(update=True, groups = {group:usernames})
+        """
+        :param group: (str)
+
+        :param usernames: (list)
+        """
+        r = self.account_post(update=True, groups={group: usernames})
+        r.release()
 
     def del_account_group(self, group):
-        return self.account_post(update=True, groups={group:[]})
+        """
+        :param group: (str)
+        """
+        r = self.account_post(update=True, groups={group: []})
+        r.release()
 
-    def get_account_info(self):
-        r = self.account_head()
-        from datetime import datetime
-        r = self.account_head(if_modified_since=datetime.now())
+    def get_account_info(self, until=None):
+        """
+        :param until: (str) formated date
+
+        :returns: (dict)
+        """
+        r = self.account_head(until=until)
         if r.status_code == 401:
             raise ClientError("No authorization")
         return r.headers
 
     def get_account_quota(self):
-        return filter_in(self.get_account_info(), 'X-Account-Policy-Quota', exactMatch = True)
+        """
+        :returns: (dict)
+        """
+        return filter_in(
+            self.get_account_info(),
+            'X-Account-Policy-Quota',
+            exactMatch=True)
 
     def get_account_versioning(self):
-        return filter_in(self.get_account_info(), 'X-Account-Policy-Versioning', exactMatch = True)
+        """
+        :returns: (dict)
+        """
+        return filter_in(
+            self.get_account_info(),
+            'X-Account-Policy-Versioning',
+            exactMatch=True)
 
-    def get_account_meta(self):
-        return filter_in(self.get_account_info(), 'X-Account-Meta-')
+    def get_account_meta(self, until=None):
+        """
+        :meta until: (str) formated date
+
+        :returns: (dict)
+        """
+        return filter_in(self.get_account_info(until=until), 'X-Account-Meta-')
 
     def get_account_group(self):
+        """
+        :returns: (dict)
+        """
         return filter_in(self.get_account_info(), 'X-Account-Group-')
 
     def set_account_meta(self, metapairs):
+        """
+        :param metapairs: (dict) {key1:val1, key2:val2, ...}
+        """
         assert(type(metapairs) is dict)
-        self.account_post(update=True, metadata=metapairs)
+        r = self.account_post(update=True, metadata=metapairs)
+        r.release()
 
     def del_account_meta(self, metakey):
-        self.account_post(update=True, metadata={metakey:''})
+        """
+        :param metakey: (str) metadatum key
+        """
+        r = self.account_post(update=True, metadata={metakey: ''})
+        r.release()
 
     def set_account_quota(self, quota):
-        self.account_post(update=True, quota=quota)
+        """
+        :param quota: (int)
+        """
+        r = self.account_post(update=True, quota=quota)
+        r.release()
 
     def set_account_versioning(self, versioning):
-        self.account_post(update=True, versioning = versioning)
+        """
+        "param versioning: (str)
+        """
+        r = self.account_post(update=True, versioning=versioning)
+        r.release()
 
     def list_containers(self):
+        """
+        :returns: (dict)
+        """
         r = self.account_get()
         return r.json
 
     def del_container(self, until=None, delimiter=None):
-        self.assert_container()
-        r = self.container_delete(until=until, delimiter=delimiter, success=(204, 404, 409))
+        """
+        :param until: (str) formated date
+
+        :param delimiter: (str) with / empty container
+
+        :raises ClientError: 404 Container does not exist
+
+        :raises ClientError: 409 Container is not empty
+        """
+        self._assert_container()
+        r = self.container_delete(
+            until=until,
+            delimiter=delimiter,
+            success=(204, 404, 409))
+        r.release()
         if r.status_code == 404:
-            raise ClientError('Container "%s" does not exist'%self.container, r.status_code)
+            raise ClientError(
+                'Container "%s" does not exist' % self.container,
+                r.status_code)
         elif r.status_code == 409:
-            raise ClientError('Container "%s" is not empty'%self.container, r.status_code)
+            raise ClientError(
+                'Container "%s" is not empty' % self.container,
+                r.status_code)
 
     def get_container_versioning(self, container):
-        return filter_in(self.get_container_info(container), 'X-Container-Policy-Versioning')
+        """
+        :param container: (str)
+
+        :returns: (dict)
+        """
+        self.container = container
+        return filter_in(
+            self.get_container_info(),
+            'X-Container-Policy-Versioning')
 
     def get_container_quota(self, container):
-        return filter_in(self.get_container_info(container), 'X-Container-Policy-Quota')
+        """
+        :param container: (str)
 
-    def get_container_meta(self, container):
-        return filter_in(self.get_container_info(container), 'X-Container-Meta-')
+        :returns: (dict)
+        """
+        self.container = container
+        return filter_in(self.get_container_info(), 'X-Container-Policy-Quota')
 
-    def get_container_object_meta(self, container):
-        return filter_in(self.get_container_info(container), 'X-Container-Object-Meta')
+    def get_container_info(self, until=None):
+        """
+        :param until: (str) formated date
+
+        :returns: (dict)
+
+        :raises ClientError: 404 Container not found
+        """
+        try:
+            r = self.container_head(until=until)
+        except ClientError as err:
+            err.details.append('for container %s' % self.container)
+            raise err
+        return r.headers
+
+    def get_container_meta(self, until=None):
+        """
+        :param until: (str) formated date
+
+        :returns: (dict)
+        """
+        return filter_in(
+            self.get_container_info(until=until),
+            'X-Container-Meta')
+
+    def get_container_object_meta(self, until=None):
+        """
+        :param until: (str) formated date
+
+        :returns: (dict)
+        """
+        return filter_in(
+            self.get_container_info(until=until),
+            'X-Container-Object-Meta')
 
     def set_container_meta(self, metapairs):
+        """
+        :param metapairs: (dict) {key1:val1, key2:val2, ...}
+        """
         assert(type(metapairs) is dict)
-        self.container_post(update=True, metadata=metapairs)
+        r = self.container_post(update=True, metadata=metapairs)
+        r.release()
 
     def del_container_meta(self, metakey):
-        self.container_post(update=True, metadata={metakey:''})
+        """
+        :param metakey: (str) metadatum key
+        """
+        r = self.container_post(update=True, metadata={metakey: ''})
+        r.release()
 
     def set_container_quota(self, quota):
-        self.container_post(update=True, quota=quota)
+        """
+        :param quota: (int)
+        """
+        r = self.container_post(update=True, quota=quota)
+        r.release()
 
     def set_container_versioning(self, versioning):
-        self.container_post(update=True, versioning=versioning)
+        """
+        :param versioning: (str)
+        """
+        r = self.container_post(update=True, versioning=versioning)
+        r.release()
 
     def del_object(self, obj, until=None, delimiter=None):
-        self.assert_container()
-        self.object_delete(obj, until=until, delimiter=delimiter)
+        """
+        :param obj: (str) remote object path
+
+        :param until: (str) formated date
+
+        :param delimiter: (str)
+        """
+        self._assert_container()
+        r = self.object_delete(obj, until=until, delimiter=delimiter)
+        r.release()
 
-    def set_object_meta(self, object, metapairs):
+    def set_object_meta(self, obj, metapairs):
+        """
+        :param obj: (str) remote object path
+
+        :param metapairs: (dict) {key1:val1, key2:val2, ...}
+        """
         assert(type(metapairs) is dict)
-        self.object_post(object, update=True, metadata=metapairs)
+        r = self.object_post(obj, update=True, metadata=metapairs)
+        r.release()
+
+    def del_object_meta(self, obj, metakey):
+        """
+        :param obj: (str) remote object path
+
+        :param metakey: (str) metadatum key
+        """
+        r = self.object_post(obj, update=True, metadata={metakey: ''})
+        r.release()
+
+    def publish_object(self, obj):
+        """
+        :param obj: (str) remote object path
+
+        :returns: (str) access url
+        """
+        r = self.object_post(obj, update=True, public=True)
+        r.release()
+        info = self.get_object_info(obj)
+        pref, sep, rest = self.base_url.partition('//')
+        base = rest.split('/')[0]
+        newurl = path4url(
+            '%s%s%s' % (pref, sep, base),
+            info['x-object-public'])
+        return newurl[1:]
+
+    def unpublish_object(self, obj):
+        """
+        :param obj: (str) remote object path
+        """
+        r = self.object_post(obj, update=True, public=False)
+        r.release()
+
+    def get_object_info(self, obj, version=None):
+        """
+        :param obj: (str) remote object path
+
+        :param version: (str)
+
+        :returns: (dict)
+        """
+        try:
+            r = self.object_head(obj, version=version)
+            return r.headers
+        except ClientError as ce:
+            if ce.status == 404:
+                raise ClientError('Object not found', status=404)
+            raise
+
+    def get_object_meta(self, obj, version=None):
+        """
+        :param obj: (str) remote object path
 
-    def del_object_meta(self, metakey, object):
-        self.object_post(object, update=True, metadata={metakey:''})
+        :param version: (str)
 
-    def publish_object(self, object):
-        self.object_post(object, update=True, public=True)
+        :returns: (dict)
+        """
+        return filter_in(
+            self.get_object_info(obj, version=version),
+            'X-Object-Meta')
 
-    def unpublish_object(self, object):
-        self.object_post(object, update=True, public=False)
+    def get_object_sharing(self, obj):
+        """
+        :param obj: (str) remote object path
 
-    def get_object_sharing(self, object):
-        r = filter_in(self.get_object_info(object), 'X-Object-Sharing', exactMatch = True)
+        :returns: (dict)
+        """
+        r = filter_in(
+            self.get_object_info(obj),
+            'X-Object-Sharing',
+            exactMatch=True)
         reply = {}
         if len(r) > 0:
             perms = r['x-object-sharing'].split(';')
@@ -847,67 +940,238 @@ class PithosClient(StorageClient):
                 reply[key] = val
         return reply
 
-    def set_object_sharing(self, object, read_permition = False, write_permition = False):
+    def set_object_sharing(
+            self, obj,
+            read_permition=False, write_permition=False):
         """Give read/write permisions to an object.
-           @param object is the object to change sharing permitions onto
-           @param read_permition is a list of users and user groups that get read permition for this object
-                False means all previous read permitions will be removed
-           @param write_perimition is a list of users and user groups to get write permition for this object
-                False means all previous read permitions will be removed
-        """
-        perms = {}
-        perms['read'] = read_permition if isinstance(read_permition, list) else ''
-        perms['write'] = write_permition if isinstance(write_permition, list) else ''
-        self.object_post(object, update=True, permitions=perms)
-
-    def del_object_sharing(self, object):
-        self.set_object_sharing(object)
-
-    def append_object(self, object, source_file, upload_cb = None):
-        """@param upload_db is a generator for showing progress of upload
-            to caller application, e.g. a progress bar. Its next is called
-            whenever a block is uploaded
-        """
-        self.assert_container()
-        meta = self.get_container_info(self.container)
+
+        :param obj: (str) remote object path
+
+        :param read_permition: (list - bool) users and user groups that get
+            read permition for this object - False means all previous read
+            permissions will be removed
+
+        :param write_perimition: (list - bool) of users and user groups to get
+           write permition for this object - False means all previous write
+           permissions will be removed
+        """
+
+        perms = dict(
+            read='' if not read_permition else read_permition,
+            write='' if not write_permition else write_permition)
+        r = self.object_post(obj, update=True, permissions=perms)
+        r.release()
+
+    def del_object_sharing(self, obj):
+        """
+        :param obj: (str) remote object path
+        """
+        self.set_object_sharing(obj)
+
+    def append_object(self, obj, source_file, upload_cb=None):
+        """
+        :param obj: (str) remote object path
+
+        :param source_file: open file descriptor
+
+        :param upload_db: progress.bar for uploading
+        """
+
+        self._assert_container()
+        meta = self.get_container_info()
         blocksize = int(meta['x-container-block-size'])
-        filesize = os.fstat(source_file.fileno()).st_size
-        nblocks = 1 + (filesize - 1)//blocksize
+        filesize = fstat(source_file.fileno()).st_size
+        nblocks = 1 + (filesize - 1) // blocksize
         offset = 0
-        if upload_cb is not None:
+        if upload_cb:
             upload_gen = upload_cb(nblocks)
+            upload_gen.next()
         for i in range(nblocks):
             block = source_file.read(min(blocksize, filesize - offset))
             offset += len(block)
-            self.object_post(object, update=True,
-                content_range='bytes */*', content_type='application/octet-stream',
-                content_length=len(block), data=block)
-            if upload_cb is not None:
+            r = self.object_post(
+                obj,
+                update=True,
+                content_range='bytes */*',
+                content_type='application/octet-stream',
+                content_length=len(block),
+                data=block)
+            r.release()
+
+            if upload_cb:
                 upload_gen.next()
 
-    def truncate_object(self, object, upto_bytes):
-        self.object_post(object, update=True, content_range='bytes 0-%s/*'%upto_bytes,
-            content_type='application/octet-stream', object_bytes=upto_bytes,
-            source_object=path4url(self.container, object))
+    def truncate_object(self, obj, upto_bytes):
+        """
+        :param obj: (str) remote object path
+
+        :param upto_bytes: max number of bytes to leave on file
+        """
+        r = self.object_post(
+            obj,
+            update=True,
+            content_range='bytes 0-%s/*' % upto_bytes,
+            content_type='application/octet-stream',
+            object_bytes=upto_bytes,
+            source_object=path4url(self.container, obj))
+        r.release()
+
+    def overwrite_object(self, obj, start, end, source_file, upload_cb=None):
+        """Overwrite a part of an object from local source file
+
+        :param obj: (str) remote object path
+
+        :param start: (int) position in bytes to start overwriting from
 
-    def overwrite_object(self, object, start, end, source_file, upload_cb=None):
-        """Overwrite a part of an object with given source file
-           @start the part of the remote object to start overwriting from, in bytes
-           @end the part of the remote object to stop overwriting to, in bytes
+        :param end: (int) position in bytes to stop overwriting at
+
+        :param source_file: open file descriptor
+
+        :param upload_db: progress.bar for uploading
         """
-        self.assert_container()
-        meta = self.get_container_info(self.container)
+
+        r = self.get_object_info(obj)
+        rf_size = int(r['content-length'])
+        if rf_size < int(start):
+            raise ClientError(
+                'Range start exceeds file size',
+                status=416)
+        elif rf_size < int(end):
+            raise ClientError(
+                'Range end exceeds file size',
+                status=416)
+        self._assert_container()
+        meta = self.get_container_info()
         blocksize = int(meta['x-container-block-size'])
-        filesize = os.fstat(source_file.fileno()).st_size
+        filesize = fstat(source_file.fileno()).st_size
         datasize = int(end) - int(start) + 1
-        nblocks = 1 + (datasize - 1)//blocksize
+        nblocks = 1 + (datasize - 1) // blocksize
         offset = 0
-        if upload_cb is not None:
+        if upload_cb:
             upload_gen = upload_cb(nblocks)
+            upload_gen.next()
         for i in range(nblocks):
-            block = source_file.read(min(blocksize, filesize - offset, datasize - offset))
+            read_size = min(blocksize, filesize - offset, datasize - offset)
+            block = source_file.read(read_size)
+            r = self.object_post(
+                obj,
+                update=True,
+                content_type='application/octet-stream',
+                content_length=len(block),
+                content_range='bytes %s-%s/*' % (
+                    start + offset,
+                    start + offset + len(block) - 1),
+                data=block)
             offset += len(block)
-            self.object_post(object, update=True, content_type='application/octet-stream', 
-                content_length=len(block), content_range='bytes %s-%s/*'%(start,end), data=block)
-            if upload_cb is not None:
+            r.release()
+
+            if upload_cb:
                 upload_gen.next()
+
+    def copy_object(
+            self, src_container, src_object, dst_container,
+            dst_object=False,
+            source_version=None,
+            public=False,
+            content_type=None,
+            delimiter=None):
+        """
+        :param src_container: (str) source container
+
+        :param src_object: (str) source object path
+
+        :param dst_container: (str) destination container
+
+        :param dst_object: (str) destination object path
+
+        :param source_version: (str) source object version
+
+        :param public: (bool)
+
+        :param content_type: (str)
+
+        :param delimiter: (str)
+        """
+        self._assert_account()
+        self.container = dst_container
+        dst_object = dst_object or src_object
+        src_path = path4url(src_container, src_object)
+        r = self.object_put(
+            dst_object,
+            success=201,
+            copy_from=src_path,
+            content_length=0,
+            source_version=source_version,
+            public=public,
+            content_type=content_type,
+            delimiter=delimiter)
+        r.release()
+
+    def move_object(
+            self, src_container, src_object, dst_container,
+            dst_object=False,
+            source_version=None,
+            public=False,
+            content_type=None,
+            delimiter=None):
+        """
+        :param src_container: (str) source container
+
+        :param src_object: (str) source object path
+
+        :param dst_container: (str) destination container
+
+        :param dst_object: (str) destination object path
+
+        :param source_version: (str) source object version
+
+        :param public: (bool)
+
+        :param content_type: (str)
+
+        :param delimiter: (str)
+        """
+        self._assert_account()
+        self.container = dst_container
+        dst_object = dst_object or src_object
+        src_path = path4url(src_container, src_object)
+        r = self.object_put(
+            dst_object,
+            success=201,
+            move_from=src_path,
+            content_length=0,
+            source_version=source_version,
+            public=public,
+            content_type=content_type,
+            delimiter=delimiter)
+        r.release()
+
+    def get_sharing_accounts(self, limit=None, marker=None, *args, **kwargs):
+        """Get accounts that share with self.account
+
+        :param limit: (str)
+
+        :param marker: (str)
+
+        :returns: (dict)
+        """
+        self._assert_account()
+
+        self.set_param('format', 'json')
+        self.set_param('limit', limit, iff=limit is not None)
+        self.set_param('marker', marker, iff=marker is not None)
+
+        path = ''
+        success = kwargs.pop('success', (200, 204))
+        r = self.get(path, *args, success=success, **kwargs)
+        return r.json
+
+    def get_object_versionlist(self, obj):
+        """
+        :param obj: (str) remote object path
+
+        :returns: (list)
+        """
+        self._assert_container()
+        r = self.object_get(obj, format='json', version='list')
+        return r.json['versions']