1 # Copyright 2011-2013 GRNET S.A. All rights reserved.
3 # Redistribution and use in source and binary forms, with or
4 # without modification, are permitted provided that the following
7 # 1. Redistributions of source code must retain the above
8 # copyright notice, this list of conditions and the following
11 # 2. Redistributions in binary form must reproduce the above
12 # copyright notice, this list of conditions and the following
13 # disclaimer in the documentation and/or other materials
14 # provided with the distribution.
16 # THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
17 # OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
20 # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
23 # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
24 # AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25 # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
26 # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27 # POSSIBILITY OF SUCH DAMAGE.
29 # The views and conclusions contained in the software and
30 # documentation are those of the authors and should not be
31 # interpreted as representing official policies, either expressed
32 # or implied, of GRNET S.A.
34 from threading import enumerate as activethreads
37 from hashlib import new as newhashlib
39 from StringIO import StringIO
41 from binascii import hexlify
43 from kamaki.clients import SilentEvent, sendlog
44 from kamaki.clients.pithos.rest_api import PithosRestClient
45 from kamaki.clients.storage import ClientError
46 from kamaki.clients.utils import path4url, filter_in
49 def _pithos_hash(block, blockhash):
50 h = newhashlib(blockhash)
51 h.update(block.rstrip('\x00'))
55 def _range_up(start, end, a_range):
57 (rstart, rend) = a_range.split('-')
58 (rstart, rend) = (int(rstart), int(rend))
59 if rstart > end or rend < start:
68 class PithosClient(PithosRestClient):
69 """Synnefo Pithos+ API client"""
71 def __init__(self, base_url, token, account=None, container=None):
72 super(PithosClient, self).__init__(base_url, token, account, container)
74 def purge_container(self, container=None):
75 """Delete an empty container and destroy associated blocks
77 cnt_back_up = self.container
79 self.container = container or cnt_back_up
80 self.container_delete(until=unicode(time()))
82 self.container = cnt_back_up
84 def upload_object_unchunked(
89 content_encoding=None,
90 content_disposition=None,
95 :param obj: (str) remote object path
97 :param f: open file descriptor
99 :param withHashFile: (bool)
101 :param size: (int) size of data to upload
105 :param content_encoding: (str)
107 :param content_disposition: (str)
109 :param content_type: (str)
111 :param sharing: {'read':[user and/or grp names],
112 'write':[usr and/or grp names]}
114 :param public: (bool)
116 :returns: (dict) created object metadata
118 self._assert_container()
124 data = json.dumps(json.loads(data))
126 raise ClientError('"%s" is not json-formated' % f.name, 1)
128 msg = '"%s" is not a valid hashmap file' % f.name
129 raise ClientError(msg, 1)
132 data = f.read(size) if size else f.read()
137 content_encoding=content_encoding,
138 content_disposition=content_disposition,
139 content_type=content_type,
145 def create_object_by_manifestation(
148 content_encoding=None,
149 content_disposition=None,
154 :param obj: (str) remote object path
158 :param content_encoding: (str)
160 :param content_disposition: (str)
162 :param content_type: (str)
164 :param sharing: {'read':[user and/or grp names],
165 'write':[usr and/or grp names]}
167 :param public: (bool)
169 :returns: (dict) created object metadata
171 self._assert_container()
176 content_encoding=content_encoding,
177 content_disposition=content_disposition,
178 content_type=content_type,
181 manifest='%s/%s' % (self.container, obj))
184 # upload_* auxiliary methods
185 def _put_block_async(self, data, hash, upload_gen=None):
186 event = SilentEvent(method=self._put_block, data=data, hash=hash)
190 def _put_block(self, data, hash):
191 r = self.container_post(
193 content_type='application/octet-stream',
194 content_length=len(data),
197 assert r.json[0] == hash, 'Local hash does not match server'
199 def _get_file_block_info(self, fileobj, size=None, cache=None):
201 :param fileobj: (file descriptor) source
203 :param size: (int) size of data to upload from source
205 :param cache: (dict) if provided, cache container info response to
206 avoid redundant calls
208 if isinstance(cache, dict):
210 meta = cache[self.container]
212 meta = self.get_container_info()
213 cache[self.container] = meta
215 meta = self.get_container_info()
216 blocksize = int(meta['x-container-block-size'])
217 blockhash = meta['x-container-block-hash']
218 size = size if size is not None else fstat(fileobj.fileno()).st_size
219 nblocks = 1 + (size - 1) // blocksize
220 return (blocksize, blockhash, size, nblocks)
222 def _create_or_get_missing_hashes(
229 if_etag_not_match=None,
230 content_encoding=None,
231 content_disposition=None,
239 content_type=content_type,
241 if_etag_match=if_etag_match,
242 if_etag_not_match=if_etag_not_match,
243 content_encoding=content_encoding,
244 content_disposition=content_disposition,
245 permissions=permissions,
248 return (None if r.status_code == 201 else r.json), r.headers
250 def _calculate_blocks_for_upload(
251 self, blocksize, blockhash, size, nblocks, hashes, hmap, fileobj,
255 hash_gen = hash_cb(nblocks)
258 for i in range(nblocks):
259 block = fileobj.read(min(blocksize, size - offset))
261 hash = _pithos_hash(block, blockhash)
263 hmap[hash] = (offset, bytes)
267 msg = 'Failed to calculate uploaded blocks:'
268 ' Offset and object size do not match'
269 assert offset == size, msg
271 def _upload_missing_blocks(self, missing, hmap, fileobj, upload_gen=None):
272 """upload missing blocks asynchronously"""
274 self._init_thread_limit()
279 offset, bytes = hmap[hash]
281 data = fileobj.read(bytes)
282 r = self._put_block_async(data, hash, upload_gen)
284 unfinished = self._watch_thread_limit(flying)
285 for thread in set(flying).difference(unfinished):
287 failures.append(thread)
290 ClientError) and thread.exception.status == 502:
291 self.POOLSIZE = self._thread_limit
292 elif thread.isAlive():
293 flying.append(thread)
301 for thread in flying:
304 failures.append(thread)
311 return [failure.kwargs['hash'] for failure in failures]
321 content_encoding=None,
322 content_disposition=None,
326 container_info_cache=None):
327 """Upload an object using multiple connections (threads)
329 :param obj: (str) remote object path
331 :param f: open file descriptor (rb)
333 :param hash_cb: optional progress.bar object for calculating hashes
335 :param upload_cb: optional progress.bar object for uploading
339 :param if_etag_match: (str) Push that value to if-match header at file
342 :param if_not_exist: (bool) If true, the file will be uploaded ONLY if
343 it does not exist remotely, otherwise the operation will fail.
344 Involves the case of an object with the same path is created while
345 the object is being uploaded.
347 :param content_encoding: (str)
349 :param content_disposition: (str)
351 :param content_type: (str)
353 :param sharing: {'read':[user and/or grp names],
354 'write':[usr and/or grp names]}
356 :param public: (bool)
358 :param container_info_cache: (dict) if given, avoid redundant calls to
359 server for container info (block size and hash information)
361 self._assert_container()
365 blocksize, blockhash, size, nblocks) = self._get_file_block_info(
366 f, size, container_info_cache)
367 (hashes, hmap, offset) = ([], {}, 0)
369 content_type = 'application/octet-stream'
371 self._calculate_blocks_for_upload(
378 hashmap = dict(bytes=size, hashes=hashes)
379 missing, obj_headers = self._create_or_get_missing_hashes(
381 content_type=content_type,
383 if_etag_match=if_etag_match,
384 if_etag_not_match='*' if if_not_exist else None,
385 content_encoding=content_encoding,
386 content_disposition=content_disposition,
394 upload_gen = upload_cb(len(missing))
395 for i in range(len(missing), len(hashmap['hashes']) + 1):
406 sendlog.info('%s blocks missing' % len(missing))
407 num_of_blocks = len(missing)
408 missing = self._upload_missing_blocks(
414 if num_of_blocks == len(missing):
417 num_of_blocks = len(missing)
422 '%s blocks failed to upload' % len(missing),
424 except KeyboardInterrupt:
425 sendlog.info('- - - wait for threads to finish')
426 for thread in activethreads():
434 content_type=content_type,
435 if_etag_match=if_etag_match,
436 if_etag_not_match='*' if if_not_exist else None,
444 # download_* auxiliary methods
445 def _get_remote_blocks_info(self, obj, **restargs):
446 #retrieve object hashmap
447 myrange = restargs.pop('data_range', None)
448 hashmap = self.get_object_hashmap(obj, **restargs)
449 restargs['data_range'] = myrange
450 blocksize = int(hashmap['block_size'])
451 blockhash = hashmap['block_hash']
452 total_size = hashmap['bytes']
453 #assert total_size/blocksize + 1 == len(hashmap['hashes'])
455 for i, h in enumerate(hashmap['hashes']):
456 # map_dict[h] = i CHAGE
458 map_dict[h].append(i)
461 return (blocksize, blockhash, total_size, hashmap['hashes'], map_dict)
463 def _dump_blocks_sync(
464 self, obj, remote_hashes, blocksize, total_size, dst, range,
466 for blockid, blockhash in enumerate(remote_hashes):
468 start = blocksize * blockid
469 is_last = start + blocksize > total_size
470 end = (total_size - 1) if is_last else (start + blocksize - 1)
471 (start, end) = _range_up(start, end, range)
472 args['data_range'] = 'bytes=%s-%s' % (start, end)
473 r = self.object_get(obj, success=(200, 206), **args)
478 def _get_block_async(self, obj, **args):
479 event = SilentEvent(self.object_get, obj, success=(200, 206), **args)
483 def _hash_from_file(self, fp, start, size, blockhash):
485 block = fp.read(size)
486 h = newhashlib(blockhash)
487 h.update(block.strip('\x00'))
488 return hexlify(h.digest())
490 def _thread2file(self, flying, blockids, local_file, offset=0, **restargs):
491 """write the results of a greenleted rest call to a file
493 :param offset: the offset of the file up to blocksize
494 - e.g. if the range is 10-100, all blocks will be written to
497 for i, (key, g) in enumerate(flying.items()):
502 block = g.value.content
503 for block_start in blockids[key]:
504 local_file.seek(block_start + offset)
505 local_file.write(block)
511 def _dump_blocks_async(
512 self, obj, remote_hashes, blocksize, total_size, local_file,
513 blockhash=None, resume=False, filerange=None, **restargs):
514 file_size = fstat(local_file.fileno()).st_size if resume else 0
516 blockid_dict = dict()
518 if filerange is not None:
519 rstart = int(filerange.split('-')[0])
520 offset = rstart if blocksize > rstart else rstart % blocksize
522 self._init_thread_limit()
523 for block_hash, blockids in remote_hashes.items():
524 blockids = [blk * blocksize for blk in blockids]
525 unsaved = [blk for blk in blockids if not (
526 blk < file_size and block_hash == self._hash_from_file(
527 local_file, blk, blocksize, blockhash))]
528 self._cb_next(len(blockids) - len(unsaved))
531 self._watch_thread_limit(flying.values())
533 flying, blockid_dict, local_file, offset,
535 end = total_size - 1 if key + blocksize > total_size\
536 else key + blocksize - 1
537 start, end = _range_up(key, end, filerange)
541 restargs['async_headers'] = {
542 'Range': 'bytes=%s-%s' % (start, end)}
543 flying[key] = self._get_block_async(obj, **restargs)
544 blockid_dict[key] = unsaved
546 for thread in flying.values():
548 self._thread2file(flying, blockid_dict, local_file, offset, **restargs)
558 if_modified_since=None,
559 if_unmodified_since=None):
560 """Download an object (multiple connections, random blocks)
562 :param obj: (str) remote object path
564 :param dst: open file descriptor (wb+)
566 :param download_cb: optional progress.bar object for downloading
568 :param version: (str) file version
570 :param resume: (bool) if set, preserve already downloaded file parts
572 :param range_str: (str) from, to are file positions (int) in bytes
574 :param if_match: (str)
576 :param if_none_match: (str)
578 :param if_modified_since: (str) formated date
580 :param if_unmodified_since: (str) formated date"""
583 data_range=None if range_str is None else 'bytes=%s' % range_str,
585 if_none_match=if_none_match,
586 if_modified_since=if_modified_since,
587 if_unmodified_since=if_unmodified_since)
594 remote_hashes) = self._get_remote_blocks_info(obj, **restargs)
595 assert total_size >= 0
598 self.progress_bar_gen = download_cb(len(hash_list))
602 self._dump_blocks_sync(
611 self._dump_blocks_async(
622 dst.truncate(total_size)
626 def download_to_string(
633 if_modified_since=None,
634 if_unmodified_since=None):
635 """Download an object to a string (multiple connections)
637 :param obj: (str) remote object path
639 :param download_cb: optional progress.bar object for downloading
641 :param version: (str) file version
643 :param range_str: (str) from, to are file positions (int) in bytes
645 :param if_match: (str)
647 :param if_none_match: (str)
649 :param if_modified_since: (str) formated date
651 :param if_unmodified_since: (str) formated date
653 :returns: (str) the whole object contents
657 data_range=None if range_str is None else 'bytes=%s' % range_str,
659 if_none_match=if_none_match,
660 if_modified_since=if_modified_since,
661 if_unmodified_since=if_unmodified_since)
668 remote_hashes) = self._get_remote_blocks_info(obj, **restargs)
669 assert total_size >= 0
672 self.progress_bar_gen = download_cb(len(hash_list))
676 for blockid, blockhash in enumerate(remote_hashes):
677 start = blocksize * blockid
678 is_last = start + blocksize > total_size
679 end = (total_size - 1) if is_last else (start + blocksize - 1)
680 (start, end) = _range_up(start, end, range_str)
683 restargs['data_range'] = 'bytes=%s-%s' % (start, end)
684 r = self.object_get(obj, success=(200, 206), **restargs)
691 #Command Progress Bar method
692 def _cb_next(self, step=1):
693 if hasattr(self, 'progress_bar_gen'):
695 for i in xrange(step):
696 self.progress_bar_gen.next()
700 def _complete_cb(self):
703 self.progress_bar_gen.next()
707 def get_object_hashmap(
712 if_modified_since=None,
713 if_unmodified_since=None,
716 :param obj: (str) remote object path
718 :param if_match: (str)
720 :param if_none_match: (str)
722 :param if_modified_since: (str) formated date
724 :param if_unmodified_since: (str) formated date
726 :param data_range: (str) from-to where from and to are integers
727 denoting file positions in bytes
736 if_etag_match=if_match,
737 if_etag_not_match=if_none_match,
738 if_modified_since=if_modified_since,
739 if_unmodified_since=if_unmodified_since,
740 data_range=data_range)
741 except ClientError as err:
742 if err.status == 304 or err.status == 412:
747 def set_account_group(self, group, usernames):
751 :param usernames: (list)
753 self.account_post(update=True, groups={group: usernames})
755 def del_account_group(self, group):
759 self.account_post(update=True, groups={group: []})
761 def get_account_info(self, until=None):
763 :param until: (str) formated date
767 r = self.account_head(until=until)
768 if r.status_code == 401:
769 raise ClientError("No authorization", status=401)
772 def get_account_quota(self):
777 self.get_account_info(),
778 'X-Account-Policy-Quota',
781 def get_account_versioning(self):
786 self.get_account_info(),
787 'X-Account-Policy-Versioning',
790 def get_account_meta(self, until=None):
792 :meta until: (str) formated date
796 return filter_in(self.get_account_info(until=until), 'X-Account-Meta-')
798 def get_account_group(self):
802 return filter_in(self.get_account_info(), 'X-Account-Group-')
804 def set_account_meta(self, metapairs):
806 :param metapairs: (dict) {key1:val1, key2:val2, ...}
808 assert(type(metapairs) is dict)
809 self.account_post(update=True, metadata=metapairs)
811 def del_account_meta(self, metakey):
813 :param metakey: (str) metadatum key
815 self.account_post(update=True, metadata={metakey: ''})
818 def set_account_quota(self, quota):
822 self.account_post(update=True, quota=quota)
825 def set_account_versioning(self, versioning):
827 "param versioning: (str)
829 self.account_post(update=True, versioning=versioning)
831 def list_containers(self):
835 r = self.account_get()
838 def del_container(self, until=None, delimiter=None):
840 :param until: (str) formated date
842 :param delimiter: (str) with / empty container
844 :raises ClientError: 404 Container does not exist
846 :raises ClientError: 409 Container is not empty
848 self._assert_container()
849 r = self.container_delete(
852 success=(204, 404, 409))
853 if r.status_code == 404:
855 'Container "%s" does not exist' % self.container,
857 elif r.status_code == 409:
859 'Container "%s" is not empty' % self.container,
862 def get_container_versioning(self, container=None):
864 :param container: (str)
868 cnt_back_up = self.container
870 self.container = container or cnt_back_up
872 self.get_container_info(),
873 'X-Container-Policy-Versioning')
875 self.container = cnt_back_up
877 def get_container_limit(self, container=None):
879 :param container: (str)
883 cnt_back_up = self.container
885 self.container = container or cnt_back_up
887 self.get_container_info(),
888 'X-Container-Policy-Quota')
890 self.container = cnt_back_up
892 def get_container_info(self, until=None):
894 :param until: (str) formated date
898 :raises ClientError: 404 Container not found
901 r = self.container_head(until=until)
902 except ClientError as err:
903 err.details.append('for container %s' % self.container)
907 def get_container_meta(self, until=None):
909 :param until: (str) formated date
914 self.get_container_info(until=until),
917 def get_container_object_meta(self, until=None):
919 :param until: (str) formated date
924 self.get_container_info(until=until),
925 'X-Container-Object-Meta')
927 def set_container_meta(self, metapairs):
929 :param metapairs: (dict) {key1:val1, key2:val2, ...}
931 assert(type(metapairs) is dict)
932 self.container_post(update=True, metadata=metapairs)
934 def del_container_meta(self, metakey):
936 :param metakey: (str) metadatum key
938 self.container_post(update=True, metadata={metakey: ''})
940 def set_container_limit(self, limit):
944 self.container_post(update=True, quota=limit)
946 def set_container_versioning(self, versioning):
948 :param versioning: (str)
950 self.container_post(update=True, versioning=versioning)
952 def del_object(self, obj, until=None, delimiter=None):
954 :param obj: (str) remote object path
956 :param until: (str) formated date
958 :param delimiter: (str)
960 self._assert_container()
961 self.object_delete(obj, until=until, delimiter=delimiter)
963 def set_object_meta(self, obj, metapairs):
965 :param obj: (str) remote object path
967 :param metapairs: (dict) {key1:val1, key2:val2, ...}
969 assert(type(metapairs) is dict)
970 self.object_post(obj, update=True, metadata=metapairs)
972 def del_object_meta(self, obj, metakey):
974 :param obj: (str) remote object path
976 :param metakey: (str) metadatum key
978 self.object_post(obj, update=True, metadata={metakey: ''})
980 def publish_object(self, obj):
982 :param obj: (str) remote object path
984 :returns: (str) access url
986 self.object_post(obj, update=True, public=True)
987 info = self.get_object_info(obj)
988 pref, sep, rest = self.base_url.partition('//')
989 base = rest.split('/')[0]
990 return '%s%s%s/%s' % (pref, sep, base, info['x-object-public'])
992 def unpublish_object(self, obj):
994 :param obj: (str) remote object path
996 self.object_post(obj, update=True, public=False)
998 def get_object_info(self, obj, version=None):
1000 :param obj: (str) remote object path
1002 :param version: (str)
1007 r = self.object_head(obj, version=version)
1009 except ClientError as ce:
1010 if ce.status == 404:
1011 raise ClientError('Object %s not found' % obj, status=404)
1014 def get_object_meta(self, obj, version=None):
1016 :param obj: (str) remote object path
1018 :param version: (str)
1023 self.get_object_info(obj, version=version),
1026 def get_object_sharing(self, obj):
1028 :param obj: (str) remote object path
1033 self.get_object_info(obj),
1038 perms = r['x-object-sharing'].split(';')
1043 raise ClientError('Incorrect reply format')
1044 (key, val) = perm.strip().split('=')
1048 def set_object_sharing(
1050 read_permition=False, write_permition=False):
1051 """Give read/write permisions to an object.
1053 :param obj: (str) remote object path
1055 :param read_permition: (list - bool) users and user groups that get
1056 read permition for this object - False means all previous read
1057 permissions will be removed
1059 :param write_perimition: (list - bool) of users and user groups to get
1060 write permition for this object - False means all previous write
1061 permissions will be removed
1064 perms = dict(read=read_permition or '', write=write_permition or '')
1065 self.object_post(obj, update=True, permissions=perms)
1067 def del_object_sharing(self, obj):
1069 :param obj: (str) remote object path
1071 self.set_object_sharing(obj)
1073 def append_object(self, obj, source_file, upload_cb=None):
1075 :param obj: (str) remote object path
1077 :param source_file: open file descriptor
1079 :param upload_db: progress.bar for uploading
1082 self._assert_container()
1083 meta = self.get_container_info()
1084 blocksize = int(meta['x-container-block-size'])
1085 filesize = fstat(source_file.fileno()).st_size
1086 nblocks = 1 + (filesize - 1) // blocksize
1089 upload_gen = upload_cb(nblocks)
1091 for i in range(nblocks):
1092 block = source_file.read(min(blocksize, filesize - offset))
1093 offset += len(block)
1097 content_range='bytes */*',
1098 content_type='application/octet-stream',
1099 content_length=len(block),
1105 def truncate_object(self, obj, upto_bytes):
1107 :param obj: (str) remote object path
1109 :param upto_bytes: max number of bytes to leave on file
1114 content_range='bytes 0-%s/*' % upto_bytes,
1115 content_type='application/octet-stream',
1116 object_bytes=upto_bytes,
1117 source_object=path4url(self.container, obj))
1119 def overwrite_object(self, obj, start, end, source_file, upload_cb=None):
1120 """Overwrite a part of an object from local source file
1122 :param obj: (str) remote object path
1124 :param start: (int) position in bytes to start overwriting from
1126 :param end: (int) position in bytes to stop overwriting at
1128 :param source_file: open file descriptor
1130 :param upload_db: progress.bar for uploading
1133 r = self.get_object_info(obj)
1134 rf_size = int(r['content-length'])
1135 if rf_size < int(start):
1137 'Range start exceeds file size',
1139 elif rf_size < int(end):
1141 'Range end exceeds file size',
1143 self._assert_container()
1144 meta = self.get_container_info()
1145 blocksize = int(meta['x-container-block-size'])
1146 filesize = fstat(source_file.fileno()).st_size
1147 datasize = int(end) - int(start) + 1
1148 nblocks = 1 + (datasize - 1) // blocksize
1151 upload_gen = upload_cb(nblocks)
1153 for i in range(nblocks):
1154 read_size = min(blocksize, filesize - offset, datasize - offset)
1155 block = source_file.read(read_size)
1159 content_type='application/octet-stream',
1160 content_length=len(block),
1161 content_range='bytes %s-%s/*' % (
1163 start + offset + len(block) - 1),
1165 offset += len(block)
1171 self, src_container, src_object, dst_container,
1173 source_version=None,
1174 source_account=None,
1179 :param src_container: (str) source container
1181 :param src_object: (str) source object path
1183 :param dst_container: (str) destination container
1185 :param dst_object: (str) destination object path
1187 :param source_version: (str) source object version
1189 :param source_account: (str) account to copy from
1191 :param public: (bool)
1193 :param content_type: (str)
1195 :param delimiter: (str)
1197 self._assert_account()
1198 self.container = dst_container
1199 src_path = path4url(src_container, src_object)
1201 dst_object or src_object,
1205 source_version=source_version,
1206 source_account=source_account,
1208 content_type=content_type,
1209 delimiter=delimiter)
1212 self, src_container, src_object, dst_container,
1214 source_account=None,
1215 source_version=None,
1220 :param src_container: (str) source container
1222 :param src_object: (str) source object path
1224 :param dst_container: (str) destination container
1226 :param dst_object: (str) destination object path
1228 :param source_account: (str) account to move from
1230 :param source_version: (str) source object version
1232 :param public: (bool)
1234 :param content_type: (str)
1236 :param delimiter: (str)
1238 self._assert_account()
1239 self.container = dst_container
1240 dst_object = dst_object or src_object
1241 src_path = path4url(src_container, src_object)
1247 source_account=source_account,
1248 source_version=source_version,
1250 content_type=content_type,
1251 delimiter=delimiter)
1253 def get_sharing_accounts(self, limit=None, marker=None, *args, **kwargs):
1254 """Get accounts that share with self.account
1258 :param marker: (str)
1262 self._assert_account()
1264 self.set_param('format', 'json')
1265 self.set_param('limit', limit, iff=limit is not None)
1266 self.set_param('marker', marker, iff=marker is not None)
1269 success = kwargs.pop('success', (200, 204))
1270 r = self.get(path, *args, success=success, **kwargs)
1273 def get_object_versionlist(self, obj):
1275 :param obj: (str) remote object path
1279 self._assert_container()
1280 r = self.object_get(obj, format='json', version='list')
1281 return r.json['versions']