- download_gen.next()
- except StopIteration:
- pass
- (start, end) = self._get_block_range(i, blocksize, total_size, custom_start, custom_end)
- data_range = 'bytes=%s-%s'%(start, end)
- r = self._get_block(obj, data_range=data_range, **restargs)
- objfile.write(r.content)
- objfile.flush()
-
- def download_object(self, obj, objfile, download_cb=None, version=None, overide=False, range=None,
- if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None):
- """overide is forcing the local file to become exactly as the remote, even if it is
- substantialy different
- """
-
- self.assert_container()
-
- (blocksize, blockhash, total_size, hmap, map_dict) = self._get_object_block_info(obj,
- version=version, if_match=if_match, if_none_match=if_none_match,
- if_modified_since=if_modified_since, if_unmodified_since=if_unmodified_since)
-
- if total_size <= 0:
- return
-
- (custom_start, custom_end) = (None, None) if range is None \
- else self._get_range_limits(range)
-
- #load progress bar
- if download_cb is not None:
- download_gen = download_cb(total_size/blocksize + 1)
- download_gen.next()
-
- resumed = self._get_downloaded_blocks(hmap, objfile, blocksize, blockhash, map_dict,
- overide=overide, download_gen=download_gen)
- restargs=dict(version=version, if_etag_match=if_match, if_etag_not_match=if_none_match,
- if_modified_since=if_modified_since, if_unmodified_since=if_unmodified_since)
-
- if objfile.isatty():
- self._append_missing_blocks(obj, objfile, hmap, resumed, blocksize, total_size,
- download_gen, custom_start=custom_start, custom_end=custom_end, **restargs)
+ block = g.value.content
+ except AttributeError:
+ broken[start] = flying_greenlets.pop(start)
+ continue
+ local_file.seek(start)
+ local_file.write(block)
+ #local_file.flush()
+ self._cb_next()
+ finished.append(flying_greenlets.pop(start))
+ local_file.flush()
+ return finished
+
+ def _dump_blocks_async(self, obj, remote_hashes, blocksize, total_size, local_file, **restargs):
+
+ #let the fly
+ flying_greenlets = {}
+ finished_greenlets = []
+ broken = {}
+ for block_hash, blockid in remote_hashes.items():
+ start = blocksize*blockid
+ end = total_size-1 if start+blocksize > total_size else start+blocksize-1
+ restargs['data_range'] = 'bytes=%s-%s'%(start, end)
+ #store info for relaunching greenlet if needed
+ flying_greenlets[start] = self._get_block_async(obj, **restargs)
+ finished_greenlets += self._greenlet2file(flying_greenlets, local_file, broken,
+ **restargs)
+
+ #check the greenlets
+ while len(flying_greenlets) > 0:
+ sleep(0.1)
+ finished_greenlets += self._greenlet2file(flying_greenlets, local_file, broken,
+ **restargs)
+
+ gevent.joinall(finished_greenlets)
+
+
+ def download_object(self, obj, dst, download_cb=None, version=None, overide=False, resume=False,
+ range=None, if_match=None, if_none_match=None, if_modified_since=None,
+ if_unmodified_since=None):
+
+ #init REST api args
+ restargs=dict(version=version,
+ data_range = None if range is None else 'bytes=%s'%range,
+ if_match=if_match,
+ if_none_match=if_none_match,
+ if_modified_since=if_modified_since,
+ if_unmodified_since=if_unmodified_since)
+
+ #1. get remote object hash info
+ ( blocksize,
+ blockhash,
+ total_size,
+ hash_list,
+ remote_hashes) = self._get_remote_blocks_info(obj, **restargs)
+ assert total_size >= 0
+
+ if download_cb:
+ self.progress_bar_gen = download_cb(len(remote_hashes)+1)
+ self._cb_next()
+
+ if dst.isatty():
+ self._dump_blocks_sync(obj, hash_list, blocksize, total_size, dst, **restargs)
+ elif resume:
+ self._filter_out_downloaded_hashses(remote_hashes, hash_list, dst, blocksize, blockhash)
+ self._dump_blocks_sync(obj, hash_list, blocksize, total_size, dst, **restargs)