root / kamaki / cli / commands / pithos.py @ 74b7c6dc
History | View | Annotate | Download (43.3 kB)
1 |
# Copyright 2011-2013 GRNET S.A. All rights reserved.
|
---|---|
2 |
#
|
3 |
# Redistribution and use in source and binary forms, with or
|
4 |
# without modification, are permitted provided that the following
|
5 |
# conditions are met:
|
6 |
#
|
7 |
# 1. Redistributions of source code must retain the above
|
8 |
# copyright notice, this list of conditions and the following
|
9 |
# disclaimer.
|
10 |
#
|
11 |
# 2. Redistributions in binary form must reproduce the above
|
12 |
# copyright notice, this list of conditions and the following
|
13 |
# disclaimer in the documentation and/or other materials
|
14 |
# provided with the distribution.
|
15 |
#
|
16 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
|
17 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
18 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
19 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
|
20 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
23 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
24 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
25 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
26 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
27 |
# POSSIBILITY OF SUCH DAMAGE.
|
28 |
#
|
29 |
# The views and conclusions contained in the software and
|
30 |
# documentation are those of the authors and should not be
|
31 |
# interpreted as representing official policies, either expressed
|
32 |
# or implied, of GRNET S.A.command
|
33 |
|
34 |
from io import StringIO |
35 |
from pydoc import pager |
36 |
from os import path, walk, makedirs |
37 |
|
38 |
from kamaki.clients.pithos import PithosClient, ClientError |
39 |
|
40 |
from kamaki.cli import command |
41 |
from kamaki.cli.command_tree import CommandTree |
42 |
from kamaki.cli.commands import ( |
43 |
_command_init, errors, addLogSettings, DontRaiseKeyError, _optional_json, |
44 |
_name_filter, _optional_output_cmd) |
45 |
from kamaki.cli.errors import ( |
46 |
CLIBaseUrlError, CLIError, CLIInvalidArgument, raiseCLIError) |
47 |
from kamaki.cli.argument import ( |
48 |
FlagArgument, IntArgument, ValueArgument, DateArgument, |
49 |
ProgressBarArgument, RepeatableArgument) |
50 |
from kamaki.cli.utils import ( |
51 |
format_size, bold, get_path_size, guess_mime_type) |
52 |
|
53 |
file_cmds = CommandTree('file', 'Pithos+/Storage object level API commands') |
54 |
container_cmds = CommandTree( |
55 |
'container', 'Pithos+/Storage container level API commands') |
56 |
sharers_commands = CommandTree('sharers', 'Pithos+/Storage sharers') |
57 |
_commands = [file_cmds, container_cmds, sharers_commands] |
58 |
|
59 |
|
60 |
class _pithos_init(_command_init): |
61 |
"""Initilize a pithos+ client
|
62 |
There is always a default account (current user uuid)
|
63 |
There is always a default container (pithos)
|
64 |
"""
|
65 |
|
66 |
@DontRaiseKeyError
|
67 |
def _custom_container(self): |
68 |
return self.config.get_cloud(self.cloud, 'pithos_container') |
69 |
|
70 |
@DontRaiseKeyError
|
71 |
def _custom_uuid(self): |
72 |
return self.config.get_cloud(self.cloud, 'pithos_uuid') |
73 |
|
74 |
def _set_account(self): |
75 |
self.account = self._custom_uuid() |
76 |
if self.account: |
77 |
return
|
78 |
astakos = getattr(self, 'auth_base', None) |
79 |
if astakos:
|
80 |
self.account = astakos.user_term('id', self.token) |
81 |
else:
|
82 |
raise CLIBaseUrlError(service='astakos') |
83 |
|
84 |
@errors.generic.all
|
85 |
@addLogSettings
|
86 |
def _run(self): |
87 |
cloud = getattr(self, 'cloud', None) |
88 |
if cloud:
|
89 |
self.base_url = self._custom_url('pithos') |
90 |
else:
|
91 |
self.cloud = 'default' |
92 |
self.token = self._custom_token('pithos') |
93 |
self.container = self._custom_container() or 'pithos' |
94 |
|
95 |
astakos = getattr(self, 'auth_base', None) |
96 |
if astakos:
|
97 |
self.token = self.token or astakos.token |
98 |
if not self.base_url: |
99 |
pithos_endpoints = astakos.get_service_endpoints( |
100 |
self._custom_type('pithos') or 'object-store', |
101 |
self._custom_version('pithos') or '') |
102 |
self.base_url = pithos_endpoints['publicURL'] |
103 |
else:
|
104 |
raise CLIBaseUrlError(service='astakos') |
105 |
|
106 |
self._set_account()
|
107 |
self.client = PithosClient(
|
108 |
self.base_url, self.token, self.account, self.container) |
109 |
|
110 |
def main(self): |
111 |
self._run()
|
112 |
|
113 |
|
114 |
class _pithos_account(_pithos_init): |
115 |
"""Setup account"""
|
116 |
|
117 |
def __init__(self, arguments={}, auth_base=None, cloud=None): |
118 |
super(_pithos_account, self).__init__(arguments, auth_base, cloud) |
119 |
self['account'] = ValueArgument( |
120 |
'Use (a different) user uuid', ('-A', '--account')) |
121 |
|
122 |
def _run(self): |
123 |
super(_pithos_account, self)._run() |
124 |
self.client.account = self['account'] or getattr( |
125 |
self, 'account', getattr(self.client, 'account', None)) |
126 |
|
127 |
|
128 |
class _pithos_container(_pithos_account): |
129 |
"""Setup container"""
|
130 |
|
131 |
def __init__(self, arguments={}, auth_base=None, cloud=None): |
132 |
super(_pithos_container, self).__init__(arguments, auth_base, cloud) |
133 |
self['container'] = ValueArgument( |
134 |
'Use this container (default: pithos)', ('-C', '--container')) |
135 |
|
136 |
@staticmethod
|
137 |
def _is_dir(remote_dict): |
138 |
return 'application/directory' == remote_dict.get( |
139 |
'content_type', remote_dict.get('content-type', '')) |
140 |
|
141 |
@staticmethod
|
142 |
def _resolve_pithos_url(url): |
143 |
"""Match urls of one of the following formats:
|
144 |
pithos://ACCOUNT/CONTAINER/OBJECT_PATH
|
145 |
/CONTAINER/OBJECT_PATH
|
146 |
return account, container, path
|
147 |
"""
|
148 |
account, container, obj_path, prefix = '', '', url, 'pithos://' |
149 |
if url.startswith(prefix):
|
150 |
account, sep, url = url[len(prefix):].partition('/') |
151 |
url = '/%s' % url
|
152 |
if url.startswith('/'): |
153 |
container, sep, obj_path = url[1:].partition('/') |
154 |
return account, container, obj_path
|
155 |
|
156 |
def _run(self, url=None): |
157 |
acc, con, self.path = self._resolve_pithos_url(url or '') |
158 |
self.account = acc or getattr(self, 'account', '') |
159 |
super(_pithos_container, self)._run() |
160 |
self.container = con or self['container'] or getattr( |
161 |
self, 'container', None) or getattr(self.client, 'container', '') |
162 |
self.client.container = self.container |
163 |
|
164 |
|
165 |
@command(file_cmds)
|
166 |
class file_list(_pithos_container, _optional_json, _name_filter): |
167 |
"""List all objects in a container or a directory object"""
|
168 |
|
169 |
arguments = dict(
|
170 |
detail=FlagArgument('detailed output', ('-l', '--list')), |
171 |
limit=IntArgument('limit number of listed items', ('-n', '--number')), |
172 |
marker=ValueArgument('output greater that marker', '--marker'), |
173 |
delimiter=ValueArgument('show output up to delimiter', '--delimiter'), |
174 |
meta=ValueArgument( |
175 |
'show output with specified meta keys', '--meta', |
176 |
default=[]), |
177 |
if_modified_since=ValueArgument( |
178 |
'show output modified since then', '--if-modified-since'), |
179 |
if_unmodified_since=ValueArgument( |
180 |
'show output not modified since then', '--if-unmodified-since'), |
181 |
until=DateArgument('show metadata until then', '--until'), |
182 |
format=ValueArgument( |
183 |
'format to parse until data (default: d/m/Y H:M:S )', '--format'), |
184 |
shared=FlagArgument('show only shared', '--shared'), |
185 |
more=FlagArgument('read long results', '--more'), |
186 |
enum=FlagArgument('Enumerate results', '--enumerate'), |
187 |
recursive=FlagArgument( |
188 |
'Recursively list containers and their contents',
|
189 |
('-R', '--recursive')) |
190 |
) |
191 |
|
192 |
def print_objects(self, object_list): |
193 |
for index, obj in enumerate(object_list): |
194 |
pretty_obj = obj.copy() |
195 |
index += 1
|
196 |
empty_space = ' ' * (len(str(len(object_list))) - len(str(index))) |
197 |
if 'subdir' in obj: |
198 |
continue
|
199 |
if self._is_dir(obj): |
200 |
size = 'D'
|
201 |
else:
|
202 |
size = format_size(obj['bytes'])
|
203 |
pretty_obj['bytes'] = '%s (%s)' % (obj['bytes'], size) |
204 |
oname = obj['name'] if self['more'] else bold(obj['name']) |
205 |
prfx = ('%s%s. ' % (empty_space, index)) if self['enum'] else '' |
206 |
if self['detail']: |
207 |
self.writeln('%s%s' % (prfx, oname)) |
208 |
self.print_dict(pretty_obj, exclude=('name')) |
209 |
self.writeln()
|
210 |
else:
|
211 |
oname = '%s%9s %s' % (prfx, size, oname)
|
212 |
oname += '/' if self._is_dir(obj) else u'' |
213 |
self.writeln(oname)
|
214 |
|
215 |
@errors.generic.all
|
216 |
@errors.pithos.connection
|
217 |
@errors.pithos.container
|
218 |
@errors.pithos.object_path
|
219 |
def _run(self): |
220 |
r = self.client.container_get(
|
221 |
limit=False if self['more'] else self['limit'], |
222 |
marker=self['marker'], |
223 |
prefix=self['name_pref'] or '/', |
224 |
delimiter=self['delimiter'], |
225 |
path=self.path or '', |
226 |
if_modified_since=self['if_modified_since'], |
227 |
if_unmodified_since=self['if_unmodified_since'], |
228 |
until=self['until'], |
229 |
meta=self['meta'], |
230 |
show_only_shared=self['shared']) |
231 |
files = self._filter_by_name(r.json)
|
232 |
if self['more']: |
233 |
outbu, self._out = self._out, StringIO() |
234 |
try:
|
235 |
if self['json_output'] or self['output_format']: |
236 |
self._print(files)
|
237 |
else:
|
238 |
self.print_objects(files)
|
239 |
finally:
|
240 |
if self['more']: |
241 |
pager(self._out.getvalue())
|
242 |
self._out = outbu
|
243 |
|
244 |
def main(self, path_or_url='/'): |
245 |
super(self.__class__, self)._run(path_or_url) |
246 |
self._run()
|
247 |
|
248 |
|
249 |
@command(file_cmds)
|
250 |
class file_create(_pithos_container, _optional_output_cmd): |
251 |
"""Create an empty file"""
|
252 |
|
253 |
arguments = dict(
|
254 |
content_type=ValueArgument( |
255 |
'Set content type (default: application/octet-stream)',
|
256 |
'--content-type',
|
257 |
default='application/octet-stream')
|
258 |
) |
259 |
|
260 |
@errors.generic.all
|
261 |
@errors.pithos.connection
|
262 |
@errors.pithos.container
|
263 |
def _run(self): |
264 |
self._optional_output(
|
265 |
self.client.create_object(self.path, self['content_type'])) |
266 |
|
267 |
def main(self, path_or_url): |
268 |
super(self.__class__, self)._run(path_or_url) |
269 |
self._run()
|
270 |
|
271 |
|
272 |
@command(file_cmds)
|
273 |
class file_mkdir(_pithos_container, _optional_output_cmd): |
274 |
"""Create a directory: /file create --content-type='applcation/directory'
|
275 |
"""
|
276 |
|
277 |
@errors.generic.all
|
278 |
@errors.pithos.connection
|
279 |
@errors.pithos.container
|
280 |
def _run(self): |
281 |
self._optional_output(self.client.create_directory(self.path)) |
282 |
|
283 |
def main(self, path_or_url): |
284 |
super(self.__class__, self)._run(path_or_url) |
285 |
self._run()
|
286 |
|
287 |
|
288 |
class _source_destination(_pithos_container, _optional_output_cmd): |
289 |
|
290 |
sd_arguments = dict(
|
291 |
destination_user_uuid=ValueArgument( |
292 |
'default: current user uuid', '--to-account'), |
293 |
destination_container=ValueArgument( |
294 |
'default: pithos', '--to-container'), |
295 |
source_prefix=FlagArgument( |
296 |
'Transfer all files that are prefixed with SOURCE PATH If the '
|
297 |
'destination path is specified, replace SOURCE_PATH with '
|
298 |
'DESTINATION_PATH',
|
299 |
('-r', '--recursive')), |
300 |
force=FlagArgument( |
301 |
'Overwrite destination objects, if needed', ('-f', '--force')) |
302 |
) |
303 |
|
304 |
def __init__(self, arguments={}, auth_base=None, cloud=None): |
305 |
self.arguments.update(arguments)
|
306 |
self.arguments.update(self.sd_arguments) |
307 |
super(_source_destination, self).__init__( |
308 |
self.arguments, auth_base, cloud)
|
309 |
|
310 |
def _report_transfer(self, src, dst, transfer_name): |
311 |
if not dst: |
312 |
if transfer_name in ('move', ): |
313 |
self.error(' delete source directory %s' % src) |
314 |
return
|
315 |
dst_prf = '' if self.account == self.dst_client.account else ( |
316 |
'pithos://%s' % self.dst_client.account) |
317 |
if src:
|
318 |
src_prf = '' if self.account == self.dst_client.account else ( |
319 |
'pithos://%s' % self.account) |
320 |
self.error(' %s %s/%s/%s\n --> %s/%s/%s' % ( |
321 |
transfer_name, |
322 |
src_prf, self.container, src,
|
323 |
dst_prf, self.dst_client.container, dst))
|
324 |
else:
|
325 |
self.error(' mkdir %s/%s/%s' % ( |
326 |
dst_prf, self.dst_client.container, dst))
|
327 |
|
328 |
@errors.generic.all
|
329 |
@errors.pithos.account
|
330 |
def _src_dst(self, version=None): |
331 |
"""Preconditions:
|
332 |
self.account, self.container, self.path
|
333 |
self.dst_acc, self.dst_con, self.dst_path
|
334 |
They should all be configured properly
|
335 |
:returns: [(src_path, dst_path), ...], if src_path is None, create
|
336 |
destination directory
|
337 |
"""
|
338 |
src_objects, dst_objects, pairs = dict(), dict(), [] |
339 |
try:
|
340 |
for obj in self.dst_client.list_objects( |
341 |
prefix=self.dst_path or self.path or '/'): |
342 |
dst_objects[obj['name']] = obj
|
343 |
except ClientError as ce: |
344 |
if ce.status in (404, ): |
345 |
raise CLIError(
|
346 |
'Destination container pithos://%s/%s not found' % (
|
347 |
self.dst_client.account, self.dst_client.container)) |
348 |
raise ce
|
349 |
if self['source_prefix']: |
350 |
# Copy and replace prefixes
|
351 |
for src_obj in self.client.list_objects(prefix=self.path or '/'): |
352 |
src_objects[src_obj['name']] = src_obj
|
353 |
for src_path, src_obj in src_objects.items(): |
354 |
dst_path = '%s%s' % (
|
355 |
self.dst_path or self.path, src_path[len(self.path):]) |
356 |
dst_obj = dst_objects.get(dst_path, None)
|
357 |
if self['force'] or not dst_obj: |
358 |
# Just do it
|
359 |
pairs.append(( |
360 |
None if self._is_dir(src_obj) else src_path, dst_path)) |
361 |
if self._is_dir(src_obj): |
362 |
pairs.append((self.path or dst_path, None)) |
363 |
elif not (self._is_dir(dst_obj) and self._is_dir(src_obj)): |
364 |
raise CLIError(
|
365 |
'Destination object exists', importance=2, details=[ |
366 |
'Failed while transfering:',
|
367 |
' pithos://%s/%s/%s' % (
|
368 |
self.account,
|
369 |
self.container,
|
370 |
src_path), |
371 |
'--> pithos://%s/%s/%s' % (
|
372 |
self.dst_client.account,
|
373 |
self.dst_client.container,
|
374 |
dst_path), |
375 |
'Use %s to transfer overwrite' % ('/'.join( |
376 |
self.arguments['force'].parsed_name))]) |
377 |
else:
|
378 |
# One object transfer
|
379 |
try:
|
380 |
src_obj = self.client.get_object_info(self.path) |
381 |
except ClientError as ce: |
382 |
if ce.status in (204, ): |
383 |
raise CLIError(
|
384 |
'Missing specific path container %s' % self.container, |
385 |
importance=2, details=[
|
386 |
'To transfer container contents %s' % (
|
387 |
'/'.join(self.arguments[ |
388 |
'source_prefix'].parsed_name))])
|
389 |
raise
|
390 |
dst_path = self.dst_path or self.path |
391 |
dst_obj = dst_objects.get(dst_path or self.path, None) |
392 |
if self['force'] or not dst_obj: |
393 |
pairs.append( |
394 |
(None if self._is_dir(src_obj) else self.path, dst_path)) |
395 |
if self._is_dir(src_obj): |
396 |
pairs.append((self.path or dst_path, None)) |
397 |
elif self._is_dir(src_obj): |
398 |
raise CLIError(
|
399 |
'Cannot transfer an application/directory object',
|
400 |
importance=2, details=[
|
401 |
'The object pithos://%s/%s/%s is a directory' % (
|
402 |
self.account,
|
403 |
self.container,
|
404 |
self.path),
|
405 |
'To recursively copy a directory, use',
|
406 |
' %s' % ('/'.join( |
407 |
self.arguments['source_prefix'].parsed_name)), |
408 |
'To create a file, use',
|
409 |
' /file create (general purpose)',
|
410 |
' /file mkdir (a directory object)'])
|
411 |
else:
|
412 |
raise CLIError(
|
413 |
'Destination object exists',
|
414 |
importance=2, details=[
|
415 |
'Failed while transfering:',
|
416 |
' pithos://%s/%s/%s' % (
|
417 |
self.account,
|
418 |
self.container,
|
419 |
self.path),
|
420 |
'--> pithos://%s/%s/%s' % (
|
421 |
self.dst_client.account,
|
422 |
self.dst_client.container,
|
423 |
dst_path), |
424 |
'Use %s to transfer overwrite' % ('/'.join( |
425 |
self.arguments['force'].parsed_name))]) |
426 |
return pairs
|
427 |
|
428 |
def _run(self, source_path_or_url, destination_path_or_url=''): |
429 |
super(_source_destination, self)._run(source_path_or_url) |
430 |
dst_acc, dst_con, dst_path = self._resolve_pithos_url(
|
431 |
destination_path_or_url) |
432 |
self.dst_client = PithosClient(
|
433 |
base_url=self.client.base_url, token=self.client.token, |
434 |
container=self[
|
435 |
'destination_container'] or dst_con or self.client.container, |
436 |
account=self[
|
437 |
'destination_user_uuid'] or dst_acc or self.client.account) |
438 |
self.dst_path = dst_path or self.path |
439 |
|
440 |
|
441 |
@command(file_cmds)
|
442 |
class file_copy(_source_destination): |
443 |
"""Copy objects, even between different accounts or containers"""
|
444 |
|
445 |
arguments = dict(
|
446 |
public=ValueArgument('publish new object', '--public'), |
447 |
content_type=ValueArgument( |
448 |
'change object\'s content type', '--content-type'), |
449 |
source_version=ValueArgument( |
450 |
'copy specific version', ('-S', '--source-version')) |
451 |
) |
452 |
|
453 |
@errors.generic.all
|
454 |
@errors.pithos.connection
|
455 |
@errors.pithos.container
|
456 |
@errors.pithos.account
|
457 |
def _run(self): |
458 |
for src, dst in self._src_dst(self['source_version']): |
459 |
self._report_transfer(src, dst, 'copy') |
460 |
if src and dst: |
461 |
self.dst_client.copy_object(
|
462 |
src_container=self.client.container,
|
463 |
src_object=src, |
464 |
dst_container=self.dst_client.container,
|
465 |
dst_object=dst, |
466 |
source_account=self.account,
|
467 |
source_version=self['source_version'], |
468 |
public=self['public'], |
469 |
content_type=self['content_type']) |
470 |
elif dst:
|
471 |
self.dst_client.create_directory(dst)
|
472 |
|
473 |
def main(self, source_path_or_url, destination_path_or_url=None): |
474 |
super(file_copy, self)._run( |
475 |
source_path_or_url, destination_path_or_url or '') |
476 |
self._run()
|
477 |
|
478 |
|
479 |
@command(file_cmds)
|
480 |
class file_move(_source_destination): |
481 |
"""Move objects, even between different accounts or containers"""
|
482 |
|
483 |
arguments = dict(
|
484 |
public=ValueArgument('publish new object', '--public'), |
485 |
content_type=ValueArgument( |
486 |
'change object\'s content type', '--content-type') |
487 |
) |
488 |
|
489 |
@errors.generic.all
|
490 |
@errors.pithos.connection
|
491 |
@errors.pithos.container
|
492 |
@errors.pithos.account
|
493 |
def _run(self): |
494 |
for src, dst in self._src_dst(): |
495 |
self._report_transfer(src, dst, 'move') |
496 |
if src and dst: |
497 |
self.dst_client.move_object(
|
498 |
src_container=self.client.container,
|
499 |
src_object=src, |
500 |
dst_container=self.dst_client.container,
|
501 |
dst_object=dst, |
502 |
source_account=self.account,
|
503 |
public=self['public'], |
504 |
content_type=self['content_type']) |
505 |
elif dst:
|
506 |
self.dst_client.create_directory(dst)
|
507 |
else:
|
508 |
self.client.del_object(src)
|
509 |
|
510 |
def main(self, source_path_or_url, destination_path_or_url=None): |
511 |
super(file_move, self)._run( |
512 |
source_path_or_url, destination_path_or_url or '') |
513 |
self._run()
|
514 |
|
515 |
|
516 |
@command(file_cmds)
|
517 |
class file_append(_pithos_container, _optional_output_cmd): |
518 |
"""Append local file to (existing) remote object
|
519 |
The remote object should exist.
|
520 |
If the remote object is a directory, it is transformed into a file.
|
521 |
In the later case, objects under the directory remain intact.
|
522 |
"""
|
523 |
|
524 |
arguments = dict(
|
525 |
progress_bar=ProgressBarArgument( |
526 |
'do not show progress bar', ('-N', '--no-progress-bar'), |
527 |
default=False),
|
528 |
max_threads=IntArgument('default: 1', '--threads'), |
529 |
) |
530 |
|
531 |
@errors.generic.all
|
532 |
@errors.pithos.connection
|
533 |
@errors.pithos.container
|
534 |
@errors.pithos.object_path
|
535 |
def _run(self, local_path): |
536 |
if self['max_threads'] > 0: |
537 |
self.client.MAX_THREADS = int(self['max_threads']) |
538 |
(progress_bar, upload_cb) = self._safe_progress_bar('Appending') |
539 |
try:
|
540 |
with open(local_path, 'rb') as f: |
541 |
self._optional_output(
|
542 |
self.client.append_object(self.path, f, upload_cb)) |
543 |
finally:
|
544 |
self._safe_progress_bar_finish(progress_bar)
|
545 |
|
546 |
def main(self, local_path, remote_path_or_url): |
547 |
super(self.__class__, self)._run(remote_path_or_url) |
548 |
self._run(local_path)
|
549 |
|
550 |
|
551 |
@command(file_cmds)
|
552 |
class file_truncate(_pithos_container, _optional_output_cmd): |
553 |
"""Truncate remote file up to size"""
|
554 |
|
555 |
arguments = dict(
|
556 |
size_in_bytes=IntArgument('Length of file after truncation', '--size') |
557 |
) |
558 |
required = ('size_in_bytes', )
|
559 |
|
560 |
@errors.generic.all
|
561 |
@errors.pithos.connection
|
562 |
@errors.pithos.container
|
563 |
@errors.pithos.object_path
|
564 |
@errors.pithos.object_size
|
565 |
def _run(self, size): |
566 |
self._optional_output(self.client.truncate_object(self.path, size)) |
567 |
|
568 |
def main(self, path_or_url): |
569 |
super(self.__class__, self)._run(path_or_url) |
570 |
self._run(size=self['size_in_bytes']) |
571 |
|
572 |
|
573 |
@command(file_cmds)
|
574 |
class file_overwrite(_pithos_container, _optional_output_cmd): |
575 |
"""Overwrite part of a remote file"""
|
576 |
|
577 |
arguments = dict(
|
578 |
progress_bar=ProgressBarArgument( |
579 |
'do not show progress bar', ('-N', '--no-progress-bar'), |
580 |
default=False),
|
581 |
start_position=IntArgument('File position in bytes', '--from'), |
582 |
end_position=IntArgument('File position in bytes', '--to') |
583 |
) |
584 |
required = ('start_position', 'end_position') |
585 |
|
586 |
@errors.generic.all
|
587 |
@errors.pithos.connection
|
588 |
@errors.pithos.container
|
589 |
@errors.pithos.object_path
|
590 |
@errors.pithos.object_size
|
591 |
def _run(self, local_path, start, end): |
592 |
start, end = int(start), int(end) |
593 |
(progress_bar, upload_cb) = self._safe_progress_bar(
|
594 |
'Overwrite %s bytes' % (end - start))
|
595 |
try:
|
596 |
with open(path.abspath(local_path), 'rb') as f: |
597 |
self._optional_output(self.client.overwrite_object( |
598 |
obj=self.path,
|
599 |
start=start, |
600 |
end=end, |
601 |
source_file=f, |
602 |
upload_cb=upload_cb)) |
603 |
finally:
|
604 |
self._safe_progress_bar_finish(progress_bar)
|
605 |
|
606 |
def main(self, local_path, path_or_url): |
607 |
super(self.__class__, self)._run(path_or_url) |
608 |
self.path = self.path or path.basename(local_path) |
609 |
self._run(
|
610 |
local_path=local_path, |
611 |
start=self['start_position'], |
612 |
end=self['end_position']) |
613 |
|
614 |
|
615 |
@command(file_cmds)
|
616 |
class file_upload(_pithos_container, _optional_output_cmd): |
617 |
"""Upload a file"""
|
618 |
|
619 |
arguments = dict(
|
620 |
max_threads=IntArgument('default: 5', '--threads'), |
621 |
content_encoding=ValueArgument( |
622 |
'set MIME content type', '--content-encoding'), |
623 |
content_disposition=ValueArgument( |
624 |
'specify objects presentation style', '--content-disposition'), |
625 |
content_type=ValueArgument('specify content type', '--content-type'), |
626 |
uuid_for_read_permission=RepeatableArgument( |
627 |
'Give read access to a user of group (can be repeated)',
|
628 |
'--read-permission'),
|
629 |
uuid_for_write_permission=RepeatableArgument( |
630 |
'Give write access to a user of group (can be repeated)',
|
631 |
'--write-permission'),
|
632 |
public=FlagArgument('make object publicly accessible', '--public'), |
633 |
overwrite=FlagArgument('Force (over)write', ('-f', '--force')), |
634 |
recursive=FlagArgument( |
635 |
'Recursively upload directory *contents* + subdirectories',
|
636 |
('-r', '--recursive')), |
637 |
unchunked=FlagArgument( |
638 |
'Upload file as one block (not recommended)', '--unchunked'), |
639 |
md5_checksum=ValueArgument( |
640 |
'Confirm upload with a custom checksum (MD5)', '--etag'), |
641 |
use_hashes=FlagArgument( |
642 |
'Source file contains hashmap not data', '--source-is-hashmap'), |
643 |
) |
644 |
|
645 |
def _sharing(self): |
646 |
sharing = dict()
|
647 |
readlist = self['uuid_for_read_permission'] |
648 |
if readlist:
|
649 |
sharing['read'] = self['uuid_for_read_permission'] |
650 |
writelist = self['uuid_for_write_permission'] |
651 |
if writelist:
|
652 |
sharing['write'] = self['uuid_for_write_permission'] |
653 |
return sharing or None |
654 |
|
655 |
def _check_container_limit(self, path): |
656 |
cl_dict = self.client.get_container_limit()
|
657 |
container_limit = int(cl_dict['x-container-policy-quota']) |
658 |
r = self.client.container_get()
|
659 |
used_bytes = sum(int(o['bytes']) for o in r.json) |
660 |
path_size = get_path_size(path) |
661 |
if container_limit and path_size > (container_limit - used_bytes): |
662 |
raise CLIError(
|
663 |
'Container %s (limit(%s) - used(%s)) < (size(%s) of %s)' % (
|
664 |
self.client.container,
|
665 |
format_size(container_limit), |
666 |
format_size(used_bytes), |
667 |
format_size(path_size), |
668 |
path), |
669 |
details=[ |
670 |
'Check accound limit: /file quota',
|
671 |
'Check container limit:',
|
672 |
'\t/file containerlimit get %s' % self.client.container, |
673 |
'Increase container limit:',
|
674 |
'\t/file containerlimit set <new limit> %s' % (
|
675 |
self.client.container)])
|
676 |
|
677 |
def _src_dst(self, local_path, remote_path, objlist=None): |
678 |
lpath = path.abspath(local_path) |
679 |
short_path = path.basename(path.abspath(local_path)) |
680 |
rpath = remote_path or short_path
|
681 |
if path.isdir(lpath):
|
682 |
if not self['recursive']: |
683 |
raise CLIError('%s is a directory' % lpath, details=[ |
684 |
'Use %s to upload directories & contents' % '/'.join( |
685 |
self.arguments['recursive'].parsed_name)]) |
686 |
robj = self.client.container_get(path=rpath)
|
687 |
if not self['overwrite']: |
688 |
if robj.json:
|
689 |
raise CLIError(
|
690 |
'Objects/files prefixed as %s already exist' % rpath,
|
691 |
details=['Existing objects:'] + ['\t%s:\t%s' % ( |
692 |
o['name'],
|
693 |
o['content_type'][12:]) for o in robj.json] + [ |
694 |
'Use -f to add, overwrite or resume'])
|
695 |
else:
|
696 |
try:
|
697 |
topobj = self.client.get_object_info(rpath)
|
698 |
if not self._is_dir(topobj): |
699 |
raise CLIError(
|
700 |
'Object /%s/%s exists but not a directory' % (
|
701 |
self.container, rpath),
|
702 |
details=['Use -f to overwrite'])
|
703 |
except ClientError as ce: |
704 |
if ce.status not in (404, ): |
705 |
raise
|
706 |
self._check_container_limit(lpath)
|
707 |
prev = ''
|
708 |
for top, subdirs, files in walk(lpath): |
709 |
if top != prev:
|
710 |
prev = top |
711 |
try:
|
712 |
rel_path = rpath + top.split(lpath)[1]
|
713 |
except IndexError: |
714 |
rel_path = rpath |
715 |
self.error('mkdir %s:%s' % ( |
716 |
self.client.container, rel_path))
|
717 |
self.client.create_directory(rel_path)
|
718 |
for f in files: |
719 |
fpath = path.join(top, f) |
720 |
if path.isfile(fpath):
|
721 |
rel_path = rel_path.replace(path.sep, '/')
|
722 |
pathfix = f.replace(path.sep, '/')
|
723 |
yield open(fpath, 'rb'), '%s/%s' % (rel_path, pathfix) |
724 |
else:
|
725 |
self.error('%s is not a regular file' % fpath) |
726 |
else:
|
727 |
if not path.isfile(lpath): |
728 |
raise CLIError(('%s is not a regular file' % lpath) if ( |
729 |
path.exists(lpath)) else '%s does not exist' % lpath) |
730 |
try:
|
731 |
robj = self.client.get_object_info(rpath)
|
732 |
if remote_path and self._is_dir(robj): |
733 |
rpath += '/%s' % (short_path.replace(path.sep, '/')) |
734 |
self.client.get_object_info(rpath)
|
735 |
if not self['overwrite']: |
736 |
raise CLIError(
|
737 |
'Object /%s/%s already exists' % (
|
738 |
self.container, rpath),
|
739 |
details=['use -f to overwrite / resume'])
|
740 |
except ClientError as ce: |
741 |
if ce.status not in (404, ): |
742 |
raise
|
743 |
self._check_container_limit(lpath)
|
744 |
yield open(lpath, 'rb'), rpath |
745 |
|
746 |
def _run(self, local_path, remote_path): |
747 |
if self['max_threads'] > 0: |
748 |
self.client.MAX_THREADS = int(self['max_threads']) |
749 |
params = dict(
|
750 |
content_encoding=self['content_encoding'], |
751 |
content_type=self['content_type'], |
752 |
content_disposition=self['content_disposition'], |
753 |
sharing=self._sharing(),
|
754 |
public=self['public']) |
755 |
uploaded, container_info_cache = list, dict() |
756 |
rpref = 'pithos://%s' if self['account'] else '' |
757 |
for f, rpath in self._src_dst(local_path, remote_path): |
758 |
self.error('%s --> %s/%s/%s' % ( |
759 |
f.name, rpref, self.client.container, rpath))
|
760 |
if not (self['content_type'] and self['content_encoding']): |
761 |
ctype, cenc = guess_mime_type(f.name) |
762 |
params['content_type'] = self['content_type'] or ctype |
763 |
params['content_encoding'] = self['content_encoding'] or cenc |
764 |
if self['unchunked']: |
765 |
r = self.client.upload_object_unchunked(
|
766 |
rpath, f, |
767 |
etag=self['md5_checksum'], withHashFile=self['use_hashes'], |
768 |
**params) |
769 |
if self['with_output'] or self['json_output']: |
770 |
r['name'] = '/%s/%s' % (self.client.container, rpath) |
771 |
uploaded.append(r) |
772 |
else:
|
773 |
try:
|
774 |
(progress_bar, upload_cb) = self._safe_progress_bar(
|
775 |
'Uploading %s' % f.name.split(path.sep)[-1]) |
776 |
if progress_bar:
|
777 |
hash_bar = progress_bar.clone() |
778 |
hash_cb = hash_bar.get_generator( |
779 |
'Calculating block hashes')
|
780 |
else:
|
781 |
hash_cb = None
|
782 |
r = self.client.upload_object(
|
783 |
rpath, f, |
784 |
hash_cb=hash_cb, |
785 |
upload_cb=upload_cb, |
786 |
container_info_cache=container_info_cache, |
787 |
**params) |
788 |
if self['with_output'] or self['json_output']: |
789 |
r['name'] = '/%s/%s' % (self.client.container, rpath) |
790 |
uploaded.append(r) |
791 |
except Exception: |
792 |
self._safe_progress_bar_finish(progress_bar)
|
793 |
raise
|
794 |
finally:
|
795 |
self._safe_progress_bar_finish(progress_bar)
|
796 |
self._optional_output(uploaded)
|
797 |
self.error('Upload completed') |
798 |
|
799 |
def main(self, local_path, remote_path_or_url): |
800 |
super(self.__class__, self)._run(remote_path_or_url) |
801 |
remote_path = self.path or path.basename(path.abspath(local_path)) |
802 |
self._run(local_path=local_path, remote_path=remote_path)
|
803 |
|
804 |
|
805 |
class RangeArgument(ValueArgument): |
806 |
"""
|
807 |
:value type: string of the form <start>-<end> where <start> and <end> are
|
808 |
integers
|
809 |
:value returns: the input string, after type checking <start> and <end>
|
810 |
"""
|
811 |
|
812 |
@property
|
813 |
def value(self): |
814 |
return getattr(self, '_value', self.default) |
815 |
|
816 |
@value.setter
|
817 |
def value(self, newvalues): |
818 |
if newvalues:
|
819 |
self._value = getattr(self, '_value', self.default) |
820 |
for newvalue in newvalues.split(','): |
821 |
self._value = ('%s,' % self._value) if self._value else '' |
822 |
start, sep, end = newvalue.partition('-')
|
823 |
if sep:
|
824 |
if start:
|
825 |
start, end = (int(start), int(end)) |
826 |
if start > end:
|
827 |
raise CLIInvalidArgument(
|
828 |
'Invalid range %s' % newvalue, details=[
|
829 |
'Valid range formats',
|
830 |
' START-END', ' UP_TO', ' -FROM', |
831 |
'where all values are integers'])
|
832 |
self._value += '%s-%s' % (start, end) |
833 |
else:
|
834 |
self._value += '-%s' % int(end) |
835 |
else:
|
836 |
self._value += '%s' % int(start) |
837 |
|
838 |
|
839 |
@command(file_cmds)
|
840 |
class file_cat(_pithos_container): |
841 |
"""Fetch remote file contents"""
|
842 |
|
843 |
arguments = dict(
|
844 |
range=RangeArgument('show range of data', '--range'), |
845 |
if_match=ValueArgument('show output if ETags match', '--if-match'), |
846 |
if_none_match=ValueArgument( |
847 |
'show output if ETags match', '--if-none-match'), |
848 |
if_modified_since=DateArgument( |
849 |
'show output modified since then', '--if-modified-since'), |
850 |
if_unmodified_since=DateArgument( |
851 |
'show output unmodified since then', '--if-unmodified-since'), |
852 |
object_version=ValueArgument( |
853 |
'Get contents of the chosen version', '--object-version') |
854 |
) |
855 |
|
856 |
@errors.generic.all
|
857 |
@errors.pithos.connection
|
858 |
@errors.pithos.container
|
859 |
@errors.pithos.object_path
|
860 |
def _run(self): |
861 |
self.client.download_object(
|
862 |
self.path, self._out, |
863 |
range_str=self['range'], |
864 |
version=self['object_version'], |
865 |
if_match=self['if_match'], |
866 |
if_none_match=self['if_none_match'], |
867 |
if_modified_since=self['if_modified_since'], |
868 |
if_unmodified_since=self['if_unmodified_since']) |
869 |
|
870 |
def main(self, path_or_url): |
871 |
super(self.__class__, self)._run(path_or_url) |
872 |
self._run()
|
873 |
|
874 |
|
875 |
@command(file_cmds)
|
876 |
class file_download(_pithos_container): |
877 |
"""Download a remove file or directory object to local file system"""
|
878 |
|
879 |
arguments = dict(
|
880 |
resume=FlagArgument( |
881 |
'Resume/Overwrite (attempt resume, else overwrite)',
|
882 |
('-f', '--resume')), |
883 |
range=RangeArgument('Download only that range of data', '--range'), |
884 |
matching_etag=ValueArgument('download iff ETag match', '--if-match'), |
885 |
non_matching_etag=ValueArgument( |
886 |
'download iff ETags DO NOT match', '--if-none-match'), |
887 |
modified_since_date=DateArgument( |
888 |
'download iff remote file is modified since then',
|
889 |
'--if-modified-since'),
|
890 |
unmodified_since_date=DateArgument( |
891 |
'show output iff remote file is unmodified since then',
|
892 |
'--if-unmodified-since'),
|
893 |
object_version=ValueArgument( |
894 |
'download a file of a specific version',
|
895 |
('-O', '--object-version')), |
896 |
max_threads=IntArgument('default: 5', '--threads'), |
897 |
progress_bar=ProgressBarArgument( |
898 |
'do not show progress bar', ('-N', '--no-progress-bar'), |
899 |
default=False),
|
900 |
recursive=FlagArgument( |
901 |
'Download a remote directory object and its contents',
|
902 |
('-r', '--recursive')) |
903 |
) |
904 |
|
905 |
def _src_dst(self, local_path): |
906 |
"""Create a list of (src, dst) where src is a remote location and dst
|
907 |
is an open file descriptor. Directories are denoted as (None, dirpath)
|
908 |
and they are pretended to other objects in a very strict order (shorter
|
909 |
to longer path)."""
|
910 |
ret = [] |
911 |
try:
|
912 |
obj = self.client.get_object_info(
|
913 |
self.path, version=self['object_version']) |
914 |
obj.setdefault('name', self.path.strip('/')) |
915 |
except ClientError as ce: |
916 |
if ce.status in (404, ): |
917 |
raiseCLIError(ce, details=[ |
918 |
'To download an object, the object must exist either as a'
|
919 |
' file or as a directory.',
|
920 |
'For example, to download everything under prefix/ the '
|
921 |
'directory "prefix" must exist.',
|
922 |
'To see if an remote object is actually there:',
|
923 |
' /file info [/CONTAINER/]OBJECT',
|
924 |
'To create a directory object:',
|
925 |
' /file mkdir [/CONTAINER/]OBJECT'])
|
926 |
if ce.status in (204, ): |
927 |
raise CLIError(
|
928 |
'No file or directory objects to download',
|
929 |
details=[ |
930 |
'To download a container (e.g., %s):' % self.container, |
931 |
' [kamaki] container download %s [LOCAL_PATH]' % (
|
932 |
self.container)])
|
933 |
raise
|
934 |
rpath = self.path.strip('/') |
935 |
local_path = local_path[-1:] if ( |
936 |
local_path.endswith('/')) else local_path |
937 |
|
938 |
if self._is_dir(obj): |
939 |
if self['recursive']: |
940 |
dirs, files = [obj, ], [] |
941 |
objects = self.client.container_get(
|
942 |
path=self.path or '/', |
943 |
if_modified_since=self['modified_since_date'], |
944 |
if_unmodified_since=self['unmodified_since_date']) |
945 |
for obj in objects.json: |
946 |
(dirs if self._is_dir(obj) else files).append(obj) |
947 |
|
948 |
# Put the directories on top of the list
|
949 |
for dpath in sorted(['%s%s' % ( |
950 |
local_path, d['name'][len(rpath):]) for d in dirs]): |
951 |
if path.exists(dpath):
|
952 |
if path.isdir(dpath):
|
953 |
continue
|
954 |
raise CLIError(
|
955 |
'Cannot replace local file %s with a directory '
|
956 |
'of the same name' % dpath,
|
957 |
details=[ |
958 |
'Either remove the file or specify a'
|
959 |
'different target location'])
|
960 |
ret.append((None, dpath, None)) |
961 |
|
962 |
# Append the file objects
|
963 |
for opath in [o['name'] for o in files]: |
964 |
lpath = '%s%s' % (local_path, opath[len(rpath):]) |
965 |
if self['resume']: |
966 |
fxists = path.exists(lpath) |
967 |
if fxists and path.isdir(lpath): |
968 |
raise CLIError(
|
969 |
'Cannot change local dir %s info file' % (
|
970 |
lpath), |
971 |
details=[ |
972 |
'Either remove the file or specify a'
|
973 |
'different target location'])
|
974 |
ret.append((opath, lpath, fxists)) |
975 |
elif path.exists(lpath):
|
976 |
raise CLIError(
|
977 |
'Cannot overwrite %s' % lpath,
|
978 |
details=['To overwrite/resume, use %s' % '/'.join( |
979 |
self.arguments['resume'].parsed_name)]) |
980 |
else:
|
981 |
ret.append((opath, lpath, None))
|
982 |
else:
|
983 |
raise CLIError(
|
984 |
'Remote object /%s/%s is a directory' % (
|
985 |
self.container, local_path),
|
986 |
details=['Use %s to download directories' % '/'.join( |
987 |
self.arguments['recursive'].parsed_name)]) |
988 |
else:
|
989 |
# Remote object is just a file
|
990 |
if path.exists(local_path) and not self['resume']: |
991 |
raise CLIError(
|
992 |
'Cannot overwrite local file %s' % (lpath),
|
993 |
details=['To overwrite/resume, use %s' % '/'.join( |
994 |
self.arguments['resume'].parsed_name)]) |
995 |
ret.append((rpath, local_path, self['resume'])) |
996 |
for r, l, resume in ret: |
997 |
if r:
|
998 |
with open(l, 'rwb+' if resume else 'wb+') as f: |
999 |
yield (r, f)
|
1000 |
else:
|
1001 |
yield (r, l)
|
1002 |
|
1003 |
@errors.generic.all
|
1004 |
@errors.pithos.connection
|
1005 |
@errors.pithos.container
|
1006 |
@errors.pithos.object_path
|
1007 |
@errors.pithos.local_path
|
1008 |
def _run(self, local_path): |
1009 |
self.client.MAX_THREADS = self['max_threads'] or 5 |
1010 |
progress_bar = None
|
1011 |
try:
|
1012 |
for rpath, output_file in self._src_dst(local_path): |
1013 |
if not rpath: |
1014 |
self.error('Create local directory %s' % output_file) |
1015 |
makedirs(output_file) |
1016 |
continue
|
1017 |
self.error('/%s/%s --> %s' % ( |
1018 |
self.container, rpath, output_file.name))
|
1019 |
progress_bar, download_cb = self._safe_progress_bar(
|
1020 |
' download')
|
1021 |
self.client.download_object(
|
1022 |
rpath, output_file, |
1023 |
download_cb=download_cb, |
1024 |
range_str=self['range'], |
1025 |
version=self['object_version'], |
1026 |
if_match=self['matching_etag'], |
1027 |
resume=self['resume'], |
1028 |
if_none_match=self['non_matching_etag'], |
1029 |
if_modified_since=self['modified_since_date'], |
1030 |
if_unmodified_since=self['unmodified_since_date']) |
1031 |
except KeyboardInterrupt: |
1032 |
from threading import activeCount, enumerate as activethreads |
1033 |
timeout = 0.5
|
1034 |
while activeCount() > 1: |
1035 |
self._out.write('\nCancel %s threads: ' % (activeCount() - 1)) |
1036 |
self._out.flush()
|
1037 |
for thread in activethreads(): |
1038 |
try:
|
1039 |
thread.join(timeout) |
1040 |
self._out.write('.' if thread.isAlive() else '*') |
1041 |
except RuntimeError: |
1042 |
continue
|
1043 |
finally:
|
1044 |
self._out.flush()
|
1045 |
timeout += 0.1
|
1046 |
self.error('\nDownload canceled by user') |
1047 |
if local_path is not None: |
1048 |
self.error('to resume, re-run with --resume') |
1049 |
except Exception: |
1050 |
self._safe_progress_bar_finish(progress_bar)
|
1051 |
raise
|
1052 |
finally:
|
1053 |
self._safe_progress_bar_finish(progress_bar)
|
1054 |
|
1055 |
def main(self, remote_path_or_url, local_path=None): |
1056 |
super(self.__class__, self)._run(remote_path_or_url) |
1057 |
local_path = local_path or self.path or '.' |
1058 |
self._run(local_path=local_path) |