root / kamaki / cli / commands / pithos.py @ edc1182f
History | View | Annotate | Download (35 kB)
1 |
# Copyright 2011-2013 GRNET S.A. All rights reserved.
|
---|---|
2 |
#
|
3 |
# Redistribution and use in source and binary forms, with or
|
4 |
# without modification, are permitted provided that the following
|
5 |
# conditions are met:
|
6 |
#
|
7 |
# 1. Redistributions of source code must retain the above
|
8 |
# copyright notice, this list of conditions and the following
|
9 |
# disclaimer.
|
10 |
#
|
11 |
# 2. Redistributions in binary form must reproduce the above
|
12 |
# copyright notice, this list of conditions and the following
|
13 |
# disclaimer in the documentation and/or other materials
|
14 |
# provided with the distribution.
|
15 |
#
|
16 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
|
17 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
18 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
19 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
|
20 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
23 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
24 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
25 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
26 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
27 |
# POSSIBILITY OF SUCH DAMAGE.
|
28 |
#
|
29 |
# The views and conclusions contained in the software and
|
30 |
# documentation are those of the authors and should not be
|
31 |
# interpreted as representing official policies, either expressed
|
32 |
# or implied, of GRNET S.A.command
|
33 |
|
34 |
from io import StringIO |
35 |
from pydoc import pager |
36 |
from os import path, walk, makedirs |
37 |
|
38 |
from kamaki.clients.pithos import PithosClient, ClientError |
39 |
|
40 |
from kamaki.cli import command |
41 |
from kamaki.cli.command_tree import CommandTree |
42 |
from kamaki.cli.commands import ( |
43 |
_command_init, errors, addLogSettings, DontRaiseKeyError, _optional_json, |
44 |
_name_filter, _optional_output_cmd) |
45 |
from kamaki.cli.errors import CLIBaseUrlError, CLIError, CLIInvalidArgument |
46 |
from kamaki.cli.argument import ( |
47 |
FlagArgument, IntArgument, ValueArgument, DateArgument, |
48 |
ProgressBarArgument, RepeatableArgument) |
49 |
from kamaki.cli.utils import ( |
50 |
format_size, bold, get_path_size, guess_mime_type) |
51 |
|
52 |
file_cmds = CommandTree('file', 'Pithos+/Storage object level API commands') |
53 |
container_cmds = CommandTree( |
54 |
'container', 'Pithos+/Storage container level API commands') |
55 |
sharers_commands = CommandTree('sharers', 'Pithos+/Storage sharers') |
56 |
_commands = [file_cmds, container_cmds, sharers_commands] |
57 |
|
58 |
|
59 |
class _pithos_init(_command_init): |
60 |
"""Initilize a pithos+ client
|
61 |
There is always a default account (current user uuid)
|
62 |
There is always a default container (pithos)
|
63 |
"""
|
64 |
|
65 |
@DontRaiseKeyError
|
66 |
def _custom_container(self): |
67 |
return self.config.get_cloud(self.cloud, 'pithos_container') |
68 |
|
69 |
@DontRaiseKeyError
|
70 |
def _custom_uuid(self): |
71 |
return self.config.get_cloud(self.cloud, 'pithos_uuid') |
72 |
|
73 |
def _set_account(self): |
74 |
self.account = self._custom_uuid() |
75 |
if self.account: |
76 |
return
|
77 |
astakos = getattr(self, 'auth_base', None) |
78 |
if astakos:
|
79 |
self.account = astakos.user_term('id', self.token) |
80 |
else:
|
81 |
raise CLIBaseUrlError(service='astakos') |
82 |
|
83 |
@errors.generic.all
|
84 |
@addLogSettings
|
85 |
def _run(self): |
86 |
cloud = getattr(self, 'cloud', None) |
87 |
if cloud:
|
88 |
self.base_url = self._custom_url('pithos') |
89 |
else:
|
90 |
self.cloud = 'default' |
91 |
self.token = self._custom_token('pithos') |
92 |
self.container = self._custom_container() or 'pithos' |
93 |
|
94 |
astakos = getattr(self, 'auth_base', None) |
95 |
if astakos:
|
96 |
self.token = self.token or astakos.token |
97 |
if not self.base_url: |
98 |
pithos_endpoints = astakos.get_service_endpoints( |
99 |
self._custom_type('pithos') or 'object-store', |
100 |
self._custom_version('pithos') or '') |
101 |
self.base_url = pithos_endpoints['publicURL'] |
102 |
else:
|
103 |
raise CLIBaseUrlError(service='astakos') |
104 |
|
105 |
self._set_account()
|
106 |
self.client = PithosClient(
|
107 |
self.base_url, self.token, self.account, self.container) |
108 |
|
109 |
def main(self): |
110 |
self._run()
|
111 |
|
112 |
|
113 |
class _pithos_account(_pithos_init): |
114 |
"""Setup account"""
|
115 |
|
116 |
def __init__(self, arguments={}, auth_base=None, cloud=None): |
117 |
super(_pithos_account, self).__init__(arguments, auth_base, cloud) |
118 |
self['account'] = ValueArgument( |
119 |
'Use (a different) user uuid', ('-A', '--account')) |
120 |
|
121 |
def _run(self): |
122 |
super(_pithos_account, self)._run() |
123 |
self.client.account = self['account'] or getattr( |
124 |
self, 'account', getattr(self.client, 'account', None)) |
125 |
|
126 |
|
127 |
class _pithos_container(_pithos_account): |
128 |
"""Setup container"""
|
129 |
|
130 |
def __init__(self, arguments={}, auth_base=None, cloud=None): |
131 |
super(_pithos_container, self).__init__(arguments, auth_base, cloud) |
132 |
self['container'] = ValueArgument( |
133 |
'Use this container (default: pithos)', ('-C', '--container')) |
134 |
|
135 |
@staticmethod
|
136 |
def _is_dir(remote_dict): |
137 |
return 'application/directory' == remote_dict.get( |
138 |
'content_type', remote_dict.get('content-type', '')) |
139 |
|
140 |
@staticmethod
|
141 |
def _resolve_pithos_url(url): |
142 |
"""Match urls of one of the following formats:
|
143 |
pithos://ACCOUNT/CONTAINER/OBJECT_PATH
|
144 |
/CONTAINER/OBJECT_PATH
|
145 |
return account, container, path
|
146 |
"""
|
147 |
account, container, obj_path, prefix = '', '', url, 'pithos://' |
148 |
if url.startswith(prefix):
|
149 |
account, sep, url = url[len(prefix):].partition('/') |
150 |
url = '/%s' % url
|
151 |
if url.startswith('/'): |
152 |
container, sep, obj_path = url[1:].partition('/') |
153 |
return account, container, obj_path
|
154 |
|
155 |
def _run(self, url=None): |
156 |
acc, con, self.path = self._resolve_pithos_url(url or '') |
157 |
self.account = acc or getattr(self, 'account', '') |
158 |
super(_pithos_container, self)._run() |
159 |
self.container = con or self['container'] or getattr( |
160 |
self, 'container', None) or getattr(self.client, 'container', '') |
161 |
self.client.container = self.container |
162 |
|
163 |
|
164 |
@command(file_cmds)
|
165 |
class file_list(_pithos_container, _optional_json, _name_filter): |
166 |
"""List all objects in a container or a directory object"""
|
167 |
|
168 |
arguments = dict(
|
169 |
detail=FlagArgument('detailed output', ('-l', '--list')), |
170 |
limit=IntArgument('limit number of listed items', ('-n', '--number')), |
171 |
marker=ValueArgument('output greater that marker', '--marker'), |
172 |
delimiter=ValueArgument('show output up to delimiter', '--delimiter'), |
173 |
meta=ValueArgument( |
174 |
'show output with specified meta keys', '--meta', |
175 |
default=[]), |
176 |
if_modified_since=ValueArgument( |
177 |
'show output modified since then', '--if-modified-since'), |
178 |
if_unmodified_since=ValueArgument( |
179 |
'show output not modified since then', '--if-unmodified-since'), |
180 |
until=DateArgument('show metadata until then', '--until'), |
181 |
format=ValueArgument( |
182 |
'format to parse until data (default: d/m/Y H:M:S )', '--format'), |
183 |
shared=FlagArgument('show only shared', '--shared'), |
184 |
more=FlagArgument('read long results', '--more'), |
185 |
enum=FlagArgument('Enumerate results', '--enumerate'), |
186 |
recursive=FlagArgument( |
187 |
'Recursively list containers and their contents',
|
188 |
('-R', '--recursive')) |
189 |
) |
190 |
|
191 |
def print_objects(self, object_list): |
192 |
for index, obj in enumerate(object_list): |
193 |
pretty_obj = obj.copy() |
194 |
index += 1
|
195 |
empty_space = ' ' * (len(str(len(object_list))) - len(str(index))) |
196 |
if 'subdir' in obj: |
197 |
continue
|
198 |
if self._is_dir(obj): |
199 |
size = 'D'
|
200 |
else:
|
201 |
size = format_size(obj['bytes'])
|
202 |
pretty_obj['bytes'] = '%s (%s)' % (obj['bytes'], size) |
203 |
oname = obj['name'] if self['more'] else bold(obj['name']) |
204 |
prfx = ('%s%s. ' % (empty_space, index)) if self['enum'] else '' |
205 |
if self['detail']: |
206 |
self.writeln('%s%s' % (prfx, oname)) |
207 |
self.print_dict(pretty_obj, exclude=('name')) |
208 |
self.writeln()
|
209 |
else:
|
210 |
oname = '%s%9s %s' % (prfx, size, oname)
|
211 |
oname += '/' if self._is_dir(obj) else u'' |
212 |
self.writeln(oname)
|
213 |
|
214 |
@errors.generic.all
|
215 |
@errors.pithos.connection
|
216 |
@errors.pithos.container
|
217 |
@errors.pithos.object_path
|
218 |
def _run(self): |
219 |
r = self.client.container_get(
|
220 |
limit=False if self['more'] else self['limit'], |
221 |
marker=self['marker'], |
222 |
prefix=self['name_pref'] or '/', |
223 |
delimiter=self['delimiter'], |
224 |
path=self.path or '', |
225 |
if_modified_since=self['if_modified_since'], |
226 |
if_unmodified_since=self['if_unmodified_since'], |
227 |
until=self['until'], |
228 |
meta=self['meta'], |
229 |
show_only_shared=self['shared']) |
230 |
files = self._filter_by_name(r.json)
|
231 |
if self['more']: |
232 |
outbu, self._out = self._out, StringIO() |
233 |
try:
|
234 |
if self['json_output'] or self['output_format']: |
235 |
self._print(files)
|
236 |
else:
|
237 |
self.print_objects(files)
|
238 |
finally:
|
239 |
if self['more']: |
240 |
pager(self._out.getvalue())
|
241 |
self._out = outbu
|
242 |
|
243 |
def main(self, path_or_url='/'): |
244 |
super(self.__class__, self)._run(path_or_url) |
245 |
self._run()
|
246 |
|
247 |
|
248 |
@command(file_cmds)
|
249 |
class file_create(_pithos_container, _optional_output_cmd): |
250 |
"""Create an empty file"""
|
251 |
|
252 |
arguments = dict(
|
253 |
content_type=ValueArgument( |
254 |
'Set content type (default: application/octet-stream)',
|
255 |
'--content-type',
|
256 |
default='application/octet-stream')
|
257 |
) |
258 |
|
259 |
@errors.generic.all
|
260 |
@errors.pithos.connection
|
261 |
@errors.pithos.container
|
262 |
def _run(self): |
263 |
self._optional_output(
|
264 |
self.client.create_object(self.path, self['content_type'])) |
265 |
|
266 |
def main(self, path_or_url): |
267 |
super(self.__class__, self)._run(path_or_url) |
268 |
self._run()
|
269 |
|
270 |
|
271 |
@command(file_cmds)
|
272 |
class file_mkdir(_pithos_container, _optional_output_cmd): |
273 |
"""Create a directory: /file create --content-type='applcation/directory'
|
274 |
"""
|
275 |
|
276 |
@errors.generic.all
|
277 |
@errors.pithos.connection
|
278 |
@errors.pithos.container
|
279 |
def _run(self): |
280 |
self._optional_output(self.client.create_directory(self.path)) |
281 |
|
282 |
def main(self, container___directory): |
283 |
super(self.__class__, self)._run( |
284 |
container___directory, path_is_optional=False)
|
285 |
self._run()
|
286 |
|
287 |
|
288 |
class _source_destination(_pithos_container, _optional_output_cmd): |
289 |
|
290 |
sd_arguments = dict(
|
291 |
destination_user_uuid=ValueArgument( |
292 |
'default: current user uuid', '--to-account'), |
293 |
destination_container=ValueArgument( |
294 |
'default: pithos', '--to-container'), |
295 |
source_prefix=FlagArgument( |
296 |
'Transfer all files that are prefixed with SOURCE PATH If the '
|
297 |
'destination path is specified, replace SOURCE_PATH with '
|
298 |
'DESTINATION_PATH',
|
299 |
('-r', '--recursive')), |
300 |
force=FlagArgument( |
301 |
'Overwrite destination objects, if needed', ('-f', '--force')) |
302 |
) |
303 |
|
304 |
def __init__(self, arguments={}, auth_base=None, cloud=None): |
305 |
self.arguments.update(arguments)
|
306 |
self.arguments.update(self.sd_arguments) |
307 |
super(_source_destination, self).__init__( |
308 |
self.arguments, auth_base, cloud)
|
309 |
|
310 |
def _report_transfer(self, src, dst, transfer_name): |
311 |
if not dst: |
312 |
if transfer_name in ('move', ): |
313 |
self.error(' delete source directory %s' % src) |
314 |
return
|
315 |
dst_prf = '' if self.account == self.dst_client.account else ( |
316 |
'pithos://%s' % self.dst_client.account) |
317 |
if src:
|
318 |
src_prf = '' if self.account == self.dst_client.account else ( |
319 |
'pithos://%s' % self.account) |
320 |
self.error(' %s %s/%s/%s\n --> %s/%s/%s' % ( |
321 |
transfer_name, |
322 |
src_prf, self.container, src,
|
323 |
dst_prf, self.dst_client.container, dst))
|
324 |
else:
|
325 |
self.error(' mkdir %s/%s/%s' % ( |
326 |
dst_prf, self.dst_client.container, dst))
|
327 |
|
328 |
@errors.generic.all
|
329 |
@errors.pithos.account
|
330 |
def _src_dst(self, version=None): |
331 |
"""Preconditions:
|
332 |
self.account, self.container, self.path
|
333 |
self.dst_acc, self.dst_con, self.dst_path
|
334 |
They should all be configured properly
|
335 |
:returns: [(src_path, dst_path), ...], if src_path is None, create
|
336 |
destination directory
|
337 |
"""
|
338 |
src_objects, dst_objects, pairs = dict(), dict(), [] |
339 |
try:
|
340 |
for obj in self.dst_client.list_objects( |
341 |
prefix=self.dst_path or self.path or '/'): |
342 |
dst_objects[obj['name']] = obj
|
343 |
except ClientError as ce: |
344 |
if ce.status in (404, ): |
345 |
raise CLIError(
|
346 |
'Destination container pithos://%s/%s not found' % (
|
347 |
self.dst_client.account, self.dst_client.container)) |
348 |
raise ce
|
349 |
if self['source_prefix']: |
350 |
# Copy and replace prefixes
|
351 |
for src_obj in self.client.list_objects(prefix=self.path or '/'): |
352 |
src_objects[src_obj['name']] = src_obj
|
353 |
for src_path, src_obj in src_objects.items(): |
354 |
dst_path = '%s%s' % (
|
355 |
self.dst_path or self.path, src_path[len(self.path):]) |
356 |
dst_obj = dst_objects.get(dst_path, None)
|
357 |
if self['force'] or not dst_obj: |
358 |
# Just do it
|
359 |
pairs.append(( |
360 |
None if self._is_dir(src_obj) else src_path, dst_path)) |
361 |
if self._is_dir(src_obj): |
362 |
pairs.append((self.path or dst_path, None)) |
363 |
elif not (self._is_dir(dst_obj) and self._is_dir(src_obj)): |
364 |
raise CLIError(
|
365 |
'Destination object exists', importance=2, details=[ |
366 |
'Failed while transfering:',
|
367 |
' pithos://%s/%s/%s' % (
|
368 |
self.account,
|
369 |
self.container,
|
370 |
src_path), |
371 |
'--> pithos://%s/%s/%s' % (
|
372 |
self.dst_client.account,
|
373 |
self.dst_client.container,
|
374 |
dst_path), |
375 |
'Use %s to transfer overwrite' % ('/'.join( |
376 |
self.arguments['force'].parsed_name))]) |
377 |
else:
|
378 |
# One object transfer
|
379 |
try:
|
380 |
src_obj = self.client.get_object_info(self.path) |
381 |
except ClientError as ce: |
382 |
if ce.status in (204, ): |
383 |
raise CLIError(
|
384 |
'Missing specific path container %s' % self.container, |
385 |
importance=2, details=[
|
386 |
'To transfer container contents %s' % (
|
387 |
'/'.join(self.arguments[ |
388 |
'source_prefix'].parsed_name))])
|
389 |
raise
|
390 |
dst_path = self.dst_path or self.path |
391 |
dst_obj = dst_objects.get(dst_path, None) or self.path |
392 |
if self['force'] or not dst_obj: |
393 |
pairs.append( |
394 |
(None if self._is_dir(src_obj) else self.path, dst_path)) |
395 |
if self._is_dir(src_obj): |
396 |
pairs.append((self.path or dst_path, None)) |
397 |
elif self._is_dir(src_obj): |
398 |
raise CLIError(
|
399 |
'Cannot transfer an application/directory object',
|
400 |
importance=2, details=[
|
401 |
'The object pithos://%s/%s/%s is a directory' % (
|
402 |
self.account,
|
403 |
self.container,
|
404 |
self.path),
|
405 |
'To recursively copy a directory, use',
|
406 |
' %s' % ('/'.join( |
407 |
self.arguments['source_prefix'].parsed_name)), |
408 |
'To create a file, use',
|
409 |
' /file create (general purpose)',
|
410 |
' /file mkdir (a directory object)'])
|
411 |
else:
|
412 |
raise CLIError(
|
413 |
'Destination object exists',
|
414 |
importance=2, details=[
|
415 |
'Failed while transfering:',
|
416 |
' pithos://%s/%s/%s' % (
|
417 |
self.account,
|
418 |
self.container,
|
419 |
self.path),
|
420 |
'--> pithos://%s/%s/%s' % (
|
421 |
self.dst_client.account,
|
422 |
self.dst_client.container,
|
423 |
dst_path), |
424 |
'Use %s to transfer overwrite' % ('/'.join( |
425 |
self.arguments['force'].parsed_name))]) |
426 |
return pairs
|
427 |
|
428 |
def _run(self, source_path_or_url, destination_path_or_url=''): |
429 |
super(_source_destination, self)._run(source_path_or_url) |
430 |
dst_acc, dst_con, dst_path = self._resolve_pithos_url(
|
431 |
destination_path_or_url) |
432 |
self.dst_client = PithosClient(
|
433 |
base_url=self.client.base_url, token=self.client.token, |
434 |
container=self[
|
435 |
'destination_container'] or dst_con or self.client.container, |
436 |
account=self[
|
437 |
'destination_user_uuid'] or dst_acc or self.client.account) |
438 |
self.dst_path = dst_path or self.path |
439 |
|
440 |
|
441 |
@command(file_cmds)
|
442 |
class file_copy(_source_destination): |
443 |
"""Copy objects, even between different accounts or containers"""
|
444 |
|
445 |
arguments = dict(
|
446 |
public=ValueArgument('publish new object', '--public'), |
447 |
content_type=ValueArgument( |
448 |
'change object\'s content type', '--content-type'), |
449 |
source_version=ValueArgument( |
450 |
'copy specific version', ('-S', '--source-version')) |
451 |
) |
452 |
|
453 |
@errors.generic.all
|
454 |
@errors.pithos.connection
|
455 |
@errors.pithos.container
|
456 |
@errors.pithos.account
|
457 |
def _run(self): |
458 |
for src, dst in self._src_dst(self['source_version']): |
459 |
self._report_transfer(src, dst, 'copy') |
460 |
if src and dst: |
461 |
self.dst_client.copy_object(
|
462 |
src_container=self.client.container,
|
463 |
src_object=src, |
464 |
dst_container=self.dst_client.container,
|
465 |
dst_object=dst, |
466 |
source_account=self.account,
|
467 |
source_version=self['source_version'], |
468 |
public=self['public'], |
469 |
content_type=self['content_type']) |
470 |
elif dst:
|
471 |
self.dst_client.create_directory(dst)
|
472 |
|
473 |
def main(self, source_path_or_url, destination_path_or_url=None): |
474 |
super(file_copy, self)._run( |
475 |
source_path_or_url, destination_path_or_url or '') |
476 |
self._run()
|
477 |
|
478 |
|
479 |
@command(file_cmds)
|
480 |
class file_move(_source_destination): |
481 |
"""Move objects, even between different accounts or containers"""
|
482 |
|
483 |
arguments = dict(
|
484 |
public=ValueArgument('publish new object', '--public'), |
485 |
content_type=ValueArgument( |
486 |
'change object\'s content type', '--content-type') |
487 |
) |
488 |
|
489 |
@errors.generic.all
|
490 |
@errors.pithos.connection
|
491 |
@errors.pithos.container
|
492 |
@errors.pithos.account
|
493 |
def _run(self): |
494 |
for src, dst in self._src_dst(): |
495 |
self._report_transfer(src, dst, 'move') |
496 |
if src and dst: |
497 |
self.dst_client.move_object(
|
498 |
src_container=self.client.container,
|
499 |
src_object=src, |
500 |
dst_container=self.dst_client.container,
|
501 |
dst_object=dst, |
502 |
source_account=self.account,
|
503 |
public=self['public'], |
504 |
content_type=self['content_type']) |
505 |
elif dst:
|
506 |
self.dst_client.create_directory(dst)
|
507 |
else:
|
508 |
self.client.del_object(src)
|
509 |
|
510 |
def main(self, source_path_or_url, destination_path_or_url=None): |
511 |
super(file_move, self)._run( |
512 |
source_path_or_url, destination_path_or_url or '') |
513 |
self._run()
|
514 |
|
515 |
|
516 |
@command(file_cmds)
|
517 |
class file_append(_pithos_container, _optional_output_cmd): |
518 |
"""Append local file to (existing) remote object
|
519 |
The remote object should exist.
|
520 |
If the remote object is a directory, it is transformed into a file.
|
521 |
In the later case, objects under the directory remain intact.
|
522 |
"""
|
523 |
|
524 |
arguments = dict(
|
525 |
progress_bar=ProgressBarArgument( |
526 |
'do not show progress bar', ('-N', '--no-progress-bar'), |
527 |
default=False),
|
528 |
max_threads=IntArgument('default: 1', '--threads'), |
529 |
) |
530 |
|
531 |
@errors.generic.all
|
532 |
@errors.pithos.connection
|
533 |
@errors.pithos.container
|
534 |
@errors.pithos.object_path
|
535 |
def _run(self, local_path): |
536 |
if self['max_threads'] > 0: |
537 |
self.client.MAX_THREADS = int(self['max_threads']) |
538 |
(progress_bar, upload_cb) = self._safe_progress_bar('Appending') |
539 |
try:
|
540 |
with open(local_path, 'rb') as f: |
541 |
self._optional_output(
|
542 |
self.client.append_object(self.path, f, upload_cb)) |
543 |
finally:
|
544 |
self._safe_progress_bar_finish(progress_bar)
|
545 |
|
546 |
def main(self, local_path, remote_path_or_url): |
547 |
super(self.__class__, self)._run(remote_path_or_url) |
548 |
self._run(local_path)
|
549 |
|
550 |
|
551 |
@command(file_cmds)
|
552 |
class file_truncate(_pithos_container, _optional_output_cmd): |
553 |
"""Truncate remote file up to size"""
|
554 |
|
555 |
arguments = dict(
|
556 |
size_in_bytes=IntArgument('Length of file after truncation', '--size') |
557 |
) |
558 |
required = ('size_in_bytes', )
|
559 |
|
560 |
@errors.generic.all
|
561 |
@errors.pithos.connection
|
562 |
@errors.pithos.container
|
563 |
@errors.pithos.object_path
|
564 |
@errors.pithos.object_size
|
565 |
def _run(self, size): |
566 |
self._optional_output(self.client.truncate_object(self.path, size)) |
567 |
|
568 |
def main(self, path_or_url): |
569 |
super(self.__class__, self)._run(path_or_url) |
570 |
self._run(size=self['size_in_bytes']) |
571 |
|
572 |
|
573 |
@command(file_cmds)
|
574 |
class file_overwrite(_pithos_container, _optional_output_cmd): |
575 |
"""Overwrite part of a remote file"""
|
576 |
|
577 |
arguments = dict(
|
578 |
progress_bar=ProgressBarArgument( |
579 |
'do not show progress bar', ('-N', '--no-progress-bar'), |
580 |
default=False),
|
581 |
start_position=IntArgument('File position in bytes', '--from'), |
582 |
end_position=IntArgument('File position in bytes', '--to') |
583 |
) |
584 |
required = ('start_position', 'end_position') |
585 |
|
586 |
@errors.generic.all
|
587 |
@errors.pithos.connection
|
588 |
@errors.pithos.container
|
589 |
@errors.pithos.object_path
|
590 |
@errors.pithos.object_size
|
591 |
def _run(self, local_path, start, end): |
592 |
start, end = int(start), int(end) |
593 |
(progress_bar, upload_cb) = self._safe_progress_bar(
|
594 |
'Overwrite %s bytes' % (end - start))
|
595 |
try:
|
596 |
with open(path.abspath(local_path), 'rb') as f: |
597 |
self._optional_output(self.client.overwrite_object( |
598 |
obj=self.path,
|
599 |
start=start, |
600 |
end=end, |
601 |
source_file=f, |
602 |
upload_cb=upload_cb)) |
603 |
finally:
|
604 |
self._safe_progress_bar_finish(progress_bar)
|
605 |
|
606 |
def main(self, local_path, path_or_url): |
607 |
super(self.__class__, self)._run(path_or_url) |
608 |
self.path = self.path or path.basename(local_path) |
609 |
self._run(
|
610 |
local_path=local_path, |
611 |
start=self['start_position'], |
612 |
end=self['end_position']) |
613 |
|
614 |
|
615 |
@command(file_cmds)
|
616 |
class file_upload(_pithos_container, _optional_output_cmd): |
617 |
"""Upload a file"""
|
618 |
|
619 |
arguments = dict(
|
620 |
max_threads=IntArgument('default: 5', '--threads'), |
621 |
content_encoding=ValueArgument( |
622 |
'set MIME content type', '--content-encoding'), |
623 |
content_disposition=ValueArgument( |
624 |
'specify objects presentation style', '--content-disposition'), |
625 |
content_type=ValueArgument('specify content type', '--content-type'), |
626 |
uuid_for_read_permission=RepeatableArgument( |
627 |
'Give read access to a user of group (can be repeated)',
|
628 |
'--read-permission'),
|
629 |
uuid_for_write_permission=RepeatableArgument( |
630 |
'Give write access to a user of group (can be repeated)',
|
631 |
'--write-permission'),
|
632 |
public=FlagArgument('make object publicly accessible', '--public'), |
633 |
overwrite=FlagArgument('Force (over)write', ('-f', '--force')), |
634 |
recursive=FlagArgument( |
635 |
'Recursively upload directory *contents* + subdirectories',
|
636 |
('-r', '--recursive')), |
637 |
unchunked=FlagArgument( |
638 |
'Upload file as one block (not recommended)', '--unchunked'), |
639 |
md5_checksum=ValueArgument( |
640 |
'Confirm upload with a custom checksum (MD5)', '--etag'), |
641 |
use_hashes=FlagArgument( |
642 |
'Source file contains hashmap not data', '--source-is-hashmap'), |
643 |
) |
644 |
|
645 |
def _sharing(self): |
646 |
sharing = dict()
|
647 |
readlist = self['uuid_for_read_permission'] |
648 |
if readlist:
|
649 |
sharing['read'] = self['uuid_for_read_permission'] |
650 |
writelist = self['uuid_for_write_permission'] |
651 |
if writelist:
|
652 |
sharing['write'] = self['uuid_for_write_permission'] |
653 |
return sharing or None |
654 |
|
655 |
def _check_container_limit(self, path): |
656 |
cl_dict = self.client.get_container_limit()
|
657 |
container_limit = int(cl_dict['x-container-policy-quota']) |
658 |
r = self.client.container_get()
|
659 |
used_bytes = sum(int(o['bytes']) for o in r.json) |
660 |
path_size = get_path_size(path) |
661 |
if container_limit and path_size > (container_limit - used_bytes): |
662 |
raise CLIError(
|
663 |
'Container %s (limit(%s) - used(%s)) < (size(%s) of %s)' % (
|
664 |
self.client.container,
|
665 |
format_size(container_limit), |
666 |
format_size(used_bytes), |
667 |
format_size(path_size), |
668 |
path), |
669 |
details=[ |
670 |
'Check accound limit: /file quota',
|
671 |
'Check container limit:',
|
672 |
'\t/file containerlimit get %s' % self.client.container, |
673 |
'Increase container limit:',
|
674 |
'\t/file containerlimit set <new limit> %s' % (
|
675 |
self.client.container)])
|
676 |
|
677 |
def _src_dst(self, local_path, remote_path, objlist=None): |
678 |
lpath = path.abspath(local_path) |
679 |
short_path = path.basename(path.abspath(local_path)) |
680 |
rpath = remote_path or short_path
|
681 |
if path.isdir(lpath):
|
682 |
if not self['recursive']: |
683 |
raise CLIError('%s is a directory' % lpath, details=[ |
684 |
'Use %s to upload directories & contents' % '/'.join( |
685 |
self.arguments['recursive'].parsed_name)]) |
686 |
robj = self.client.container_get(path=rpath)
|
687 |
if not self['overwrite']: |
688 |
if robj.json:
|
689 |
raise CLIError(
|
690 |
'Objects/files prefixed as %s already exist' % rpath,
|
691 |
details=['Existing objects:'] + ['\t%s:\t%s' % ( |
692 |
o['name'],
|
693 |
o['content_type'][12:]) for o in robj.json] + [ |
694 |
'Use -f to add, overwrite or resume'])
|
695 |
else:
|
696 |
try:
|
697 |
topobj = self.client.get_object_info(rpath)
|
698 |
if not self._is_dir(topobj): |
699 |
raise CLIError(
|
700 |
'Object /%s/%s exists but not a directory' % (
|
701 |
self.container, rpath),
|
702 |
details=['Use -f to overwrite'])
|
703 |
except ClientError as ce: |
704 |
if ce.status not in (404, ): |
705 |
raise
|
706 |
self._check_container_limit(lpath)
|
707 |
prev = ''
|
708 |
for top, subdirs, files in walk(lpath): |
709 |
if top != prev:
|
710 |
prev = top |
711 |
try:
|
712 |
rel_path = rpath + top.split(lpath)[1]
|
713 |
except IndexError: |
714 |
rel_path = rpath |
715 |
self.error('mkdir %s:%s' % ( |
716 |
self.client.container, rel_path))
|
717 |
self.client.create_directory(rel_path)
|
718 |
for f in files: |
719 |
fpath = path.join(top, f) |
720 |
if path.isfile(fpath):
|
721 |
rel_path = rel_path.replace(path.sep, '/')
|
722 |
pathfix = f.replace(path.sep, '/')
|
723 |
yield open(fpath, 'rb'), '%s/%s' % (rel_path, pathfix) |
724 |
else:
|
725 |
self.error('%s is not a regular file' % fpath) |
726 |
else:
|
727 |
if not path.isfile(lpath): |
728 |
raise CLIError(('%s is not a regular file' % lpath) if ( |
729 |
path.exists(lpath)) else '%s does not exist' % lpath) |
730 |
try:
|
731 |
robj = self.client.get_object_info(rpath)
|
732 |
if remote_path and self._is_dir(robj): |
733 |
rpath += '/%s' % (short_path.replace(path.sep, '/')) |
734 |
self.client.get_object_info(rpath)
|
735 |
if not self['overwrite']: |
736 |
raise CLIError(
|
737 |
'Object /%s/%s already exists' % (
|
738 |
self.container, rpath),
|
739 |
details=['use -f to overwrite / resume'])
|
740 |
except ClientError as ce: |
741 |
if ce.status not in (404, ): |
742 |
raise
|
743 |
self._check_container_limit(lpath)
|
744 |
yield open(lpath, 'rb'), rpath |
745 |
|
746 |
def _run(self, local_path, remote_path): |
747 |
if self['max_threads'] > 0: |
748 |
self.client.MAX_THREADS = int(self['max_threads']) |
749 |
params = dict(
|
750 |
content_encoding=self['content_encoding'], |
751 |
content_type=self['content_type'], |
752 |
content_disposition=self['content_disposition'], |
753 |
sharing=self._sharing(),
|
754 |
public=self['public']) |
755 |
uploaded, container_info_cache = list, dict() |
756 |
rpref = 'pithos://%s' if self['account'] else '' |
757 |
for f, rpath in self._src_dst(local_path, remote_path): |
758 |
self.error('%s --> %s/%s/%s' % ( |
759 |
f.name, rpref, self.client.container, rpath))
|
760 |
if not (self['content_type'] and self['content_encoding']): |
761 |
ctype, cenc = guess_mime_type(f.name) |
762 |
params['content_type'] = self['content_type'] or ctype |
763 |
params['content_encoding'] = self['content_encoding'] or cenc |
764 |
if self['unchunked']: |
765 |
r = self.client.upload_object_unchunked(
|
766 |
rpath, f, |
767 |
etag=self['md5_checksum'], withHashFile=self['use_hashes'], |
768 |
**params) |
769 |
if self['with_output'] or self['json_output']: |
770 |
r['name'] = '/%s/%s' % (self.client.container, rpath) |
771 |
uploaded.append(r) |
772 |
else:
|
773 |
try:
|
774 |
(progress_bar, upload_cb) = self._safe_progress_bar(
|
775 |
'Uploading %s' % f.name.split(path.sep)[-1]) |
776 |
if progress_bar:
|
777 |
hash_bar = progress_bar.clone() |
778 |
hash_cb = hash_bar.get_generator( |
779 |
'Calculating block hashes')
|
780 |
else:
|
781 |
hash_cb = None
|
782 |
r = self.client.upload_object(
|
783 |
rpath, f, |
784 |
hash_cb=hash_cb, |
785 |
upload_cb=upload_cb, |
786 |
container_info_cache=container_info_cache, |
787 |
**params) |
788 |
if self['with_output'] or self['json_output']: |
789 |
r['name'] = '/%s/%s' % (self.client.container, rpath) |
790 |
uploaded.append(r) |
791 |
except Exception: |
792 |
self._safe_progress_bar_finish(progress_bar)
|
793 |
raise
|
794 |
finally:
|
795 |
self._safe_progress_bar_finish(progress_bar)
|
796 |
self._optional_output(uploaded)
|
797 |
self.error('Upload completed') |
798 |
|
799 |
def main(self, local_path, remote_path_or_url): |
800 |
super(self.__class__, self)._run(remote_path_or_url) |
801 |
remote_path = self.path or path.basename(path.abspath(local_path)) |
802 |
self._run(local_path=local_path, remote_path=remote_path)
|
803 |
|
804 |
|
805 |
class RangeArgument(ValueArgument): |
806 |
"""
|
807 |
:value type: string of the form <start>-<end> where <start> and <end> are
|
808 |
integers
|
809 |
:value returns: the input string, after type checking <start> and <end>
|
810 |
"""
|
811 |
|
812 |
@property
|
813 |
def value(self): |
814 |
return getattr(self, '_value', self.default) |
815 |
|
816 |
@value.setter
|
817 |
def value(self, newvalues): |
818 |
if newvalues:
|
819 |
self._value = getattr(self, '_value', self.default) |
820 |
for newvalue in newvalues.split(','): |
821 |
self._value = ('%s,' % self._value) if self._value else '' |
822 |
start, sep, end = newvalue.partition('-')
|
823 |
if sep:
|
824 |
if start:
|
825 |
start, end = (int(start), int(end)) |
826 |
if start > end:
|
827 |
raise CLIInvalidArgument(
|
828 |
'Invalid range %s' % newvalue, details=[
|
829 |
'Valid range formats',
|
830 |
' START-END', ' UP_TO', ' -FROM', |
831 |
'where all values are integers'])
|
832 |
self._value += '%s-%s' % (start, end) |
833 |
else:
|
834 |
self._value += '-%s' % int(end) |
835 |
else:
|
836 |
self._value += '%s' % int(start) |
837 |
|
838 |
|
839 |
@command(file_cmds)
|
840 |
class file_cat(_pithos_container): |
841 |
"""Fetch remote file contents"""
|
842 |
|
843 |
arguments = dict(
|
844 |
range=RangeArgument('show range of data', '--range'), |
845 |
if_match=ValueArgument('show output if ETags match', '--if-match'), |
846 |
if_none_match=ValueArgument( |
847 |
'show output if ETags match', '--if-none-match'), |
848 |
if_modified_since=DateArgument( |
849 |
'show output modified since then', '--if-modified-since'), |
850 |
if_unmodified_since=DateArgument( |
851 |
'show output unmodified since then', '--if-unmodified-since'), |
852 |
object_version=ValueArgument( |
853 |
'Get contents of the chosen version', '--object-version') |
854 |
) |
855 |
|
856 |
@errors.generic.all
|
857 |
@errors.pithos.connection
|
858 |
@errors.pithos.container
|
859 |
@errors.pithos.object_path
|
860 |
def _run(self): |
861 |
self.client.download_object(
|
862 |
self.path, self._out, |
863 |
range_str=self['range'], |
864 |
version=self['object_version'], |
865 |
if_match=self['if_match'], |
866 |
if_none_match=self['if_none_match'], |
867 |
if_modified_since=self['if_modified_since'], |
868 |
if_unmodified_since=self['if_unmodified_since']) |
869 |
|
870 |
def main(self, path_or_url): |
871 |
super(self.__class__, self)._run(path_or_url) |
872 |
self._run()
|