33 |
33 |
|
34 |
34 |
from io import StringIO
|
35 |
35 |
from pydoc import pager
|
|
36 |
from os import path, walk, makedirs
|
36 |
37 |
|
37 |
38 |
from kamaki.clients.pithos import PithosClient, ClientError
|
38 |
39 |
|
... | ... | |
41 |
42 |
from kamaki.cli.commands import (
|
42 |
43 |
_command_init, errors, addLogSettings, DontRaiseKeyError, _optional_json,
|
43 |
44 |
_name_filter, _optional_output_cmd)
|
44 |
|
from kamaki.cli.errors import (
|
45 |
|
CLIBaseUrlError, CLIError)
|
|
45 |
from kamaki.cli.errors import CLIBaseUrlError, CLIError, CLIInvalidArgument
|
46 |
46 |
from kamaki.cli.argument import (
|
47 |
|
FlagArgument, IntArgument, ValueArgument, DateArgument)
|
48 |
|
from kamaki.cli.utils import (format_size, bold)
|
|
47 |
FlagArgument, IntArgument, ValueArgument, DateArgument,
|
|
48 |
ProgressBarArgument, RepeatableArgument)
|
|
49 |
from kamaki.cli.utils import (
|
|
50 |
format_size, bold, get_path_size, guess_mime_type)
|
49 |
51 |
|
50 |
52 |
file_cmds = CommandTree('file', 'Pithos+/Storage object level API commands')
|
51 |
53 |
container_cmds = CommandTree(
|
... | ... | |
142 |
144 |
/CONTAINER/OBJECT_PATH
|
143 |
145 |
return account, container, path
|
144 |
146 |
"""
|
145 |
|
account, container, path, prefix = '', '', url, 'pithos://'
|
|
147 |
account, container, obj_path, prefix = '', '', url, 'pithos://'
|
146 |
148 |
if url.startswith(prefix):
|
147 |
149 |
account, sep, url = url[len(prefix):].partition('/')
|
148 |
150 |
url = '/%s' % url
|
149 |
151 |
if url.startswith('/'):
|
150 |
|
container, sep, path = url[1:].partition('/')
|
151 |
|
return account, container, path
|
|
152 |
container, sep, obj_path = url[1:].partition('/')
|
|
153 |
return account, container, obj_path
|
152 |
154 |
|
153 |
155 |
def _run(self, url=None):
|
154 |
156 |
acc, con, self.path = self._resolve_pithos_url(url or '')
|
... | ... | |
193 |
195 |
empty_space = ' ' * (len(str(len(object_list))) - len(str(index)))
|
194 |
196 |
if 'subdir' in obj:
|
195 |
197 |
continue
|
196 |
|
if obj['content_type'] == 'application/directory':
|
197 |
|
isDir, size = True, 'D'
|
|
198 |
if self._is_dir(obj):
|
|
199 |
size = 'D'
|
198 |
200 |
else:
|
199 |
|
isDir, size = False, format_size(obj['bytes'])
|
|
201 |
size = format_size(obj['bytes'])
|
200 |
202 |
pretty_obj['bytes'] = '%s (%s)' % (obj['bytes'], size)
|
201 |
203 |
oname = obj['name'] if self['more'] else bold(obj['name'])
|
202 |
204 |
prfx = ('%s%s. ' % (empty_space, index)) if self['enum'] else ''
|
... | ... | |
206 |
208 |
self.writeln()
|
207 |
209 |
else:
|
208 |
210 |
oname = '%s%9s %s' % (prfx, size, oname)
|
209 |
|
oname += '/' if isDir else u''
|
|
211 |
oname += '/' if self._is_dir(obj) else u''
|
210 |
212 |
self.writeln(oname)
|
211 |
213 |
|
212 |
214 |
@errors.generic.all
|
... | ... | |
509 |
511 |
super(file_move, self)._run(
|
510 |
512 |
source_path_or_url, destination_path_or_url or '')
|
511 |
513 |
self._run()
|
|
514 |
|
|
515 |
|
|
516 |
@command(file_cmds)
|
|
517 |
class file_append(_pithos_container, _optional_output_cmd):
|
|
518 |
"""Append local file to (existing) remote object
|
|
519 |
The remote object should exist.
|
|
520 |
If the remote object is a directory, it is transformed into a file.
|
|
521 |
In the later case, objects under the directory remain intact.
|
|
522 |
"""
|
|
523 |
|
|
524 |
arguments = dict(
|
|
525 |
progress_bar=ProgressBarArgument(
|
|
526 |
'do not show progress bar', ('-N', '--no-progress-bar'),
|
|
527 |
default=False),
|
|
528 |
max_threads=IntArgument('default: 1', '--threads'),
|
|
529 |
)
|
|
530 |
|
|
531 |
@errors.generic.all
|
|
532 |
@errors.pithos.connection
|
|
533 |
@errors.pithos.container
|
|
534 |
@errors.pithos.object_path
|
|
535 |
def _run(self, local_path):
|
|
536 |
if self['max_threads'] > 0:
|
|
537 |
self.client.MAX_THREADS = int(self['max_threads'])
|
|
538 |
(progress_bar, upload_cb) = self._safe_progress_bar('Appending')
|
|
539 |
try:
|
|
540 |
with open(local_path, 'rb') as f:
|
|
541 |
self._optional_output(
|
|
542 |
self.client.append_object(self.path, f, upload_cb))
|
|
543 |
finally:
|
|
544 |
self._safe_progress_bar_finish(progress_bar)
|
|
545 |
|
|
546 |
def main(self, local_path, remote_path_or_url):
|
|
547 |
super(self.__class__, self)._run(remote_path_or_url)
|
|
548 |
self._run(local_path)
|
|
549 |
|
|
550 |
|
|
551 |
@command(file_cmds)
|
|
552 |
class file_truncate(_pithos_container, _optional_output_cmd):
|
|
553 |
"""Truncate remote file up to size"""
|
|
554 |
|
|
555 |
arguments = dict(
|
|
556 |
size_in_bytes=IntArgument('Length of file after truncation', '--size')
|
|
557 |
)
|
|
558 |
required = ('size_in_bytes', )
|
|
559 |
|
|
560 |
@errors.generic.all
|
|
561 |
@errors.pithos.connection
|
|
562 |
@errors.pithos.container
|
|
563 |
@errors.pithos.object_path
|
|
564 |
@errors.pithos.object_size
|
|
565 |
def _run(self, size):
|
|
566 |
self._optional_output(self.client.truncate_object(self.path, size))
|
|
567 |
|
|
568 |
def main(self, path_or_url):
|
|
569 |
super(self.__class__, self)._run(path_or_url)
|
|
570 |
self._run(size=self['size_in_bytes'])
|
|
571 |
|
|
572 |
|
|
573 |
@command(file_cmds)
|
|
574 |
class file_overwrite(_pithos_container, _optional_output_cmd):
|
|
575 |
"""Overwrite part of a remote file"""
|
|
576 |
|
|
577 |
arguments = dict(
|
|
578 |
progress_bar=ProgressBarArgument(
|
|
579 |
'do not show progress bar', ('-N', '--no-progress-bar'),
|
|
580 |
default=False),
|
|
581 |
start_position=IntArgument('File position in bytes', '--from'),
|
|
582 |
end_position=IntArgument('File position in bytes', '--to')
|
|
583 |
)
|
|
584 |
required = ('start_position', 'end_position')
|
|
585 |
|
|
586 |
@errors.generic.all
|
|
587 |
@errors.pithos.connection
|
|
588 |
@errors.pithos.container
|
|
589 |
@errors.pithos.object_path
|
|
590 |
@errors.pithos.object_size
|
|
591 |
def _run(self, local_path, start, end):
|
|
592 |
start, end = int(start), int(end)
|
|
593 |
(progress_bar, upload_cb) = self._safe_progress_bar(
|
|
594 |
'Overwrite %s bytes' % (end - start))
|
|
595 |
try:
|
|
596 |
with open(path.abspath(local_path), 'rb') as f:
|
|
597 |
self._optional_output(self.client.overwrite_object(
|
|
598 |
obj=self.path,
|
|
599 |
start=start,
|
|
600 |
end=end,
|
|
601 |
source_file=f,
|
|
602 |
upload_cb=upload_cb))
|
|
603 |
finally:
|
|
604 |
self._safe_progress_bar_finish(progress_bar)
|
|
605 |
|
|
606 |
def main(self, local_path, path_or_url):
|
|
607 |
super(self.__class__, self)._run(path_or_url)
|
|
608 |
self.path = self.path or path.basename(local_path)
|
|
609 |
self._run(
|
|
610 |
local_path=local_path,
|
|
611 |
start=self['start_position'],
|
|
612 |
end=self['end_position'])
|
|
613 |
|
|
614 |
|
|
615 |
@command(file_cmds)
|
|
616 |
class file_upload(_pithos_container, _optional_output_cmd):
|
|
617 |
"""Upload a file"""
|
|
618 |
|
|
619 |
arguments = dict(
|
|
620 |
max_threads=IntArgument('default: 5', '--threads'),
|
|
621 |
content_encoding=ValueArgument(
|
|
622 |
'set MIME content type', '--content-encoding'),
|
|
623 |
content_disposition=ValueArgument(
|
|
624 |
'specify objects presentation style', '--content-disposition'),
|
|
625 |
content_type=ValueArgument('specify content type', '--content-type'),
|
|
626 |
uuid_for_read_permission=RepeatableArgument(
|
|
627 |
'Give read access to a user of group (can be repeated)',
|
|
628 |
'--read-permission'),
|
|
629 |
uuid_for_write_permission=RepeatableArgument(
|
|
630 |
'Give write access to a user of group (can be repeated)',
|
|
631 |
'--write-permission'),
|
|
632 |
public=FlagArgument('make object publicly accessible', '--public'),
|
|
633 |
overwrite=FlagArgument('Force (over)write', ('-f', '--force')),
|
|
634 |
recursive=FlagArgument(
|
|
635 |
'Recursively upload directory *contents* + subdirectories',
|
|
636 |
('-r', '--recursive')),
|
|
637 |
unchunked=FlagArgument(
|
|
638 |
'Upload file as one block (not recommended)', '--unchunked'),
|
|
639 |
md5_checksum=ValueArgument(
|
|
640 |
'Confirm upload with a custom checksum (MD5)', '--etag'),
|
|
641 |
use_hashes=FlagArgument(
|
|
642 |
'Source file contains hashmap not data', '--source-is-hashmap'),
|
|
643 |
)
|
|
644 |
|
|
645 |
def _sharing(self):
|
|
646 |
sharing = dict()
|
|
647 |
readlist = self['uuid_for_read_permission']
|
|
648 |
if readlist:
|
|
649 |
sharing['read'] = self['uuid_for_read_permission']
|
|
650 |
writelist = self['uuid_for_write_permission']
|
|
651 |
if writelist:
|
|
652 |
sharing['write'] = self['uuid_for_write_permission']
|
|
653 |
return sharing or None
|
|
654 |
|
|
655 |
def _check_container_limit(self, path):
|
|
656 |
cl_dict = self.client.get_container_limit()
|
|
657 |
container_limit = int(cl_dict['x-container-policy-quota'])
|
|
658 |
r = self.client.container_get()
|
|
659 |
used_bytes = sum(int(o['bytes']) for o in r.json)
|
|
660 |
path_size = get_path_size(path)
|
|
661 |
if container_limit and path_size > (container_limit - used_bytes):
|
|
662 |
raise CLIError(
|
|
663 |
'Container %s (limit(%s) - used(%s)) < (size(%s) of %s)' % (
|
|
664 |
self.client.container,
|
|
665 |
format_size(container_limit),
|
|
666 |
format_size(used_bytes),
|
|
667 |
format_size(path_size),
|
|
668 |
path),
|
|
669 |
details=[
|
|
670 |
'Check accound limit: /file quota',
|
|
671 |
'Check container limit:',
|
|
672 |
'\t/file containerlimit get %s' % self.client.container,
|
|
673 |
'Increase container limit:',
|
|
674 |
'\t/file containerlimit set <new limit> %s' % (
|
|
675 |
self.client.container)])
|
|
676 |
|
|
677 |
def _src_dst(self, local_path, remote_path, objlist=None):
|
|
678 |
lpath = path.abspath(local_path)
|
|
679 |
short_path = path.basename(path.abspath(local_path))
|
|
680 |
rpath = remote_path or short_path
|
|
681 |
if path.isdir(lpath):
|
|
682 |
if not self['recursive']:
|
|
683 |
raise CLIError('%s is a directory' % lpath, details=[
|
|
684 |
'Use %s to upload directories & contents' % '/'.join(
|
|
685 |
self.arguments['recursive'].parsed_name)])
|
|
686 |
robj = self.client.container_get(path=rpath)
|
|
687 |
if not self['overwrite']:
|
|
688 |
if robj.json:
|
|
689 |
raise CLIError(
|
|
690 |
'Objects/files prefixed as %s already exist' % rpath,
|
|
691 |
details=['Existing objects:'] + ['\t%s:\t%s' % (
|
|
692 |
o['name'],
|
|
693 |
o['content_type'][12:]) for o in robj.json] + [
|
|
694 |
'Use -f to add, overwrite or resume'])
|
|
695 |
else:
|
|
696 |
try:
|
|
697 |
topobj = self.client.get_object_info(rpath)
|
|
698 |
if not self._is_dir(topobj):
|
|
699 |
raise CLIError(
|
|
700 |
'Object /%s/%s exists but not a directory' % (
|
|
701 |
self.container, rpath),
|
|
702 |
details=['Use -f to overwrite'])
|
|
703 |
except ClientError as ce:
|
|
704 |
if ce.status not in (404, ):
|
|
705 |
raise
|
|
706 |
self._check_container_limit(lpath)
|
|
707 |
prev = ''
|
|
708 |
for top, subdirs, files in walk(lpath):
|
|
709 |
if top != prev:
|
|
710 |
prev = top
|
|
711 |
try:
|
|
712 |
rel_path = rpath + top.split(lpath)[1]
|
|
713 |
except IndexError:
|
|
714 |
rel_path = rpath
|
|
715 |
self.error('mkdir %s:%s' % (
|
|
716 |
self.client.container, rel_path))
|
|
717 |
self.client.create_directory(rel_path)
|
|
718 |
for f in files:
|
|
719 |
fpath = path.join(top, f)
|
|
720 |
if path.isfile(fpath):
|
|
721 |
rel_path = rel_path.replace(path.sep, '/')
|
|
722 |
pathfix = f.replace(path.sep, '/')
|
|
723 |
yield open(fpath, 'rb'), '%s/%s' % (rel_path, pathfix)
|
|
724 |
else:
|
|
725 |
self.error('%s is not a regular file' % fpath)
|
|
726 |
else:
|
|
727 |
if not path.isfile(lpath):
|
|
728 |
raise CLIError(('%s is not a regular file' % lpath) if (
|
|
729 |
path.exists(lpath)) else '%s does not exist' % lpath)
|
|
730 |
try:
|
|
731 |
robj = self.client.get_object_info(rpath)
|
|
732 |
if remote_path and self._is_dir(robj):
|
|
733 |
rpath += '/%s' % (short_path.replace(path.sep, '/'))
|
|
734 |
self.client.get_object_info(rpath)
|
|
735 |
if not self['overwrite']:
|
|
736 |
raise CLIError(
|
|
737 |
'Object /%s/%s already exists' % (
|
|
738 |
self.container, rpath),
|
|
739 |
details=['use -f to overwrite / resume'])
|
|
740 |
except ClientError as ce:
|
|
741 |
if ce.status not in (404, ):
|
|
742 |
raise
|
|
743 |
self._check_container_limit(lpath)
|
|
744 |
yield open(lpath, 'rb'), rpath
|
|
745 |
|
|
746 |
def _run(self, local_path, remote_path):
|
|
747 |
if self['max_threads'] > 0:
|
|
748 |
self.client.MAX_THREADS = int(self['max_threads'])
|
|
749 |
params = dict(
|
|
750 |
content_encoding=self['content_encoding'],
|
|
751 |
content_type=self['content_type'],
|
|
752 |
content_disposition=self['content_disposition'],
|
|
753 |
sharing=self._sharing(),
|
|
754 |
public=self['public'])
|
|
755 |
uploaded, container_info_cache = list, dict()
|
|
756 |
rpref = 'pithos://%s' if self['account'] else ''
|
|
757 |
for f, rpath in self._src_dst(local_path, remote_path):
|
|
758 |
self.error('%s --> %s/%s/%s' % (
|
|
759 |
f.name, rpref, self.client.container, rpath))
|
|
760 |
if not (self['content_type'] and self['content_encoding']):
|
|
761 |
ctype, cenc = guess_mime_type(f.name)
|
|
762 |
params['content_type'] = self['content_type'] or ctype
|
|
763 |
params['content_encoding'] = self['content_encoding'] or cenc
|
|
764 |
if self['unchunked']:
|
|
765 |
r = self.client.upload_object_unchunked(
|
|
766 |
rpath, f,
|
|
767 |
etag=self['md5_checksum'], withHashFile=self['use_hashes'],
|
|
768 |
**params)
|
|
769 |
if self['with_output'] or self['json_output']:
|
|
770 |
r['name'] = '/%s/%s' % (self.client.container, rpath)
|
|
771 |
uploaded.append(r)
|
|
772 |
else:
|
|
773 |
try:
|
|
774 |
(progress_bar, upload_cb) = self._safe_progress_bar(
|
|
775 |
'Uploading %s' % f.name.split(path.sep)[-1])
|
|
776 |
if progress_bar:
|
|
777 |
hash_bar = progress_bar.clone()
|
|
778 |
hash_cb = hash_bar.get_generator(
|
|
779 |
'Calculating block hashes')
|
|
780 |
else:
|
|
781 |
hash_cb = None
|
|
782 |
r = self.client.upload_object(
|
|
783 |
rpath, f,
|
|
784 |
hash_cb=hash_cb,
|
|
785 |
upload_cb=upload_cb,
|
|
786 |
container_info_cache=container_info_cache,
|
|
787 |
**params)
|
|
788 |
if self['with_output'] or self['json_output']:
|
|
789 |
r['name'] = '/%s/%s' % (self.client.container, rpath)
|
|
790 |
uploaded.append(r)
|
|
791 |
except Exception:
|
|
792 |
self._safe_progress_bar_finish(progress_bar)
|
|
793 |
raise
|
|
794 |
finally:
|
|
795 |
self._safe_progress_bar_finish(progress_bar)
|
|
796 |
self._optional_output(uploaded)
|
|
797 |
self.error('Upload completed')
|
|
798 |
|
|
799 |
def main(self, local_path, remote_path_or_url):
|
|
800 |
super(self.__class__, self)._run(remote_path_or_url)
|
|
801 |
remote_path = self.path or path.basename(path.abspath(local_path))
|
|
802 |
self._run(local_path=local_path, remote_path=remote_path)
|
|
803 |
|
|
804 |
|
|
805 |
class RangeArgument(ValueArgument):
|
|
806 |
"""
|
|
807 |
:value type: string of the form <start>-<end> where <start> and <end> are
|
|
808 |
integers
|
|
809 |
:value returns: the input string, after type checking <start> and <end>
|
|
810 |
"""
|
|
811 |
|
|
812 |
@property
|
|
813 |
def value(self):
|
|
814 |
return getattr(self, '_value', self.default)
|
|
815 |
|
|
816 |
@value.setter
|
|
817 |
def value(self, newvalues):
|
|
818 |
if newvalues:
|
|
819 |
self._value = getattr(self, '_value', self.default)
|
|
820 |
for newvalue in newvalues.split(','):
|
|
821 |
self._value = ('%s,' % self._value) if self._value else ''
|
|
822 |
start, sep, end = newvalue.partition('-')
|
|
823 |
if sep:
|
|
824 |
if start:
|
|
825 |
start, end = (int(start), int(end))
|
|
826 |
if start > end:
|
|
827 |
raise CLIInvalidArgument(
|
|
828 |
'Invalid range %s' % newvalue, details=[
|
|
829 |
'Valid range formats',
|
|
830 |
' START-END', ' UP_TO', ' -FROM',
|
|
831 |
'where all values are integers'])
|
|
832 |
self._value += '%s-%s' % (start, end)
|
|
833 |
else:
|
|
834 |
self._value += '-%s' % int(end)
|
|
835 |
else:
|
|
836 |
self._value += '%s' % int(start)
|
|
837 |
|
|
838 |
|
|
839 |
@command(file_cmds)
|
|
840 |
class file_cat(_pithos_container):
|
|
841 |
"""Fetch remote file contents"""
|
|
842 |
|
|
843 |
arguments = dict(
|
|
844 |
range=RangeArgument('show range of data', '--range'),
|
|
845 |
if_match=ValueArgument('show output if ETags match', '--if-match'),
|
|
846 |
if_none_match=ValueArgument(
|
|
847 |
'show output if ETags match', '--if-none-match'),
|
|
848 |
if_modified_since=DateArgument(
|
|
849 |
'show output modified since then', '--if-modified-since'),
|
|
850 |
if_unmodified_since=DateArgument(
|
|
851 |
'show output unmodified since then', '--if-unmodified-since'),
|
|
852 |
object_version=ValueArgument(
|
|
853 |
'Get contents of the chosen version', '--object-version')
|
|
854 |
)
|
|
855 |
|
|
856 |
@errors.generic.all
|
|
857 |
@errors.pithos.connection
|
|
858 |
@errors.pithos.container
|
|
859 |
@errors.pithos.object_path
|
|
860 |
def _run(self):
|
|
861 |
self.client.download_object(
|
|
862 |
self.path, self._out,
|
|
863 |
range_str=self['range'],
|
|
864 |
version=self['object_version'],
|
|
865 |
if_match=self['if_match'],
|
|
866 |
if_none_match=self['if_none_match'],
|
|
867 |
if_modified_since=self['if_modified_since'],
|
|
868 |
if_unmodified_since=self['if_unmodified_since'])
|
|
869 |
|
|
870 |
def main(self, path_or_url):
|
|
871 |
super(self.__class__, self)._run(path_or_url)
|
|
872 |
self._run()
|