--- /dev/null
+Client Library
+=============
+
+.. automodule:: pithos.lib.client
\ No newline at end of file
--- /dev/null
+Clients
+========
+
+.. toctree::
+ :maxdepth: 1
+
+ client-lib
\ No newline at end of file
devguide
adminguide
backends
-
+ clients
Indices and tables
==================
# interpreted as representing official policies, either expressed
# or implied, of GRNET S.A.
-from pithos.lib.client import Client, Fault
+from pithos.lib.client import Pithos_Client, Fault
import unittest
from django.utils import simplejson as json
from xml.dom import minidom
+from StringIO import StringIO
import types
import hashlib
import os
class BaseTestCase(unittest.TestCase):
#TODO unauthorized request
def setUp(self):
- self.client = Client(DEFAULT_HOST, DEFAULT_AUTH, DEFAULT_USER, DEFAULT_API)
- self.invalid_client = Client(DEFAULT_HOST, DEFAULT_AUTH, 'non-existing', DEFAULT_API)
- self.unauthorised_client = Client(DEFAULT_HOST, '', DEFAULT_USER, DEFAULT_API)
+ self.client = Pithos_Client(DEFAULT_HOST, DEFAULT_AUTH, DEFAULT_USER, DEFAULT_API)
self.headers = {
'account': ('x-account-container-count',
'x-account-bytes-used',
'last-modified',
'content-length',
'date',
- 'content-type',
+ 'content_type',
'server',),
'object': ('etag',
'content-length',
- 'content-type',
+ 'content_type',
'content-encoding',
'last-modified',
'date',
'server',),
'container': ('x-container-object-count',
'x-container-bytes-used',
- 'content-type',
+ 'content_type',
'last-modified',
'content-length',
'date',
'x-container-object-meta',
'x-container-policy-versioning',
'server',)}
-
+
self.contentTypes = {'xml':'application/xml',
'json':'application/json',
'':'text/plain'}
'content_encoding',
'last_modified',)}
self.return_codes = (400, 401, 404, 503,)
-
+
+ def tearDown(self):
+ for c in self.client.list_containers():
+ for o in self.client.list_objects(c):
+ self.client.delete_object(c, o)
+ self.client.delete_container(c)
+
def assert_status(self, status, codes):
l = [elem for elem in self.return_codes]
if type(codes) == types.ListType:
l.append(codes)
self.assertTrue(status in l)
- def assert_list(self, path, entity, limit=10000, format='text', params=None, **headers):
- status, headers, data = self.client.get(path, format=format,
- headers=headers, params=params)
-
- self.assert_status(status, [200, 204, 304, 412])
- if format == 'text':
- data = data.strip().split('\n') if data else []
- self.assertTrue(len(data) <= limit)
- else:
- exp_content_type = self.contentTypes[format]
- self.assertEqual(headers['content-type'].find(exp_content_type), 0)
- #self.assert_extended(data, format, entity, limit)
- if format == 'json':
- data = json.loads(data) if data else []
- elif format == 'xml':
- data = minidom.parseString(data)
- return status, headers, data
-
- def list_containers(self, limit=10000, marker='', format='text', **headers):
- params = locals()
- params.pop('self')
- return self.assert_list('', 'account', limit, format, params, **headers)
-
- def list_objects(self, container, limit=10000, marker='',
- prefix='', format='', path='', delimiter='', meta='',
- **headers):
- params = locals()
- params.pop('self')
- params.pop('container')
- path = '/' + container
- format = 'text' if format == '' else format
- return self.assert_list(path, 'container', limit, format, params, **headers)
-
- def _assert_get_meta(self, path, entity, params=None, **exp_meta):
- status, headers, data = self.client.head(path, params)
- self.assert_status(status, 204)
- #self.assert_headers(headers, entity, **exp_meta)
- return status, headers, data
-
- def get_account_meta(self, params=None, **exp_meta):
- return self._assert_get_meta('', 'account', params, **exp_meta)
-
- def get_container_meta(self, container, params=None, **exp_meta):
- path = '/%s' % container
- return self._assert_get_meta(path, 'container', params, **exp_meta)
-
- def create_container(self, name, **meta):
- headers = {}
- for k,v in meta.items():
- headers['x-container-meta-%s' %k.strip().upper()] = v.strip()
- status, header, data = self.client.put('/' + name, headers=headers)
- self.assert_status(status, [201, 202])
- return status, header, data
-
- def get_object(self, container, name, format='', version=None, **headers):
- path = '/%s/%s' % (container, name)
- params = {'version':version} if version else None
- status, headers, data = self.client.get(path, format, headers, params)
- self.assert_status(status, [200, 206, 304, 412, 416])
- #if status in [200, 206]:
- # self.assert_headers(headers, 'object')
- return status, headers, data
-
- def update_object(self, container, name, data='', content_type='', **headers):
- if content_type != '':
- headers['content-type'] = content_type
- status, headers, data = self.client.update_object_data(container,
- name,
- data,
- headers)
- self.assert_status(status, [202, 204, 416])
- return status, headers, data
-
+ #def assert_list(self, path, entity, limit=10000, format='text', params=None, **headers):
+ # status, headers, data = self.client.get(path, format=format,
+ # headers=headers, params=params)
+ #
+ # self.assert_status(status, [200, 204, 304, 412])
+ # if format == 'text':
+ # data = data.strip().split('\n') if data else []
+ # self.assertTrue(len(data) <= limit)
+ # else:
+ # exp_content_type = self.contentTypes[format]
+ # self.assertEqual(headers['content_type'].find(exp_content_type), 0)
+ # #self.assert_extended(data, format, entity, limit)
+ # if format == 'json':
+ # data = json.loads(data) if data else []
+ # elif format == 'xml':
+ # data = minidom.parseString(data)
+ # return status, headers, data
+
def assert_headers(self, headers, type, **exp_meta):
prefix = 'x-%s-meta-' %type
system_headers = [h for h in headers if not h.startswith(prefix)]
elif exp_meta:
k = k.split(prefix)[-1]
self.assertEqual(v, exp_meta[k])
-
+
#def assert_extended(self, data, format, type, size):
# if format == 'xml':
# self._assert_xml(data, type, size)
# elif format == 'json':
# self._assert_json(data, type, size)
- #
+
#def _assert_json(self, data, type, size):
# print '#', data
# convert = lambda s: s.lower()
# if 'subdir' in i.keys():
# continue
# self.assertTrue(item in i.keys())
- #
+
#def _assert_xml(self, data, type, size):
# print '#', data
# convert = lambda s: s.lower()
except Fault, f:
self.failUnless(f.status == status)
+ def assert_container_exists(self, container):
+ """
+ asserts the existence of a container
+ """
+ try:
+ self.client.retrieve_container_metadata(container)
+ except Fault, f:
+ self.failIf(f.status == 404)
+
def assert_object_exists(self, container, object):
"""
asserts the existence of an object
"""
self.assert_raises_fault(404, self.client.retrieve_object_metadata,
container, object)
-
+
def upload_random_data(self, container, name, length=1024, type=None,
enc=None, **meta):
data = get_random_data(length)
return self.upload_data(container, name, data, type, enc, **meta)
-
+
def upload_data(self, container, name, data, type=None, enc=None, etag=None,
**meta):
obj = {}
obj['data'] = data
obj['hash'] = compute_md5_hash(obj['data'])
- headers = {}
- for k,v in meta.items():
- key = 'x-object-meta-%s' % k
- headers[key] = v
- headers['etag'] = etag if etag else obj['hash']
+ args = {}
+ args['etag'] = etag if etag else obj['hash']
+
guess = mimetypes.guess_type(name)
type = type if type else guess[0]
enc = enc if enc else guess[1]
- headers['content-type'] = type if type else 'plain/text'
- headers['content-encoding'] = enc if enc else None
- obj['meta'] = headers
+ args['content_type'] = type if type else 'plain/text'
+ args['content_encoding'] = enc if enc else None
+
+ obj['meta'] = args
path = '/%s/%s' % (container, name)
- status, headers, data = self.client.put(path, obj['data'],
- headers=headers)
- if status == 201:
- self.assertTrue('etag' in headers)
- self.assertEqual(obj['hash'], headers['etag'])
- return obj
+ self.client.create_object(container, name, StringIO(obj['data']),
+ meta, **args)
+
+ return obj
except IOError:
return
self.account = 'test'
self.containers = ['apples', 'bananas', 'kiwis', 'oranges', 'pears']
for item in self.containers:
- self.create_container(item)
-
- def tearDown(self):
- for c in self.list_containers()[2]:
- self.client.delete_container(c)
-
+ self.client.create_container(item)
+
def test_get_account_meta(self):
- headers = self.get_account_meta()[1]
+ meta = self.client.retrieve_account_metadata()
- containers = self.list_containers()[2]
+ containers = self.client.list_containers()
l = str(len(containers))
- self.assertEqual(headers['x-account-container-count'], l)
+ self.assertEqual(meta['x-account-container-count'], l)
size = 0
for c in containers:
- h = self.get_container_meta(c)[1]
- size = size + int(h['x-container-bytes-used'])
- self.assertEqual(headers['x-account-bytes-used'], str(size))
-
+ m = self.client.retrieve_container_metadata(c)
+ size = size + int(m['x-container-bytes-used'])
+ self.assertEqual(meta['x-account-bytes-used'], str(size))
+
#def test_get_account_401(self):
# response = self.get_account_meta('non-existing-account')
# print response
#create some containers
self.containers = ['apples', 'bananas', 'kiwis', 'oranges', 'pears']
for item in self.containers:
- self.create_container(item)
-
- def tearDown(self):
- for c in self.list_containers()[2]:
- for o in self.list_objects(c)[2]:
- self.client.delete_object(c, o)
- self.client.delete_container(c)
-
+ self.client.create_container(item)
+
def test_list(self):
#list containers
- containers = self.list_containers()[2]
+ containers = self.client.list_containers()
self.assertEquals(self.containers, containers)
-
+
#def test_list_204(self):
# response = self.list_containers('non-existing-account')
# self.assertEqual(response.status_code, 204)
-
+
def test_list_with_limit(self):
limit = 2
- containers = self.list_containers(limit=limit)[2]
+ containers = self.client.list_containers(limit=limit)
self.assertEquals(len(containers), limit)
self.assertEquals(self.containers[:2], containers)
-
+
def test_list_with_marker(self):
l = 2
m = 'bananas'
- containers = self.list_containers(limit=l, marker=m)[2]
+ containers = self.client.list_containers(limit=l, marker=m)
i = self.containers.index(m) + 1
self.assertEquals(self.containers[i:(i+l)], containers)
m = 'oranges'
- containers = self.list_containers(limit=l, marker=m)[2]
+ containers = self.client.list_containers(limit=l, marker=m)
i = self.containers.index(m) + 1
self.assertEquals(self.containers[i:(i+l)], containers)
-
+
#def test_extended_list(self):
# self.list_containers(self.account, limit=3, format='xml')
# self.list_containers(self.account, limit=3, format='json')
-
+
def test_list_json_with_marker(self):
l = 2
m = 'bananas'
- status, headers, containers = self.list_containers(limit=l, marker=m,
- format='json')
+ containers = self.client.list_containers(limit=l, marker=m, detail=True)
self.assertEqual(containers[0]['name'], 'kiwis')
self.assertEqual(containers[1]['name'], 'oranges')
-
- def test_list_xml_with_marker(self):
- l = 2
- m = 'oranges'
- status, headers, xml = self.list_containers(limit=l, marker=m,
- format='xml')
- nodes = xml.getElementsByTagName('name')
- self.assertEqual(len(nodes), 1)
- self.assertEqual(nodes[0].childNodes[0].data, 'pears')
-
+
+ #def test_list_xml_with_marker(self):
+ # l = 2
+ # m = 'oranges'
+ # status, headers, xml = self.list_containers(limit=l, marker=m, format='xml')
+ # nodes = xml.getElementsByTagName('name')
+ # self.assertEqual(len(nodes), 1)
+ # self.assertEqual(nodes[0].childNodes[0].data, 'pears')
+
def test_if_modified_since(self):
t = datetime.datetime.utcnow()
t2 = t - datetime.timedelta(minutes=10)
#add a new container
- self.create_container('dummy')
-
+ self.client.create_container('dummy')
+
for f in DATE_FORMATS:
past = t2.strftime(f)
-
- headers = {'if-modified-since':'%s' %past}
- status, headers, data = self.list_containers(**headers)
-
- #assert get success
- self.assertEqual(status, 200)
-
+ try:
+ c = self.client.list_containers(if_modified_since=past)
+ self.assertEqual(len(c), len(self.containers) + 1)
+ except Fault, f:
+ self.failIf(f.status == 304) #fail if not modified
+
def test_if_modified_since_invalid_date(self):
- headers = {'if-modified-since':''}
- status, headers, data = self.list_containers(**headers)
-
- #assert get success
- self.assertEqual(status, 200)
-
+ c = self.client.list_containers(if_modified_since='')
+ self.assertEqual(len(c), len(self.containers))
+
def test_if_not_modified_since(self):
now = datetime.datetime.utcnow()
since = now + datetime.timedelta(1)
for f in DATE_FORMATS:
- headers = {'if-modified-since':'%s' %since.strftime(f)}
+ args = {'if_modified_since':'%s' %since.strftime(f)}
+
#assert not modified
- self.assert_raises_fault(304, self.list_containers, **headers)
-
+ self.assert_raises_fault(304, self.client.list_containers, **args)
+
def test_if_unmodified_since(self):
now = datetime.datetime.utcnow()
since = now + datetime.timedelta(1)
for f in DATE_FORMATS:
- headers = {'if-unmodified-since':'%s' %since.strftime(f)}
- status, headers, data = self.list_containers(**headers)
+ c = self.client.list_containers(if_unmodified_since=since.strftime(f))
#assert success
- self.assertEqual(status, 200)
- self.assertEqual(self.containers, data)
-
+ self.assertEqual(self.containers, c)
+
def test_if_unmodified_since_precondition_failed(self):
t = datetime.datetime.utcnow()
t2 = t - datetime.timedelta(minutes=10)
#add a new container
- self.create_container('dummy')
+ self.client.create_container('dummy')
for f in DATE_FORMATS:
past = t2.strftime(f)
- headers = {'if-unmodified-since':'%s' %past}
+ args = {'if_unmodified_since':'%s' %past}
+
#assert precondition failed
- self.assert_raises_fault(412, self.list_containers, **headers)
-
+ self.assert_raises_fault(412, self.client.list_containers, **args)
+
class AccountPost(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.account = 'test'
self.containers = ['apples', 'bananas', 'kiwis', 'oranges', 'pears']
for item in self.containers:
- self.create_container(item)
-
- def tearDown(self):
- containers = self.list_containers()[2]
- for c in containers:
- self.client.delete_container(c)
-
+ self.client.create_container(item)
+
def test_update_meta(self):
meta = {'test':'test', 'tost':'tost'}
- status, headers, data = self.get_account_meta(**meta)
+ self.client.update_account_metadata(**meta)
+ self.assertEqual(meta, self.client.retrieve_account_metadata(restricted=True))
#def test_invalid_account_update_meta(self):
# with AssertMappingInvariant(self.get_account_meta, self.account):
# meta = {'HTTP_X_ACCOUNT_META_TEST':'test',
# 'HTTP_X_ACCOUNT_META_TOST':'tost'}
# response = self.update_account_meta('non-existing-account', **meta)
-
+
class ContainerHead(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.account = 'test'
self.container = 'apples'
- status = self.create_container(self.container)[0]
-
- def tearDown(self):
- for o in self.list_objects(self.container)[2]:
- self.client.delete_object(self.container, o)
- self.client.delete_container(self.container)
-
+ self.client.create_container(self.container)
+
def test_get_meta(self):
meta = {'trash':'true'}
t1 = datetime.datetime.utcnow()
o = self.upload_random_data(self.container, o_names[0], **meta)
if o:
- status, headers, data = self.get_container_meta(self.container)
+ headers = self.client.retrieve_container_metadata(self.container)
self.assertEqual(headers['x-container-object-count'], '1')
self.assertEqual(headers['x-container-bytes-used'], str(len(o['data'])))
t2 = datetime.datetime.strptime(headers['last-modified'], DATE_FORMATS[2])
self.account = 'test'
self.container = ['pears', 'apples']
for c in self.container:
- self.create_container(c)
+ self.client.create_container(c)
self.obj = []
for o in o_names[:8]:
self.obj.append(self.upload_random_data(self.container[0], o))
for o in o_names[8:]:
self.obj.append(self.upload_random_data(self.container[1], o))
-
- def tearDown(self):
- for c in self.container:
- for obj in self.list_objects(c)[2]:
- self.client.delete_object(c, obj)
- self.client.delete_container(c)
-
+
def test_list_objects(self):
- objects = self.list_objects(self.container[0])[2]
+ objects = self.client.list_objects(self.container[0])
l = [elem['name'] for elem in self.obj[:8]]
l.sort()
self.assertEqual(objects, l)
-
+
def test_list_objects_with_limit_marker(self):
- objects = self.list_objects(self.container[0], limit=2)[2]
+ objects = self.client.list_objects(self.container[0], limit=2)
l = [elem['name'] for elem in self.obj[:8]]
l.sort()
self.assertEqual(objects, l[:2])
'moms_birthday.jpg']
limit = 4
for m in markers:
- objects = self.list_objects(self.container[0], limit=limit,
- marker=m)[2]
+ objects = self.client.list_objects(self.container[0], limit=limit,
+ marker=m)
l = [elem['name'] for elem in self.obj[:8]]
l.sort()
start = l.index(m) + 1
end = start + limit
end = len(l) >= end and end or len(l)
self.assertEqual(objects, l[start:end])
-
+
def test_list_pseudo_hierarchical_folders(self):
- objects = self.list_objects(self.container[1], prefix='photos',
- delimiter='/')[2]
+ objects = self.client.list_objects(self.container[1], prefix='photos',
+ delimiter='/')
self.assertEquals(['photos/animals/', 'photos/me.jpg',
'photos/plants/'], objects)
- objects = self.list_objects(self.container[1], prefix='photos/animals',
- delimiter='/')[2]
+ objects = self.client.list_objects(self.container[1],
+ prefix='photos/animals',
+ delimiter='/')
l = ['photos/animals/cats/', 'photos/animals/dogs/']
self.assertEquals(l, objects)
- objects = self.list_objects(self.container[1], path='photos')[2]
+ objects = self.client.list_objects(self.container[1], path='photos')
self.assertEquals(['photos/me.jpg'], objects)
-
+
def test_extended_list_json(self):
- objects = self.list_objects(self.container[1],
- format='json', limit=2,
- prefix='photos/animals',
- delimiter='/')[2]
+ objects = self.client.list_objects(self.container[1], detail=True,
+ limit=2, prefix='photos/animals',
+ delimiter='/')
self.assertEqual(objects[0]['subdir'], 'photos/animals/cats/')
self.assertEqual(objects[1]['subdir'], 'photos/animals/dogs/')
-
- def test_extended_list_xml(self):
- xml = self.list_objects(self.container[1], format='xml', limit=4,
- prefix='photos', delimiter='/')[2]
- dirs = xml.getElementsByTagName('subdir')
- self.assertEqual(len(dirs), 2)
- self.assertEqual(dirs[0].attributes['name'].value, 'photos/animals/')
- self.assertEqual(dirs[1].attributes['name'].value, 'photos/plants/')
-
- objects = xml.getElementsByTagName('name')
- self.assertEqual(len(objects), 1)
- self.assertEqual(objects[0].childNodes[0].data, 'photos/me.jpg')
-
+
+ #def test_extended_list_xml(self):
+ # xml = self.client.list_objects(self.container[1], format='xml', limit=4,
+ # prefix='photos', delimiter='/')
+ # dirs = xml.getElementsByTagName('subdir')
+ # self.assertEqual(len(dirs), 2)
+ # self.assertEqual(dirs[0].attributes['name'].value, 'photos/animals/')
+ # self.assertEqual(dirs[1].attributes['name'].value, 'photos/plants/')
+ #
+ # objects = xml.getElementsByTagName('name')
+ # self.assertEqual(len(objects), 1)
+ # self.assertEqual(objects[0].childNodes[0].data, 'photos/me.jpg')
+
def test_list_meta_double_matching(self):
meta = {'quality':'aaa', 'stock':'true'}
self.client.update_object_metadata(self.container[0],
self.obj[0]['name'], **meta)
- obj = self.list_objects(self.container[0], meta='Quality,Stock')[2]
+ obj = self.client.list_objects(self.container[0], meta='Quality,Stock')
self.assertEqual(len(obj), 1)
self.assertTrue(obj, self.obj[0]['name'])
-
+
def test_list_using_meta(self):
meta = {'quality':'aaa'}
for o in self.obj[:2]:
self.client.update_object_metadata(self.container[0], o['name'],
**meta)
- status, headers, data = self.list_objects(self.container[0],
- meta='Quality')
- self.assertEqual(status, 200)
- self.assertEqual(len(data), 2)
- self.assertTrue(data, [o['name'] for o in self.obj[:2]])
+ obj = self.client.list_objects(self.container[0], meta='Quality')
+ self.assertEqual(len(obj), 2)
+ self.assertTrue(obj, [o['name'] for o in self.obj[:2]])
# test case insensitive
- status, headers, obj = self.list_objects(self.container[0],
- meta='quality')
- self.assertEqual(status, 200)
+ obj = self.client.list_objects(self.container[0], meta='quality')
self.assertEqual(len(obj), 2)
self.assertTrue(obj, [o['name'] for o in self.obj[:2]])
# test multiple matches
- status, headers, obj = self.list_objects(self.container[0],
- meta='Quality,Stock')
- self.assertEqual(status, 200)
+ obj = self.client.list_objects(self.container[0], meta='Quality,Stock')
self.assertEqual(len(obj), 4)
self.assertTrue(obj, [o['name'] for o in self.obj[:4]])
# test non 1-1 multiple match
- status, headers, obj = self.list_objects(self.container[0],
- meta='Quality,aaaa')
- self.assertEqual(status, 200)
+ obj = self.client.list_objects(self.container[0], meta='Quality,aaaa')
self.assertEqual(len(obj), 2)
self.assertTrue(obj, [o['name'] for o in self.obj[:2]])
-
+
def test_if_modified_since(self):
t = datetime.datetime.utcnow()
t2 = t - datetime.timedelta(minutes=10)
#add a new object
self.upload_random_data(self.container[0], o_names[0])
-
+
for f in DATE_FORMATS:
past = t2.strftime(f)
-
- headers = {'if-modified-since':'%s' %past}
- status, headers, data = self.list_objects(self.container[0],
- **headers)
-
- #assert get success
- self.assertEqual(status, 200)
-
+ try:
+ o = self.client.list_objects(self.container[0],
+ if_modified_since=past)
+ self.assertEqual(o,
+ self.client.list_objects(self.container[0]))
+ except Fault, f:
+ self.failIf(f.status == 304) #fail if not modified
+
def test_if_modified_since_invalid_date(self):
headers = {'if-modified-since':''}
- status, headers, data = self.list_objects(self.container[0], **headers)
-
- #assert get success
- self.assertEqual(status, 200)
-
+ o = self.client.list_objects(self.container[0], if_modified_since='')
+ self.assertEqual(o, self.client.list_objects(self.container[0]))
+
def test_if_not_modified_since(self):
now = datetime.datetime.utcnow()
since = now + datetime.timedelta(1)
for f in DATE_FORMATS:
- headers = {'if-modified-since':'%s' %since.strftime(f)}
+ args = {'if_modified_since':'%s' %since.strftime(f)}
+
#assert not modified
- self.assert_raises_fault(304, self.list_objects, self.container[0],
- **headers)
+ self.assert_raises_fault(304, self.client.list_objects,
+ self.container[0], **args)
def test_if_unmodified_since(self):
now = datetime.datetime.utcnow()
since = now + datetime.timedelta(1)
for f in DATE_FORMATS:
- headers = {'if-unmodified-since':'%s' %since.strftime(f)}
- status, headers, data = self.list_objects(self.container[0], **headers)
+ obj = self.client.list_objects(self.container[0],
+ if_unmodified_since=since.strftime(f))
- #assert success
- self.assertEqual(status, 200)
- objlist = self.list_objects(self.container[0])[2]
- self.assertEqual(data, objlist)
-
+ #assert unmodified
+ self.assertEqual(obj, self.client.list_objects(self.container[0]))
+
def test_if_unmodified_since_precondition_failed(self):
t = datetime.datetime.utcnow()
t2 = t - datetime.timedelta(minutes=10)
#add a new container
- self.create_container('dummy')
-
+ self.client.create_container('dummy')
+
for f in DATE_FORMATS:
past = t2.strftime(f)
- headers = {'if-unmodified-since':'%s' %past}
+ args = {'if_unmodified_since':'%s' %past}
+
#assert precondition failed
- self.assert_raises_fault(412, self.list_objects, self.container[0],
- **headers)
+ self.assert_raises_fault(412, self.client.list_objects,
+ self.container[0], **args)
class ContainerPut(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.account = 'test'
self.containers = ['c1', 'c2']
-
- def tearDown(self):
- for c in self.list_containers()[2]:
- r = self.client.delete_container(c)
-
+
def test_create(self):
- status = self.create_container(self.containers[0])[0]
- self.assertEqual(status, 201)
-
- containers = self.list_containers()[2]
+ self.client.create_container(self.containers[0])
+ containers = self.client.list_containers()
self.assertTrue(self.containers[0] in containers)
- status = self.get_container_meta(self.containers[0])[0]
- self.assertEqual(status, 204)
-
+ self.assert_container_exists(self.containers[0])
+
def test_create_twice(self):
- status, header, data = self.create_container(self.containers[0])
- if status == 201:
- status, header, data = self.create_container(self.containers[0])
- self.assertTrue(status, 202)
-
+ self.client.create_container(self.containers[0])
+ self.assertTrue(not self.client.create_container(self.containers[0]))
+
class ContainerPost(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.account = 'test'
self.container = 'apples'
- self.create_container(self.container)
-
- def tearDown(self):
- for o in self.list_objects(self.container)[2]:
- self.client.delete_object(self.account, self.container, o)
- self.client.delete_container(self.container)
-
+ self.client.create_container(self.container)
+
def test_update_meta(self):
meta = {'test':'test33',
'tost':'tost22'}
self.client.update_container_metadata(self.container, **meta)
- headers = self.get_container_meta(self.container)[1]
+ headers = self.client.retrieve_container_metadata(self.container)
for k,v in meta.items():
k = 'x-container-meta-%s' % k
self.assertTrue(headers[k])
self.account = 'test'
self.containers = ['c1', 'c2']
for c in self.containers:
- self.create_container(c)
+ self.client.create_container(c)
self.upload_random_data(self.containers[1], o_names[0])
-
- def tearDown(self):
- for c in self.list_containers()[2]:
- for o in self.list_objects(c)[2]:
- self.client.delete_object(c, o)
- self.client.delete_container(c)
-
+
def test_delete(self):
status = self.client.delete_container(self.containers[0])[0]
self.assertEqual(status, 204)
-
+
def test_delete_non_empty(self):
self.assert_raises_fault(409, self.client.delete_container,
self.containers[1])
-
+
def test_delete_invalid(self):
self.assert_raises_fault(404, self.client.delete_container, 'c3')
self.containers = ['c1', 'c2']
#create some containers
for c in self.containers:
- self.create_container(c)
+ self.client.create_container(c)
#upload a file
names = ('obj1', 'obj2')
self.objects = []
for n in names:
self.objects.append(self.upload_random_data(self.containers[1], n))
-
- def tearDown(self):
- for c in self.containers:
- for o in self.list_objects(c)[2]:
- self.client.delete_object(c, o)
- self.client.delete_container(c)
-
+
def test_get(self):
#perform get
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- self.objects[0]['meta'])
- #assert success
- self.assertEqual(status, 200)
-
- #assert content-type
- self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
-
+ o = self.client.retrieve_object(self.containers[1],
+ self.objects[0]['name'],
+ self.objects[0]['meta'])
+ self.assertEqual(o, self.objects[0]['data'])
+
def test_get_invalid(self):
- self.assert_raises_fault(404, self.get_object, self.containers[0],
- self.objects[0]['name'])
-
+ self.assert_raises_fault(404, self.client.retrieve_object,
+ self.containers[0], self.objects[0]['name'])
+
def test_get_partial(self):
#perform get with range
- headers = {'range':'bytes=0-499'}
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- **headers)
+ status, headers, data = self.client.request_object(self.containers[1],
+ self.objects[0]['name'],
+ range='bytes=0-499')
#assert successful partial content
self.assertEqual(status, 206)
#assert content-type
self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
+ self.objects[0]['meta']['content_type'])
#assert content length
self.assertEqual(int(headers['content-length']), 500)
#assert content
self.assertEqual(self.objects[0]['data'][:500], data)
-
+
def test_get_final_500(self):
#perform get with range
headers = {'range':'bytes=-500'}
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- **headers)
+ status, headers, data = self.client.request_object(self.containers[1],
+ self.objects[0]['name'],
+ range='bytes=-500')
#assert successful partial content
self.assertEqual(status, 206)
#assert content-type
self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
+ self.objects[0]['meta']['content_type'])
#assert content length
self.assertEqual(int(headers['content-length']), 500)
#assert content
self.assertTrue(self.objects[0]['data'][-500:], data)
-
+
def test_get_rest(self):
#perform get with range
offset = len(self.objects[0]['data']) - 500
- headers = {'range':'bytes=%s-' %offset}
- status, headers, data = self.get_object(self.containers[1],
+ status, headers, data = self.client.request_object(self.containers[1],
self.objects[0]['name'],
- **headers)
+ range='bytes=%s-' %offset)
#assert successful partial content
self.assertEqual(status, 206)
#assert content-type
self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
+ self.objects[0]['meta']['content_type'])
#assert content length
self.assertEqual(int(headers['content-length']), 500)
#assert content
self.assertTrue(self.objects[0]['data'][-500:], data)
-
+
def test_get_range_not_satisfiable(self):
#perform get with range
offset = len(self.objects[0]['data']) + 1
- headers = {'range':'bytes=0-%s' %offset}
#assert range not satisfiable
- self.assert_raises_fault(416, self.get_object, self.containers[1],
- self.objects[0]['name'], **headers)
-
+ self.assert_raises_fault(416, self.client.retrieve_object,
+ self.containers[1], self.objects[0]['name'],
+ range='bytes=0-%s' %offset)
+
def test_multiple_range(self):
#perform get with multiple range
ranges = ['0-499', '-500', '1000-']
- headers = {'range' : 'bytes=%s' % ','.join(ranges)}
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- **headers)
+ bytes = 'bytes=%s' % ','.join(ranges)
+ status, headers, data = self.client.request_object(self.containers[1],
+ self.objects[0]['name'],
+ range=bytes)
# assert partial content
self.assertEqual(status, 206)
self.assertEqual(len(fdata), len(sdata))
self.assertEquals(fdata, sdata)
i+=1
-
+
def test_multiple_range_not_satisfiable(self):
#perform get with multiple range
out_of_range = len(self.objects[0]['data']) + 1
ranges = ['0-499', '-500', '%d-' %out_of_range]
- headers = {'range' : 'bytes=%s' % ','.join(ranges)}
+ bytes = 'bytes=%s' % ','.join(ranges)
# assert partial content
- self.assert_raises_fault(416, self.get_object, self.containers[1],
- self.objects[0]['name'], **headers)
-
-
+ self.assert_raises_fault(416, self.client.retrieve_object,
+ self.containers[1],
+ self.objects[0]['name'], range=bytes)
+
def test_get_with_if_match(self):
#perform get with If-Match
- headers = {'if-match':self.objects[0]['hash']}
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- **headers)
+ etag = self.objects[0]['hash']
+ status, headers, data = self.client.request_object(self.containers[1],
+ self.objects[0]['name'],
+ if_match=etag)
#assert get success
self.assertEqual(status, 200)
#assert content-type
self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
+ self.objects[0]['meta']['content_type'])
#assert response content
self.assertEqual(self.objects[0]['data'], data)
-
+
def test_get_with_if_match_star(self):
#perform get with If-Match *
headers = {'if-match':'*'}
- status, headers, data = self.get_object(self.containers[1],
+ status, headers, data = self.client.request_object(self.containers[1],
self.objects[0]['name'],
**headers)
#assert get success
#assert content-type
self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
+ self.objects[0]['meta']['content_type'])
#assert response content
self.assertEqual(self.objects[0]['data'], data)
-
+
def test_get_with_multiple_if_match(self):
#perform get with If-Match
etags = [i['hash'] for i in self.objects if i]
etags = ','.join('"%s"' % etag for etag in etags)
- headers = {'if-match':etags}
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- **headers)
+ status, headers, data = self.client.request_object(self.containers[1],
+ self.objects[0]['name'],
+ if_match=etags)
#assert get success
self.assertEqual(status, 200)
#assert content-type
self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
+ self.objects[0]['meta']['content_type'])
#assert content-type
self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
+ self.objects[0]['meta']['content_type'])
#assert response content
self.assertEqual(self.objects[0]['data'], data)
-
+
def test_if_match_precondition_failed(self):
- #perform get with If-Match
- headers = {'if-match':'123'}
-
#assert precondition failed
- self.assert_raises_fault(412, self.get_object, self.containers[1],
- self.objects[0]['name'], **headers)
-
-
+ self.assert_raises_fault(412, self.client.retrieve_object,
+ self.containers[1],
+ self.objects[0]['name'], if_match='123')
+
def test_if_none_match(self):
#perform get with If-None-Match
- headers = {'if-none-match':'123'}
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- **headers)
+ status, headers, data = self.client.request_object(self.containers[1],
+ self.objects[0]['name'],
+ if_none_match='123')
#assert get success
self.assertEqual(status, 200)
#assert content-type
- self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
-
+ self.assertEqual(headers['content_type'],
+ self.objects[0]['meta']['content_type'])
+
def test_if_none_match(self):
- #perform get with If-None-Match *
- headers = {'if-none-match':'*'}
-
- #assert not modified
- self.assert_raises_fault(304, self.get_object, self.containers[1],
- self.objects[0]['name'],
- **headers)
-
+ #perform get with If-None-Match * and assert not modified
+ self.assert_raises_fault(304, self.client.retrieve_object,
+ self.containers[1],
+ self.objects[0]['name'],
+ if_none_match='*')
+
def test_if_none_match_not_modified(self):
- #perform get with If-None-Match
- headers = {'if-none-match':'%s' %self.objects[0]['hash']}
-
- #assert not modified
- self.assert_raises_fault(304, self.get_object, self.containers[1],
- self.objects[0]['name'],
- **headers)
-
- headers = self.get_object(self.containers[1],
- self.objects[0]['name'])[1]
- self.assertEqual(headers['etag'], self.objects[0]['hash'])
-
+ #perform get with If-None-Match and assert not modified
+ self.assert_raises_fault(304, self.client.retrieve_object,
+ self.containers[1],
+ self.objects[0]['name'],
+ if_none_match=self.objects[0]['hash'])
+
+ meta = self.client.retrieve_object_metadata(self.containers[1],
+ self.objects[0]['name'])
+ self.assertEqual(meta['etag'], self.objects[0]['hash'])
+
def test_if_modified_since(self):
t = datetime.datetime.utcnow()
t2 = t - datetime.timedelta(minutes=10)
past = t2.strftime(f)
headers = {'if-modified-since':'%s' %past}
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- **headers)
-
- #assert get success
- self.assertEqual(status, 200)
-
- #assert content-type
- self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
-
- def test_if_modified_since_invalid_date(self):
- headers = {'if-modified-since':''}
- status, headers, data = self.get_object(self.containers[1],
+ try:
+ o = self.client.retrieve_object(self.containers[1],
self.objects[0]['name'],
- **headers)
-
- #assert get success
- self.assertEqual(status, 200)
-
- #assert content-type
- self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
-
+ if_modified_since=past)
+ self.assertEqual(o,
+ self.client.retrieve_object(self.containers[1],
+ self.objects[0]['name']))
+ except Fault, f:
+ self.failIf(f.status == 304)
+
+ def test_if_modified_since_invalid_date(self):
+ o = self.client.retrieve_object(self.containers[1],
+ self.objects[0]['name'],
+ if_modified_since='')
+ self.assertEqual(o, self.client.retrieve_object(self.containers[1],
+ self.objects[0]['name']))
+
def test_if_not_modified_since(self):
now = datetime.datetime.utcnow()
since = now + datetime.timedelta(1)
for f in DATE_FORMATS:
- headers = {'if-modified-since':'%s' %since.strftime(f)}
-
#assert not modified
- self.assert_raises_fault(304, self.get_object, self.containers[1],
- self.objects[0]['name'], **headers)
-
-
+ self.assert_raises_fault(304, self.client.retrieve_object,
+ self.containers[1], self.objects[0]['name'],
+ if_modified_since=since.strftime(f))
+
def test_if_unmodified_since(self):
now = datetime.datetime.utcnow()
since = now + datetime.timedelta(1)
for f in DATE_FORMATS:
- headers = {'if-unmodified-since':'%s' %since.strftime(f)}
- status, headers, data = self.get_object(self.containers[1],
- self.objects[0]['name'],
- **headers)
+ t = since.strftime(f)
+ status, headers, data = self.client.request_object(self.containers[1],
+ self.objects[0]['name'],
+ if_unmodified_since=t)
#assert success
self.assertEqual(status, 200)
self.assertEqual(self.objects[0]['data'], data)
#assert content-type
self.assertEqual(headers['content-type'],
- self.objects[0]['meta']['content-type'])
-
+ self.objects[0]['meta']['content_type'])
+
def test_if_unmodified_since_precondition_failed(self):
t = datetime.datetime.utcnow()
t2 = t - datetime.timedelta(minutes=10)
for f in DATE_FORMATS:
past = t2.strftime(f)
-
- headers = {'if-unmodified-since':'%s' %past}
-
#assert precondition failed
- self.assert_raises_fault(412, self.get_object, self.containers[1],
- self.objects[0]['name'], **headers)
-
-
+ self.assert_raises_fault(412, self.client.retrieve_object,
+ self.containers[1], self.objects[0]['name'],
+ if_unmodified_since=past)
+
def test_hashes(self):
l = 8388609
fname = 'largefile'
o = self.upload_random_data(self.containers[1], fname, l)
if o:
- data = self.get_object(self.containers[1],
- fname,
- 'json')[2]
+ data = self.client.retrieve_object(self.containers[1], fname, detail=True)
body = json.loads(data)
hashes = body['hashes']
block_size = body['block_size']
BaseTestCase.setUp(self)
self.account = 'test'
self.container = 'c1'
- self.create_container(self.container)
+ self.client.create_container(self.container)
- def tearDown(self):
- objects = self.list_objects(self.container)[2]
- for o in objects:
- self.client.delete_object(self.container, o)
- self.client.delete_container(self.container)
-
def test_upload(self):
name = o_names[0]
meta = {'test':'test1'}
self.assertEqual(headers['test'], meta['test'])
#assert uploaded content
- status, headers, content = self.get_object(self.container, name)
- self.assertEqual(len(o['data']), int(headers['content-length']))
- self.assertEqual(o['data'], content)
-
+ status, h, data = self.client.request_object(self.container, name)
+ self.assertEqual(len(o['data']), int(h['content-length']))
+ self.assertEqual(o['data'], data)
+
+ #assert content-type
+ self.assertEqual(h['content-type'], o['meta']['content_type'])
+
def test_upload_unprocessable_entity(self):
meta={'etag':'123', 'test':'test1'}
#assert unprocessable entity
- self.assert_raises_fault(422, self.upload_random_data,self.container,
+ self.assert_raises_fault(422, self.upload_random_data, self.container,
o_names[0], **meta)
-
+
def test_chucked_transfer(self):
- fname = './api/tests.py'
- objname = os.path.split(fname)[-1:][0]
- f = open(fname, 'r')
- status = self.client.create_object(self.container,
- objname,
- f,
- chunked=True)[0]
- self.assertEqual(status, 201)
+ data = get_random_data()
+ objname = 'object'
+ self.client.create_object_using_chunks(self.container, objname,
+ StringIO(data))
- uploaded_data = self.get_object(self.container,
- objname)[2]
- f = open(fname, 'r')
- actual_data = f.read()
- self.assertEqual(actual_data, uploaded_data)
+ uploaded_data = self.client.retrieve_object(self.container, objname)
+ self.assertEqual(data, uploaded_data)
class ObjectCopy(BaseTestCase):
def setUp(self):
self.account = 'test'
self.containers = ['c1', 'c2']
for c in self.containers:
- self.create_container(c)
+ self.client.create_container(c)
self.obj = self.upload_random_data(self.containers[0], o_names[0])
-
- def tearDown(self):
- for c in self.containers:
- for o in self.list_objects(c)[2]:
- self.client.delete_object(c, o)
- self.client.delete_container(c)
-
+
def test_copy(self):
with AssertMappingInvariant(self.client.retrieve_object_metadata,
self.containers[0], self.obj['name']):
#assert src object still exists
self.assert_object_exists(self.containers[0], self.obj['name'])
-
+
def test_copy_from_different_container(self):
with AssertMappingInvariant(self.client.retrieve_object_metadata,
self.containers[0], self.obj['name']):
#assert src object still exists
self.assert_object_exists(self.containers[0], self.obj['name'])
-
+
def test_copy_invalid(self):
#copy from invalid object
meta = {'test':'testcopy'}
self.account = 'test'
self.containers = ['c1', 'c2']
for c in self.containers:
- self.create_container(c)
+ self.client.create_container(c)
self.obj = self.upload_random_data(self.containers[0], o_names[0])
-
- def tearDown(self):
- for c in self.containers:
- for o in self.list_objects(c)[2]:
- self.client.delete_object(c, o)
- self.client.delete_container(c)
-
+
def test_update_meta(self):
#perform update metadata
more = {'foo':'foo', 'bar':'bar'}
for k,v in more.items():
self.assertTrue(k in headers.keys())
self.assertTrue(headers[k], v)
-
+
def test_update_object(self,
first_byte_pos=0,
last_byte_pos=499,
length)
partial = last_byte_pos - first_byte_pos + 1
data = get_random_data(partial)
- headers = {'content-range':range,
- 'content-type':'application/octet-stream'}
+ args = {'content_type':'application/octet-stream',
+ 'content_range':'%s' %range}
if content_length:
- headers.update({'content-length':'%s' % content_length})
-
- status = self.client.update_object_data(self.containers[0],
- self.obj['name'],
- data,
- headers)[0]
-
+ args['content_length'] = content_length
+ status = self.client.update_object(self.containers[0], self.obj['name'],
+ StringIO(data), **args)[0]
if partial < 0 or (instance_length and l <= last_byte_pos):
self.assertEqual(status, 202)
else:
- self.assertEqual(status, 204)
-
+ self.assertEqual(status, 204)
#check modified object
- content = self.get_object(self.containers[0], self.obj['name'])[2]
+ content = self.client.retrieve_object(self.containers[0],
+ self.obj['name'])
self.assertEqual(content[0:partial], data)
self.assertEqual(content[partial:l], self.obj['data'][partial:l])
-
+
def test_update_object_no_content_length(self):
self.test_update_object(content_length = None)
-
-
- #fails if the server resets the content-legth
- #def test_update_object_invalid_content_length(self):
- # with AssertContentInvariant(self.get_object, self.containers[0],
- # self.obj['name']):
- # self.test_update_object(content_length = 1000)
-
- def test_update_object_with_unknown_instance_length(self):
- self.test_update_object(instance_length = False)
-
+
+ def test_update_object_invalid_content_length(self):
+ with AssertContentInvariant(self.client.retrieve_object,
+ self.containers[0], self.obj['name']):
+ self.assert_raises_fault(400, self.test_update_object,
+ content_length = 1000)
+
def test_update_object_invalid_range(self):
- with AssertContentInvariant(self.get_object, self.containers[0],
- self.obj['name']):
+ with AssertContentInvariant(self.client.retrieve_object,
+ self.containers[0], self.obj['name']):
self.test_update_object(499, 0, True)
#no use if the server resets the content-legth
def test_update_object_invalid_range_and_length(self):
- with AssertContentInvariant(self.get_object, self.containers[0],
- self.obj['name']):
+ with AssertContentInvariant(self.client.retrieve_object,
+ self.containers[0], self.obj['name']):
self.test_update_object(499, 0, True, -1)
#no use if the server resets the content-legth
def test_update_object_invalid_range_with_no_content_length(self):
- with AssertContentInvariant(self.get_object, self.containers[0],
- self.obj['name']):
+ with AssertContentInvariant(self.client.retrieve_object,
+ self.containers[0], self.obj['name']):
self.test_update_object(499, 0, True, content_length = None)
def test_update_object_out_of_limits(self):
- with AssertContentInvariant(self.get_object, self.containers[0],
- self.obj['name']):
+ with AssertContentInvariant(self.client.retrieve_object,
+ self.containers[0], self.obj['name']):
l = len(self.obj['data'])
self.assert_raises_fault(416, self.test_update_object, 0, l+1, True)
-
+
def test_append(self):
data = get_random_data(500)
- headers = {'content-type':'application/octet-stream',
- 'content-length':'500'}
- status = self.client.update_object_data(self.containers[0],
- self.obj['name'],
- data, headers)[0]
-
- self.assertEqual(status, 204)
+ headers = {}
+ self.client.update_object(self.containers[0], self.obj['name'],
+ StringIO(data), content_length=500,
+ content_type='application/octet-stream')
- content = self.get_object(self.containers[0], self.obj['name'])[2]
+ content = self.client.retrieve_object(self.containers[0],
+ self.obj['name'])
self.assertEqual(len(content), len(self.obj['data']) + 500)
self.assertEqual(content[:-500], self.obj['data'])
-
+
def test_update_with_chunked_transfer(self):
- data, pure = create_random_chunked_data()
- dl = len(pure)
+ data = get_random_data(500)
+ dl = len(data)
fl = len(self.obj['data'])
- meta = {'transfer-encoding':'chunked',
- 'content-range':'bytes 0-/%d' %fl}
- self.update_object(self.containers[0], self.obj['name'], data,
- 'application/octet-stream', **meta)
- #check modified object
- content = self.get_object(self.containers[0], self.obj['name'])[2]
- self.assertEqual(content[0:dl], pure)
- self.assertEqual(content[dl:fl], self.obj['data'][dl:fl])
-
- def test_update_with_chunked_transfer_strict_range(self):
- data, pure = create_random_chunked_data()
- dl = len(pure) - 1
- fl = len(self.obj['data'])
- meta = {'transfer-encoding':'chunked',
- 'content-range':'bytes 0-%d/%d' %(dl, fl)}
- self.update_object(self.containers[0], self.obj['name'], data,
- 'application/octet-stream', **meta)
+ self.client.update_object_using_chunks(self.containers[0],
+ self.obj['name'], StringIO(data),
+ offset=0,
+ content_type='application/octet-stream')
#check modified object
- content = self.get_object(self.containers[0], self.obj['name'])[2]
- self.assertEqual(content[0:dl+1], pure)
- self.assertEqual(content[dl+1:fl], self.obj['data'][dl+1:fl])
+ content = self.client.retrieve_object(self.containers[0],
+ self.obj['name'])
+ self.assertEqual(content[0:dl], data)
+ self.assertEqual(content[dl:fl], self.obj['data'][dl:fl])
class ObjectDelete(BaseTestCase):
def setUp(self):
self.account = 'test'
self.containers = ['c1', 'c2']
for c in self.containers:
- self.create_container(c)
+ self.client.create_container(c)
self.obj = self.upload_random_data(self.containers[0], o_names[0])
-
- def tearDown(self):
- for c in self.containers:
- for o in self.list_objects(c)[2]:
- self.client.delete_object(c, o)
- self.client.delete_container(c)
-
+
def test_delete(self):
#perform delete object
self.client.delete_object(self.containers[0], self.obj['name'])[0]
-
+
def test_delete_invalid(self):
#assert item not found
self.assert_raises_fault(404, self.client.delete_object, self.containers[1],
self.callable = callable
self.args = args
self.kwargs = kwargs
-
+
def __enter__(self):
self.map = self.callable(*self.args, **self.kwargs)
return self.map
-
+
def __exit__(self, type, value, tb):
map = self.callable(*self.args, **self.kwargs)
for k in self.map.keys():
self.callable = callable
self.args = args
self.kwargs = kwargs
-
+
def __enter__(self):
self.content = self.callable(*self.args, **self.kwargs)[2]
return self.content
-
+
def __exit__(self, type, value, tb):
content = self.callable(*self.args, **self.kwargs)[2]
assert self.content == content
h.update(data.rstrip('\x00'))
return h.hexdigest()
-def create_chunked_update_test_file(src, dest):
- fr = open(src, 'r')
- fw = open(dest, 'w')
- data = fr.readline()
- while data:
- fw.write(hex(len(data)))
- fw.write('\r\n')
- fw.write(data)
- data = fr.readline()
- fw.write(hex(0))
- fw.write('\r\n')
-
-def create_random_chunked_data(rows=5):
- i = 0
- out = []
- pure= []
- while i < rows:
- data = get_random_data(random.randint(1, 100))
- out.append(hex(len(data)))
- out.append(data)
- pure.append(data)
- i+=1
- out.append(hex(0))
- out.append('\r\n')
- return '\r\n'.join(out), ''.join(pure)
-
def get_random_data(length=500):
char_set = string.ascii_uppercase + string.digits
return ''.join(random.choice(char_set) for x in range(length))
'photos/me.jpg']
if __name__ == "__main__":
- unittest.main()
+ unittest.main()
\ No newline at end of file
self.debug = debug
self.token = token
- def _chunked_transfer(self, path, method='PUT', f=stdin, headers=None,
- blocksize=1024):
-
- http = HTTPConnection(self.host)
-
- # write header
- path = '/%s/%s%s' % (self.api, self.account, path)
- http.putrequest(method, path)
- http.putheader('x-auth-token', self.token)
- http.putheader('content-type', 'application/octet-stream')
- http.putheader('transfer-encoding', 'chunked')
- if headers:
- for header,value in headers.items():
- http.putheader(header, value)
- http.endheaders()
-
- # write body
- data = ''
- while True:
- if f.closed:
- break
- block = f.read(blocksize)
- if block == '':
- break
- data = '%s\r\n%s\r\n' % (hex(len(block)), block)
- try:
- http.send(data)
- except:
- #retry
- http.send(data)
- data = '0x0\r\n'
- try:
- http.send(data)
- except:
- #retry
- http.send(data)
-
- # get response
- resp = http.getresponse()
-
- headers = dict(resp.getheaders())
-
- if self.verbose:
- print '%d %s' % (resp.status, resp.reason)
- for key, val in headers.items():
- print '%s: %s' % (key.capitalize(), val)
- print
-
- length = resp.getheader('Content-length', None)
- data = resp.read(length)
- if self.debug:
- print data
- print
-
- if int(resp.status) in ERROR_CODES.keys():
- raise Fault(data, int(resp.status))
-
- #print '*', resp.status, headers, data
- return resp.status, headers, data
-
- def _req(self, method, path, body=None, headers=None, format='text',
- params=None):
+ def _req(self, method, path, body=None, headers={}, format='text',
+ params={}):
full_path = '/%s/%s%s?format=%s' % (self.api, self.account, path,
format)
- if params:
- for k,v in params.items():
- if v:
- full_path = '%s&%s=%s' %(full_path, k, v)
- else:
- full_path = '%s&%s' %(full_path, k)
+ for k,v in params.items():
+ if v:
+ full_path = '%s&%s=%s' %(full_path, k, v)
+ else:
+ full_path = '%s&%s' %(full_path, k)
conn = HTTPConnection(self.host)
#encode whitespace
full_path = full_path.replace(' ', '%20')
kwargs = {}
- kwargs['headers'] = headers or {}
+ for k,v in headers.items():
+ headers.pop(k)
+ k = k.replace('_', '-')
+ headers[k] = v
+
+ kwargs['headers'] = headers
kwargs['headers']['X-Auth-Token'] = self.token
- if not headers or \
- 'transfer-encoding' not in headers \
- or headers['transfer-encoding'] != 'chunked':
- kwargs['headers']['content-length'] = len(body) if body else 0
if body:
kwargs['body'] = body
else:
kwargs['headers']['content-type'] = ''
+ kwargs['headers'].setdefault('content-length', len(body) if body else 0)
kwargs['headers'].setdefault('content-type', 'application/octet-stream')
try:
#print '*', method, full_path, kwargs
#print '*', resp.status, headers, data
return resp.status, headers, data
- def delete(self, path, format='text'):
- return self._req('DELETE', path, format=format)
+ def delete(self, path, format='text', params={}):
+ return self._req('DELETE', path, format=format, params=params)
- def get(self, path, format='text', headers=None, params=None):
+ def get(self, path, format='text', headers=None, params={}):
return self._req('GET', path, headers=headers, format=format,
params=params)
- def head(self, path, format='text', params=None):
+ def head(self, path, format='text', params={}):
return self._req('HEAD', path, format=format, params=params)
- def post(self, path, body=None, format='text', headers=None, params=None):
+ def post(self, path, body=None, format='text', headers=None, params={}):
return self._req('POST', path, body, headers=headers, format=format,
params=params)
def put(self, path, body=None, format='text', headers=None):
return self._req('PUT', path, body, headers=headers, format=format)
- def _list(self, path, detail=False, params=None, headers=None):
+ def _list(self, path, detail=False, params={}, **headers):
format = 'json' if detail else 'text'
status, headers, data = self.get(path, format=format, headers=headers,
params=params)
if detail:
data = json.loads(data) if data else ''
else:
- data = data.strip().split('\n')
+ data = data.strip().split('\n') if data else ''
return data
- def _get_metadata(self, path, prefix=None, params=None):
+ def _get_metadata(self, path, prefix=None, params={}):
status, headers, data = self.head(path, params=params)
prefixlen = len(prefix) if prefix else 0
meta = {}
meta[key] = val
return meta
- def _update_metadata(self, path, entity, **meta):
+ def _filter(self, l, d):
"""
- adds new and updates the values of previously set metadata
+ filter out from l elements having the metadata values provided
"""
- params = {'update':None}
+ ll = l
+ for elem in l:
+ if type(elem) == types.DictionaryType:
+ for key in d.keys():
+ k = 'x_object_meta_%s' % key
+ if k in elem.keys() and elem[k] == d[key]:
+ ll.remove(elem)
+ break
+ return ll
+
+class OOS_Client(Client):
+ """Openstack Objest Storage Client"""
+
+ def _update_metadata(self, path, entity, **meta):
+ """adds new and updates the values of previously set metadata"""
+ ex_meta = self.retrieve_account_metadata(restricted=True)
+ ex_meta.update(meta)
headers = {}
prefix = 'x-%s-meta-' % entity
- for k,v in meta.items():
+ for k,v in ex_meta.items():
k = '%s%s' % (prefix, k)
headers[k] = v
return self.post(path, headers=headers, params=params)
def _delete_metadata(self, path, entity, meta=[]):
- """
- delete previously set metadata
- """
- params = {'update':None}
+ """delete previously set metadata"""
+ ex_meta = self.retrieve_account_metadata(restricted=True)
headers = {}
prefix = 'x-%s-meta-' % entity
- for m in meta:
- headers['%s%s' % (prefix, m)] = None
+ for k in ex_meta.keys():
+ if k in meta:
+ headers['%s%s' % (prefix, k)] = ex_meta[k]
return self.post(path, headers=headers)
# Storage Account Services
- def list_containers(self, detail=False, params=None, headers=None):
- return self._list('', detail, params, headers)
+ def list_containers(self, detail=False, limit=10000, marker=None, params={},
+ **headers):
+ """lists containers"""
+ if not params:
+ params = {}
+ params.update({'limit':limit, 'marker':marker})
+ return self._list('', detail, params, **headers)
- def account_metadata(self, restricted=False, until=None):
+ def retrieve_account_metadata(self, restricted=False, **params):
+ """returns the account metadata"""
prefix = 'x-account-meta-' if restricted else None
- params = {'until':until} if until else None
- return self._get_metadata('', prefix, params=params)
+ return self._get_metadata('', prefix, params)
def update_account_metadata(self, **meta):
+ """updates the account metadata"""
return self._update_metadata('', 'account', **meta)
def delete_account_metadata(self, meta=[]):
+ """deletes the account metadata"""
return self._delete_metadata('', 'account', meta)
- def set_account_groups(self, **groups):
- """
- create account groups
- """
- headers = {}
- for key, val in groups.items():
- headers['x-account-group-%s' % key] = val
- params = {'update':None}
- return self.post('', headers=headers, params=params)
-
- def unset_account_groups(self, groups=[]):
- """
- delete account groups
- """
- headers = {}
- for elem in groups:
- headers['x-account-group-%s' % elem] = ''
- params = {'update':None}
- return self.post('', headers=headers, params=params)
-
# Storage Container Services
- def _filter(self, l, d):
- """
- filter out from l elements having the metadata values provided
- """
- ll = l
- for elem in l:
- if type(elem) == types.DictionaryType:
- for key in d.keys():
- k = 'x_object_meta_%s' % key
- if k in elem.keys() and elem[k] == d[key]:
- ll.remove(elem)
- break
- return ll
-
def _filter_trashed(self, l):
return self._filter(l, {'trash':'true'})
- def list_objects(self, container, detail=False, headers=None,
- include_trashed=False, **params):
- l = self._list('/' + container, detail, params, headers)
+ def list_objects(self, container, detail=False, limit=10000, marker=None,
+ prefix=None, delimiter=None, path=None,
+ include_trashed=False, params={}, **headers):
+ """returns a list with the container objects"""
+ params.update({'limit':limit, 'marker':marker, 'prefix':prefix,
+ 'delimiter':delimiter, 'path':path})
+ l = self._list('/' + container, detail, params, **headers)
if not include_trashed:
l = self._filter_trashed(l)
return l
- def create_container(self, container, headers=None, **meta):
+ def create_container(self, container, **meta):
+ """creates a container"""
+ headers = {}
for k,v in meta.items():
headers['x-container-meta-%s' %k.strip().upper()] = v.strip()
status, header, data = self.put('/' + container, headers=headers)
raise Fault(data, int(status))
return True
- def delete_container(self, container):
- return self.delete('/' + container)
+ def delete_container(self, container, params={}):
+ """deletes a container"""
+ return self.delete('/' + container, params=params)
- def retrieve_container_metadata(self, container, restricted=False,
- until=None):
+ def retrieve_container_metadata(self, container, restricted=False, **params):
+ """returns the container metadata"""
prefix = 'x-container-meta-' if restricted else None
- params = {'until':until} if until else None
- return self._get_metadata('/%s' % container, prefix, params=params)
+ return self._get_metadata('/%s' % container, prefix, params)
def update_container_metadata(self, container, **meta):
+ """unpdates the container metadata"""
return self._update_metadata('/' + container, 'container', **meta)
def delete_container_metadata(self, container, meta=[]):
+ """deletes the container metadata"""
path = '/%s' % (container)
return self._delete_metadata(path, 'container', meta)
- def set_container_policies(self, container, **policies):
- path = '/%s' % (container)
- headers = {}
- print ''
- for key, val in policies.items():
- headers['x-container-policy-%s' % key] = val
- return self.post(path, headers=headers)
-
# Storage Object Services
- def retrieve_object(self, container, object, detail=False, headers=None,
- version=None):
+ def request_object(self, container, object, detail=False, params={},
+ **headers):
+ """returns tuple containing the status, headers and data response for an object request"""
path = '/%s/%s' % (container, object)
- format = 'json' if detail else 'text'
- params = {'version':version} if version else None
+ format = 'json' if detail else 'text'
status, headers, data = self.get(path, format, headers, params)
- return data
+ return status, headers, data
+
+ def retrieve_object(self, container, object, detail=False, params={},
+ **headers):
+ """returns an object's data"""
+ t = self.request_object(container, object, detail, params, **headers)
+ return t[2]
def create_directory_marker(self, container, object):
+ """creates a dierectory marker"""
if not object:
raise Fault('Directory markers have to be nested in a container')
h = {'Content-Type':'application/directory'}
- return self.create_object(container, object, f=None, headers=h)
-
- def _set_public_header(self, headers, public=False):
- """
- sets the public header
- """
- if public == None:
- return
- elif public:
- headers['x-object-public'] = public
- else:
- headers['x-object-public'] = ''
-
- def create_object(self, container, object, f=stdin, chunked=False,
- blocksize=1024, headers={}, use_hashes=False,
- public=None, **meta):
- """
- creates an object
- if f is None then creates a zero length object
- if f is stdin or chunked is set then performs chunked transfer
- """
- path = '/%s/%s' % (container, object)
- for k,v in meta.items():
- headers['x-object-meta-%s' %k.strip().upper()] = v.strip()
- self._set_public_header(headers, public)
- headers = headers if headers else None
- if not chunked:
- format = 'json' if use_hashes else 'text'
- data = f.read() if f else None
- if data:
- if format == 'json':
- try:
- data = eval(data)
- data = json.dumps(data)
- except SyntaxError:
- raise Fault('Invalid formatting')
- return self.put(path, data, headers=headers, format=format)
- else:
- return self._chunked_transfer(path, 'PUT', f, headers=headers,
- blocksize=1024)
+ return self.create_zero_length_object(container, object, **h)
- def update_object_data(self, container, object, data=None, headers={},
- offset=None, public=None, **meta):
+ def create_object(self, container, object, f=stdin, format='text', meta={},
+ etag=None, content_type=None, content_encoding=None,
+ content_disposition=None, **headers):
+ """creates an object"""
path = '/%s/%s' % (container, object)
+ for k, v in headers.items():
+ if not v:
+ headers.pop(k)
+
+ l = ['etag', 'content_encoding', 'content_disposition', 'content_type']
+ l = [elem for elem in l if eval(elem)]
+ for elem in l:
+ headers.update({elem:eval(elem)})
+
for k,v in meta.items():
headers['x-object-meta-%s' %k.strip()] = v.strip()
- if 'content-range' not in headers.keys():
- if offset:
- headers['content-range'] = 'bytes %s-/*' % offset
- else:
- headers['content-range'] = 'bytes */*'
- self._set_public_header(headers, public)
- headers = headers if headers else None
- return self.post(path, data, headers=headers)
+ data = f.read() if f else None
+ return self.put(path, data, format, headers=headers)
- def update_object(self, container, object, f=stdin, chunked=False,
- blocksize=1024, headers={}, offset=None, public=None,
- **meta):
+ def update_object(self, container, object, f=stdin, offset=None, meta={},
+ content_length=None, content_type=None,
+ content_encoding=None, content_disposition=None,
+ **headers):
path = '/%s/%s' % (container, object)
+ for k, v in headers.items():
+ if not v:
+ headers.pop(k)
+
+ l = ['content_encoding', 'content_disposition', 'content_type',
+ 'content_length']
+ l = [elem for elem in l if eval(elem)]
+ for elem in l:
+ headers.update({elem:eval(elem)})
+
+ if 'content_range' not in headers.keys():
+ if offset != None:
+ headers['content_range'] = 'bytes %s-/*' % offset
+ else:
+ headers['content_range'] = 'bytes */*'
+
for k,v in meta.items():
headers['x-object-meta-%s' %k.strip()] = v.strip()
- if offset:
- headers['content-range'] = 'bytes %s-/*' % offset
- else:
- headers['content-range'] = 'bytes */*'
- self._set_public_header(headers, public)
- headers = headers if headers else None
- if not chunked and f != stdin:
- data = f.read() if f else None
- return self.post(path, data, headers=headers)
- else:
- return self._chunked_transfer(path, 'POST', f, headers=headers,
- blocksize=1024)
+ data = f.read() if f else None
+ return self.post(path, data, headers=headers)
def _change_obj_location(self, src_container, src_object, dst_container,
dst_object, remove=False, public=False, **meta):
dst_container, dst_object, True,
public, **meta)
- def delete_object(self, container, object):
- return self.delete('/%s/%s' % (container, object))
+ def delete_object(self, container, object, params={}):
+ return self.delete('/%s/%s' % (container, object), params=params)
def retrieve_object_metadata(self, container, object, restricted=False,
version=None):
"""
path = '/%s/%s' % (container, object)
prefix = 'x-object-meta-' if restricted else None
- params = {'version':version} if version else None
+ params = {'version':version} if version else {}
return self._get_metadata(path, prefix, params=params)
def update_object_metadata(self, container, object, **meta):
path = '/%s/%s' % (container, object)
return self._delete_metadata(path, 'object', meta)
- def trash_object(self, container, object):
+class Pithos_Client(OOS_Client):
+ """Pithos Storage Client. Extends OOS_Client"""
+
+ def _chunked_transfer(self, path, method='PUT', f=stdin, headers=None,
+ blocksize=1024):
+ """perfomrs a chunked request"""
+ http = HTTPConnection(self.host)
+
+ # write header
+ path = '/%s/%s%s' % (self.api, self.account, path)
+ http.putrequest(method, path)
+ http.putheader('x-auth-token', self.token)
+ http.putheader('content-type', 'application/octet-stream')
+ http.putheader('transfer-encoding', 'chunked')
+ if headers:
+ for header,value in headers.items():
+ http.putheader(header, value)
+ http.endheaders()
+
+ # write body
+ data = ''
+ while True:
+ if f.closed:
+ break
+ block = f.read(blocksize)
+ if block == '':
+ break
+ data = '%s\r\n%s\r\n' % (hex(len(block)), block)
+ try:
+ http.send(data)
+ except:
+ #retry
+ http.send(data)
+ data = '0x0\r\n'
+ try:
+ http.send(data)
+ except:
+ #retry
+ http.send(data)
+
+ # get response
+ resp = http.getresponse()
+
+ headers = dict(resp.getheaders())
+
+ if self.verbose:
+ print '%d %s' % (resp.status, resp.reason)
+ for key, val in headers.items():
+ print '%s: %s' % (key.capitalize(), val)
+ print
+
+ length = resp.getheader('Content-length', None)
+ data = resp.read(length)
+ if self.debug:
+ print data
+ print
+
+ if int(resp.status) in ERROR_CODES.keys():
+ raise Fault(data, int(resp.status))
+
+ #print '*', resp.status, headers, data
+ return resp.status, headers, data
+
+ def _update_metadata(self, path, entity, **meta):
+ """
+ adds new and updates the values of previously set metadata
+ """
+ params = {'update':None}
+ headers = {}
+ prefix = 'x-%s-meta-' % entity
+ for k,v in meta.items():
+ k = '%s%s' % (prefix, k)
+ headers[k] = v
+ return self.post(path, headers=headers, params=params)
+
+ def _delete_metadata(self, path, entity, meta=[]):
"""
- trashes an object
- actually resets all object metadata with trash = true
+ delete previously set metadata
"""
+ params = {'update':None}
+ headers = {}
+ prefix = 'x-%s-meta-' % entity
+ for m in meta:
+ headers['%s%s' % (prefix, m)] = None
+ return self.post(path, headers=headers, params=params)
+
+ # Storage Account Services
+
+ def list_containers(self, detail=False, if_modified_since=None,
+ if_unmodified_since=None, limit=1000, marker=None,
+ until=None):
+ """returns a list with the account containers"""
+ params = {'until':until} if until else None
+ headers = {'if-modified-since':if_modified_since,
+ 'if-unmodified-since':if_unmodified_since}
+ return OOS_Client.list_containers(self, detail=detail, limit=limit,
+ marker=marker, params=params,
+ **headers)
+
+ def retrieve_account_metadata(self, restricted=False, until=None):
+ """returns the account metadata"""
+ params = {'until':until} if until else {}
+ return OOS_Client.retrieve_account_metadata(self, restricted=restricted,
+ **params)
+
+ def set_account_groups(self, **groups):
+ """create account groups"""
+ headers = {}
+ for key, val in groups.items():
+ headers['x-account-group-%s' % key] = val
+ params = {'update':None}
+ return self.post('', headers=headers, params=params)
+
+ def unset_account_groups(self, groups=[]):
+ """delete account groups"""
+ headers = {}
+ for elem in groups:
+ headers['x-account-group-%s' % elem] = ''
+ params = {'update':None}
+ return self.post('', headers=headers, params=params)
+
+ # Storage Container Services
+
+ def list_objects(self, container, detail=False, limit=10000, marker=None,
+ prefix=None, delimiter=None, path=None,
+ include_trashed=False, params={}, if_modified_since=None,
+ if_unmodified_since=None, meta={}, until=None):
+ """returns a list with the container objects"""
+ params = {'until':until, 'meta':meta}
+ args = locals()
+ for elem in ['self', 'container', 'params', 'until', 'meta']:
+ args.pop(elem)
+ return OOS_Client.list_objects(self, container, params=params,
+ **args)
+
+ def retrieve_container_metadata(self, container, restricted=False,
+ until=None):
+ """returns container's metadata"""
+ params = {'until':until} if until else {}
+ return OOS_Client.retrieve_container_metadata(self, container,
+ restricted=restricted,
+ **params)
+
+ def set_container_policies(self, container, **policies):
+ """sets containers policies"""
+ path = '/%s' % (container)
+ headers = {}
+ print ''
+ for key, val in policies.items():
+ headers['x-container-policy-%s' % key] = val
+ return self.post(path, headers=headers)
+
+ def delete_container(self, container, until=None):
+ """deletes a container or the container history until the date provided"""
+ params = {'until':until} if until else {}
+ return OOS_Client.delete_container(self, container, params)
+
+ # Storage Object Services
+
+ def retrieve_object(self, container, object, params={}, detail=False, range=None,
+ if_range=None, if_match=None, if_none_match=None,
+ if_modified_since=None, if_unmodified_since=None,
+ **headers):
+ """returns an object"""
+ headers={}
+ l = ['range', 'if_range', 'if_match', 'if_none_match',
+ 'if_modified_since', 'if_unmodified_since']
+ l = [elem for elem in l if eval(elem)]
+ for elem in l:
+ headers.update({elem:eval(elem)})
+ return OOS_Client.retrieve_object(self, container, object, detail=detail,
+ params=params, **headers)
+
+ def retrieve_object_version(self, container, object, version, detail=False,
+ range=None, if_range=None, if_match=None,
+ if_none_match=None, if_modified_since=None,
+ if_unmodified_since=None):
+ """returns a specific object version"""
+ args = locals()
+ l = ['self', 'container', 'object']
+ for elem in l:
+ args.pop(elem)
+ params = {'version':version}
+ return self.retrieve_object(container, object, params, **args)
+
+ def retrieve_object_versionlist(self, container, object, range=None,
+ if_range=None, if_match=None,
+ if_none_match=None, if_modified_since=None,
+ if_unmodified_since=None):
+ """returns the object version list"""
+ args = locals()
+ l = ['self', 'container', 'object']
+ for elem in l:
+ args.pop(elem)
+
+ return self.retrieve_object_version(container, object, version='list',
+ detail=True, **args)
+
+ def create_object(self, container, object, f=stdin, meta={},
+ etag=None, content_type=None, content_encoding=None,
+ content_disposition=None, x_object_manifest=None,
+ x_object_sharing=None, x_object_public=None):
+ """creates an object"""
+ args = locals()
+ for elem in ['self', 'container', 'object']:
+ args.pop(elem)
+ return OOS_Client.create_object(self, container, object, **args)
+
+ def create_object_using_chunks(self, container, object, f=stdin,
+ blocksize=1024, meta={}, etag=None,
+ content_type=None, content_encoding=None,
+ content_disposition=None,
+ x_object_sharing=None,
+ x_object_manifest=None,
+ x_object_public=None):
+ """creates an object (incremental upload)"""
+ path = '/%s/%s' % (container, object)
+ headers = {}
+ l = ['etag', 'content_type', 'content_encoding', 'content_disposition',
+ 'x_object_sharing', 'x_object_manifest', 'x_object_public']
+ l = [elem for elem in l if eval(elem)]
+ for elem in l:
+ headers.update({elem:eval(elem)})
+
+ for k,v in meta.items():
+ headers['x-object-meta-%s' %k.strip()] = v.strip()
+
+ return self._chunked_transfer(path, 'PUT', f, headers=headers,
+ blocksize=blocksize)
+
+ def create_object_by_hashmap(container, object, f=stdin, format='json',
+ meta={}, etag=None, content_encoding=None,
+ content_disposition=None, content_type=None,
+ x_object_sharing=None, x_object_manifest=None,
+ x_object_public = None):
+ """creates an object by uploading hashes representing data instead of data"""
+ args = locals()
+ for elem in ['self', 'container', 'object']:
+ args.pop(elem)
+
+ data = f.read() if f else None
+ if data and format == 'json':
+ try:
+ data = eval(data)
+ data = json.dumps(data)
+ except SyntaxError:
+ raise Fault('Invalid formatting')
+
+ #TODO check with xml
+ return self.create_object(container, object, **args)
+
+ def create_manifestation(self, container, object, manifest):
+ """creates a manifestation"""
+ headers={'x_object_manifest':manifest}
+ return self.create_object(container, object, f=None, **headers)
+
+ def update_object(self, container, object, f=stdin, offset=None, meta={},
+ content_length=None, content_type=None, content_range=None,
+ content_encoding=None, content_disposition=None,
+ x_object_bytes=None, x_object_manifest=None,
+ x_object_sharing=None, x_object_public=None):
+ """updates an object"""
+ spath = '/%s/%s' % (container, object)
+ args = locals()
+ for elem in ['self', 'container', 'object']:
+ args.pop(elem)
+
+ return OOS_Client.update_object(self, container, object, **args)
+
+ def update_object_using_chunks(self, container, object, f=stdin,
+ blocksize=1024, offset=None, meta={},
+ content_type=None, content_encoding=None,
+ content_disposition=None, x_object_bytes=None,
+ x_object_manifest=None, x_object_sharing=None,
+ x_object_public=None):
+ """updates an object (incremental upload)"""
+ path = '/%s/%s' % (container, object)
+ headers = {}
+ l = ['content_type', 'content_encoding', 'content_disposition',
+ 'x_object_bytes', 'x_object_manifest', 'x_object_sharing',
+ 'x_object_public']
+ l = [elem for elem in l if eval(elem)]
+ for elem in l:
+ headers.update({elem:eval(elem)})
+
+ if offset != None:
+ headers['content_range'] = 'bytes %s-/*' % offset
+ else:
+ headers['content_range'] = 'bytes */*'
+
+ for k,v in meta.items():
+ headers['x-object-meta-%s' %k.strip()] = v.strip()
+
+ return self._chunked_transfer(path, 'POST', f, headers=headers,
+ blocksize=blocksize)
+
+ def delete_object(self, container, object, until=None):
+ """deletes an object or the object history until the date provided"""
+ params = {'until':until} if until else {}
+ return OOS_Client.delete_object(self, container, object, params)
+
+ def trash_object(self, container, object):
+ """trashes an object"""
path = '/%s/%s' % (container, object)
meta = {'trash':'true'}
return self._update_metadata(path, 'object', **meta)
def restore_object(self, container, object):
- """
- restores a trashed object
- actualy removes trash object metadata info
- """
+ """restores a trashed object"""
return self.delete_object_metadata(container, object, ['trash'])
+ def _set_public_header(self, headers, public=False):
+ """sets the public header"""
+ if not headers:
+ headers = {}
+ if public == None:
+ return
+ else:
+ headers['x-object-public'] = public if public else ''
+
def publish_object(self, container, object):
- """
- sets a previously created object publicly accessible
- """
+ """sets a previously created object publicly accessible"""
path = '/%s/%s' % (container, object)
- headers = {'content-range':'bytes */*'}
+ headers = {'content_range':'bytes */*'}
self._set_public_header(headers, public=True)
return self.post(path, headers=headers)
def unpublish_object(self, container, object):
- """
- unpublish an object
- """
+ """unpublish an object"""
path = '/%s/%s' % (container, object)
- headers = {'content-range':'bytes */*'}
+ headers = {'content_range':'bytes */*'}
self._set_public_header(headers, public=False)
return self.post(path, headers=headers)
--- /dev/null
+# Copyright 2011 GRNET S.A. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or
+# without modification, are permitted provided that the following
+# conditions are met:
+#
+# 1. Redistributions of source code must retain the above
+# copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
+# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
+# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+# The views and conclusions contained in the software and
+# documentation are those of the authors and should not be
+# interpreted as representing official policies, either expressed
+# or implied, of GRNET S.A.
+
+# Django settings for pithos project.
+import os
+
+PROJECT_PATH = os.path.dirname(os.path.abspath(__file__)) + '/'
+
+DEBUG = True
+TEMPLATE_DEBUG = DEBUG
+
+TEST = True
+
+ADMINS = (
+ # ('Your Name', 'your_email@domain.com'),
+)
+
+MANAGERS = ADMINS
+
+DATABASES = {
+ 'default': {
+ 'ENGINE': 'sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
+ 'NAME': PROJECT_PATH + 'pithos.db', # Or path to database file if using sqlite3.
+ 'USER': '', # Not used with sqlite3.
+ 'PASSWORD': '', # Not used with sqlite3.
+ 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
+ 'PORT': '', # Set to empty string for default. Not used with sqlite3.
+ }
+}
+
+# The backend to use and its initilization options.
+if TEST:
+ BACKEND = ('SimpleBackend', (os.path.join(PROJECT_PATH, 'data/testpithos.db'),))
+else:
+ BACKEND = ('SimpleBackend', (os.path.join(PROJECT_PATH, 'data/pithos.db'),))
+
+# Local time zone for this installation. Choices can be found here:
+# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
+# although not all choices may be available on all operating systems.
+# On Unix systems, a value of None will cause Django to use the same
+# timezone as the operating system.
+# If running in a Windows environment this must be set to the same as your
+# system time zone.
+TIME_ZONE = 'UTC'
+
+# Language code for this installation. All choices can be found here:
+# http://www.i18nguy.com/unicode/language-identifiers.html
+LANGUAGE_CODE = 'en-us'
+
+SITE_ID = 1
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+
+# If you set this to False, Django will not format dates, numbers and
+# calendars according to the current locale
+USE_L10N = True
+
+# Absolute filesystem path to the directory that will hold user-uploaded files.
+# Example: "/home/media/media.lawrence.com/"
+MEDIA_ROOT = ''
+
+# URL that handles the media served from MEDIA_ROOT. Make sure to use a
+# trailing slash if there is a path component (optional in other cases).
+# Examples: "http://media.lawrence.com", "http://example.com/media/"
+MEDIA_URL = ''
+
+# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
+# trailing slash.
+# Examples: "http://foo.com/media/", "/media/".
+ADMIN_MEDIA_PREFIX = '/media/'
+
+# Make this unique, and don't share it with anybody.
+SECRET_KEY = '$j0cdrfm*0sc2j+e@@2f-&3-_@2=^!z#+b-8o4_i10@2%ev7si'
+
+# List of callables that know how to import templates from various sources.
+TEMPLATE_LOADERS = (
+ 'django.template.loaders.filesystem.Loader',
+ 'django.template.loaders.app_directories.Loader',
+# 'django.template.loaders.eggs.Loader',
+)
+
+MIDDLEWARE_CLASSES = (
+ 'django.middleware.common.CommonMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+# 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'pithos.middleware.LoggingConfigMiddleware',
+ 'pithos.api.auth.DummyAuthMiddleware'
+)
+
+ROOT_URLCONF = 'pithos.urls'
+
+TEMPLATE_DIRS = (
+ # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
+ # Always use forward slashes, even on Windows.
+ # Don't forget to use absolute paths, not relative paths.
+)
+
+INSTALLED_APPS = (
+ 'django.contrib.auth',
+ 'django.contrib.contenttypes',
+ 'django.contrib.sessions',
+ 'django.contrib.sites',
+ 'django.contrib.messages',
+# 'django.contrib.admin',
+# 'django.contrib.admindocs',
+ 'api',
+ 'public'
+)
+
+AUTH_TOKENS = {
+ '0000': 'test',
+ '0001': 'verigak',
+ '0002': 'chazapis',
+ '0003': 'gtsouk',
+ '0004': 'papagian',
+ '0005': 'louridas',
+ '0006': 'chstath',
+ '0007': 'pkanavos'}
from optparse import OptionParser
from os import environ
from sys import argv, exit, stdin, stdout
-from pithos.lib.client import Client, Fault
+from pithos.lib.client import Pithos_Client, Fault
from datetime import datetime
import json
import time as _time
import os
-DEFAULT_HOST = 'pithos.dev.grnet.gr'
-#DEFAULT_HOST = '127.0.0.1:8000'
-#DEFAULT_API = 'v1'
+#DEFAULT_HOST = 'pithos.dev.grnet.gr'
+DEFAULT_HOST = '127.0.0.1:8000'
+DEFAULT_API = 'v1'
_cli_commands = {}
def __init__(self, name, argv):
parser = OptionParser('%%prog %s [options] %s' % (name, self.syntax))
parser.add_option('--host', dest='host', metavar='HOST',
- default=DEFAULT_HOST, help='use server HOST')
+ default=_get_server(), help='use server HOST')
parser.add_option('--user', dest='user', metavar='USERNAME',
default=_get_user(),
help='use account USERNAME')
val = getattr(options, key)
setattr(self, key, val)
- self.client = Client(self.host, self.token, self.user, self.api, self.verbose,
+ self.client = Pithos_Client(self.host, self.token, self.user, self.api, self.verbose,
self.debug)
self.parser = parser
self.args = args
-
+
+ def _build_args(self, attrs):
+ args = {}
+ for a in [a for a in attrs if getattr(self, a)]:
+ args[a] = getattr(self, a)
+ return args
+
def add_options(self, parser):
pass
parser.add_option('-l', action='store_true', dest='detail',
default=False, help='show detailed output')
parser.add_option('-n', action='store', type='int', dest='limit',
- default=1000, help='show limited output')
+ default=10000, help='show limited output')
parser.add_option('--marker', action='store', type='str',
dest='marker', default=None,
help='show output greater then marker')
self.list_containers()
def list_containers(self):
- params = {'limit':self.limit, 'marker':self.marker}
- headers = {'IF_MODIFIED_SINCE':self.if_modified_since,
- 'IF_UNMODIFIED_SINCE':self.if_unmodified_since}
+ attrs = ['detail', 'limit', 'marker', 'if_modified_since',
+ 'if_unmodified_since']
+ args = self._build_args(attrs)
if self.until:
t = _time.strptime(self.until, self.format)
- params['until'] = int(_time.mktime(t))
+ args['until'] = int(_time.mktime(t))
- l = self.client.list_containers(self.detail, params, headers)
+ l = self.client.list_containers(**args)
print_list(l)
def list_objects(self, container):
#prepate params
params = {}
- attrs = ['limit', 'marker', 'prefix', 'delimiter', 'path', 'meta']
- for a in [a for a in attrs if getattr(self, a)]:
- params[a] = getattr(self, a)
+ attrs = ['detail', 'limit', 'marker', 'prefix', 'delimiter', 'path',
+ 'meta', 'if_modified_since', 'if_unmodified_since']
+ args = self._build_args(attrs)
if self.until:
t = _time.strptime(self.until, self.format)
- params['until'] = int(_time.mktime(t))
+ args['until'] = int(_time.mktime(t))
- headers = {'IF_MODIFIED_SINCE':self.if_modified_since,
- 'IF_UNMODIFIED_SINCE':self.if_unmodified_since}
container, sep, object = container.partition('/')
if object:
return
detail = 'json'
#if request with meta quering disable trash filtering
show_trashed = True if self.meta else False
- l = self.client.list_objects(container, detail, headers,
- include_trashed = show_trashed, **params)
+ l = self.client.list_objects(container, **args)
print_list(l, detail=self.detail)
@cli_command('meta')
def execute(self, path=''):
container, sep, object = path.partition('/')
+ args = {'restricted':self.restricted}
if self.until:
t = _time.strptime(self.until, self.format)
- self.until = int(_time.mktime(t))
+ args['until'] = int(_time.mktime(t))
+
if object:
meta = self.client.retrieve_object_metadata(container, object,
self.restricted,
self.version)
elif container:
- meta = self.client.retrieve_container_metadata(container,
- self.restricted,
- self.until)
+ meta = self.client.retrieve_container_metadata(container, **args)
else:
- meta = self.client.account_metadata(self.restricted, self.until)
+ meta = self.client.retrieve_account_metadata(**args)
if meta == None:
print 'Entity does not exist'
else:
description = 'create a container'
def execute(self, container, *args):
- headers = {}
meta = {}
for arg in args:
key, sep, val = arg.partition('=')
meta[key] = val
- ret = self.client.create_container(container, headers, **meta)
+ ret = self.client.create_container(container, **meta)
if not ret:
print 'Container already exists'
syntax = '<container>[/<object>]'
description = 'delete a container or an object'
+ def add_options(self, parser):
+ parser.add_option('--until', action='store', dest='until',
+ default=False, help='remove history until that date')
+ parser.add_option('--format', action='store', dest='format',
+ default='%d/%m/%Y', help='format to parse until date')
+
def execute(self, path):
container, sep, object = path.partition('/')
+ if self.until:
+ t = _time.strptime(self.until, self.format)
+ until = int(_time.mktime(t))
+
if object:
- self.client.delete_object(container, object)
+ self.client.delete_object(container, object, until)
else:
- self.client.delete_container(container)
+ self.client.delete_container(container, until)
@cli_command('get')
class GetObject(Command):
default=False, help='show detailed output')
parser.add_option('--range', action='store', dest='range',
default=None, help='show range of data')
- parser.add_option('--if-range', action='store', dest='if-range',
+ parser.add_option('--if-range', action='store', dest='if_range',
default=None, help='show range of data')
- parser.add_option('--if-match', action='store', dest='if-match',
+ parser.add_option('--if-match', action='store', dest='if_match',
default=None, help='show output if ETags match')
parser.add_option('--if-none-match', action='store',
- dest='if-none-match', default=None,
+ dest='if_none_match', default=None,
help='show output if ETags don\'t match')
parser.add_option('--if-modified-since', action='store', type='str',
- dest='if-modified-since', default=None,
+ dest='if_modified_since', default=None,
help='show output if modified since then')
parser.add_option('--if-unmodified-since', action='store', type='str',
- dest='if-unmodified-since', default=None,
+ dest='if_unmodified_since', default=None,
help='show output if not modified since then')
parser.add_option('-o', action='store', type='str',
dest='file', default=None,
help='get the full object version list')
def execute(self, path):
- headers = {}
+ attrs = ['detail', 'if_match', 'if_none_match', 'if_modified_since',
+ 'if_unmodified_since']
+ args = self._build_args(attrs)
+
if self.range:
- headers['RANGE'] = 'bytes=%s' %self.range
- if getattr(self, 'if-range'):
- headers['IF_RANGE'] = 'If-Range:%s' % getattr(self, 'if-range')
- attrs = ['if-match', 'if-none-match', 'if-modified-since',
- 'if-unmodified-since']
- attrs = [a for a in attrs if getattr(self, a)]
- for a in attrs:
- headers[a.replace('-', '_').upper()] = getattr(self, a)
+ args['range'] = 'bytes=%s' %self.range
+ if getattr(self, 'if_range'):
+ args['if-range'] = 'If-Range:%s' % getattr(self, 'if_range')
+
container, sep, object = path.partition('/')
+ data = None
if self.versionlist:
- self.version = 'list'
- self.detail = True
- data = self.client.retrieve_object(container, object, self.detail,
- headers, self.version)
+ if 'detail' in args.keys():
+ args.pop('detail')
+ data = self.client.retrieve_object_versionlist(container, object, **args)
+ elif self.version:
+ data = self.client.retrieve_object_version(container, object,
+ self.version, **args)
+ else:
+ data = self.client.retrieve_object(container, object, **args)
+
f = self.file and open(self.file, 'w') or stdout
if self.detail:
data = json.loads(data)
parser.add_option('--etag', action='store', dest='etag',
default=None, help='check written data')
parser.add_option('--content-encoding', action='store',
- dest='content-encoding', default=None,
+ dest='content_encoding', default=None,
help='provide the object MIME content type')
parser.add_option('--content-disposition', action='store', type='str',
- dest='content-disposition', default=None,
+ dest='content_disposition', default=None,
help='provide the presentation style of the object')
- parser.add_option('-S', action='store',
- dest='segment-size', default=False,
- help='use for large file support')
+ #parser.add_option('-S', action='store',
+ # dest='segment_size', default=False,
+ # help='use for large file support')
parser.add_option('--manifest', action='store_true',
- dest='manifest', default=None,
+ dest='x_object_manifest', default=None,
help='upload a manifestation file')
- parser.add_option('--type', action='store',
- dest='content-type', default=False,
+ parser.add_option('--content-type', action='store',
+ dest='content_type', default=None,
help='create object with specific content type')
parser.add_option('--sharing', action='store',
- dest='sharing', default=None,
+ dest='x_object_sharing', default=None,
help='define sharing object policy')
parser.add_option('-f', action='store',
dest='srcpath', default=None,
help='file descriptor to read from (pass - for standard input)')
- parser.add_option('--public', action='store',
- dest='public', default=None,
+ parser.add_option('--public', action='store_true',
+ dest='x_object_public', default=False,
help='make object publicly accessible (\'True\'/\'False\')')
def execute(self, path, *args):
key, sep, val = arg.partition('=')
meta[key] = val
- headers = {}
- manifest = getattr(self, 'manifest')
- if manifest:
- # if it's manifestation file
- # send zero-byte data with X-Object-Manifest header
- self.touch = True
- headers['X_OBJECT_MANIFEST'] = manifest
- if self.sharing:
- headers['X_OBJECT_SHARING'] = self.sharing
-
- attrs = ['etag', 'content-encoding', 'content-disposition',
- 'content-type']
- attrs = [a for a in attrs if getattr(self, a)]
- for a in attrs:
- headers[a.replace('-', '_').upper()] = getattr(self, a)
+ attrs = ['etag', 'content_encoding', 'content_disposition',
+ 'content_type', 'x_object_sharing', 'x_object_public']
+ args = self._build_args(attrs)
container, sep, object = path.partition('/')
if self.use_hashes and not f:
raise Fault('Illegal option combination')
- if self.public not in ['True', 'False', None]:
- raise Fault('Not acceptable value for public')
- public = eval(self.public) if self.public else None
- self.client.create_object(container, object, f, chunked=self.chunked,
- headers=headers, use_hashes=self.use_hashes,
- public=public, **meta)
+
+ if self.chunked:
+ self.client.create_object_using_chunks(container, object, f,
+ meta=meta, **args)
+ elif self.use_hashes:
+ format = 'json' if detail else 'text'
+ self.client.create_object_by_hashmap(container, object, f, format,
+ meta=meta, **args)
+ elif self.x_object_manifest:
+ self.client.create_manifestation(container, object, self.x_object_manifest)
+ else:
+ data = f.read() if f else None
+ self.client.create_object(container, object, data, meta=meta, **args)
if f:
f.close()
parser.add_option('--chunked', action='store_true', dest='chunked',
default=False, help='set chunked transfer mode')
parser.add_option('--content-encoding', action='store',
- dest='content-encoding', default=None,
+ dest='content_encoding', default=None,
help='provide the object MIME content type')
parser.add_option('--content-disposition', action='store', type='str',
- dest='content-disposition', default=None,
+ dest='content_disposition', default=None,
help='provide the presentation style of the object')
parser.add_option('--manifest', action='store', type='str',
- dest='manifest', default=None,
+ dest='x_object_manifest', default=None,
help='use for large file support')
parser.add_option('--sharing', action='store',
- dest='sharing', default=None,
+ dest='x_object_sharing', default=None,
help='define sharing object policy')
parser.add_option('--nosharing', action='store_true',
dest='no_sharing', default=None,
dest='srcpath', default=None,
help='file descriptor to read from: pass - for standard input')
parser.add_option('--public', action='store',
- dest='public', default=None,
+ dest='x_object_public', default=False,
help='publish/unpublish object (\'True\'/\'False\')')
def execute(self, path, *args):
if path.find('=') != -1:
raise Fault('Missing path argument')
- headers = {}
- if self.manifest:
- headers['X_OBJECT_MANIFEST'] = self.manifest
- if self.sharing:
- headers['X_OBJECT_SHARING'] = self.sharing
- if self.no_sharing:
- headers['X_OBJECT_SHARING'] = ''
-
- attrs = ['content-encoding', 'content-disposition']
- attrs = [a for a in attrs if getattr(self, a)]
- for a in attrs:
- headers[a.replace('-', '_').upper()] = getattr(self, a)
-
#prepare user defined meta
meta = {}
for arg in args:
key, sep, val = arg.partition('=')
meta[key] = val
+ if self.no_sharing:
+ self.x_object_sharing = ''
+
+ attrs = ['content_encoding', 'content_disposition', 'x_object_sharing',
+ 'x_object_public']
+ args = self._build_args(attrs)
+
container, sep, object = path.partition('/')
f = None
- chunked = False
if self.srcpath:
- f = self.srcpath != '-' and open(self.srcpath) or stdin
- if f:
- chunked = True if (self.chunked or f == stdin) else False
- if self.public not in ['True', 'False', None]:
- raise Fault('Not acceptable value for public')
- public = eval(self.public) if self.public else None
- self.client.update_object(container, object, f, chunked=chunked,
- headers=headers, offset=self.offset,
- public=public, **meta)
+ f = open(self.srcpath) if self.srcpath != '-' else stdin
+
+ if self.chunked:
+ self.client.update_object_using_chunks(container, object, f,
+ meta=meta, **args)
+ else:
+ data = f.read() if f else None
+ self.client.update_object(container, object, data, meta=meta, **args)
if f:
f.close()
patterns = ['^x_(account|container|object)_meta_(\w+)$']
patterns.append(patterns[0].replace('_', '-'))
for key, val in sorted(d.items()):
- for p in patterns:
- p = re.compile(p)
- m = p.match(key)
- if m:
- key = m.group(2)
f.write('%s: %s\n' % (key.rjust(30), val))
def print_list(l, verbose=False, f=stdout, detail=True):
return os.environ['PITHOS_AUTH']
except KeyError:
return '0000'
-
+
+def _get_server():
+ try:
+ return os.environ['PITHOS_SERVER']
+ except KeyError:
+ return DEFAULT_HOST
def main():
try:
cmd = cls(name, argv[2:])
+ #cmd.execute(*cmd.args)
try:
cmd.execute(*cmd.args)
except TypeError, e: