root / snf-pithos-app / pithos / api / test / __init__.py @ 7c36f3fb
History | View | Annotate | Download (19.1 kB)
1 |
#!/usr/bin/env python
|
---|---|
2 |
#coding=utf8
|
3 |
|
4 |
# Copyright 2011-2013 GRNET S.A. All rights reserved.
|
5 |
#
|
6 |
# Redistribution and use in source and binary forms, with or
|
7 |
# without modification, are permitted provided that the following
|
8 |
# conditions are met:
|
9 |
#
|
10 |
# 1. Redistributions of source code must retain the above
|
11 |
# copyright notice, this list of conditions and the following
|
12 |
# disclaimer.
|
13 |
#
|
14 |
# 2. Redistributions in binary form must reproduce the above
|
15 |
# copyright notice, this list of conditions and the following
|
16 |
# disclaimer in the documentation and/or other materials
|
17 |
# provided with the distribution.
|
18 |
#
|
19 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
|
20 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
21 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
22 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
|
23 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
24 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
25 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
26 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
27 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
28 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
29 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
30 |
# POSSIBILITY OF SUCH DAMAGE.
|
31 |
#
|
32 |
# The views and conclusions contained in the software and
|
33 |
# documentation are those of the authors and should not be
|
34 |
# interpreted as representing official policies, either expressed
|
35 |
# or implied, of GRNET S.A.
|
36 |
|
37 |
from urlparse import urlunsplit, urlsplit |
38 |
from xml.dom import minidom |
39 |
|
40 |
from snf_django.utils.testing import with_settings, astakos_user |
41 |
|
42 |
from pithos.api import settings as pithos_settings |
43 |
from pithos.api.test.util import is_date, get_random_data |
44 |
|
45 |
from synnefo.lib.services import get_service_path |
46 |
from synnefo.lib import join_urls |
47 |
|
48 |
from django.test import TestCase |
49 |
from django.conf import settings |
50 |
from django.utils.http import urlencode |
51 |
|
52 |
import django.utils.simplejson as json |
53 |
|
54 |
import random |
55 |
import threading |
56 |
import functools |
57 |
|
58 |
|
59 |
pithos_test_settings = functools.partial(with_settings, pithos_settings) |
60 |
|
61 |
DATE_FORMATS = ["%a %b %d %H:%M:%S %Y",
|
62 |
"%A, %d-%b-%y %H:%M:%S GMT",
|
63 |
"%a, %d %b %Y %H:%M:%S GMT"]
|
64 |
|
65 |
o_names = ['kate.jpg',
|
66 |
'kate_beckinsale.jpg',
|
67 |
'How To Win Friends And Influence People.pdf',
|
68 |
'moms_birthday.jpg',
|
69 |
'poodle_strut.mov',
|
70 |
'Disturbed - Down With The Sickness.mp3',
|
71 |
'army_of_darkness.avi',
|
72 |
'the_mad.avi',
|
73 |
'photos/animals/dogs/poodle.jpg',
|
74 |
'photos/animals/dogs/terrier.jpg',
|
75 |
'photos/animals/cats/persian.jpg',
|
76 |
'photos/animals/cats/siamese.jpg',
|
77 |
'photos/plants/fern.jpg',
|
78 |
'photos/plants/rose.jpg',
|
79 |
'photos/me.jpg']
|
80 |
|
81 |
details = {'container': ('name', 'count', 'bytes', 'last_modified', |
82 |
'x_container_policy'),
|
83 |
'object': ('name', 'hash', 'bytes', 'content_type', |
84 |
'content_encoding', 'last_modified',)} |
85 |
|
86 |
return_codes = (400, 401, 403, 404, 503) |
87 |
|
88 |
TEST_BLOCK_SIZE = 1024
|
89 |
TEST_HASH_ALGORITHM = 'sha256'
|
90 |
|
91 |
|
92 |
class PithosAPITest(TestCase): |
93 |
def setUp(self): |
94 |
if (pithos_settings.BACKEND_DB_MODULE ==
|
95 |
'pithos.backends.lib.sqlalchemy'):
|
96 |
pithos_settings.BACKEND_DB_CONNECTION = django_to_sqlalchemy() |
97 |
pithos_settings.BACKEND_POOL_SIZE = 1
|
98 |
|
99 |
# Override default block size to spead up tests
|
100 |
pithos_settings.BACKEND_BLOCK_SIZE = TEST_BLOCK_SIZE |
101 |
pithos_settings.BACKEND_HASH_ALGORITHM = TEST_HASH_ALGORITHM |
102 |
|
103 |
self.user = 'user' |
104 |
self.pithos_path = join_urls(get_service_path(
|
105 |
pithos_settings.pithos_services, 'object-store'))
|
106 |
|
107 |
def tearDown(self): |
108 |
#delete additionally created metadata
|
109 |
meta = self.get_account_meta()
|
110 |
self.delete_account_meta(meta)
|
111 |
|
112 |
#delete additionally created groups
|
113 |
groups = self.get_account_groups()
|
114 |
self.delete_account_groups(groups)
|
115 |
|
116 |
self._clean_account()
|
117 |
|
118 |
def head(self, url, user='user', *args, **kwargs): |
119 |
with astakos_user(user):
|
120 |
response = self.client.head(url, *args, **kwargs)
|
121 |
return response
|
122 |
|
123 |
def get(self, url, user='user', *args, **kwargs): |
124 |
with astakos_user(user):
|
125 |
response = self.client.get(url, *args, **kwargs)
|
126 |
return response
|
127 |
|
128 |
def delete(self, url, user='user', *args, **kwargs): |
129 |
with astakos_user(user):
|
130 |
response = self.client.delete(url, *args, **kwargs)
|
131 |
return response
|
132 |
|
133 |
def post(self, url, user='user', *args, **kwargs): |
134 |
with astakos_user(user):
|
135 |
kwargs.setdefault('content_type', 'application/octet-stream') |
136 |
response = self.client.post(url, *args, **kwargs)
|
137 |
return response
|
138 |
|
139 |
def put(self, url, user='user', *args, **kwargs): |
140 |
with astakos_user(user):
|
141 |
kwargs.setdefault('content_type', 'application/octet-stream') |
142 |
response = self.client.put(url, *args, **kwargs)
|
143 |
return response
|
144 |
|
145 |
def _clean_account(self): |
146 |
for c in self.list_containers(): |
147 |
self.delete_container_content(c['name']) |
148 |
self.delete_container(c['name']) |
149 |
|
150 |
def update_account_meta(self, meta): |
151 |
kwargs = dict(
|
152 |
('HTTP_X_ACCOUNT_META_%s' % k, str(v)) for k, v in meta.items()) |
153 |
url = join_urls(self.pithos_path, self.user) |
154 |
r = self.post('%s?update=' % url, **kwargs) |
155 |
self.assertEqual(r.status_code, 202) |
156 |
account_meta = self.get_account_meta()
|
157 |
(self.assertTrue('X-Account-Meta-%s' % k in account_meta) for |
158 |
k in meta.keys())
|
159 |
(self.assertEqual(account_meta['X-Account-Meta-%s' % k], v) for |
160 |
k, v in meta.items())
|
161 |
|
162 |
def reset_account_meta(self, meta): |
163 |
kwargs = dict(
|
164 |
('HTTP_X_ACCOUNT_META_%s' % k, str(v)) for k, v in meta.items()) |
165 |
url = join_urls(self.pithos_path, self.user) |
166 |
r = self.post(url, **kwargs)
|
167 |
self.assertEqual(r.status_code, 202) |
168 |
account_meta = self.get_account_meta()
|
169 |
(self.assertTrue('X-Account-Meta-%s' % k in account_meta) for |
170 |
k in meta.keys())
|
171 |
(self.assertEqual(account_meta['X-Account-Meta-%s' % k], v) for |
172 |
k, v in meta.items())
|
173 |
|
174 |
def delete_account_meta(self, meta): |
175 |
transform = lambda k: 'HTTP_%s' % k.replace('-', '_').upper() |
176 |
kwargs = dict((transform(k), '') for k, v in meta.items()) |
177 |
url = join_urls(self.pithos_path, self.user) |
178 |
r = self.post('%s?update=' % url, **kwargs) |
179 |
self.assertEqual(r.status_code, 202) |
180 |
account_meta = self.get_account_meta()
|
181 |
(self.assertTrue('X-Account-Meta-%s' % k not in account_meta) for |
182 |
k in meta.keys())
|
183 |
return r
|
184 |
|
185 |
def delete_account_groups(self, groups): |
186 |
url = join_urls(self.pithos_path, self.user) |
187 |
r = self.post('%s?update=' % url, **groups) |
188 |
self.assertEqual(r.status_code, 202) |
189 |
return r
|
190 |
|
191 |
def get_account_info(self, until=None): |
192 |
url = join_urls(self.pithos_path, self.user) |
193 |
if until is not None: |
194 |
parts = list(urlsplit(url))
|
195 |
parts[3] = urlencode({
|
196 |
'until': until
|
197 |
}) |
198 |
url = urlunsplit(parts) |
199 |
r = self.head(url)
|
200 |
self.assertEqual(r.status_code, 204) |
201 |
return r
|
202 |
|
203 |
def get_account_meta(self, until=None): |
204 |
r = self.get_account_info(until=until)
|
205 |
headers = dict(r._headers.values())
|
206 |
map(headers.pop,
|
207 |
[k for k in headers.keys() |
208 |
if not k.startswith('X-Account-Meta-')]) |
209 |
return headers
|
210 |
|
211 |
def get_account_groups(self, until=None): |
212 |
r = self.get_account_info(until=until)
|
213 |
headers = dict(r._headers.values())
|
214 |
map(headers.pop,
|
215 |
[k for k in headers.keys() |
216 |
if not k.startswith('X-Account-Group-')]) |
217 |
return headers
|
218 |
|
219 |
def get_container_info(self, container, until=None): |
220 |
url = join_urls(self.pithos_path, self.user, container) |
221 |
if until is not None: |
222 |
parts = list(urlsplit(url))
|
223 |
parts[3] = urlencode({
|
224 |
'until': until
|
225 |
}) |
226 |
url = urlunsplit(parts) |
227 |
r = self.head(url)
|
228 |
self.assertEqual(r.status_code, 204) |
229 |
return r
|
230 |
|
231 |
def get_container_meta(self, container, until=None): |
232 |
r = self.get_container_info(container, until=until)
|
233 |
headers = dict(r._headers.values())
|
234 |
map(headers.pop,
|
235 |
[k for k in headers.keys() |
236 |
if not k.startswith('X-Container-Meta-')]) |
237 |
return headers
|
238 |
|
239 |
def update_container_meta(self, container, meta): |
240 |
kwargs = dict(
|
241 |
('HTTP_X_CONTAINER_META_%s' % k, str(v)) for k, v in meta.items()) |
242 |
url = join_urls(self.pithos_path, self.user, container) |
243 |
r = self.post('%s?update=' % url, **kwargs) |
244 |
self.assertEqual(r.status_code, 202) |
245 |
container_meta = self.get_container_meta(container)
|
246 |
(self.assertTrue('X-Container-Meta-%s' % k in container_meta) for |
247 |
k in meta.keys())
|
248 |
(self.assertEqual(container_meta['X-Container-Meta-%s' % k], v) for |
249 |
k, v in meta.items())
|
250 |
|
251 |
def list_containers(self, format='json', headers={}, **params): |
252 |
_url = join_urls(self.pithos_path, self.user) |
253 |
parts = list(urlsplit(_url))
|
254 |
params['format'] = format
|
255 |
parts[3] = urlencode(params)
|
256 |
url = urlunsplit(parts) |
257 |
_headers = dict(('HTTP_%s' % k.upper(), str(v)) |
258 |
for k, v in headers.items()) |
259 |
r = self.get(url, **_headers)
|
260 |
|
261 |
if format is None: |
262 |
containers = r.content.split('\n')
|
263 |
if '' in containers: |
264 |
containers.remove('')
|
265 |
return containers
|
266 |
elif format == 'json': |
267 |
try:
|
268 |
containers = json.loads(r.content) |
269 |
except:
|
270 |
self.fail('json format expected') |
271 |
return containers
|
272 |
elif format == 'xml': |
273 |
return minidom.parseString(r.content)
|
274 |
|
275 |
def delete_container_content(self, cname): |
276 |
url = join_urls(self.pithos_path, self.user, cname) |
277 |
r = self.delete('%s?delimiter=/' % url) |
278 |
self.assertEqual(r.status_code, 204) |
279 |
return r
|
280 |
|
281 |
def delete_container(self, cname): |
282 |
url = join_urls(self.pithos_path, self.user, cname) |
283 |
r = self.delete(url)
|
284 |
self.assertEqual(r.status_code, 204) |
285 |
return r
|
286 |
|
287 |
def create_container(self, cname): |
288 |
url = join_urls(self.pithos_path, self.user, cname) |
289 |
r = self.put(url, data='') |
290 |
self.assertTrue(r.status_code in (202, 201)) |
291 |
return r
|
292 |
|
293 |
def upload_object(self, cname, oname=None, length=None, verify=True, |
294 |
**meta): |
295 |
oname = oname or get_random_data(8) |
296 |
length = length or random.randint(TEST_BLOCK_SIZE, 2 * TEST_BLOCK_SIZE) |
297 |
data = get_random_data(length=length) |
298 |
headers = dict(('HTTP_X_OBJECT_META_%s' % k.upper(), v) |
299 |
for k, v in meta.iteritems()) |
300 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
301 |
r = self.put(url, data=data, **headers)
|
302 |
if verify:
|
303 |
self.assertEqual(r.status_code, 201) |
304 |
return oname, data, r
|
305 |
|
306 |
def update_object_data(self, cname, oname=None, length=None, |
307 |
content_type=None, content_range=None, |
308 |
verify=True, **meta):
|
309 |
oname = oname or get_random_data(8) |
310 |
length = length or random.randint(TEST_BLOCK_SIZE, 2 * TEST_BLOCK_SIZE) |
311 |
content_type = content_type or 'application/octet-stream' |
312 |
data = get_random_data(length=length) |
313 |
headers = dict(('HTTP_X_OBJECT_META_%s' % k.upper(), v) |
314 |
for k, v in meta.iteritems()) |
315 |
if content_range:
|
316 |
headers['HTTP_CONTENT_RANGE'] = content_range
|
317 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
318 |
r = self.post(url, data=data, content_type=content_type, **headers)
|
319 |
if verify:
|
320 |
self.assertEqual(r.status_code, 204) |
321 |
return oname, data, r
|
322 |
|
323 |
def append_object_data(self, cname, oname=None, length=None, |
324 |
content_type=None):
|
325 |
return self.update_object_data(cname, oname=oname, |
326 |
length=length, |
327 |
content_type=content_type, |
328 |
content_range='bytes */*')
|
329 |
|
330 |
def create_folder(self, cname, oname=None, **headers): |
331 |
oname = oname or get_random_data(8) |
332 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
333 |
r = self.put(url, data='', content_type='application/directory', |
334 |
**headers) |
335 |
self.assertEqual(r.status_code, 201) |
336 |
return oname, r
|
337 |
|
338 |
def list_objects(self, cname, prefix=None): |
339 |
url = join_urls(self.pithos_path, self.user, cname) |
340 |
path = '%s?format=json' % url
|
341 |
if prefix is not None: |
342 |
path = '%s&prefix=%s' % (path, prefix)
|
343 |
r = self.get(path)
|
344 |
self.assertTrue(r.status_code in (200, 204)) |
345 |
try:
|
346 |
objects = json.loads(r.content) |
347 |
except:
|
348 |
self.fail('json format expected') |
349 |
return objects
|
350 |
|
351 |
def get_object_info(self, container, object, version=None, until=None): |
352 |
url = join_urls(self.pithos_path, self.user, container, object) |
353 |
if until is not None: |
354 |
parts = list(urlsplit(url))
|
355 |
parts[3] = urlencode({
|
356 |
'until': until
|
357 |
}) |
358 |
url = urlunsplit(parts) |
359 |
if version:
|
360 |
url = '%s?version=%s' % (url, version)
|
361 |
r = self.head(url)
|
362 |
self.assertEqual(r.status_code, 200) |
363 |
return r
|
364 |
|
365 |
def get_object_meta(self, container, object, version=None, until=None): |
366 |
r = self.get_object_info(container, object, version, until=until) |
367 |
headers = dict(r._headers.values())
|
368 |
map(headers.pop,
|
369 |
[k for k in headers.keys() |
370 |
if not k.startswith('X-Object-Meta-')]) |
371 |
return headers
|
372 |
|
373 |
def update_object_meta(self, container, object, meta): |
374 |
kwargs = dict(
|
375 |
('HTTP_X_OBJECT_META_%s' % k, str(v)) for k, v in meta.items()) |
376 |
url = join_urls(self.pithos_path, self.user, container, object) |
377 |
r = self.post('%s?update=' % url, content_type='', **kwargs) |
378 |
self.assertEqual(r.status_code, 202) |
379 |
object_meta = self.get_object_meta(container, object) |
380 |
(self.assertTrue('X-Objecr-Meta-%s' % k in object_meta) for |
381 |
k in meta.keys())
|
382 |
(self.assertEqual(object_meta['X-Object-Meta-%s' % k], v) for |
383 |
k, v in meta.items())
|
384 |
|
385 |
def assert_status(self, status, codes): |
386 |
l = [elem for elem in return_codes] |
387 |
if isinstance(codes, list): |
388 |
l.extend(codes) |
389 |
else:
|
390 |
l.append(codes) |
391 |
self.assertTrue(status in l) |
392 |
|
393 |
def assert_extended(self, data, format, type, size=10000): |
394 |
if format == 'xml': |
395 |
self._assert_xml(data, type, size) |
396 |
elif format == 'json': |
397 |
self._assert_json(data, type, size) |
398 |
|
399 |
def _assert_json(self, data, type, size): |
400 |
convert = lambda s: s.lower()
|
401 |
info = [convert(elem) for elem in details[type]] |
402 |
self.assertTrue(len(data) <= size) |
403 |
for item in info: |
404 |
for i in data: |
405 |
if 'subdir' in i.keys(): |
406 |
continue
|
407 |
self.assertTrue(item in i.keys()) |
408 |
|
409 |
def _assert_xml(self, data, type, size): |
410 |
convert = lambda s: s.lower()
|
411 |
info = [convert(elem) for elem in details[type]] |
412 |
try:
|
413 |
info.remove('content_encoding')
|
414 |
except ValueError: |
415 |
pass
|
416 |
xml = data |
417 |
entities = xml.getElementsByTagName(type)
|
418 |
self.assertTrue(len(entities) <= size) |
419 |
for e in entities: |
420 |
for item in info: |
421 |
self.assertTrue(e.getElementsByTagName(item))
|
422 |
|
423 |
|
424 |
class AssertMappingInvariant(object): |
425 |
def __init__(self, callable, *args, **kwargs): |
426 |
self.callable = callable |
427 |
self.args = args
|
428 |
self.kwargs = kwargs
|
429 |
|
430 |
def __enter__(self): |
431 |
self.map = self.callable(*self.args, **self.kwargs) |
432 |
return self.map |
433 |
|
434 |
def __exit__(self, type, value, tb): |
435 |
map = self.callable(*self.args, **self.kwargs) |
436 |
for k, v in self.map.items(): |
437 |
if is_date(v):
|
438 |
continue
|
439 |
|
440 |
assert(k in map), '%s not in map' % k |
441 |
assert v == map[k] |
442 |
|
443 |
|
444 |
class AssertUUidInvariant(object): |
445 |
def __init__(self, callable, *args, **kwargs): |
446 |
self.callable = callable |
447 |
self.args = args
|
448 |
self.kwargs = kwargs
|
449 |
|
450 |
def __enter__(self): |
451 |
self.map = self.callable(*self.args, **self.kwargs) |
452 |
assert('x-object-uuid' in self.map) |
453 |
self.uuid = self.map['x-object-uuid'] |
454 |
return self.map |
455 |
|
456 |
def __exit__(self, type, value, tb): |
457 |
map = self.callable(*self.args, **self.kwargs) |
458 |
assert('x-object-uuid' in self.map) |
459 |
uuid = map['x-object-uuid'] |
460 |
assert(uuid == self.uuid) |
461 |
|
462 |
|
463 |
django_sqlalchemy_engines = { |
464 |
'django.db.backends.postgresql_psycopg2': 'postgresql+psycopg2', |
465 |
'django.db.backends.postgresql': 'postgresql', |
466 |
'django.db.backends.mysql': '', |
467 |
'django.db.backends.sqlite3': 'mssql', |
468 |
'django.db.backends.oracle': 'oracle'} |
469 |
|
470 |
|
471 |
def django_to_sqlalchemy(): |
472 |
"""Convert the django default database to sqlalchemy connection string"""
|
473 |
# TODO support for more complex configuration
|
474 |
db = settings.DATABASES['default']
|
475 |
name = db.get('TEST_NAME', 'test_%s' % db['NAME']) |
476 |
if db['ENGINE'] == 'django.db.backends.sqlite3': |
477 |
db.get('TEST_NAME', db['NAME']) |
478 |
return 'sqlite:///%s' % name |
479 |
else:
|
480 |
d = dict(scheme=django_sqlalchemy_engines.get(db['ENGINE']), |
481 |
user=db['USER'],
|
482 |
pwd=db['PASSWORD'],
|
483 |
host=db['HOST'].lower(),
|
484 |
port=int(db['PORT']) if db['PORT'] != '' else '', |
485 |
name=name) |
486 |
return '%(scheme)s://%(user)s:%(pwd)s@%(host)s:%(port)s/%(name)s' % d |
487 |
|
488 |
|
489 |
def test_concurrently(times=2): |
490 |
"""
|
491 |
Add this decorator to small pieces of code that you want to test
|
492 |
concurrently to make sure they don't raise exceptions when run at the
|
493 |
same time. E.g., some Django views that do a SELECT and then a subsequent
|
494 |
INSERT might fail when the INSERT assumes that the data has not changed
|
495 |
since the SELECT.
|
496 |
"""
|
497 |
def test_concurrently_decorator(test_func): |
498 |
def wrapper(*args, **kwargs): |
499 |
exceptions = [] |
500 |
|
501 |
def call_test_func(): |
502 |
try:
|
503 |
test_func(*args, **kwargs) |
504 |
except Exception, e: |
505 |
exceptions.append(e) |
506 |
raise
|
507 |
|
508 |
threads = [] |
509 |
for i in range(times): |
510 |
threads.append(threading.Thread()) |
511 |
for t in threads: |
512 |
t.start() |
513 |
for t in threads: |
514 |
t.join() |
515 |
if exceptions:
|
516 |
raise Exception( |
517 |
('test_concurrently intercepted %s',
|
518 |
'exceptions: %s') % (len(exceptions), exceptions)) |
519 |
return wrapper
|
520 |
return test_concurrently_decorator
|