root / snf-pithos-app / pithos / api / test / __init__.py @ 5fe43b8c
History | View | Annotate | Download (19 kB)
1 |
#!/usr/bin/env python
|
---|---|
2 |
#coding=utf8
|
3 |
|
4 |
# Copyright 2011-2013 GRNET S.A. All rights reserved.
|
5 |
#
|
6 |
# Redistribution and use in source and binary forms, with or
|
7 |
# without modification, are permitted provided that the following
|
8 |
# conditions are met:
|
9 |
#
|
10 |
# 1. Redistributions of source code must retain the above
|
11 |
# copyright notice, this list of conditions and the following
|
12 |
# disclaimer.
|
13 |
#
|
14 |
# 2. Redistributions in binary form must reproduce the above
|
15 |
# copyright notice, this list of conditions and the following
|
16 |
# disclaimer in the documentation and/or other materials
|
17 |
# provided with the distribution.
|
18 |
#
|
19 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
|
20 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
21 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
22 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
|
23 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
24 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
25 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
26 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
27 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
28 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
29 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
30 |
# POSSIBILITY OF SUCH DAMAGE.
|
31 |
#
|
32 |
# The views and conclusions contained in the software and
|
33 |
# documentation are those of the authors and should not be
|
34 |
# interpreted as representing official policies, either expressed
|
35 |
# or implied, of GRNET S.A.
|
36 |
|
37 |
from urlparse import urlunsplit, urlsplit |
38 |
from xml.dom import minidom |
39 |
|
40 |
from snf_django.utils.testing import with_settings, astakos_user |
41 |
|
42 |
from pithos.api import settings as pithos_settings |
43 |
from pithos.api.test.util import is_date, get_random_data |
44 |
|
45 |
from synnefo.lib.services import get_service_path |
46 |
from synnefo.lib import join_urls |
47 |
|
48 |
from django.test import TestCase |
49 |
from django.utils.http import urlencode |
50 |
from django.conf import settings |
51 |
|
52 |
import django.utils.simplejson as json |
53 |
|
54 |
import random |
55 |
import threading |
56 |
import functools |
57 |
|
58 |
|
59 |
pithos_test_settings = functools.partial(with_settings, pithos_settings) |
60 |
|
61 |
DATE_FORMATS = ["%a %b %d %H:%M:%S %Y",
|
62 |
"%A, %d-%b-%y %H:%M:%S GMT",
|
63 |
"%a, %d %b %Y %H:%M:%S GMT"]
|
64 |
|
65 |
o_names = ['kate.jpg',
|
66 |
'kate_beckinsale.jpg',
|
67 |
'How To Win Friends And Influence People.pdf',
|
68 |
'moms_birthday.jpg',
|
69 |
'poodle_strut.mov',
|
70 |
'Disturbed - Down With The Sickness.mp3',
|
71 |
'army_of_darkness.avi',
|
72 |
'the_mad.avi',
|
73 |
'photos/animals/dogs/poodle.jpg',
|
74 |
'photos/animals/dogs/terrier.jpg',
|
75 |
'photos/animals/cats/persian.jpg',
|
76 |
'photos/animals/cats/siamese.jpg',
|
77 |
'photos/plants/fern.jpg',
|
78 |
'photos/plants/rose.jpg',
|
79 |
'photos/me.jpg']
|
80 |
|
81 |
details = {'container': ('name', 'count', 'bytes', 'last_modified', |
82 |
'x_container_policy'),
|
83 |
'object': ('name', 'hash', 'bytes', 'content_type', |
84 |
'content_encoding', 'last_modified',)} |
85 |
|
86 |
return_codes = (400, 401, 403, 404, 503) |
87 |
|
88 |
TEST_BLOCK_SIZE = 1024
|
89 |
TEST_HASH_ALGORITHM = 'sha256'
|
90 |
|
91 |
|
92 |
class PithosAPITest(TestCase): |
93 |
def setUp(self): |
94 |
pithos_settings.BACKEND_DB_MODULE = 'pithos.backends.lib.sqlalchemy'
|
95 |
pithos_settings.BACKEND_DB_CONNECTION = django_to_sqlalchemy() |
96 |
pithos_settings.BACKEND_POOL_SIZE = 1
|
97 |
|
98 |
# Override default block size to spead up tests
|
99 |
pithos_settings.BACKEND_BLOCK_SIZE = TEST_BLOCK_SIZE |
100 |
pithos_settings.BACKEND_HASH_ALGORITHM = TEST_HASH_ALGORITHM |
101 |
|
102 |
self.user = 'user' |
103 |
self.pithos_path = join_urls(get_service_path(
|
104 |
pithos_settings.pithos_services, 'object-store'))
|
105 |
|
106 |
def tearDown(self): |
107 |
#delete additionally created metadata
|
108 |
meta = self.get_account_meta()
|
109 |
self.delete_account_meta(meta)
|
110 |
|
111 |
#delete additionally created groups
|
112 |
groups = self.get_account_groups()
|
113 |
self.delete_account_groups(groups)
|
114 |
|
115 |
self._clean_account()
|
116 |
|
117 |
def head(self, url, user='user', *args, **kwargs): |
118 |
with astakos_user(user):
|
119 |
response = self.client.head(url, *args, **kwargs)
|
120 |
return response
|
121 |
|
122 |
def get(self, url, user='user', *args, **kwargs): |
123 |
with astakos_user(user):
|
124 |
response = self.client.get(url, *args, **kwargs)
|
125 |
return response
|
126 |
|
127 |
def delete(self, url, user='user', *args, **kwargs): |
128 |
with astakos_user(user):
|
129 |
response = self.client.delete(url, *args, **kwargs)
|
130 |
return response
|
131 |
|
132 |
def post(self, url, user='user', *args, **kwargs): |
133 |
with astakos_user(user):
|
134 |
kwargs.setdefault('content_type', 'application/octet-stream') |
135 |
response = self.client.post(url, *args, **kwargs)
|
136 |
return response
|
137 |
|
138 |
def put(self, url, user='user', *args, **kwargs): |
139 |
with astakos_user(user):
|
140 |
kwargs.setdefault('content_type', 'application/octet-stream') |
141 |
response = self.client.put(url, *args, **kwargs)
|
142 |
return response
|
143 |
|
144 |
def _clean_account(self): |
145 |
for c in self.list_containers(): |
146 |
self.delete_container_content(c['name']) |
147 |
self.delete_container(c['name']) |
148 |
|
149 |
def update_account_meta(self, meta): |
150 |
kwargs = dict(
|
151 |
('HTTP_X_ACCOUNT_META_%s' % k, str(v)) for k, v in meta.items()) |
152 |
url = join_urls(self.pithos_path, self.user) |
153 |
r = self.post('%s?update=' % url, **kwargs) |
154 |
self.assertEqual(r.status_code, 202) |
155 |
account_meta = self.get_account_meta()
|
156 |
(self.assertTrue('X-Account-Meta-%s' % k in account_meta) for |
157 |
k in meta.keys())
|
158 |
(self.assertEqual(account_meta['X-Account-Meta-%s' % k], v) for |
159 |
k, v in meta.items())
|
160 |
|
161 |
def reset_account_meta(self, meta): |
162 |
kwargs = dict(
|
163 |
('HTTP_X_ACCOUNT_META_%s' % k, str(v)) for k, v in meta.items()) |
164 |
url = join_urls(self.pithos_path, self.user) |
165 |
r = self.post(url, **kwargs)
|
166 |
self.assertEqual(r.status_code, 202) |
167 |
account_meta = self.get_account_meta()
|
168 |
(self.assertTrue('X-Account-Meta-%s' % k in account_meta) for |
169 |
k in meta.keys())
|
170 |
(self.assertEqual(account_meta['X-Account-Meta-%s' % k], v) for |
171 |
k, v in meta.items())
|
172 |
|
173 |
def delete_account_meta(self, meta): |
174 |
transform = lambda k: 'HTTP_%s' % k.replace('-', '_').upper() |
175 |
kwargs = dict((transform(k), '') for k, v in meta.items()) |
176 |
url = join_urls(self.pithos_path, self.user) |
177 |
r = self.post('%s?update=' % url, **kwargs) |
178 |
self.assertEqual(r.status_code, 202) |
179 |
account_meta = self.get_account_meta()
|
180 |
(self.assertTrue('X-Account-Meta-%s' % k not in account_meta) for |
181 |
k in meta.keys())
|
182 |
return r
|
183 |
|
184 |
def delete_account_groups(self, groups): |
185 |
url = join_urls(self.pithos_path, self.user) |
186 |
r = self.post('%s?update=' % url, **groups) |
187 |
self.assertEqual(r.status_code, 202) |
188 |
return r
|
189 |
|
190 |
def get_account_info(self, until=None): |
191 |
url = join_urls(self.pithos_path, self.user) |
192 |
if until is not None: |
193 |
parts = list(urlsplit(url))
|
194 |
parts[3] = urlencode({
|
195 |
'until': until
|
196 |
}) |
197 |
url = urlunsplit(parts) |
198 |
r = self.head(url)
|
199 |
self.assertEqual(r.status_code, 204) |
200 |
return r
|
201 |
|
202 |
def get_account_meta(self, until=None): |
203 |
r = self.get_account_info(until=until)
|
204 |
headers = dict(r._headers.values())
|
205 |
map(headers.pop,
|
206 |
[k for k in headers.keys() |
207 |
if not k.startswith('X-Account-Meta-')]) |
208 |
return headers
|
209 |
|
210 |
def get_account_groups(self, until=None): |
211 |
r = self.get_account_info(until=until)
|
212 |
headers = dict(r._headers.values())
|
213 |
map(headers.pop,
|
214 |
[k for k in headers.keys() |
215 |
if not k.startswith('X-Account-Group-')]) |
216 |
return headers
|
217 |
|
218 |
def get_container_info(self, container, until=None): |
219 |
url = join_urls(self.pithos_path, self.user, container) |
220 |
if until is not None: |
221 |
parts = list(urlsplit(url))
|
222 |
parts[3] = urlencode({
|
223 |
'until': until
|
224 |
}) |
225 |
url = urlunsplit(parts) |
226 |
r = self.head(url)
|
227 |
self.assertEqual(r.status_code, 204) |
228 |
return r
|
229 |
|
230 |
def get_container_meta(self, container, until=None): |
231 |
r = self.get_container_info(container, until=until)
|
232 |
headers = dict(r._headers.values())
|
233 |
map(headers.pop,
|
234 |
[k for k in headers.keys() |
235 |
if not k.startswith('X-Container-Meta-')]) |
236 |
return headers
|
237 |
|
238 |
def update_container_meta(self, container, meta): |
239 |
kwargs = dict(
|
240 |
('HTTP_X_CONTAINER_META_%s' % k, str(v)) for k, v in meta.items()) |
241 |
url = join_urls(self.pithos_path, self.user, container) |
242 |
r = self.post('%s?update=' % url, **kwargs) |
243 |
self.assertEqual(r.status_code, 202) |
244 |
container_meta = self.get_container_meta(container)
|
245 |
(self.assertTrue('X-Container-Meta-%s' % k in container_meta) for |
246 |
k in meta.keys())
|
247 |
(self.assertEqual(container_meta['X-Container-Meta-%s' % k], v) for |
248 |
k, v in meta.items())
|
249 |
|
250 |
def list_containers(self, format='json', headers={}, **params): |
251 |
_url = join_urls(self.pithos_path, self.user) |
252 |
parts = list(urlsplit(_url))
|
253 |
params['format'] = format
|
254 |
parts[3] = urlencode(params)
|
255 |
url = urlunsplit(parts) |
256 |
_headers = dict(('HTTP_%s' % k.upper(), str(v)) |
257 |
for k, v in headers.items()) |
258 |
r = self.get(url, **_headers)
|
259 |
|
260 |
if format is None: |
261 |
containers = r.content.split('\n')
|
262 |
if '' in containers: |
263 |
containers.remove('')
|
264 |
return containers
|
265 |
elif format == 'json': |
266 |
try:
|
267 |
containers = json.loads(r.content) |
268 |
except:
|
269 |
self.fail('json format expected') |
270 |
return containers
|
271 |
elif format == 'xml': |
272 |
return minidom.parseString(r.content)
|
273 |
|
274 |
def delete_container_content(self, cname): |
275 |
url = join_urls(self.pithos_path, self.user, cname) |
276 |
r = self.delete('%s?delimiter=/' % url) |
277 |
self.assertEqual(r.status_code, 204) |
278 |
return r
|
279 |
|
280 |
def delete_container(self, cname): |
281 |
url = join_urls(self.pithos_path, self.user, cname) |
282 |
r = self.delete(url)
|
283 |
self.assertEqual(r.status_code, 204) |
284 |
return r
|
285 |
|
286 |
def create_container(self, cname): |
287 |
url = join_urls(self.pithos_path, self.user, cname) |
288 |
r = self.put(url, data='') |
289 |
self.assertTrue(r.status_code in (202, 201)) |
290 |
return r
|
291 |
|
292 |
def upload_object(self, cname, oname=None, length=None, verify=True, |
293 |
**meta): |
294 |
oname = oname or get_random_data(8) |
295 |
length = length or random.randint(TEST_BLOCK_SIZE, 2 * TEST_BLOCK_SIZE) |
296 |
data = get_random_data(length=length) |
297 |
headers = dict(('HTTP_X_OBJECT_META_%s' % k.upper(), v) |
298 |
for k, v in meta.iteritems()) |
299 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
300 |
r = self.put(url, data=data, **headers)
|
301 |
if verify:
|
302 |
self.assertEqual(r.status_code, 201) |
303 |
return oname, data, r
|
304 |
|
305 |
def update_object_data(self, cname, oname=None, length=None, |
306 |
content_type=None, content_range=None, |
307 |
verify=True, **meta):
|
308 |
oname = oname or get_random_data(8) |
309 |
length = length or random.randint(TEST_BLOCK_SIZE, 2 * TEST_BLOCK_SIZE) |
310 |
content_type = content_type or 'application/octet-stream' |
311 |
data = get_random_data(length=length) |
312 |
headers = dict(('HTTP_X_OBJECT_META_%s' % k.upper(), v) |
313 |
for k, v in meta.iteritems()) |
314 |
if content_range:
|
315 |
headers['HTTP_CONTENT_RANGE'] = content_range
|
316 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
317 |
r = self.post(url, data=data, content_type=content_type, **headers)
|
318 |
if verify:
|
319 |
self.assertEqual(r.status_code, 204) |
320 |
return oname, data, r
|
321 |
|
322 |
def append_object_data(self, cname, oname=None, length=None, |
323 |
content_type=None):
|
324 |
return self.update_object_data(cname, oname=oname, |
325 |
length=length, |
326 |
content_type=content_type, |
327 |
content_range='bytes */*')
|
328 |
|
329 |
def create_folder(self, cname, oname=None, **headers): |
330 |
oname = oname or get_random_data(8) |
331 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
332 |
r = self.put(url, data='', content_type='application/directory', |
333 |
**headers) |
334 |
self.assertEqual(r.status_code, 201) |
335 |
return oname, r
|
336 |
|
337 |
def list_objects(self, cname, prefix=None): |
338 |
url = join_urls(self.pithos_path, self.user, cname) |
339 |
path = '%s?format=json' % url
|
340 |
if prefix is not None: |
341 |
path = '%s&prefix=%s' % (path, prefix)
|
342 |
r = self.get(path)
|
343 |
self.assertTrue(r.status_code in (200, 204)) |
344 |
try:
|
345 |
objects = json.loads(r.content) |
346 |
except:
|
347 |
self.fail('json format expected') |
348 |
return objects
|
349 |
|
350 |
def get_object_info(self, container, object, version=None, until=None): |
351 |
url = join_urls(self.pithos_path, self.user, container, object) |
352 |
if until is not None: |
353 |
parts = list(urlsplit(url))
|
354 |
parts[3] = urlencode({
|
355 |
'until': until
|
356 |
}) |
357 |
url = urlunsplit(parts) |
358 |
if version:
|
359 |
url = '%s?version=%s' % (url, version)
|
360 |
r = self.head(url)
|
361 |
self.assertEqual(r.status_code, 200) |
362 |
return r
|
363 |
|
364 |
def get_object_meta(self, container, object, version=None, until=None): |
365 |
r = self.get_object_info(container, object, version, until=until) |
366 |
headers = dict(r._headers.values())
|
367 |
map(headers.pop,
|
368 |
[k for k in headers.keys() |
369 |
if not k.startswith('X-Object-Meta-')]) |
370 |
return headers
|
371 |
|
372 |
def update_object_meta(self, container, object, meta): |
373 |
kwargs = dict(
|
374 |
('HTTP_X_OBJECT_META_%s' % k, str(v)) for k, v in meta.items()) |
375 |
url = join_urls(self.pithos_path, self.user, container, object) |
376 |
r = self.post('%s?update=' % url, content_type='', **kwargs) |
377 |
self.assertEqual(r.status_code, 202) |
378 |
object_meta = self.get_object_meta(container, object) |
379 |
(self.assertTrue('X-Objecr-Meta-%s' % k in object_meta) for |
380 |
k in meta.keys())
|
381 |
(self.assertEqual(object_meta['X-Object-Meta-%s' % k], v) for |
382 |
k, v in meta.items())
|
383 |
|
384 |
def assert_status(self, status, codes): |
385 |
l = [elem for elem in return_codes] |
386 |
if isinstance(codes, list): |
387 |
l.extend(codes) |
388 |
else:
|
389 |
l.append(codes) |
390 |
self.assertTrue(status in l) |
391 |
|
392 |
def assert_extended(self, data, format, type, size=10000): |
393 |
if format == 'xml': |
394 |
self._assert_xml(data, type, size) |
395 |
elif format == 'json': |
396 |
self._assert_json(data, type, size) |
397 |
|
398 |
def _assert_json(self, data, type, size): |
399 |
convert = lambda s: s.lower()
|
400 |
info = [convert(elem) for elem in details[type]] |
401 |
self.assertTrue(len(data) <= size) |
402 |
for item in info: |
403 |
for i in data: |
404 |
if 'subdir' in i.keys(): |
405 |
continue
|
406 |
self.assertTrue(item in i.keys()) |
407 |
|
408 |
def _assert_xml(self, data, type, size): |
409 |
convert = lambda s: s.lower()
|
410 |
info = [convert(elem) for elem in details[type]] |
411 |
try:
|
412 |
info.remove('content_encoding')
|
413 |
except ValueError: |
414 |
pass
|
415 |
xml = data |
416 |
entities = xml.getElementsByTagName(type)
|
417 |
self.assertTrue(len(entities) <= size) |
418 |
for e in entities: |
419 |
for item in info: |
420 |
self.assertTrue(e.getElementsByTagName(item))
|
421 |
|
422 |
|
423 |
class AssertMappingInvariant(object): |
424 |
def __init__(self, callable, *args, **kwargs): |
425 |
self.callable = callable |
426 |
self.args = args
|
427 |
self.kwargs = kwargs
|
428 |
|
429 |
def __enter__(self): |
430 |
self.map = self.callable(*self.args, **self.kwargs) |
431 |
return self.map |
432 |
|
433 |
def __exit__(self, type, value, tb): |
434 |
map = self.callable(*self.args, **self.kwargs) |
435 |
for k, v in self.map.items(): |
436 |
if is_date(v):
|
437 |
continue
|
438 |
|
439 |
assert(k in map), '%s not in map' % k |
440 |
assert v == map[k] |
441 |
|
442 |
|
443 |
class AssertUUidInvariant(object): |
444 |
def __init__(self, callable, *args, **kwargs): |
445 |
self.callable = callable |
446 |
self.args = args
|
447 |
self.kwargs = kwargs
|
448 |
|
449 |
def __enter__(self): |
450 |
self.map = self.callable(*self.args, **self.kwargs) |
451 |
assert('x-object-uuid' in self.map) |
452 |
self.uuid = self.map['x-object-uuid'] |
453 |
return self.map |
454 |
|
455 |
def __exit__(self, type, value, tb): |
456 |
map = self.callable(*self.args, **self.kwargs) |
457 |
assert('x-object-uuid' in self.map) |
458 |
uuid = map['x-object-uuid'] |
459 |
assert(uuid == self.uuid) |
460 |
|
461 |
|
462 |
django_sqlalchemy_engines = { |
463 |
'django.db.backends.postgresql_psycopg2': 'postgresql+psycopg2', |
464 |
'django.db.backends.postgresql': 'postgresql', |
465 |
'django.db.backends.mysql': '', |
466 |
'django.db.backends.sqlite3': 'mssql', |
467 |
'django.db.backends.oracle': 'oracle'} |
468 |
|
469 |
|
470 |
def django_to_sqlalchemy(): |
471 |
"""Convert the django default database to sqlalchemy connection string"""
|
472 |
# TODO support for more complex configuration
|
473 |
db = settings.DATABASES['default']
|
474 |
name = db.get('TEST_NAME', 'test_%s' % db['NAME']) |
475 |
if db['ENGINE'] == 'django.db.backends.sqlite3': |
476 |
db.get('TEST_NAME', db['NAME']) |
477 |
return 'sqlite:///%s' % name |
478 |
else:
|
479 |
d = dict(scheme=django_sqlalchemy_engines.get(db['ENGINE']), |
480 |
user=db['USER'],
|
481 |
pwd=db['PASSWORD'],
|
482 |
host=db['HOST'].lower(),
|
483 |
port=int(db['PORT']) if db['PORT'] != '' else '', |
484 |
name=name) |
485 |
return '%(scheme)s://%(user)s:%(pwd)s@%(host)s:%(port)s/%(name)s' % d |
486 |
|
487 |
|
488 |
def test_concurrently(times=2): |
489 |
"""
|
490 |
Add this decorator to small pieces of code that you want to test
|
491 |
concurrently to make sure they don't raise exceptions when run at the
|
492 |
same time. E.g., some Django views that do a SELECT and then a subsequent
|
493 |
INSERT might fail when the INSERT assumes that the data has not changed
|
494 |
since the SELECT.
|
495 |
"""
|
496 |
def test_concurrently_decorator(test_func): |
497 |
def wrapper(*args, **kwargs): |
498 |
exceptions = [] |
499 |
|
500 |
def call_test_func(): |
501 |
try:
|
502 |
test_func(*args, **kwargs) |
503 |
except Exception, e: |
504 |
exceptions.append(e) |
505 |
raise
|
506 |
|
507 |
threads = [] |
508 |
for i in range(times): |
509 |
threads.append(threading.Thread()) |
510 |
for t in threads: |
511 |
t.start() |
512 |
for t in threads: |
513 |
t.join() |
514 |
if exceptions:
|
515 |
raise Exception( |
516 |
('test_concurrently intercepted %s',
|
517 |
'exceptions: %s') % (len(exceptions), exceptions)) |
518 |
return wrapper
|
519 |
return test_concurrently_decorator
|