root / snf-pithos-app / pithos / api / test / objects.py @ 800af189
History | View | Annotate | Download (45.1 kB)
1 |
#!/usr/bin/env python
|
---|---|
2 |
#coding=utf8
|
3 |
|
4 |
# Copyright 2011-2013 GRNET S.A. All rights reserved.
|
5 |
#
|
6 |
# Redistribution and use in source and binary forms, with or
|
7 |
# without modification, are permitted provided that the following
|
8 |
# conditions are met:
|
9 |
#
|
10 |
# 1. Redistributions of source code must retain the above
|
11 |
# copyright notice, this list of conditions and the following
|
12 |
# disclaimer.
|
13 |
#
|
14 |
# 2. Redistributions in binary form must reproduce the above
|
15 |
# copyright notice, this list of conditions and the following
|
16 |
# disclaimer in the documentation and/or other materials
|
17 |
# provided with the distribution.
|
18 |
#
|
19 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
|
20 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
21 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
22 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
|
23 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
24 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
25 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
26 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
27 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
28 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
29 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
30 |
# POSSIBILITY OF SUCH DAMAGE.
|
31 |
#
|
32 |
# The views and conclusions contained in the software and
|
33 |
# documentation are those of the authors and should not be
|
34 |
# interpreted as representing official policies, either expressed
|
35 |
# or implied, of GRNET S.A.
|
36 |
|
37 |
from collections import defaultdict |
38 |
from urllib import quote |
39 |
from functools import partial |
40 |
|
41 |
from pithos.api.test import (PithosAPITest, pithos_settings, |
42 |
AssertMappingInvariant, AssertUUidInvariant, |
43 |
TEST_BLOCK_SIZE, TEST_HASH_ALGORITHM, |
44 |
DATE_FORMATS) |
45 |
from pithos.api.test.util import md5_hash, merkle, strnextling, get_random_data |
46 |
|
47 |
from synnefo.lib import join_urls |
48 |
|
49 |
import django.utils.simplejson as json |
50 |
|
51 |
import random |
52 |
import re |
53 |
import datetime |
54 |
import time as _time |
55 |
|
56 |
merkle = partial(merkle, |
57 |
blocksize=TEST_BLOCK_SIZE, |
58 |
blockhash=TEST_HASH_ALGORITHM) |
59 |
|
60 |
|
61 |
class ObjectGet(PithosAPITest): |
62 |
def setUp(self): |
63 |
PithosAPITest.setUp(self)
|
64 |
self.containers = ['c1', 'c2'] |
65 |
|
66 |
# create some containers
|
67 |
for c in self.containers: |
68 |
self.create_container(c)
|
69 |
|
70 |
# upload files
|
71 |
self.objects = defaultdict(list) |
72 |
self.objects['c1'].append(self.upload_object('c1')[0]) |
73 |
|
74 |
def test_versions(self): |
75 |
c = 'c1'
|
76 |
o = self.objects[c][0] |
77 |
url = join_urls(self.pithos_path, self.user, c, o) |
78 |
|
79 |
meta = {'HTTP_X_OBJECT_META_QUALITY': 'AAA'} |
80 |
r = self.post(url, content_type='', **meta) |
81 |
self.assertEqual(r.status_code, 202) |
82 |
|
83 |
url = join_urls(self.pithos_path, self.user, c, o) |
84 |
r = self.get('%s?version=list&format=json' % url) |
85 |
self.assertEqual(r.status_code, 200) |
86 |
l1 = json.loads(r.content)['versions']
|
87 |
self.assertEqual(len(l1), 2) |
88 |
|
89 |
# update meta
|
90 |
meta = {'HTTP_X_OBJECT_META_QUALITY': 'AB', |
91 |
'HTTP_X_OBJECT_META_STOCK': 'True'} |
92 |
r = self.post(url, content_type='', **meta) |
93 |
self.assertEqual(r.status_code, 202) |
94 |
|
95 |
# assert a newly created version has been created
|
96 |
r = self.get('%s?version=list&format=json' % url) |
97 |
self.assertEqual(r.status_code, 200) |
98 |
l2 = json.loads(r.content)['versions']
|
99 |
self.assertEqual(len(l2), len(l1) + 1) |
100 |
self.assertEqual(l2[:-1], l1) |
101 |
|
102 |
vserial, _ = l2[-2]
|
103 |
self.assertEqual(self.get_object_meta(c, o, version=vserial), |
104 |
{'X-Object-Meta-Quality': 'AAA'}) |
105 |
|
106 |
# update data
|
107 |
self.append_object_data(c, o)
|
108 |
|
109 |
# assert a newly created version has been created
|
110 |
r = self.get('%s?version=list&format=json' % url) |
111 |
self.assertEqual(r.status_code, 200) |
112 |
l3 = json.loads(r.content)['versions']
|
113 |
self.assertEqual(len(l3), len(l2) + 1) |
114 |
self.assertEqual(l3[:-1], l2) |
115 |
|
116 |
def test_objects_with_trailing_spaces(self): |
117 |
# create object
|
118 |
oname = self.upload_object('c1')[0] |
119 |
url = join_urls(self.pithos_path, self.user, 'c1', oname) |
120 |
|
121 |
r = self.get(quote('%s ' % url)) |
122 |
self.assertEqual(r.status_code, 404) |
123 |
|
124 |
# delete object
|
125 |
self.delete(url)
|
126 |
|
127 |
r = self.get(url)
|
128 |
self.assertEqual(r.status_code, 404) |
129 |
|
130 |
# upload object with trailing space
|
131 |
oname = self.upload_object('c1', quote('%s ' % get_random_data(8)))[0] |
132 |
|
133 |
url = join_urls(self.pithos_path, self.user, 'c1', oname) |
134 |
r = self.get(url)
|
135 |
self.assertEqual(r.status_code, 200) |
136 |
|
137 |
url = join_urls(self.pithos_path, self.user, 'c1', oname[:-1]) |
138 |
r = self.get(url)
|
139 |
self.assertEqual(r.status_code, 404) |
140 |
|
141 |
def test_get_partial(self): |
142 |
cname = self.containers[0] |
143 |
oname, odata = self.upload_object(cname, length=512)[:-1] |
144 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
145 |
r = self.get(url, HTTP_RANGE='bytes=0-499') |
146 |
self.assertEqual(r.status_code, 206) |
147 |
data = r.content |
148 |
self.assertEqual(data, odata[:500]) |
149 |
self.assertTrue('Content-Range' in r) |
150 |
self.assertEqual(r['Content-Range'], 'bytes 0-499/%s' % len(odata)) |
151 |
self.assertTrue('Content-Type' in r) |
152 |
self.assertTrue(r['Content-Type'], 'application/octet-stream') |
153 |
|
154 |
def test_get_final_500(self): |
155 |
cname = self.containers[0] |
156 |
oname, odata = self.upload_object(cname, length=512)[:-1] |
157 |
size = len(odata)
|
158 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
159 |
r = self.get(url, HTTP_RANGE='bytes=-500') |
160 |
self.assertEqual(r.status_code, 206) |
161 |
self.assertEqual(r.content, odata[-500:]) |
162 |
self.assertTrue('Content-Range' in r) |
163 |
self.assertEqual(r['Content-Range'], |
164 |
'bytes %s-%s/%s' % (size - 500, size - 1, size)) |
165 |
self.assertTrue('Content-Type' in r) |
166 |
self.assertTrue(r['Content-Type'], 'application/octet-stream') |
167 |
|
168 |
def test_get_rest(self): |
169 |
cname = self.containers[0] |
170 |
oname, odata = self.upload_object(cname, length=512)[:-1] |
171 |
size = len(odata)
|
172 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
173 |
offset = len(odata) - random.randint(1, 512) |
174 |
r = self.get(url, HTTP_RANGE='bytes=%s-' % offset) |
175 |
self.assertEqual(r.status_code, 206) |
176 |
self.assertEqual(r.content, odata[offset:])
|
177 |
self.assertTrue('Content-Range' in r) |
178 |
self.assertEqual(r['Content-Range'], |
179 |
'bytes %s-%s/%s' % (offset, size - 1, size)) |
180 |
self.assertTrue('Content-Type' in r) |
181 |
self.assertTrue(r['Content-Type'], 'application/octet-stream') |
182 |
|
183 |
def test_get_range_not_satisfiable(self): |
184 |
cname = self.containers[0] |
185 |
oname, odata = self.upload_object(cname, length=512)[:-1] |
186 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
187 |
|
188 |
# TODO
|
189 |
#r = self.get(url, HTTP_RANGE='bytes=50-10')
|
190 |
#self.assertEqual(r.status_code, 416)
|
191 |
|
192 |
offset = len(odata) + 1 |
193 |
r = self.get(url, HTTP_RANGE='bytes=0-%s' % offset) |
194 |
self.assertEqual(r.status_code, 416) |
195 |
|
196 |
def test_multiple_range(self): |
197 |
cname = self.containers[0] |
198 |
oname, odata = self.upload_object(cname)[:-1] |
199 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
200 |
|
201 |
l = ['0-499', '-500', '1000-'] |
202 |
ranges = 'bytes=%s' % ','.join(l) |
203 |
r = self.get(url, HTTP_RANGE=ranges)
|
204 |
self.assertEqual(r.status_code, 206) |
205 |
self.assertTrue('content-type' in r) |
206 |
p = re.compile( |
207 |
'multipart/byteranges; boundary=(?P<boundary>[0-9a-f]{32}\Z)',
|
208 |
re.I) |
209 |
m = p.match(r['content-type'])
|
210 |
if m is None: |
211 |
self.fail('Invalid multiple range content type') |
212 |
boundary = m.groupdict()['boundary']
|
213 |
cparts = r.content.split('--%s' % boundary)[1:-1] |
214 |
|
215 |
# assert content parts length
|
216 |
self.assertEqual(len(cparts), len(l)) |
217 |
|
218 |
# for each content part assert headers
|
219 |
i = 0
|
220 |
for cpart in cparts: |
221 |
content = cpart.split('\r\n')
|
222 |
headers = content[1:3] |
223 |
content_range = headers[0].split(': ') |
224 |
self.assertEqual(content_range[0], 'Content-Range') |
225 |
|
226 |
r = l[i].split('-')
|
227 |
if not r[0] and not r[1]: |
228 |
pass
|
229 |
elif not r[0]: |
230 |
start = len(odata) - int(r[1]) |
231 |
end = len(odata)
|
232 |
elif not r[1]: |
233 |
start = int(r[0]) |
234 |
end = len(odata)
|
235 |
else:
|
236 |
start = int(r[0]) |
237 |
end = int(r[1]) + 1 |
238 |
fdata = odata[start:end] |
239 |
sdata = '\r\n'.join(content[4:-1]) |
240 |
self.assertEqual(len(fdata), len(sdata)) |
241 |
self.assertEquals(fdata, sdata)
|
242 |
i += 1
|
243 |
|
244 |
def test_multiple_range_not_satisfiable(self): |
245 |
# perform get with multiple range
|
246 |
cname = self.containers[0] |
247 |
oname, odata = self.upload_object(cname)[:-1] |
248 |
out_of_range = len(odata) + 1 |
249 |
l = ['0-499', '-500', '%d-' % out_of_range] |
250 |
ranges = 'bytes=%s' % ','.join(l) |
251 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
252 |
r = self.get(url, HTTP_RANGE=ranges)
|
253 |
self.assertEqual(r.status_code, 416) |
254 |
|
255 |
def test_get_if_match(self): |
256 |
cname = self.containers[0] |
257 |
oname, odata = self.upload_object(cname)[:-1] |
258 |
|
259 |
# perform get with If-Match
|
260 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
261 |
|
262 |
if pithos_settings.UPDATE_MD5:
|
263 |
etag = md5_hash(odata) |
264 |
else:
|
265 |
etag = merkle(odata) |
266 |
|
267 |
r = self.get(url, HTTP_IF_MATCH=etag)
|
268 |
|
269 |
# assert get success
|
270 |
self.assertEqual(r.status_code, 200) |
271 |
|
272 |
# assert response content
|
273 |
self.assertEqual(r.content, odata)
|
274 |
|
275 |
def test_get_if_match_star(self): |
276 |
cname = self.containers[0] |
277 |
oname, odata = self.upload_object(cname)[:-1] |
278 |
|
279 |
# perform get with If-Match *
|
280 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
281 |
r = self.get(url, HTTP_IF_MATCH='*') |
282 |
|
283 |
# assert get success
|
284 |
self.assertEqual(r.status_code, 200) |
285 |
|
286 |
# assert response content
|
287 |
self.assertEqual(r.content, odata)
|
288 |
|
289 |
def test_get_multiple_if_match(self): |
290 |
cname = self.containers[0] |
291 |
oname, odata = self.upload_object(cname)[:-1] |
292 |
|
293 |
# perform get with If-Match
|
294 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
295 |
|
296 |
if pithos_settings.UPDATE_MD5:
|
297 |
etag = md5_hash(odata) |
298 |
else:
|
299 |
etag = merkle(odata) |
300 |
|
301 |
quoted = lambda s: '"%s"' % s |
302 |
r = self.get(url, HTTP_IF_MATCH=','.join( |
303 |
[quoted(etag), quoted(get_random_data(64))]))
|
304 |
|
305 |
# assert get success
|
306 |
self.assertEqual(r.status_code, 200) |
307 |
|
308 |
# assert response content
|
309 |
self.assertEqual(r.content, odata)
|
310 |
|
311 |
def test_if_match_precondition_failed(self): |
312 |
cname = self.containers[0] |
313 |
oname, odata = self.upload_object(cname)[:-1] |
314 |
|
315 |
# perform get with If-Match
|
316 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
317 |
r = self.get(url, HTTP_IF_MATCH=get_random_data(8)) |
318 |
self.assertEqual(r.status_code, 412) |
319 |
|
320 |
def test_if_none_match(self): |
321 |
# upload object
|
322 |
cname = self.containers[0] |
323 |
oname, odata = self.upload_object(cname)[:-1] |
324 |
|
325 |
if pithos_settings.UPDATE_MD5:
|
326 |
etag = md5_hash(odata) |
327 |
else:
|
328 |
etag = merkle(odata) |
329 |
|
330 |
# perform get with If-None-Match
|
331 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
332 |
r = self.get(url, HTTP_IF_NONE_MATCH=etag)
|
333 |
|
334 |
# assert precondition_failed
|
335 |
self.assertEqual(r.status_code, 304) |
336 |
|
337 |
# update object data
|
338 |
r = self.append_object_data(cname, oname)[-1] |
339 |
self.assertTrue(etag != r['ETag']) |
340 |
|
341 |
# perform get with If-None-Match
|
342 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
343 |
r = self.get(url, HTTP_IF_NONE_MATCH=etag)
|
344 |
|
345 |
# assert get success
|
346 |
self.assertEqual(r.status_code, 200) |
347 |
|
348 |
def test_if_none_match_star(self): |
349 |
# upload object
|
350 |
cname = self.containers[0] |
351 |
oname, odata = self.upload_object(cname)[:-1] |
352 |
|
353 |
# perform get with If-None-Match with star
|
354 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
355 |
r = self.get(url, HTTP_IF_NONE_MATCH='*') |
356 |
|
357 |
self.assertEqual(r.status_code, 304) |
358 |
|
359 |
def test_if_modified_since(self): |
360 |
# upload object
|
361 |
cname = self.containers[0] |
362 |
oname, odata = self.upload_object(cname)[:-1] |
363 |
object_info = self.get_object_info(cname, oname)
|
364 |
last_modified = object_info['Last-Modified']
|
365 |
t1 = datetime.datetime.strptime(last_modified, DATE_FORMATS[-1])
|
366 |
t1_formats = map(t1.strftime, DATE_FORMATS)
|
367 |
|
368 |
# Check not modified since
|
369 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
370 |
for t in t1_formats: |
371 |
r = self.get(url, HTTP_IF_MODIFIED_SINCE=t)
|
372 |
self.assertEqual(r.status_code, 304) |
373 |
|
374 |
_time.sleep(1)
|
375 |
|
376 |
# update object data
|
377 |
appended_data = self.append_object_data(cname, oname)[1] |
378 |
|
379 |
# Check modified since
|
380 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
381 |
for t in t1_formats: |
382 |
r = self.get(url, HTTP_IF_MODIFIED_SINCE=t)
|
383 |
self.assertEqual(r.status_code, 200) |
384 |
self.assertEqual(r.content, odata + appended_data)
|
385 |
|
386 |
def test_if_modified_since_invalid_date(self): |
387 |
cname = self.containers[0] |
388 |
oname, odata = self.upload_object(cname)[:-1] |
389 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
390 |
r = self.get(url, HTTP_IF_MODIFIED_SINCE='Monday') |
391 |
self.assertEqual(r.status_code, 200) |
392 |
self.assertEqual(r.content, odata)
|
393 |
|
394 |
def test_if_not_modified_since(self): |
395 |
cname = self.containers[0] |
396 |
oname, odata = self.upload_object(cname)[:-1] |
397 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
398 |
object_info = self.get_object_info(cname, oname)
|
399 |
last_modified = object_info['Last-Modified']
|
400 |
t = datetime.datetime.strptime(last_modified, DATE_FORMATS[-1])
|
401 |
|
402 |
# Check unmodified
|
403 |
t1 = t + datetime.timedelta(seconds=1)
|
404 |
t1_formats = map(t1.strftime, DATE_FORMATS)
|
405 |
for t in t1_formats: |
406 |
r = self.get(url, HTTP_IF_UNMODIFIED_SINCE=t)
|
407 |
self.assertEqual(r.status_code, 200) |
408 |
self.assertEqual(odata, odata)
|
409 |
|
410 |
# modify object
|
411 |
_time.sleep(2)
|
412 |
self.append_object_data(cname, oname)
|
413 |
|
414 |
object_info = self.get_object_info(cname, oname)
|
415 |
last_modified = object_info['Last-Modified']
|
416 |
t = datetime.datetime.strptime(last_modified, DATE_FORMATS[-1])
|
417 |
t2 = t - datetime.timedelta(seconds=1)
|
418 |
t2_formats = map(t2.strftime, DATE_FORMATS)
|
419 |
|
420 |
# check modified
|
421 |
for t in t2_formats: |
422 |
r = self.get(url, HTTP_IF_UNMODIFIED_SINCE=t)
|
423 |
self.assertEqual(r.status_code, 412) |
424 |
|
425 |
# modify account: update object meta
|
426 |
_time.sleep(1)
|
427 |
self.update_object_meta(cname, oname, {'foo': 'bar'}) |
428 |
|
429 |
object_info = self.get_object_info(cname, oname)
|
430 |
last_modified = object_info['Last-Modified']
|
431 |
t = datetime.datetime.strptime(last_modified, DATE_FORMATS[-1])
|
432 |
t3 = t - datetime.timedelta(seconds=1)
|
433 |
t3_formats = map(t3.strftime, DATE_FORMATS)
|
434 |
|
435 |
# check modified
|
436 |
for t in t3_formats: |
437 |
r = self.get(url, HTTP_IF_UNMODIFIED_SINCE=t)
|
438 |
self.assertEqual(r.status_code, 412) |
439 |
|
440 |
def test_if_unmodified_since(self): |
441 |
cname = self.containers[0] |
442 |
oname, odata = self.upload_object(cname)[:-1] |
443 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
444 |
object_info = self.get_object_info(cname, oname)
|
445 |
last_modified = object_info['Last-Modified']
|
446 |
t = datetime.datetime.strptime(last_modified, DATE_FORMATS[-1])
|
447 |
t = t + datetime.timedelta(seconds=1)
|
448 |
t_formats = map(t.strftime, DATE_FORMATS)
|
449 |
|
450 |
for tf in t_formats: |
451 |
r = self.get(url, HTTP_IF_UNMODIFIED_SINCE=tf)
|
452 |
self.assertEqual(r.status_code, 200) |
453 |
self.assertEqual(r.content, odata)
|
454 |
|
455 |
def test_if_unmodified_since_precondition_failed(self): |
456 |
cname = self.containers[0] |
457 |
oname, odata = self.upload_object(cname)[:-1] |
458 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
459 |
object_info = self.get_object_info(cname, oname)
|
460 |
last_modified = object_info['Last-Modified']
|
461 |
t = datetime.datetime.strptime(last_modified, DATE_FORMATS[-1])
|
462 |
t = t - datetime.timedelta(seconds=1)
|
463 |
t_formats = map(t.strftime, DATE_FORMATS)
|
464 |
|
465 |
for tf in t_formats: |
466 |
r = self.get(url, HTTP_IF_UNMODIFIED_SINCE=tf)
|
467 |
self.assertEqual(r.status_code, 412) |
468 |
|
469 |
def test_hashes(self): |
470 |
l = random.randint(2, 5) * pithos_settings.BACKEND_BLOCK_SIZE |
471 |
cname = self.containers[0] |
472 |
oname, odata = self.upload_object(cname, length=l)[:-1] |
473 |
size = len(odata)
|
474 |
|
475 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
476 |
r = self.get('%s?format=json&hashmap' % url) |
477 |
self.assertEqual(r.status_code, 200) |
478 |
body = json.loads(r.content) |
479 |
|
480 |
hashes = body['hashes']
|
481 |
block_size = body['block_size']
|
482 |
block_num = size / block_size if size / block_size == 0 else\ |
483 |
size / block_size + 1
|
484 |
self.assertTrue(len(hashes), block_num) |
485 |
i = 0
|
486 |
for h in hashes: |
487 |
start = i * block_size |
488 |
end = (i + 1) * block_size
|
489 |
hash = merkle(odata[start:end]) |
490 |
self.assertEqual(h, hash) |
491 |
i += 1
|
492 |
|
493 |
|
494 |
class ObjectPut(PithosAPITest): |
495 |
def setUp(self): |
496 |
PithosAPITest.setUp(self)
|
497 |
self.container = get_random_data(8) |
498 |
self.create_container(self.container) |
499 |
|
500 |
def test_upload(self): |
501 |
cname = self.container
|
502 |
oname = get_random_data(8)
|
503 |
data = get_random_data() |
504 |
meta = {'test': 'test1'} |
505 |
headers = dict(('HTTP_X_OBJECT_META_%s' % k.upper(), v) |
506 |
for k, v in meta.iteritems()) |
507 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
508 |
r = self.put(url, data=data, content_type='application/pdf', **headers) |
509 |
self.assertEqual(r.status_code, 201) |
510 |
self.assertTrue('ETag' in r) |
511 |
self.assertTrue('X-Object-Version' in r) |
512 |
|
513 |
info = self.get_object_info(cname, oname)
|
514 |
|
515 |
# assert object meta
|
516 |
self.assertTrue('X-Object-Meta-Test' in info) |
517 |
self.assertEqual(info['X-Object-Meta-Test'], 'test1') |
518 |
|
519 |
# assert content-type
|
520 |
self.assertEqual(info['content-type'], 'application/pdf') |
521 |
|
522 |
# assert uploaded content
|
523 |
r = self.get(url)
|
524 |
self.assertEqual(r.status_code, 200) |
525 |
self.assertEqual(r.content, data)
|
526 |
|
527 |
def test_maximum_upload_size_exceeds(self): |
528 |
cname = self.container
|
529 |
oname = get_random_data(8)
|
530 |
|
531 |
# set container quota to 100
|
532 |
url = join_urls(self.pithos_path, self.user, cname) |
533 |
r = self.post(url, HTTP_X_CONTAINER_POLICY_QUOTA='100') |
534 |
self.assertEqual(r.status_code, 202) |
535 |
|
536 |
info = self.get_container_info(cname)
|
537 |
length = int(info['X-Container-Policy-Quota']) + 1 |
538 |
|
539 |
data = get_random_data(length) |
540 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
541 |
r = self.put(url, data=data)
|
542 |
self.assertEqual(r.status_code, 413) |
543 |
|
544 |
def test_upload_with_name_containing_slash(self): |
545 |
cname = self.container
|
546 |
oname = '/%s' % get_random_data(8) |
547 |
data = get_random_data() |
548 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
549 |
r = self.put(url, data=data)
|
550 |
self.assertEqual(r.status_code, 201) |
551 |
self.assertTrue('ETag' in r) |
552 |
self.assertTrue('X-Object-Version' in r) |
553 |
|
554 |
r = self.get(url)
|
555 |
self.assertEqual(r.status_code, 200) |
556 |
self.assertEqual(r.content, data)
|
557 |
|
558 |
def test_upload_unprocessable_entity(self): |
559 |
cname = self.container
|
560 |
oname = get_random_data(8)
|
561 |
data = get_random_data() |
562 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
563 |
r = self.put(url, data=data, HTTP_ETAG='123') |
564 |
self.assertEqual(r.status_code, 422) |
565 |
|
566 |
# def test_chunked_transfer(self):
|
567 |
# cname = self.container
|
568 |
# oname = '/%s' % get_random_data(8)
|
569 |
# data = get_random_data()
|
570 |
# url = join_urls(self.pithos_path, self.user, cname, oname)
|
571 |
# r = self.put(url, data=data, HTTP_TRANSFER_ENCODING='chunked')
|
572 |
# self.assertEqual(r.status_code, 201)
|
573 |
# self.assertTrue('ETag' in r)
|
574 |
# self.assertTrue('X-Object-Version' in r)
|
575 |
|
576 |
def test_manifestation(self): |
577 |
cname = self.container
|
578 |
prefix = 'myobject/'
|
579 |
data = ''
|
580 |
for i in range(random.randint(2, 10)): |
581 |
part = '%s%d' % (prefix, i)
|
582 |
data += self.upload_object(cname, oname=part)[1] |
583 |
|
584 |
manifest = '%s/%s' % (cname, prefix)
|
585 |
oname = get_random_data(8)
|
586 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
587 |
r = self.put(url, data='', HTTP_X_OBJECT_MANIFEST=manifest) |
588 |
self.assertEqual(r.status_code, 201) |
589 |
|
590 |
# assert object exists
|
591 |
r = self.get(url)
|
592 |
self.assertEqual(r.status_code, 200) |
593 |
|
594 |
# assert its content
|
595 |
self.assertEqual(r.content, data)
|
596 |
|
597 |
# invalid manifestation
|
598 |
invalid_manifestation = '%s/%s' % (cname, get_random_data(8)) |
599 |
self.put(url, data='', HTTP_X_OBJECT_MANIFEST=invalid_manifestation) |
600 |
r = self.get(url)
|
601 |
self.assertEqual(r.content, '') |
602 |
|
603 |
def test_create_zero_length_object(self): |
604 |
cname = self.container
|
605 |
oname = get_random_data(8)
|
606 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
607 |
r = self.put(url, data='') |
608 |
self.assertEqual(r.status_code, 201) |
609 |
|
610 |
r = self.get(url)
|
611 |
self.assertEqual(r.status_code, 200) |
612 |
self.assertEqual(int(r['Content-Length']), 0) |
613 |
self.assertEqual(r.content, '') |
614 |
|
615 |
r = self.get('%s?hashmap=&format=json' % url) |
616 |
self.assertEqual(r.status_code, 200) |
617 |
body = json.loads(r.content) |
618 |
hashes = body['hashes']
|
619 |
hash = merkle('')
|
620 |
self.assertEqual(hashes, [hash]) |
621 |
|
622 |
def test_create_object_by_hashmap(self): |
623 |
cname = self.container
|
624 |
block_size = pithos_settings.BACKEND_BLOCK_SIZE |
625 |
|
626 |
# upload an object
|
627 |
oname, data = self.upload_object(cname, length=block_size + 1)[:-1] |
628 |
# get it hashmap
|
629 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
630 |
r = self.get('%s?hashmap=&format=json' % url) |
631 |
|
632 |
oname = get_random_data(8)
|
633 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
634 |
r = self.put('%s?hashmap=' % url, data=r.content) |
635 |
self.assertEqual(r.status_code, 201) |
636 |
|
637 |
r = self.get(url)
|
638 |
self.assertEqual(r.status_code, 200) |
639 |
self.assertEqual(r.content, data)
|
640 |
|
641 |
|
642 |
class ObjectCopy(PithosAPITest): |
643 |
def setUp(self): |
644 |
PithosAPITest.setUp(self)
|
645 |
self.container = 'c1' |
646 |
self.create_container(self.container) |
647 |
self.object, self.data = self.upload_object(self.container)[:-1] |
648 |
|
649 |
url = join_urls( |
650 |
self.pithos_path, self.user, self.container, self.object) |
651 |
r = self.head(url)
|
652 |
self.etag = r['X-Object-Hash'] |
653 |
|
654 |
def test_copy(self): |
655 |
with AssertMappingInvariant(self.get_object_info, self.container, |
656 |
self.object):
|
657 |
# copy object
|
658 |
oname = get_random_data(8)
|
659 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
660 |
r = self.put(url, data='', HTTP_X_OBJECT_META_TEST='testcopy', |
661 |
HTTP_X_COPY_FROM='/%s/%s' % (
|
662 |
self.container, self.object)) |
663 |
|
664 |
# assert copy success
|
665 |
self.assertEqual(r.status_code, 201) |
666 |
|
667 |
# assert access the new object
|
668 |
r = self.head(url)
|
669 |
self.assertEqual(r.status_code, 200) |
670 |
self.assertTrue('X-Object-Meta-Test' in r) |
671 |
self.assertEqual(r['X-Object-Meta-Test'], 'testcopy') |
672 |
|
673 |
# assert etag is the same
|
674 |
self.assertTrue('X-Object-Hash' in r) |
675 |
self.assertEqual(r['X-Object-Hash'], self.etag) |
676 |
|
677 |
def test_copy_from_different_container(self): |
678 |
cname = 'c2'
|
679 |
self.create_container(cname)
|
680 |
with AssertMappingInvariant(self.get_object_info, self.container, |
681 |
self.object):
|
682 |
oname = get_random_data(8)
|
683 |
url = join_urls(self.pithos_path, self.user, cname, oname) |
684 |
r = self.put(url, data='', HTTP_X_OBJECT_META_TEST='testcopy', |
685 |
HTTP_X_COPY_FROM='/%s/%s' % (
|
686 |
self.container, self.object)) |
687 |
|
688 |
# assert copy success
|
689 |
self.assertEqual(r.status_code, 201) |
690 |
|
691 |
# assert access the new object
|
692 |
r = self.head(url)
|
693 |
self.assertEqual(r.status_code, 200) |
694 |
self.assertTrue('X-Object-Meta-Test' in r) |
695 |
self.assertEqual(r['X-Object-Meta-Test'], 'testcopy') |
696 |
|
697 |
# assert etag is the same
|
698 |
self.assertTrue('X-Object-Hash' in r) |
699 |
self.assertEqual(r['X-Object-Hash'], self.etag) |
700 |
|
701 |
def test_copy_invalid(self): |
702 |
# copy from non-existent object
|
703 |
oname = get_random_data(8)
|
704 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
705 |
r = self.put(url, data='', HTTP_X_OBJECT_META_TEST='testcopy', |
706 |
HTTP_X_COPY_FROM='/%s/%s' % (
|
707 |
self.container, get_random_data(8))) |
708 |
self.assertEqual(r.status_code, 404) |
709 |
|
710 |
# copy from non-existent container
|
711 |
oname = get_random_data(8)
|
712 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
713 |
r = self.put(url, data='', HTTP_X_OBJECT_META_TEST='testcopy', |
714 |
HTTP_X_COPY_FROM='/%s/%s' % (
|
715 |
get_random_data(8), self.object)) |
716 |
self.assertEqual(r.status_code, 404) |
717 |
|
718 |
def test_copy_dir(self): |
719 |
folder = self.create_folder(self.container)[0] |
720 |
subfolder = self.create_folder(
|
721 |
self.container, oname='%s/%s' % (folder, get_random_data(8)))[0] |
722 |
objects = [subfolder] |
723 |
append = objects.append |
724 |
append(self.upload_object(self.container, |
725 |
'%s/%s' % (folder, get_random_data(8)), |
726 |
HTTP_X_OBJECT_META_DEPTH='1')[0]) |
727 |
append(self.upload_object(self.container, |
728 |
'%s/%s' % (subfolder, get_random_data(8)), |
729 |
HTTP_X_OBJECT_META_DEPTH='2')[0]) |
730 |
other = self.upload_object(self.container, strnextling(folder))[0] |
731 |
|
732 |
# copy dir
|
733 |
copy_folder = self.create_folder(self.container)[0] |
734 |
url = join_urls(self.pithos_path, self.user, self.container, |
735 |
copy_folder) |
736 |
r = self.put('%s?delimiter=/' % url, data='', |
737 |
HTTP_X_COPY_FROM='/%s/%s' % (self.container, folder)) |
738 |
self.assertEqual(r.status_code, 201) |
739 |
|
740 |
for obj in objects: |
741 |
# assert object exists
|
742 |
url = join_urls(self.pithos_path, self.user, self.container, |
743 |
obj.replace(folder, copy_folder)) |
744 |
r = self.head(url)
|
745 |
self.assertEqual(r.status_code, 200) |
746 |
|
747 |
# assert metadata
|
748 |
meta = self.get_object_meta(self.container, obj) |
749 |
for k in meta.keys(): |
750 |
self.assertTrue(k in r) |
751 |
self.assertEqual(r[k], meta[k])
|
752 |
|
753 |
# assert other has not been created under copy folder
|
754 |
url = join_urls(self.pithos_path, self.user, self.container, |
755 |
'%s/%s' % (copy_folder,
|
756 |
other.replace(folder, copy_folder))) |
757 |
r = self.head(url)
|
758 |
self.assertEqual(r.status_code, 404) |
759 |
|
760 |
|
761 |
class ObjectMove(PithosAPITest): |
762 |
def setUp(self): |
763 |
PithosAPITest.setUp(self)
|
764 |
self.container = 'c1' |
765 |
self.create_container(self.container) |
766 |
self.object, self.data = self.upload_object(self.container)[:-1] |
767 |
|
768 |
url = join_urls( |
769 |
self.pithos_path, self.user, self.container, self.object) |
770 |
r = self.head(url)
|
771 |
self.etag = r['X-Object-Hash'] |
772 |
|
773 |
def test_move(self): |
774 |
# move object
|
775 |
oname = get_random_data(8)
|
776 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
777 |
r = self.put(url, data='', HTTP_X_OBJECT_META_TEST='testcopy', |
778 |
HTTP_X_MOVE_FROM='/%s/%s' % (
|
779 |
self.container, self.object)) |
780 |
|
781 |
# assert move success
|
782 |
self.assertEqual(r.status_code, 201) |
783 |
|
784 |
# assert access the new object
|
785 |
r = self.head(url)
|
786 |
self.assertEqual(r.status_code, 200) |
787 |
self.assertTrue('X-Object-Meta-Test' in r) |
788 |
self.assertEqual(r['X-Object-Meta-Test'], 'testcopy') |
789 |
|
790 |
# assert etag is the same
|
791 |
self.assertTrue('X-Object-Hash' in r) |
792 |
|
793 |
# assert the initial object has been deleted
|
794 |
url = join_urls(self.pithos_path, self.user, self.container, |
795 |
self.object)
|
796 |
r = self.head(url)
|
797 |
self.assertEqual(r.status_code, 404) |
798 |
|
799 |
def test_move_dir(self): |
800 |
folder = self.create_folder(self.container)[0] |
801 |
subfolder = self.create_folder(
|
802 |
self.container, oname='%s/%s' % (folder, get_random_data(8)))[0] |
803 |
objects = [subfolder] |
804 |
append = objects.append |
805 |
meta = {} |
806 |
meta[objects[0]] = {}
|
807 |
append(self.upload_object(self.container, |
808 |
'%s/%s' % (folder, get_random_data(8)), |
809 |
HTTP_X_OBJECT_META_DEPTH='1')[0]) |
810 |
meta[objects[1]] = {'X-Object-Meta-Depth': '1'} |
811 |
append(self.upload_object(self.container, |
812 |
'%s/%s' % (subfolder, get_random_data(8)), |
813 |
HTTP_X_OBJECT_META_DEPTH='2')[0]) |
814 |
meta[objects[1]] = {'X-Object-Meta-Depth': '2'} |
815 |
other = self.upload_object(self.container, strnextling(folder))[0] |
816 |
|
817 |
# move dir
|
818 |
copy_folder = self.create_folder(self.container)[0] |
819 |
url = join_urls(self.pithos_path, self.user, self.container, |
820 |
copy_folder) |
821 |
r = self.put('%s?delimiter=/' % url, data='', |
822 |
HTTP_X_MOVE_FROM='/%s/%s' % (self.container, folder)) |
823 |
self.assertEqual(r.status_code, 201) |
824 |
|
825 |
for obj in objects: |
826 |
# assert initial object does not exist
|
827 |
url = join_urls(self.pithos_path, self.user, self.container, obj) |
828 |
r = self.head(url)
|
829 |
self.assertEqual(r.status_code, 404) |
830 |
|
831 |
# assert new object was created
|
832 |
url = join_urls(self.pithos_path, self.user, self.container, |
833 |
obj.replace(folder, copy_folder)) |
834 |
r = self.head(url)
|
835 |
self.assertEqual(r.status_code, 200) |
836 |
|
837 |
# # assert metadata
|
838 |
# for k in meta[obj].keys():
|
839 |
# self.assertTrue(k in r)
|
840 |
# self.assertEqual(r[k], meta[obj][k])
|
841 |
|
842 |
# assert other has not been created under copy folder
|
843 |
url = join_urls(self.pithos_path, self.user, self.container, |
844 |
'%s/%s' % (copy_folder,
|
845 |
other.replace(folder, copy_folder))) |
846 |
r = self.head(url)
|
847 |
self.assertEqual(r.status_code, 404) |
848 |
|
849 |
|
850 |
class ObjectPost(PithosAPITest): |
851 |
def setUp(self): |
852 |
PithosAPITest.setUp(self)
|
853 |
self.container = 'c1' |
854 |
self.create_container(self.container) |
855 |
self.object, self.object_data = self.upload_object(self.container)[:2] |
856 |
|
857 |
def test_update_meta(self): |
858 |
with AssertUUidInvariant(self.get_object_info, |
859 |
self.container,
|
860 |
self.object):
|
861 |
# update metadata
|
862 |
d = {'a' * 114: 'b' * 256} |
863 |
kwargs = dict(('HTTP_X_OBJECT_META_%s' % k, v) for |
864 |
k, v in d.items())
|
865 |
url = join_urls(self.pithos_path, self.user, self.container, |
866 |
self.object)
|
867 |
r = self.post(url, content_type='', **kwargs) |
868 |
self.assertEqual(r.status_code, 202) |
869 |
|
870 |
# assert metadata have been updated
|
871 |
meta = self.get_object_meta(self.container, self.object) |
872 |
|
873 |
for k, v in d.items(): |
874 |
key = 'X-Object-Meta-%s' % k.title()
|
875 |
self.assertTrue(key in meta) |
876 |
self.assertTrue(meta[key], v)
|
877 |
|
878 |
# Header key too large
|
879 |
d = {'a' * 115: 'b' * 256} |
880 |
kwargs = dict(('HTTP_X_OBJECT_META_%s' % k, v) for |
881 |
k, v in d.items())
|
882 |
r = self.post(url, content_type='', **kwargs) |
883 |
self.assertEqual(r.status_code, 400) |
884 |
|
885 |
# Header value too large
|
886 |
d = {'a' * 114: 'b' * 257} |
887 |
kwargs = dict(('HTTP_X_OBJECT_META_%s' % k, v) for |
888 |
k, v in d.items())
|
889 |
r = self.post(url, content_type='', **kwargs) |
890 |
self.assertEqual(r.status_code, 400) |
891 |
|
892 |
# # Check utf-8 meta
|
893 |
# d = {'α' * (114 / 2): 'β' * (256 / 2)}
|
894 |
# kwargs = dict(('HTTP_X_OBJECT_META_%s' % quote(k), quote(v)) for
|
895 |
# k, v in d.items())
|
896 |
# url = join_urls(self.pithos_path, self.user, self.container,
|
897 |
# self.object)
|
898 |
# r = self.post(url, content_type='', **kwargs)
|
899 |
# self.assertEqual(r.status_code, 202)
|
900 |
#
|
901 |
# # assert metadata have been updated
|
902 |
# meta = self.get_object_meta(self.container, self.object)
|
903 |
#
|
904 |
# for k, v in d.items():
|
905 |
# key = 'X-Object-Meta-%s' % k.title()
|
906 |
# self.assertTrue(key in meta)
|
907 |
# self.assertTrue(meta[key], v)
|
908 |
#
|
909 |
# # Header key too large
|
910 |
# d = {'α' * 114: 'β' * (256 / 2)}
|
911 |
# kwargs = dict(('HTTP_X_OBJECT_META_%s' % quote(k), quote(v)) for
|
912 |
# k, v in d.items())
|
913 |
# r = self.post(url, content_type='', **kwargs)
|
914 |
# self.assertEqual(r.status_code, 400)
|
915 |
#
|
916 |
# # Header value too large
|
917 |
# d = {'α' * 114: 'β' * 256}
|
918 |
# kwargs = dict(('HTTP_X_OBJECT_META_%s' % quote(k), quote(v)) for
|
919 |
# k, v in d.items())
|
920 |
# r = self.udpate(url, content_type='', **kwargs)
|
921 |
# self.assertEqual(r.status_code, 400)
|
922 |
|
923 |
def test_update_object(self): |
924 |
block_size = pithos_settings.BACKEND_BLOCK_SIZE |
925 |
oname, odata = self.upload_object(
|
926 |
self.container, length=random.randint(
|
927 |
block_size + 1, 2 * block_size))[:2] |
928 |
|
929 |
length = len(odata)
|
930 |
first_byte_pos = random.randint(1, block_size)
|
931 |
last_byte_pos = random.randint(block_size + 1, length - 1) |
932 |
range = 'bytes %s-%s/%s' % (first_byte_pos, last_byte_pos, length)
|
933 |
kwargs = {'content_type': 'application/octet-stream', |
934 |
'HTTP_CONTENT_RANGE': range} |
935 |
|
936 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
937 |
partial = last_byte_pos - first_byte_pos + 1
|
938 |
data = get_random_data(partial) |
939 |
r = self.post(url, data=data, **kwargs)
|
940 |
|
941 |
self.assertEqual(r.status_code, 204) |
942 |
self.assertTrue('ETag' in r) |
943 |
updated_data = odata.replace(odata[first_byte_pos: last_byte_pos + 1],
|
944 |
data) |
945 |
if pithos_settings.UPDATE_MD5:
|
946 |
etag = md5_hash(updated_data) |
947 |
else:
|
948 |
etag = merkle(updated_data) |
949 |
#self.assertEqual(r['ETag'], etag)
|
950 |
|
951 |
# check modified object
|
952 |
r = self.get(url)
|
953 |
|
954 |
self.assertEqual(r.status_code, 200) |
955 |
self.assertEqual(r.content, updated_data)
|
956 |
self.assertEqual(etag, r['ETag']) |
957 |
|
958 |
def test_update_object_divided_by_blocksize(self): |
959 |
block_size = pithos_settings.BACKEND_BLOCK_SIZE |
960 |
oname, odata = self.upload_object(self.container, |
961 |
length=2 * block_size)[:2] |
962 |
|
963 |
length = len(odata)
|
964 |
first_byte_pos = block_size |
965 |
last_byte_pos = 2 * block_size - 1 |
966 |
range = 'bytes %s-%s/%s' % (first_byte_pos, last_byte_pos, length)
|
967 |
kwargs = {'content_type': 'application/octet-stream', |
968 |
'HTTP_CONTENT_RANGE': range} |
969 |
|
970 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
971 |
partial = last_byte_pos - first_byte_pos + 1
|
972 |
data = get_random_data(partial) |
973 |
r = self.post(url, data=data, **kwargs)
|
974 |
|
975 |
self.assertEqual(r.status_code, 204) |
976 |
self.assertTrue('ETag' in r) |
977 |
updated_data = odata.replace(odata[first_byte_pos: last_byte_pos + 1],
|
978 |
data) |
979 |
if pithos_settings.UPDATE_MD5:
|
980 |
etag = md5_hash(updated_data) |
981 |
else:
|
982 |
etag = merkle(updated_data) |
983 |
#self.assertEqual(r['ETag'], etag)
|
984 |
|
985 |
# check modified object
|
986 |
r = self.get(url)
|
987 |
|
988 |
self.assertEqual(r.status_code, 200) |
989 |
self.assertEqual(r.content, updated_data)
|
990 |
self.assertEqual(etag, r['ETag']) |
991 |
|
992 |
def test_update_object_invalid_content_length(self): |
993 |
block_size = pithos_settings.BACKEND_BLOCK_SIZE |
994 |
oname, odata = self.upload_object(
|
995 |
self.container, length=random.randint(
|
996 |
block_size + 1, 2 * block_size))[:2] |
997 |
|
998 |
length = len(odata)
|
999 |
first_byte_pos = random.randint(1, block_size)
|
1000 |
last_byte_pos = random.randint(block_size + 1, length - 1) |
1001 |
partial = last_byte_pos - first_byte_pos + 1
|
1002 |
data = get_random_data(partial) |
1003 |
range = 'bytes %s-%s/%s' % (first_byte_pos, last_byte_pos, length)
|
1004 |
kwargs = {'content_type': 'application/octet-stream', |
1005 |
'HTTP_CONTENT_RANGE': range, |
1006 |
'CONTENT_LENGTH': partial + 1} |
1007 |
|
1008 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
1009 |
r = self.post(url, data=data, **kwargs)
|
1010 |
|
1011 |
self.assertEqual(r.status_code, 400) |
1012 |
|
1013 |
def test_update_object_invalid_range(self): |
1014 |
block_size = pithos_settings.BACKEND_BLOCK_SIZE |
1015 |
oname, odata = self.upload_object(
|
1016 |
self.container, length=random.randint(block_size + 1, |
1017 |
2 * block_size))[:2] |
1018 |
|
1019 |
length = len(odata)
|
1020 |
first_byte_pos = random.randint(1, block_size)
|
1021 |
last_byte_pos = first_byte_pos - 1
|
1022 |
range = 'bytes %s-%s/%s' % (first_byte_pos, last_byte_pos, length)
|
1023 |
kwargs = {'content_type': 'application/octet-stream', |
1024 |
'HTTP_CONTENT_RANGE': range} |
1025 |
|
1026 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
1027 |
r = self.post(url, data=get_random_data(), **kwargs)
|
1028 |
|
1029 |
self.assertEqual(r.status_code, 416) |
1030 |
|
1031 |
def test_update_object_out_of_limits(self): |
1032 |
block_size = pithos_settings.BACKEND_BLOCK_SIZE |
1033 |
oname, odata = self.upload_object(
|
1034 |
self.container, length=random.randint(block_size + 1, |
1035 |
2 * block_size))[:2] |
1036 |
|
1037 |
length = len(odata)
|
1038 |
first_byte_pos = random.randint(1, block_size)
|
1039 |
last_byte_pos = length + 1
|
1040 |
range = 'bytes %s-%s/%s' % (first_byte_pos, last_byte_pos, length)
|
1041 |
kwargs = {'content_type': 'application/octet-stream', |
1042 |
'HTTP_CONTENT_RANGE': range} |
1043 |
|
1044 |
url = join_urls(self.pithos_path, self.user, self.container, oname) |
1045 |
r = self.post(url, data=get_random_data(), **kwargs)
|
1046 |
|
1047 |
self.assertEqual(r.status_code, 416) |
1048 |
|
1049 |
def test_append(self): |
1050 |
data = get_random_data() |
1051 |
length = len(data)
|
1052 |
url = join_urls(self.pithos_path, self.user, self.container, |
1053 |
self.object)
|
1054 |
r = self.post(url, data=data, content_type='application/octet-stream', |
1055 |
HTTP_CONTENT_LENGTH=str(length),
|
1056 |
HTTP_CONTENT_RANGE='bytes */*')
|
1057 |
self.assertEqual(r.status_code, 204) |
1058 |
|
1059 |
r = self.get(url)
|
1060 |
content = r.content |
1061 |
self.assertEqual(len(content), len(self.object_data) + length) |
1062 |
self.assertEqual(content, self.object_data + data) |
1063 |
|
1064 |
# TODO Fix the test
|
1065 |
def _test_update_with_chunked_transfer(self): |
1066 |
data = get_random_data() |
1067 |
length = len(data)
|
1068 |
|
1069 |
url = join_urls(self.pithos_path, self.user, self.container, |
1070 |
self.object)
|
1071 |
r = self.post(url, data=data, content_type='application/octet-stream', |
1072 |
HTTP_CONTENT_RANGE='bytes 0-/*',
|
1073 |
HTTP_TRANSFER_ENCODING='chunked')
|
1074 |
self.assertEqual(r.status_code, 204) |
1075 |
|
1076 |
# check modified object
|
1077 |
r = self.get(url)
|
1078 |
content = r.content |
1079 |
self.assertEqual(content[0:length], data) |
1080 |
self.assertEqual(content[length:], self.object_data[length:]) |
1081 |
|
1082 |
def test_update_from_other_object(self): |
1083 |
src = self.object
|
1084 |
dest = get_random_data(8)
|
1085 |
|
1086 |
url = join_urls(self.pithos_path, self.user, self.container, src) |
1087 |
r = self.get(url)
|
1088 |
source_data = r.content |
1089 |
source_meta = self.get_object_info(self.container, src) |
1090 |
|
1091 |
# update zero length object
|
1092 |
url = join_urls(self.pithos_path, self.user, self.container, dest) |
1093 |
r = self.put(url, data='') |
1094 |
self.assertEqual(r.status_code, 201) |
1095 |
|
1096 |
r = self.post(url,
|
1097 |
HTTP_CONTENT_RANGE='bytes */*',
|
1098 |
HTTP_X_SOURCE_OBJECT='/%s/%s' % (self.container, src)) |
1099 |
self.assertEqual(r.status_code, 204) |
1100 |
|
1101 |
r = self.get(url)
|
1102 |
dest_data = r.content |
1103 |
dest_meta = self.get_object_info(self.container, dest) |
1104 |
|
1105 |
self.assertEqual(source_data, dest_data)
|
1106 |
#self.assertEqual(source_meta['ETag'], dest_meta['ETag'])
|
1107 |
self.assertEqual(source_meta['X-Object-Hash'], |
1108 |
dest_meta['X-Object-Hash'])
|
1109 |
self.assertTrue(
|
1110 |
source_meta['X-Object-UUID'] != dest_meta['X-Object-UUID']) |
1111 |
|
1112 |
def test_update_range_from_other_object(self): |
1113 |
src = self.object
|
1114 |
dest = get_random_data(8)
|
1115 |
|
1116 |
url = join_urls(self.pithos_path, self.user, self.container, src) |
1117 |
r = self.get(url)
|
1118 |
source_data = r.content |
1119 |
|
1120 |
# update zero length object
|
1121 |
url = join_urls(self.pithos_path, self.user, self.container, dest) |
1122 |
initial_data = get_random_data() |
1123 |
length = len(initial_data)
|
1124 |
r = self.put(url, data=initial_data)
|
1125 |
self.assertEqual(r.status_code, 201) |
1126 |
|
1127 |
offset = random.randint(1, length - 2) |
1128 |
upto = random.randint(offset, length - 1)
|
1129 |
r = self.post(url,
|
1130 |
HTTP_CONTENT_RANGE='bytes %s-%s/*' % (offset, upto),
|
1131 |
HTTP_X_SOURCE_OBJECT='/%s/%s' % (self.container, src)) |
1132 |
self.assertEqual(r.status_code, 204) |
1133 |
|
1134 |
r = self.get(url)
|
1135 |
content = r.content |
1136 |
self.assertEqual(content, (initial_data[:offset] +
|
1137 |
source_data[:upto - offset + 1] +
|
1138 |
initial_data[upto + 1:]))
|
1139 |
|
1140 |
|
1141 |
class ObjectDelete(PithosAPITest): |
1142 |
def setUp(self): |
1143 |
PithosAPITest.setUp(self)
|
1144 |
self.container = 'c1' |
1145 |
self.create_container(self.container) |
1146 |
self.object, self.object_data = self.upload_object(self.container)[:2] |
1147 |
|
1148 |
def test_delete(self): |
1149 |
url = join_urls(self.pithos_path, self.user, self.container, |
1150 |
self.object)
|
1151 |
r = self.delete(url)
|
1152 |
self.assertEqual(r.status_code, 204) |
1153 |
|
1154 |
r = self.head(url)
|
1155 |
self.assertEqual(r.status_code, 404) |
1156 |
|
1157 |
def test_delete_non_existent(self): |
1158 |
url = join_urls(self.pithos_path, self.user, self.container, |
1159 |
get_random_data(8))
|
1160 |
r = self.delete(url)
|
1161 |
self.assertEqual(r.status_code, 404) |
1162 |
|
1163 |
def test_delete_dir(self): |
1164 |
folder = self.create_folder(self.container)[0] |
1165 |
subfolder = self.create_folder(
|
1166 |
self.container, oname='%s/%s' % (folder, get_random_data(8)))[0] |
1167 |
objects = [subfolder] |
1168 |
append = objects.append |
1169 |
meta = {} |
1170 |
meta[objects[0]] = {}
|
1171 |
append(self.upload_object(self.container, |
1172 |
'%s/%s' % (folder, get_random_data(8)), |
1173 |
HTTP_X_OBJECT_META_DEPTH='1')[0]) |
1174 |
meta[objects[1]] = {'X-Object-Meta-Depth': '1'} |
1175 |
append(self.upload_object(self.container, |
1176 |
'%s/%s' % (subfolder, get_random_data(8)), |
1177 |
HTTP_X_OBJECT_META_DEPTH='2')[0]) |
1178 |
meta[objects[1]] = {'X-Object-Meta-Depth': '2'} |
1179 |
other = self.upload_object(self.container, strnextling(folder))[0] |
1180 |
|
1181 |
# move dir
|
1182 |
url = join_urls(self.pithos_path, self.user, self.container, folder) |
1183 |
r = self.delete('%s?delimiter=/' % url) |
1184 |
self.assertEqual(r.status_code, 204) |
1185 |
|
1186 |
for obj in objects: |
1187 |
# assert object does not exist
|
1188 |
url = join_urls(self.pithos_path, self.user, self.container, obj) |
1189 |
r = self.head(url)
|
1190 |
self.assertEqual(r.status_code, 404) |
1191 |
|
1192 |
# assert other has not been deleted
|
1193 |
url = join_urls(self.pithos_path, self.user, self.container, other) |
1194 |
r = self.head(url)
|
1195 |
self.assertEqual(r.status_code, 200) |