root / snf-pithos-app / pithos / api / util.py @ 8e95eb05
History | View | Annotate | Download (40.4 kB)
1 |
# Copyright 2011-2012 GRNET S.A. All rights reserved.
|
---|---|
2 |
#
|
3 |
# Redistribution and use in source and binary forms, with or
|
4 |
# without modification, are permitted provided that the following
|
5 |
# conditions are met:
|
6 |
#
|
7 |
# 1. Redistributions of source code must retain the above
|
8 |
# copyright notice, this list of conditions and the following
|
9 |
# disclaimer.
|
10 |
#
|
11 |
# 2. Redistributions in binary form must reproduce the above
|
12 |
# copyright notice, this list of conditions and the following
|
13 |
# disclaimer in the documentation and/or other materials
|
14 |
# provided with the distribution.
|
15 |
#
|
16 |
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
|
17 |
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
18 |
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
19 |
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
|
20 |
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
23 |
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
24 |
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
25 |
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
26 |
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
27 |
# POSSIBILITY OF SUCH DAMAGE.
|
28 |
#
|
29 |
# The views and conclusions contained in the software and
|
30 |
# documentation are those of the authors and should not be
|
31 |
# interpreted as representing official policies, either expressed
|
32 |
# or implied, of GRNET S.A.
|
33 |
|
34 |
from functools import wraps |
35 |
from datetime import datetime |
36 |
from urllib import quote, unquote |
37 |
|
38 |
from django.http import HttpResponse, Http404, HttpResponseForbidden |
39 |
from django.template.loader import render_to_string |
40 |
from django.utils import simplejson as json |
41 |
from django.utils.http import http_date, parse_etags |
42 |
from django.utils.encoding import smart_unicode, smart_str |
43 |
from django.core.files.uploadhandler import FileUploadHandler |
44 |
from django.core.files.uploadedfile import UploadedFile |
45 |
from django.core.urlresolvers import reverse |
46 |
|
47 |
from snf_django.lib.api.parsedate import parse_http_date_safe, parse_http_date |
48 |
from snf_django.lib import api |
49 |
from snf_django.lib.api import faults, utils |
50 |
|
51 |
from pithos.api.settings import (BACKEND_DB_MODULE, BACKEND_DB_CONNECTION, |
52 |
BACKEND_BLOCK_MODULE, BACKEND_BLOCK_PATH, |
53 |
BACKEND_BLOCK_UMASK, |
54 |
BACKEND_QUEUE_MODULE, BACKEND_QUEUE_HOSTS, |
55 |
BACKEND_QUEUE_EXCHANGE, |
56 |
ASTAKOSCLIENT_POOLSIZE, |
57 |
SERVICE_TOKEN, |
58 |
ASTAKOS_BASE_URL, |
59 |
BACKEND_ACCOUNT_QUOTA, BACKEND_CONTAINER_QUOTA, |
60 |
BACKEND_VERSIONING, |
61 |
BACKEND_FREE_VERSIONING, BACKEND_POOL_SIZE, |
62 |
RADOS_STORAGE, RADOS_POOL_BLOCKS, |
63 |
RADOS_POOL_MAPS, TRANSLATE_UUIDS, |
64 |
PUBLIC_URL_SECURITY, |
65 |
PUBLIC_URL_ALPHABET, |
66 |
COOKIE_NAME, BASE_HOST) |
67 |
from pithos.api.resources import resources |
68 |
from pithos.backends.base import (NotAllowedError, QuotaError, ItemNotExists, |
69 |
VersionNotExists) |
70 |
|
71 |
from synnefo.lib import join_urls |
72 |
|
73 |
from astakosclient import AstakosClient |
74 |
from astakosclient.errors import NoUserName, NoUUID |
75 |
|
76 |
import logging |
77 |
import re |
78 |
import hashlib |
79 |
import uuid |
80 |
import decimal |
81 |
|
82 |
logger = logging.getLogger(__name__) |
83 |
|
84 |
|
85 |
def json_encode_decimal(obj): |
86 |
if isinstance(obj, decimal.Decimal): |
87 |
return str(obj) |
88 |
raise TypeError(repr(obj) + " is not JSON serializable") |
89 |
|
90 |
|
91 |
def rename_meta_key(d, old, new): |
92 |
if old not in d: |
93 |
return
|
94 |
d[new] = d[old] |
95 |
del(d[old])
|
96 |
|
97 |
|
98 |
def printable_header_dict(d): |
99 |
"""Format a meta dictionary for printing out json/xml.
|
100 |
|
101 |
Convert all keys to lower case and replace dashes with underscores.
|
102 |
Format 'last_modified' timestamp.
|
103 |
"""
|
104 |
|
105 |
if 'last_modified' in d and d['last_modified']: |
106 |
d['last_modified'] = utils.isoformat(
|
107 |
datetime.fromtimestamp(d['last_modified']))
|
108 |
return dict([(k.lower().replace('-', '_'), v) for k, v in d.iteritems()]) |
109 |
|
110 |
|
111 |
def format_header_key(k): |
112 |
"""Convert underscores to dashes and capitalize intra-dash strings."""
|
113 |
return '-'.join([x.capitalize() for x in k.replace('_', '-').split('-')]) |
114 |
|
115 |
|
116 |
def get_header_prefix(request, prefix): |
117 |
"""Get all prefix-* request headers in a dict.
|
118 |
Reformat keys with format_header_key()."""
|
119 |
|
120 |
prefix = 'HTTP_' + prefix.upper().replace('-', '_') |
121 |
# TODO: Document or remove '~' replacing.
|
122 |
return dict([(format_header_key(k[5:]), v.replace('~', '')) |
123 |
for k, v in request.META.iteritems() |
124 |
if k.startswith(prefix) and len(k) > len(prefix)]) |
125 |
|
126 |
|
127 |
def check_meta_headers(meta): |
128 |
if len(meta) > 90: |
129 |
raise faults.BadRequest('Too many headers.') |
130 |
for k, v in meta.iteritems(): |
131 |
if len(k) > 128: |
132 |
raise faults.BadRequest('Header name too large.') |
133 |
if len(v) > 256: |
134 |
raise faults.BadRequest('Header value too large.') |
135 |
|
136 |
|
137 |
def get_account_headers(request): |
138 |
meta = get_header_prefix(request, 'X-Account-Meta-')
|
139 |
check_meta_headers(meta) |
140 |
groups = {} |
141 |
for k, v in get_header_prefix(request, 'X-Account-Group-').iteritems(): |
142 |
n = k[16:].lower()
|
143 |
if '-' in n or '_' in n: |
144 |
raise faults.BadRequest('Bad characters in group name') |
145 |
groups[n] = v.replace(' ', '').split(',') |
146 |
while '' in groups[n]: |
147 |
groups[n].remove('')
|
148 |
return meta, groups
|
149 |
|
150 |
|
151 |
def put_account_headers(response, meta, groups, policy): |
152 |
if 'count' in meta: |
153 |
response['X-Account-Container-Count'] = meta['count'] |
154 |
if 'bytes' in meta: |
155 |
response['X-Account-Bytes-Used'] = meta['bytes'] |
156 |
response['Last-Modified'] = http_date(int(meta['modified'])) |
157 |
for k in [x for x in meta.keys() if x.startswith('X-Account-Meta-')]: |
158 |
response[smart_str( |
159 |
k, strings_only=True)] = smart_str(meta[k], strings_only=True) |
160 |
if 'until_timestamp' in meta: |
161 |
response['X-Account-Until-Timestamp'] = http_date(
|
162 |
int(meta['until_timestamp'])) |
163 |
for k, v in groups.iteritems(): |
164 |
k = smart_str(k, strings_only=True)
|
165 |
k = format_header_key('X-Account-Group-' + k)
|
166 |
v = smart_str(','.join(v), strings_only=True) |
167 |
response[k] = v |
168 |
for k, v in policy.iteritems(): |
169 |
response[smart_str(format_header_key('X-Account-Policy-' + k),
|
170 |
strings_only=True)] = smart_str(v, strings_only=True) |
171 |
|
172 |
|
173 |
def get_container_headers(request): |
174 |
meta = get_header_prefix(request, 'X-Container-Meta-')
|
175 |
check_meta_headers(meta) |
176 |
policy = dict([(k[19:].lower(), v.replace(' ', '')) for k, v in |
177 |
get_header_prefix(request, |
178 |
'X-Container-Policy-').iteritems()])
|
179 |
return meta, policy
|
180 |
|
181 |
|
182 |
def put_container_headers(request, response, meta, policy): |
183 |
if 'count' in meta: |
184 |
response['X-Container-Object-Count'] = meta['count'] |
185 |
if 'bytes' in meta: |
186 |
response['X-Container-Bytes-Used'] = meta['bytes'] |
187 |
response['Last-Modified'] = http_date(int(meta['modified'])) |
188 |
for k in [x for x in meta.keys() if x.startswith('X-Container-Meta-')]: |
189 |
response[smart_str( |
190 |
k, strings_only=True)] = smart_str(meta[k], strings_only=True) |
191 |
l = [smart_str(x, strings_only=True) for x in meta['object_meta'] |
192 |
if x.startswith('X-Object-Meta-')] |
193 |
response['X-Container-Object-Meta'] = ','.join([x[14:] for x in l]) |
194 |
response['X-Container-Block-Size'] = request.backend.block_size
|
195 |
response['X-Container-Block-Hash'] = request.backend.hash_algorithm
|
196 |
if 'until_timestamp' in meta: |
197 |
response['X-Container-Until-Timestamp'] = http_date(
|
198 |
int(meta['until_timestamp'])) |
199 |
for k, v in policy.iteritems(): |
200 |
response[smart_str(format_header_key('X-Container-Policy-' + k),
|
201 |
strings_only=True)] = smart_str(v, strings_only=True) |
202 |
|
203 |
|
204 |
def get_object_headers(request): |
205 |
content_type = request.META.get('CONTENT_TYPE', None) |
206 |
meta = get_header_prefix(request, 'X-Object-Meta-')
|
207 |
check_meta_headers(meta) |
208 |
if request.META.get('HTTP_CONTENT_ENCODING'): |
209 |
meta['Content-Encoding'] = request.META['HTTP_CONTENT_ENCODING'] |
210 |
if request.META.get('HTTP_CONTENT_DISPOSITION'): |
211 |
meta['Content-Disposition'] = request.META['HTTP_CONTENT_DISPOSITION'] |
212 |
if request.META.get('HTTP_X_OBJECT_MANIFEST'): |
213 |
meta['X-Object-Manifest'] = request.META['HTTP_X_OBJECT_MANIFEST'] |
214 |
return content_type, meta, get_sharing(request), get_public(request)
|
215 |
|
216 |
|
217 |
def put_object_headers(response, meta, restricted=False, token=None): |
218 |
response['ETag'] = meta['checksum'] |
219 |
response['Content-Length'] = meta['bytes'] |
220 |
response.override_serialization = True
|
221 |
response['Content-Type'] = meta.get('type', 'application/octet-stream') |
222 |
response['Last-Modified'] = http_date(int(meta['modified'])) |
223 |
if not restricted: |
224 |
response['X-Object-Hash'] = meta['hash'] |
225 |
response['X-Object-UUID'] = meta['uuid'] |
226 |
if TRANSLATE_UUIDS:
|
227 |
meta['modified_by'] = \
|
228 |
retrieve_displayname(token, meta['modified_by'])
|
229 |
response['X-Object-Modified-By'] = smart_str(
|
230 |
meta['modified_by'], strings_only=True) |
231 |
response['X-Object-Version'] = meta['version'] |
232 |
response['X-Object-Version-Timestamp'] = http_date(
|
233 |
int(meta['version_timestamp'])) |
234 |
for k in [x for x in meta.keys() if x.startswith('X-Object-Meta-')]: |
235 |
response[smart_str( |
236 |
k, strings_only=True)] = smart_str(meta[k], strings_only=True) |
237 |
for k in ( |
238 |
'Content-Encoding', 'Content-Disposition', 'X-Object-Manifest', |
239 |
'X-Object-Sharing', 'X-Object-Shared-By', 'X-Object-Allowed-To', |
240 |
'X-Object-Public'):
|
241 |
if k in meta: |
242 |
response[k] = smart_str(meta[k], strings_only=True)
|
243 |
else:
|
244 |
for k in ('Content-Encoding', 'Content-Disposition'): |
245 |
if k in meta: |
246 |
response[k] = smart_str(meta[k], strings_only=True)
|
247 |
|
248 |
|
249 |
def update_manifest_meta(request, v_account, meta): |
250 |
"""Update metadata if the object has an X-Object-Manifest."""
|
251 |
|
252 |
if 'X-Object-Manifest' in meta: |
253 |
etag = ''
|
254 |
bytes = 0
|
255 |
try:
|
256 |
src_container, src_name = split_container_object_string( |
257 |
'/' + meta['X-Object-Manifest']) |
258 |
objects = request.backend.list_objects( |
259 |
request.user_uniq, v_account, |
260 |
src_container, prefix=src_name, virtual=False)
|
261 |
for x in objects: |
262 |
src_meta = request.backend.get_object_meta(request.user_uniq, |
263 |
v_account, |
264 |
src_container, |
265 |
x[0], 'pithos', x[1]) |
266 |
etag += src_meta['checksum']
|
267 |
bytes += src_meta['bytes'] |
268 |
except:
|
269 |
# Ignore errors.
|
270 |
return
|
271 |
meta['bytes'] = bytes |
272 |
md5 = hashlib.md5() |
273 |
md5.update(etag) |
274 |
meta['checksum'] = md5.hexdigest().lower()
|
275 |
|
276 |
|
277 |
def is_uuid(str): |
278 |
if str is None: |
279 |
return False |
280 |
try:
|
281 |
uuid.UUID(str)
|
282 |
except ValueError: |
283 |
return False |
284 |
else:
|
285 |
return True |
286 |
|
287 |
|
288 |
##########################
|
289 |
# USER CATALOG utilities #
|
290 |
##########################
|
291 |
|
292 |
def retrieve_displayname(token, uuid, fail_silently=True): |
293 |
astakos = AstakosClient(ASTAKOS_BASE_URL, retry=2, use_pool=True, |
294 |
logger=logger) |
295 |
try:
|
296 |
displayname = astakos.get_username(token, uuid) |
297 |
except NoUserName:
|
298 |
if not fail_silently: |
299 |
raise ItemNotExists(uuid)
|
300 |
else:
|
301 |
# just return the uuid
|
302 |
return uuid
|
303 |
return displayname
|
304 |
|
305 |
|
306 |
def retrieve_displaynames(token, uuids, return_dict=False, fail_silently=True): |
307 |
astakos = AstakosClient(ASTAKOS_BASE_URL, retry=2, use_pool=True, |
308 |
logger=logger) |
309 |
catalog = astakos.get_usernames(token, uuids) or {}
|
310 |
missing = list(set(uuids) - set(catalog)) |
311 |
if missing and not fail_silently: |
312 |
raise ItemNotExists('Unknown displaynames: %s' % ', '.join(missing)) |
313 |
return catalog if return_dict else [catalog.get(i) for i in uuids] |
314 |
|
315 |
|
316 |
def retrieve_uuid(token, displayname): |
317 |
if is_uuid(displayname):
|
318 |
return displayname
|
319 |
|
320 |
astakos = AstakosClient(ASTAKOS_BASE_URL, retry=2, use_pool=True, |
321 |
logger=logger) |
322 |
try:
|
323 |
uuid = astakos.get_uuid(token, displayname) |
324 |
except NoUUID:
|
325 |
raise ItemNotExists(displayname)
|
326 |
return uuid
|
327 |
|
328 |
|
329 |
def retrieve_uuids(token, displaynames, return_dict=False, fail_silently=True): |
330 |
astakos = AstakosClient(ASTAKOS_BASE_URL, retry=2, use_pool=True, |
331 |
logger=logger) |
332 |
catalog = astakos.get_uuids(token, displaynames) or {}
|
333 |
missing = list(set(displaynames) - set(catalog)) |
334 |
if missing and not fail_silently: |
335 |
raise ItemNotExists('Unknown uuids: %s' % ', '.join(missing)) |
336 |
return catalog if return_dict else [catalog.get(i) for i in displaynames] |
337 |
|
338 |
|
339 |
def replace_permissions_displayname(token, holder): |
340 |
if holder == '*': |
341 |
return holder
|
342 |
try:
|
343 |
# check first for a group permission
|
344 |
account, group = holder.split(':', 1) |
345 |
except ValueError: |
346 |
return retrieve_uuid(token, holder)
|
347 |
else:
|
348 |
return ':'.join([retrieve_uuid(token, account), group]) |
349 |
|
350 |
|
351 |
def replace_permissions_uuid(token, holder): |
352 |
if holder == '*': |
353 |
return holder
|
354 |
try:
|
355 |
# check first for a group permission
|
356 |
account, group = holder.split(':', 1) |
357 |
except ValueError: |
358 |
return retrieve_displayname(token, holder)
|
359 |
else:
|
360 |
return ':'.join([retrieve_displayname(token, account), group]) |
361 |
|
362 |
|
363 |
def update_sharing_meta(request, permissions, v_account, |
364 |
v_container, v_object, meta): |
365 |
if permissions is None: |
366 |
return
|
367 |
allowed, perm_path, perms = permissions |
368 |
if len(perms) == 0: |
369 |
return
|
370 |
|
371 |
# replace uuid with displayname
|
372 |
if TRANSLATE_UUIDS:
|
373 |
perms['read'] = [replace_permissions_uuid(
|
374 |
getattr(request, 'token', None), x) |
375 |
for x in perms.get('read', [])] |
376 |
perms['write'] = [replace_permissions_uuid(
|
377 |
getattr(request, 'token', None), x) |
378 |
for x in perms.get('write', [])] |
379 |
|
380 |
ret = [] |
381 |
|
382 |
r = ','.join(perms.get('read', [])) |
383 |
if r:
|
384 |
ret.append('read=' + r)
|
385 |
w = ','.join(perms.get('write', [])) |
386 |
if w:
|
387 |
ret.append('write=' + w)
|
388 |
meta['X-Object-Sharing'] = '; '.join(ret) |
389 |
if '/'.join((v_account, v_container, v_object)) != perm_path: |
390 |
meta['X-Object-Shared-By'] = perm_path
|
391 |
if request.user_uniq != v_account:
|
392 |
meta['X-Object-Allowed-To'] = allowed
|
393 |
|
394 |
|
395 |
def update_public_meta(public, meta): |
396 |
if not public: |
397 |
return
|
398 |
meta['X-Object-Public'] = join_urls(
|
399 |
BASE_HOST, reverse('pithos.api.public.public_demux', args=(public,)))
|
400 |
|
401 |
|
402 |
def validate_modification_preconditions(request, meta): |
403 |
"""Check that the modified timestamp conforms with the preconditions set."""
|
404 |
|
405 |
if 'modified' not in meta: |
406 |
return # TODO: Always return? |
407 |
|
408 |
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
|
409 |
if if_modified_since is not None: |
410 |
if_modified_since = parse_http_date_safe(if_modified_since) |
411 |
if (if_modified_since is not None |
412 |
and int(meta['modified']) <= if_modified_since): |
413 |
raise faults.NotModified('Resource has not been modified') |
414 |
|
415 |
if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
|
416 |
if if_unmodified_since is not None: |
417 |
if_unmodified_since = parse_http_date_safe(if_unmodified_since) |
418 |
if (if_unmodified_since is not None |
419 |
and int(meta['modified']) > if_unmodified_since): |
420 |
raise faults.PreconditionFailed('Resource has been modified') |
421 |
|
422 |
|
423 |
def validate_matching_preconditions(request, meta): |
424 |
"""Check that the ETag conforms with the preconditions set."""
|
425 |
|
426 |
etag = meta['checksum']
|
427 |
if not etag: |
428 |
etag = None
|
429 |
|
430 |
if_match = request.META.get('HTTP_IF_MATCH')
|
431 |
if if_match is not None: |
432 |
if etag is None: |
433 |
raise faults.PreconditionFailed('Resource does not exist') |
434 |
if (if_match != '*' |
435 |
and etag not in [x.lower() for x in parse_etags(if_match)]): |
436 |
raise faults.PreconditionFailed('Resource ETag does not match') |
437 |
|
438 |
if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
|
439 |
if if_none_match is not None: |
440 |
# TODO: If this passes, must ignore If-Modified-Since header.
|
441 |
if etag is not None: |
442 |
if (if_none_match == '*' |
443 |
or etag in [x.lower() for x in parse_etags(if_none_match)]): |
444 |
# TODO: Continue if an If-Modified-Since header is present.
|
445 |
if request.method in ('HEAD', 'GET'): |
446 |
raise faults.NotModified('Resource ETag matches') |
447 |
raise faults.PreconditionFailed(
|
448 |
'Resource exists or ETag matches')
|
449 |
|
450 |
|
451 |
def split_container_object_string(s): |
452 |
if not len(s) > 0 or s[0] != '/': |
453 |
raise ValueError |
454 |
s = s[1:]
|
455 |
pos = s.find('/')
|
456 |
if pos == -1 or pos == len(s) - 1: |
457 |
raise ValueError |
458 |
return s[:pos], s[(pos + 1):] |
459 |
|
460 |
|
461 |
def copy_or_move_object(request, src_account, src_container, src_name, |
462 |
dest_account, dest_container, dest_name, |
463 |
move=False, delimiter=None): |
464 |
"""Copy or move an object."""
|
465 |
|
466 |
if 'ignore_content_type' in request.GET and 'CONTENT_TYPE' in request.META: |
467 |
del(request.META['CONTENT_TYPE']) |
468 |
content_type, meta, permissions, public = get_object_headers(request) |
469 |
src_version = request.META.get('HTTP_X_SOURCE_VERSION')
|
470 |
try:
|
471 |
if move:
|
472 |
version_id = request.backend.move_object( |
473 |
request.user_uniq, src_account, src_container, src_name, |
474 |
dest_account, dest_container, dest_name, |
475 |
content_type, 'pithos', meta, False, permissions, delimiter) |
476 |
else:
|
477 |
version_id = request.backend.copy_object( |
478 |
request.user_uniq, src_account, src_container, src_name, |
479 |
dest_account, dest_container, dest_name, |
480 |
content_type, 'pithos', meta, False, permissions, |
481 |
src_version, delimiter) |
482 |
except NotAllowedError:
|
483 |
raise faults.Forbidden('Not allowed') |
484 |
except (ItemNotExists, VersionNotExists):
|
485 |
raise faults.ItemNotFound('Container or object does not exist') |
486 |
except ValueError: |
487 |
raise faults.BadRequest('Invalid sharing header') |
488 |
except QuotaError, e:
|
489 |
raise faults.RequestEntityTooLarge('Quota error: %s' % e) |
490 |
if public is not None: |
491 |
try:
|
492 |
request.backend.update_object_public( |
493 |
request.user_uniq, dest_account, |
494 |
dest_container, dest_name, public) |
495 |
except NotAllowedError:
|
496 |
raise faults.Forbidden('Not allowed') |
497 |
except ItemNotExists:
|
498 |
raise faults.ItemNotFound('Object does not exist') |
499 |
return version_id
|
500 |
|
501 |
|
502 |
def get_int_parameter(p): |
503 |
if p is not None: |
504 |
try:
|
505 |
p = int(p)
|
506 |
except ValueError: |
507 |
return None |
508 |
if p < 0: |
509 |
return None |
510 |
return p
|
511 |
|
512 |
|
513 |
def get_content_length(request): |
514 |
content_length = get_int_parameter(request.META.get('CONTENT_LENGTH'))
|
515 |
if content_length is None: |
516 |
raise faults.LengthRequired('Missing or invalid Content-Length header') |
517 |
return content_length
|
518 |
|
519 |
|
520 |
def get_range(request, size): |
521 |
"""Parse a Range header from the request.
|
522 |
|
523 |
Either returns None, when the header is not existent or should be ignored,
|
524 |
or a list of (offset, length) tuples - should be further checked.
|
525 |
"""
|
526 |
|
527 |
ranges = request.META.get('HTTP_RANGE', '').replace(' ', '') |
528 |
if not ranges.startswith('bytes='): |
529 |
return None |
530 |
|
531 |
ret = [] |
532 |
for r in (x.strip() for x in ranges[6:].split(',')): |
533 |
p = re.compile('^(?P<offset>\d*)-(?P<upto>\d*)$')
|
534 |
m = p.match(r) |
535 |
if not m: |
536 |
return None |
537 |
offset = m.group('offset')
|
538 |
upto = m.group('upto')
|
539 |
if offset == '' and upto == '': |
540 |
return None |
541 |
|
542 |
if offset != '': |
543 |
offset = int(offset)
|
544 |
if upto != '': |
545 |
upto = int(upto)
|
546 |
if offset > upto:
|
547 |
return None |
548 |
ret.append((offset, upto - offset + 1))
|
549 |
else:
|
550 |
ret.append((offset, size - offset)) |
551 |
else:
|
552 |
length = int(upto)
|
553 |
ret.append((size - length, length)) |
554 |
|
555 |
return ret
|
556 |
|
557 |
|
558 |
def get_content_range(request): |
559 |
"""Parse a Content-Range header from the request.
|
560 |
|
561 |
Either returns None, when the header is not existent or should be ignored,
|
562 |
or an (offset, length, total) tuple - check as length, total may be None.
|
563 |
Returns (None, None, None) if the provided range is '*/*'.
|
564 |
"""
|
565 |
|
566 |
ranges = request.META.get('HTTP_CONTENT_RANGE', '') |
567 |
if not ranges: |
568 |
return None |
569 |
|
570 |
p = re.compile('^bytes (?P<offset>\d+)-(?P<upto>\d*)/(?P<total>(\d+|\*))$')
|
571 |
m = p.match(ranges) |
572 |
if not m: |
573 |
if ranges == 'bytes */*': |
574 |
return (None, None, None) |
575 |
return None |
576 |
offset = int(m.group('offset')) |
577 |
upto = m.group('upto')
|
578 |
total = m.group('total')
|
579 |
if upto != '': |
580 |
upto = int(upto)
|
581 |
else:
|
582 |
upto = None
|
583 |
if total != '*': |
584 |
total = int(total)
|
585 |
else:
|
586 |
total = None
|
587 |
if (upto is not None and offset > upto) or \ |
588 |
(total is not None and offset >= total) or \ |
589 |
(total is not None and upto is not None and upto >= total): |
590 |
return None |
591 |
|
592 |
if upto is None: |
593 |
length = None
|
594 |
else:
|
595 |
length = upto - offset + 1
|
596 |
return (offset, length, total)
|
597 |
|
598 |
|
599 |
def get_sharing(request): |
600 |
"""Parse an X-Object-Sharing header from the request.
|
601 |
|
602 |
Raises BadRequest on error.
|
603 |
"""
|
604 |
|
605 |
permissions = request.META.get('HTTP_X_OBJECT_SHARING')
|
606 |
if permissions is None: |
607 |
return None |
608 |
|
609 |
# TODO: Document or remove '~' replacing.
|
610 |
permissions = permissions.replace('~', '') |
611 |
|
612 |
ret = {} |
613 |
permissions = permissions.replace(' ', '') |
614 |
if permissions == '': |
615 |
return ret
|
616 |
for perm in (x for x in permissions.split(';')): |
617 |
if perm.startswith('read='): |
618 |
ret['read'] = list(set( |
619 |
[v.replace(' ', '').lower() for v in perm[5:].split(',')])) |
620 |
if '' in ret['read']: |
621 |
ret['read'].remove('') |
622 |
if '*' in ret['read']: |
623 |
ret['read'] = ['*'] |
624 |
if len(ret['read']) == 0: |
625 |
raise faults.BadRequest(
|
626 |
'Bad X-Object-Sharing header value: invalid length')
|
627 |
elif perm.startswith('write='): |
628 |
ret['write'] = list(set( |
629 |
[v.replace(' ', '').lower() for v in perm[6:].split(',')])) |
630 |
if '' in ret['write']: |
631 |
ret['write'].remove('') |
632 |
if '*' in ret['write']: |
633 |
ret['write'] = ['*'] |
634 |
if len(ret['write']) == 0: |
635 |
raise faults.BadRequest(
|
636 |
'Bad X-Object-Sharing header value: invalid length')
|
637 |
else:
|
638 |
raise faults.BadRequest(
|
639 |
'Bad X-Object-Sharing header value: missing prefix')
|
640 |
|
641 |
# replace displayname with uuid
|
642 |
if TRANSLATE_UUIDS:
|
643 |
try:
|
644 |
ret['read'] = [replace_permissions_displayname(
|
645 |
getattr(request, 'token', None), x) |
646 |
for x in ret.get('read', [])] |
647 |
ret['write'] = [replace_permissions_displayname(
|
648 |
getattr(request, 'token', None), x) |
649 |
for x in ret.get('write', [])] |
650 |
except ItemNotExists, e:
|
651 |
raise faults.BadRequest(
|
652 |
'Bad X-Object-Sharing header value: unknown account: %s' % e)
|
653 |
|
654 |
# Keep duplicates only in write list.
|
655 |
dups = [x for x in ret.get( |
656 |
'read', []) if x in ret.get('write', []) and x != '*'] |
657 |
if dups:
|
658 |
for x in dups: |
659 |
ret['read'].remove(x)
|
660 |
if len(ret['read']) == 0: |
661 |
del(ret['read']) |
662 |
|
663 |
return ret
|
664 |
|
665 |
|
666 |
def get_public(request): |
667 |
"""Parse an X-Object-Public header from the request.
|
668 |
|
669 |
Raises BadRequest on error.
|
670 |
"""
|
671 |
|
672 |
public = request.META.get('HTTP_X_OBJECT_PUBLIC')
|
673 |
if public is None: |
674 |
return None |
675 |
|
676 |
public = public.replace(' ', '').lower() |
677 |
if public == 'true': |
678 |
return True |
679 |
elif public == 'false' or public == '': |
680 |
return False |
681 |
raise faults.BadRequest('Bad X-Object-Public header value') |
682 |
|
683 |
|
684 |
def raw_input_socket(request): |
685 |
"""Return the socket for reading the rest of the request."""
|
686 |
|
687 |
server_software = request.META.get('SERVER_SOFTWARE')
|
688 |
if server_software and server_software.startswith('mod_python'): |
689 |
return request._req
|
690 |
if 'wsgi.input' in request.environ: |
691 |
return request.environ['wsgi.input'] |
692 |
raise NotImplemented('Unknown server software') |
693 |
|
694 |
MAX_UPLOAD_SIZE = 5 * (1024 * 1024 * 1024) # 5GB |
695 |
|
696 |
|
697 |
def socket_read_iterator(request, length=0, blocksize=4096): |
698 |
"""Return a maximum of blocksize data read from the socket in each iteration
|
699 |
|
700 |
Read up to 'length'. If 'length' is negative, will attempt a chunked read.
|
701 |
The maximum ammount of data read is controlled by MAX_UPLOAD_SIZE.
|
702 |
"""
|
703 |
|
704 |
sock = raw_input_socket(request) |
705 |
if length < 0: # Chunked transfers |
706 |
# Small version (server does the dechunking).
|
707 |
if (request.environ.get('mod_wsgi.input_chunked', None) |
708 |
or request.META['SERVER_SOFTWARE'].startswith('gunicorn')): |
709 |
while length < MAX_UPLOAD_SIZE:
|
710 |
data = sock.read(blocksize) |
711 |
if data == '': |
712 |
return
|
713 |
yield data
|
714 |
raise faults.BadRequest('Maximum size is reached') |
715 |
|
716 |
# Long version (do the dechunking).
|
717 |
data = ''
|
718 |
while length < MAX_UPLOAD_SIZE:
|
719 |
# Get chunk size.
|
720 |
if hasattr(sock, 'readline'): |
721 |
chunk_length = sock.readline() |
722 |
else:
|
723 |
chunk_length = ''
|
724 |
while chunk_length[-1:] != '\n': |
725 |
chunk_length += sock.read(1)
|
726 |
chunk_length.strip() |
727 |
pos = chunk_length.find(';')
|
728 |
if pos >= 0: |
729 |
chunk_length = chunk_length[:pos] |
730 |
try:
|
731 |
chunk_length = int(chunk_length, 16) |
732 |
except Exception: |
733 |
raise faults.BadRequest('Bad chunk size') |
734 |
# TODO: Change to something more appropriate.
|
735 |
# Check if done.
|
736 |
if chunk_length == 0: |
737 |
if len(data) > 0: |
738 |
yield data
|
739 |
return
|
740 |
# Get the actual data.
|
741 |
while chunk_length > 0: |
742 |
chunk = sock.read(min(chunk_length, blocksize))
|
743 |
chunk_length -= len(chunk)
|
744 |
if length > 0: |
745 |
length += len(chunk)
|
746 |
data += chunk |
747 |
if len(data) >= blocksize: |
748 |
ret = data[:blocksize] |
749 |
data = data[blocksize:] |
750 |
yield ret
|
751 |
sock.read(2) # CRLF |
752 |
raise faults.BadRequest('Maximum size is reached') |
753 |
else:
|
754 |
if length > MAX_UPLOAD_SIZE:
|
755 |
raise faults.BadRequest('Maximum size is reached') |
756 |
while length > 0: |
757 |
data = sock.read(min(length, blocksize))
|
758 |
if not data: |
759 |
raise faults.BadRequest()
|
760 |
length -= len(data)
|
761 |
yield data
|
762 |
|
763 |
|
764 |
class SaveToBackendHandler(FileUploadHandler): |
765 |
"""Handle a file from an HTML form the django way."""
|
766 |
|
767 |
def __init__(self, request=None): |
768 |
super(SaveToBackendHandler, self).__init__(request) |
769 |
self.backend = request.backend
|
770 |
|
771 |
def put_data(self, length): |
772 |
if len(self.data) >= length: |
773 |
block = self.data[:length]
|
774 |
self.file.hashmap.append(self.backend.put_block(block)) |
775 |
self.md5.update(block)
|
776 |
self.data = self.data[length:] |
777 |
|
778 |
def new_file(self, field_name, file_name, content_type, |
779 |
content_length, charset=None):
|
780 |
self.md5 = hashlib.md5()
|
781 |
self.data = '' |
782 |
self.file = UploadedFile(
|
783 |
name=file_name, content_type=content_type, charset=charset) |
784 |
self.file.size = 0 |
785 |
self.file.hashmap = []
|
786 |
|
787 |
def receive_data_chunk(self, raw_data, start): |
788 |
self.data += raw_data
|
789 |
self.file.size += len(raw_data) |
790 |
self.put_data(self.request.backend.block_size) |
791 |
return None |
792 |
|
793 |
def file_complete(self, file_size): |
794 |
l = len(self.data) |
795 |
if l > 0: |
796 |
self.put_data(l)
|
797 |
self.file.etag = self.md5.hexdigest().lower() |
798 |
return self.file |
799 |
|
800 |
|
801 |
class ObjectWrapper(object): |
802 |
"""Return the object's data block-per-block in each iteration.
|
803 |
|
804 |
Read from the object using the offset and length provided
|
805 |
in each entry of the range list.
|
806 |
"""
|
807 |
|
808 |
def __init__(self, backend, ranges, sizes, hashmaps, boundary): |
809 |
self.backend = backend
|
810 |
self.ranges = ranges
|
811 |
self.sizes = sizes
|
812 |
self.hashmaps = hashmaps
|
813 |
self.boundary = boundary
|
814 |
self.size = sum(self.sizes) |
815 |
|
816 |
self.file_index = 0 |
817 |
self.block_index = 0 |
818 |
self.block_hash = -1 |
819 |
self.block = '' |
820 |
|
821 |
self.range_index = -1 |
822 |
self.offset, self.length = self.ranges[0] |
823 |
|
824 |
def __iter__(self): |
825 |
return self |
826 |
|
827 |
def part_iterator(self): |
828 |
if self.length > 0: |
829 |
# Get the file for the current offset.
|
830 |
file_size = self.sizes[self.file_index] |
831 |
while self.offset >= file_size: |
832 |
self.offset -= file_size
|
833 |
self.file_index += 1 |
834 |
file_size = self.sizes[self.file_index] |
835 |
|
836 |
# Get the block for the current position.
|
837 |
self.block_index = int(self.offset / self.backend.block_size) |
838 |
if self.block_hash != \ |
839 |
self.hashmaps[self.file_index][self.block_index]: |
840 |
self.block_hash = self.hashmaps[ |
841 |
self.file_index][self.block_index] |
842 |
try:
|
843 |
self.block = self.backend.get_block(self.block_hash) |
844 |
except ItemNotExists:
|
845 |
raise faults.ItemNotFound('Block does not exist') |
846 |
|
847 |
# Get the data from the block.
|
848 |
bo = self.offset % self.backend.block_size |
849 |
bs = self.backend.block_size
|
850 |
if (self.block_index == len(self.hashmaps[self.file_index]) - 1 and |
851 |
self.sizes[self.file_index] % self.backend.block_size): |
852 |
bs = self.sizes[self.file_index] % self.backend.block_size |
853 |
bl = min(self.length, bs - bo) |
854 |
data = self.block[bo:bo + bl]
|
855 |
self.offset += bl
|
856 |
self.length -= bl
|
857 |
return data
|
858 |
else:
|
859 |
raise StopIteration |
860 |
|
861 |
def next(self): |
862 |
if len(self.ranges) == 1: |
863 |
return self.part_iterator() |
864 |
if self.range_index == len(self.ranges): |
865 |
raise StopIteration |
866 |
try:
|
867 |
if self.range_index == -1: |
868 |
raise StopIteration |
869 |
return self.part_iterator() |
870 |
except StopIteration: |
871 |
self.range_index += 1 |
872 |
out = [] |
873 |
if self.range_index < len(self.ranges): |
874 |
# Part header.
|
875 |
self.offset, self.length = self.ranges[self.range_index] |
876 |
self.file_index = 0 |
877 |
if self.range_index > 0: |
878 |
out.append('')
|
879 |
out.append('--' + self.boundary) |
880 |
out.append('Content-Range: bytes %d-%d/%d' % (
|
881 |
self.offset, self.offset + self.length - 1, self.size)) |
882 |
out.append('Content-Transfer-Encoding: binary')
|
883 |
out.append('')
|
884 |
out.append('')
|
885 |
return '\r\n'.join(out) |
886 |
else:
|
887 |
# Footer.
|
888 |
out.append('')
|
889 |
out.append('--' + self.boundary + '--') |
890 |
out.append('')
|
891 |
return '\r\n'.join(out) |
892 |
|
893 |
|
894 |
def object_data_response(request, sizes, hashmaps, meta, public=False): |
895 |
"""Get the HttpResponse object for replying with the object's data."""
|
896 |
|
897 |
# Range handling.
|
898 |
size = sum(sizes)
|
899 |
ranges = get_range(request, size) |
900 |
if ranges is None: |
901 |
ranges = [(0, size)]
|
902 |
ret = 200
|
903 |
else:
|
904 |
check = [True for offset, length in ranges if |
905 |
length <= 0 or length > size or |
906 |
offset < 0 or offset >= size or |
907 |
offset + length > size] |
908 |
if len(check) > 0: |
909 |
raise faults.RangeNotSatisfiable(
|
910 |
'Requested range exceeds object limits')
|
911 |
ret = 206
|
912 |
if_range = request.META.get('HTTP_IF_RANGE')
|
913 |
if if_range:
|
914 |
try:
|
915 |
# Modification time has passed instead.
|
916 |
last_modified = parse_http_date(if_range) |
917 |
if last_modified != meta['modified']: |
918 |
ranges = [(0, size)]
|
919 |
ret = 200
|
920 |
except ValueError: |
921 |
if if_range != meta['checksum']: |
922 |
ranges = [(0, size)]
|
923 |
ret = 200
|
924 |
|
925 |
if ret == 206 and len(ranges) > 1: |
926 |
boundary = uuid.uuid4().hex |
927 |
else:
|
928 |
boundary = ''
|
929 |
wrapper = ObjectWrapper(request.backend, ranges, sizes, hashmaps, boundary) |
930 |
response = HttpResponse(wrapper, status=ret) |
931 |
put_object_headers( |
932 |
response, meta, restricted=public, |
933 |
token=getattr(request, 'token', None)) |
934 |
if ret == 206: |
935 |
if len(ranges) == 1: |
936 |
offset, length = ranges[0]
|
937 |
response[ |
938 |
'Content-Length'] = length # Update with the correct length. |
939 |
response['Content-Range'] = 'bytes %d-%d/%d' % ( |
940 |
offset, offset + length - 1, size)
|
941 |
else:
|
942 |
del(response['Content-Length']) |
943 |
response['Content-Type'] = 'multipart/byteranges; boundary=%s' % ( |
944 |
boundary,) |
945 |
return response
|
946 |
|
947 |
|
948 |
def put_object_block(request, hashmap, data, offset): |
949 |
"""Put one block of data at the given offset."""
|
950 |
|
951 |
bi = int(offset / request.backend.block_size)
|
952 |
bo = offset % request.backend.block_size |
953 |
bl = min(len(data), request.backend.block_size - bo) |
954 |
if bi < len(hashmap): |
955 |
hashmap[bi] = request.backend.update_block(hashmap[bi], data[:bl], bo) |
956 |
else:
|
957 |
hashmap.append(request.backend.put_block(('\x00' * bo) + data[:bl]))
|
958 |
return bl # Return ammount of data written. |
959 |
|
960 |
|
961 |
def hashmap_md5(backend, hashmap, size): |
962 |
"""Produce the MD5 sum from the data in the hashmap."""
|
963 |
|
964 |
# TODO: Search backend for the MD5 of another object
|
965 |
# with the same hashmap and size...
|
966 |
md5 = hashlib.md5() |
967 |
bs = backend.block_size |
968 |
for bi, hash in enumerate(hashmap): |
969 |
data = backend.get_block(hash) # Blocks come in padded. |
970 |
if bi == len(hashmap) - 1: |
971 |
data = data[:size % bs] |
972 |
md5.update(data) |
973 |
return md5.hexdigest().lower()
|
974 |
|
975 |
|
976 |
def simple_list_response(request, l): |
977 |
if request.serialization == 'text': |
978 |
return '\n'.join(l) + '\n' |
979 |
if request.serialization == 'xml': |
980 |
return render_to_string('items.xml', {'items': l}) |
981 |
if request.serialization == 'json': |
982 |
return json.dumps(l)
|
983 |
|
984 |
|
985 |
from pithos.backends.util import PithosBackendPool |
986 |
|
987 |
if RADOS_STORAGE:
|
988 |
BLOCK_PARAMS = {'mappool': RADOS_POOL_MAPS,
|
989 |
'blockpool': RADOS_POOL_BLOCKS, }
|
990 |
else:
|
991 |
BLOCK_PARAMS = {'mappool': None, |
992 |
'blockpool': None, } |
993 |
|
994 |
|
995 |
_pithos_backend_pool = PithosBackendPool( |
996 |
size=BACKEND_POOL_SIZE, |
997 |
db_module=BACKEND_DB_MODULE, |
998 |
db_connection=BACKEND_DB_CONNECTION, |
999 |
block_module=BACKEND_BLOCK_MODULE, |
1000 |
block_path=BACKEND_BLOCK_PATH, |
1001 |
block_umask=BACKEND_BLOCK_UMASK, |
1002 |
queue_module=BACKEND_QUEUE_MODULE, |
1003 |
queue_hosts=BACKEND_QUEUE_HOSTS, |
1004 |
queue_exchange=BACKEND_QUEUE_EXCHANGE, |
1005 |
astakos_url=ASTAKOS_BASE_URL, |
1006 |
service_token=SERVICE_TOKEN, |
1007 |
astakosclient_poolsize=ASTAKOSCLIENT_POOLSIZE, |
1008 |
free_versioning=BACKEND_FREE_VERSIONING, |
1009 |
block_params=BLOCK_PARAMS, |
1010 |
public_url_security=PUBLIC_URL_SECURITY, |
1011 |
public_url_alphabet=PUBLIC_URL_ALPHABET, |
1012 |
account_quota_policy=BACKEND_ACCOUNT_QUOTA, |
1013 |
container_quota_policy=BACKEND_CONTAINER_QUOTA, |
1014 |
container_versioning_policy=BACKEND_VERSIONING) |
1015 |
|
1016 |
|
1017 |
def get_backend(): |
1018 |
backend = _pithos_backend_pool.pool_get() |
1019 |
backend.messages = [] |
1020 |
return backend
|
1021 |
|
1022 |
|
1023 |
def update_request_headers(request): |
1024 |
# Handle URL-encoded keys and values.
|
1025 |
meta = dict([(
|
1026 |
k, v) for k, v in request.META.iteritems() if k.startswith('HTTP_')]) |
1027 |
for k, v in meta.iteritems(): |
1028 |
try:
|
1029 |
k.decode('ascii')
|
1030 |
v.decode('ascii')
|
1031 |
except UnicodeDecodeError: |
1032 |
raise faults.BadRequest('Bad character in headers.') |
1033 |
if '%' in k or '%' in v: |
1034 |
del(request.META[k])
|
1035 |
request.META[unquote(k)] = smart_unicode(unquote( |
1036 |
v), strings_only=True)
|
1037 |
|
1038 |
|
1039 |
def update_response_headers(request, response): |
1040 |
if (not response.has_header('Content-Length') and |
1041 |
not (response.has_header('Content-Type') and |
1042 |
response['Content-Type'].startswith('multipart/byteranges'))): |
1043 |
response['Content-Length'] = len(response.content) |
1044 |
|
1045 |
# URL-encode unicode in headers.
|
1046 |
meta = response.items() |
1047 |
for k, v in meta: |
1048 |
if (k.startswith('X-Account-') or k.startswith('X-Container-') or |
1049 |
k.startswith('X-Object-') or k.startswith('Content-')): |
1050 |
del(response[k])
|
1051 |
response[quote(k)] = quote(v, safe='/=,:@; ')
|
1052 |
|
1053 |
|
1054 |
def get_pithos_usage(token): |
1055 |
"""Get Pithos Usage from astakos."""
|
1056 |
astakos = AstakosClient(ASTAKOS_BASE_URL, retry=2, use_pool=True, |
1057 |
logger=logger) |
1058 |
quotas = astakos.get_quotas(token)['system']
|
1059 |
pithos_resources = [r['name'] for r in resources] |
1060 |
map(quotas.pop, filter(lambda k: k not in pithos_resources, quotas.keys())) |
1061 |
return quotas.popitem()[-1] # assume only one resource |
1062 |
|
1063 |
|
1064 |
def api_method(http_method=None, token_required=True, user_required=True, logger=None, |
1065 |
format_allowed=False, default_serialization="json"): |
1066 |
def decorator(func): |
1067 |
@api.api_method(http_method=http_method, token_required=token_required,
|
1068 |
user_required=user_required, |
1069 |
logger=logger, format_allowed=format_allowed, |
1070 |
astakos_url=ASTAKOS_BASE_URL, |
1071 |
default_serialization=default_serialization) |
1072 |
@wraps(func)
|
1073 |
def wrapper(request, *args, **kwargs): |
1074 |
# The args variable may contain up to (account, container, object).
|
1075 |
if len(args) > 1 and len(args[1]) > 256: |
1076 |
raise faults.BadRequest("Container name too large") |
1077 |
if len(args) > 2 and len(args[2]) > 1024: |
1078 |
raise faults.BadRequest('Object name too large.') |
1079 |
|
1080 |
try:
|
1081 |
# Add a PithosBackend as attribute of the request object
|
1082 |
request.backend = get_backend() |
1083 |
# Many API method expect thet X-Auth-Token in request,token
|
1084 |
request.token = request.x_auth_token |
1085 |
update_request_headers(request) |
1086 |
response = func(request, *args, **kwargs) |
1087 |
update_response_headers(request, response) |
1088 |
return response
|
1089 |
finally:
|
1090 |
# Always close PithosBackend connection
|
1091 |
if getattr(request, "backend", None) is not None: |
1092 |
request.backend.close() |
1093 |
return wrapper
|
1094 |
return decorator
|
1095 |
|
1096 |
|
1097 |
def get_token_from_cookie(request): |
1098 |
assert(request.method == 'GET'),\ |
1099 |
"Cookie based authentication is only allowed to GET requests"
|
1100 |
token = None
|
1101 |
if COOKIE_NAME in request.COOKIES: |
1102 |
cookie_value = unquote(request.COOKIES.get(COOKIE_NAME, ''))
|
1103 |
account, sep, token = cookie_value.partition('|')
|
1104 |
return token
|
1105 |
|
1106 |
|
1107 |
def view_method(): |
1108 |
"""Decorator function for views."""
|
1109 |
|
1110 |
def decorator(func): |
1111 |
@wraps(func)
|
1112 |
def wrapper(request, *args, **kwargs): |
1113 |
request.META['HTTP_X_AUTH_TOKEN'] = get_token_from_cookie(request)
|
1114 |
# Get the response object
|
1115 |
response = func(request, *args, **kwargs) |
1116 |
if response.status_code == 200: |
1117 |
return response
|
1118 |
elif response.status_code == 404: |
1119 |
raise Http404()
|
1120 |
elif response.status_code in [401, 403]: |
1121 |
return HttpResponseForbidden()
|
1122 |
else:
|
1123 |
raise Exception(response) |
1124 |
return wrapper
|
1125 |
return decorator
|